diff options
372 files changed, 15795 insertions, 5806 deletions
diff --git a/Changes.md b/Changes.md index fc15e601b6..9f2449c2c3 100644 --- a/Changes.md +++ b/Changes.md @@ -43,14 +43,9 @@ within a product configuration .mk file, board config .mk file, or buildspec.mk. The path set when running builds now makes the `python` executable point to python 3, whereas on previous versions it pointed to python 2. If you still have python 2 scripts, you can change the shebang line to use `python2` explicitly. This only applies for -scripts run directly from makefiles, or from soong genrules. This behavior can be -temporarily overridden by setting the `BUILD_BROKEN_PYTHON_IS_PYTHON2` environment -variable to `true`. It's only an environment variable and not a product config variable -because product config sometimes calls python code. - -In addition, `python_*` soong modules no longer allow python 2. This can be temporarily -overridden by setting the `BUILD_BROKEN_USES_SOONG_PYTHON2_MODULES` product configuration -variable to `true`. +scripts run directly from makefiles, or from soong genrules. + +In addition, `python_*` soong modules no longer allow python 2. Python 2 is slated for complete removal in V. diff --git a/backported_fixes/Android.bp b/backported_fixes/Android.bp new file mode 100644 index 0000000000..0caea56a57 --- /dev/null +++ b/backported_fixes/Android.bp @@ -0,0 +1,144 @@ +// Copyright 2024 Google Inc. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package { + default_applicable_licenses: ["Android-Apache-2.0"], + default_team: "trendy_team_android_media_reliability", +} + +genrule { + name: "applied_backported_fixes", + tools: ["applied_backported_fixes_property_writer"], + srcs: [":applied_backported_fix_binpbs"], + out: ["applied_backported_fixes.prop"], + cmd: "$(location applied_backported_fixes_property_writer)" + + " -p $(location applied_backported_fixes.prop)" + + " $(in)", +} + +filegroup { + name: "backported_fixes_proto_file", + srcs: [ + "backported_fixes.proto", + ], +} + +java_library { + name: "backported_fixes_proto", + srcs: ["backported_fixes.proto"], + host_supported: true, + sdk_version: "current", +} + +java_library { + name: "backported_fixes_common", + srcs: ["src/java/com/android/build/backportedfixes/common/*.java"], + static_libs: [ + "backported_fixes_proto", + "guava", + ], + host_supported: true, +} + +java_test_host { + name: "backported_fixes_common_test", + srcs: ["tests/java/com/android/build/backportedfixes/common/*.java"], + static_libs: [ + "backported_fixes_common", + "backported_fixes_proto", + "junit", + "truth", + "truth-liteproto-extension", + "truth-proto-extension", + ], + test_options: { + unit_test: true, + }, + test_suites: ["general-tests"], +} + +java_library { + name: "backported_fixes_main_lib", + srcs: ["src/java/com/android/build/backportedfixes/*.java"], + static_libs: [ + "backported_fixes_common", + "backported_fixes_proto", + "jcommander", + "guava", + ], + host_supported: true, +} + +java_binary_host { + name: "applied_backported_fixes_property_writer", + main_class: "com.android.build.backportedfixes.WriteBackportedFixesPropFile", + static_libs: [ + "backported_fixes_main_lib", + ], +} + +java_binary_host { + name: "backported_fixes_combiner", + main_class: "com.android.build.backportedfixes.CombineBackportedFixes", + static_libs: [ + "backported_fixes_main_lib", + ], +} + +// Combines BackportedFix binary proto files into a single BackportedFixes binary proto file. +genrule_defaults { + name: "default_backported_fixes_combiner", + tools: ["backported_fixes_combiner"], + cmd: "$(location backported_fixes_combiner)" + + " -o $(out)" + + " $(in)", +} + +java_test_host { + name: "backported_fixes_main_lib_test", + srcs: ["tests/java/com/android/build/backportedfixes/*.java"], + static_libs: [ + "backported_fixes_main_lib", + "backported_fixes_proto", + "junit", + "truth", + ], + test_options: { + unit_test: true, + }, + test_suites: ["general-tests"], +} + +// Converts BackprotedFix text protos to binary protos +genrule_defaults { + name: "default_backported_fix_binpbs", + tools: ["aprotoc"], + tool_files: [ + ":backported_fixes_proto_file", + ], + cmd: "$(location aprotoc) " + + " --encode=com.android.build.backportedfixes.BackportedFix" + + " $(location :backported_fixes_proto_file)" + + " < $(in)" + + " > $(out); echo $(out)", +} + +gensrcs { + name: "applied_backported_fix_binpbs", + defaults: ["default_backported_fix_binpbs"], + output_extension: "binpb", + srcs: [ + "applied_fixes/*.txtpb", + ], +} diff --git a/backported_fixes/OWNERS b/backported_fixes/OWNERS new file mode 100644 index 0000000000..ac176bf0b4 --- /dev/null +++ b/backported_fixes/OWNERS @@ -0,0 +1,3 @@ +essick@google.com +nchalko@google.com +portmannc@google.com diff --git a/backported_fixes/applied_fixes/ki350037023.txtpb b/backported_fixes/applied_fixes/ki350037023.txtpb new file mode 100644 index 0000000000..456a7aec35 --- /dev/null +++ b/backported_fixes/applied_fixes/ki350037023.txtpb @@ -0,0 +1,19 @@ +# Copyright (C) 2024 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# proto-file: ../backported_fixes.proto +# proto-message: BackportedFix + +known_issue: 350037023 +alias: 1 diff --git a/backported_fixes/backported_fixes.proto b/backported_fixes/backported_fixes.proto new file mode 100644 index 0000000000..91618eebd9 --- /dev/null +++ b/backported_fixes/backported_fixes.proto @@ -0,0 +1,37 @@ +// Copyright (C) 2024 The Android Open Source Project +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +package com.android.build.backportedfixes; + +option java_multiple_files = true; + +// A list of backported fixes. +message BackportedFixes { + repeated BackportedFix fixes = 1; +} + +// A known issue approved for reporting Build.getBackportedFixStatus +message BackportedFix { + + // The issue id from the public bug tracker + // https://issuetracker.google.com/issues/{known_issue} + optional int64 known_issue = 1; + // The alias for the known issue. + // 1 - 1023 are valid aliases + // Must be unique across all backported fixes. + optional int32 alias = 2; +} + diff --git a/backported_fixes/src/java/com/android/build/backportedfixes/CombineBackportedFixes.java b/backported_fixes/src/java/com/android/build/backportedfixes/CombineBackportedFixes.java new file mode 100644 index 0000000000..0592cc187b --- /dev/null +++ b/backported_fixes/src/java/com/android/build/backportedfixes/CombineBackportedFixes.java @@ -0,0 +1,65 @@ + +/* + * Copyright (C) 2024 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.android.build.backportedfixes; + +import com.android.build.backportedfixes.common.Parser; + +import com.beust.jcommander.JCommander; +import com.beust.jcommander.Parameter; +import com.beust.jcommander.converters.FileConverter; + +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.OutputStream; +import java.util.List; + + +/** Creates a BackportedFixes binary proto file from a list of BackportedFix proto binary files. */ +public final class CombineBackportedFixes { + + @Parameter(description = "BackportedFix proto binary files", + converter = FileConverter.class, + required = true) + List<File> fixFiles; + @Parameter(description = "Write the BackportedFixes proto binary to this file", + names = {"--out","-o"}, + converter = FileConverter.class, + required = true) + File outFile; + + public static void main(String... argv) throws Exception { + CombineBackportedFixes main = new CombineBackportedFixes(); + JCommander.newBuilder().addObject(main).build().parse(argv); + main.run(); + } + + CombineBackportedFixes() { + } + + private void run() throws Exception { + try (var out = new FileOutputStream(outFile)) { + var fixes = Parser.parseBackportedFixFiles(fixFiles); + writeBackportedFixes(fixes, out); + } + } + + static void writeBackportedFixes(BackportedFixes fixes, OutputStream out) + throws IOException { + fixes.writeTo(out); + } +} diff --git a/backported_fixes/src/java/com/android/build/backportedfixes/WriteBackportedFixesPropFile.java b/backported_fixes/src/java/com/android/build/backportedfixes/WriteBackportedFixesPropFile.java new file mode 100644 index 0000000000..0ffb4ac904 --- /dev/null +++ b/backported_fixes/src/java/com/android/build/backportedfixes/WriteBackportedFixesPropFile.java @@ -0,0 +1,89 @@ + +/* + * Copyright (C) 2024 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.android.build.backportedfixes; + +import static java.nio.charset.StandardCharsets.UTF_8; + +import com.android.build.backportedfixes.common.Parser; + +import com.beust.jcommander.JCommander; +import com.beust.jcommander.Parameter; +import com.beust.jcommander.converters.FileConverter; +import com.google.common.io.Files; + +import java.io.File; +import java.io.PrintWriter; +import java.io.Writer; +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; + + +/** + * Creates backported fix properties file. + * + * <p>Writes BitSet of backported fix aliases from a list of BackportedFix proto binary files and + * writes the property {@value PROPERTY_NAME} to a file. + */ +public final class WriteBackportedFixesPropFile { + + private static final String PROPERTY_NAME = "ro.build.backported_fixes.alias_bitset.long_list"; + @Parameter(description = "BackportedFix proto binary files", + converter = FileConverter.class, + required = true) + List<File> fixFiles; + @Parameter(description = "The file to write the property value to.", + names = {"--property_file", "-p"}, + converter = FileConverter.class, + required = true) + File propertyFile; + + public static void main(String... argv) throws Exception { + WriteBackportedFixesPropFile main = new WriteBackportedFixesPropFile(); + JCommander.newBuilder().addObject(main).build().parse(argv); + main.run(); + } + + WriteBackportedFixesPropFile() { + } + + private void run() throws Exception { + try (var out = Files.newWriter(propertyFile, UTF_8)) { + var fixes = Parser.parseBackportedFixFiles(fixFiles); + writeFixesAsAliasBitSet(fixes, out); + } + } + + static void writeFixesAsAliasBitSet(BackportedFixes fixes, Writer out) { + PrintWriter printWriter = new PrintWriter(out); + printWriter.println("# The following backported fixes have been applied"); + for (var f : fixes.getFixesList()) { + printWriter.printf("# https://issuetracker.google.com/issues/%d with alias %d", + f.getKnownIssue(), f.getAlias()); + printWriter.println(); + } + var bsArray = Parser.getBitSetArray( + fixes.getFixesList().stream().mapToInt(BackportedFix::getAlias).toArray()); + String bsString = Arrays.stream(bsArray).mapToObj(Long::toString).collect( + Collectors.joining(",")); + printWriter.printf("%s=%s", PROPERTY_NAME, bsString); + printWriter.println(); + if (printWriter.checkError()) { + throw new RuntimeException("There was an error writing to " + out.toString()); + } + } +} diff --git a/backported_fixes/src/java/com/android/build/backportedfixes/common/Parser.java b/backported_fixes/src/java/com/android/build/backportedfixes/common/Parser.java new file mode 100644 index 0000000000..6180fdc3da --- /dev/null +++ b/backported_fixes/src/java/com/android/build/backportedfixes/common/Parser.java @@ -0,0 +1,140 @@ +/* + * Copyright (C) 2024 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.android.build.backportedfixes.common; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.android.build.backportedfixes.BackportedFix; +import com.android.build.backportedfixes.BackportedFixes; + +import com.google.common.base.Throwables; +import com.google.common.collect.ImmutableList; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.InputStream; +import java.util.BitSet; +import java.util.Comparator; +import java.util.List; +import java.util.stream.Collector; +import java.util.stream.Collectors; + + +/** Static utilities for working with {@link BackportedFixes}. */ +public final class Parser { + + /** Creates list of FileInputStreams for a list of files. */ + public static ImmutableList<FileInputStream> getFileInputStreams(List<File> fixFiles) throws + FileNotFoundException { + var streams = ImmutableList.<FileInputStream>builder(); + for (var f : fixFiles) { + streams.add(new FileInputStream(f)); + } + return streams.build(); + } + + /** Converts a list of backported fix aliases into a long array representing a {@link BitSet} */ + public static long[] getBitSetArray(int[] aliases) { + BitSet bs = new BitSet(); + for (int a : aliases) { + bs.set(a); + } + return bs.toLongArray(); + } + + /** + * Creates a {@link BackportedFixes} from a list of {@link BackportedFix} binary proto streams. + */ + public static BackportedFixes parseBackportedFixFiles(List<File> fixFiles) + throws IOException { + try { + return fixFiles.stream().map(Parser::tunelFileInputStream) + .map(Parser::tunnelParse) + .sorted(Comparator.comparing(BackportedFix::getKnownIssue)) + .collect(fixCollector()); + + } catch (TunnelException e) { + throw e.rethrow(FileNotFoundException.class, IOException.class); + } + } + + + private static Collector<BackportedFix, ?, BackportedFixes> fixCollector() { + return Collectors.collectingAndThen(Collectors.toList(), fixList -> { + var result = BackportedFixes.newBuilder(); + result.addAllFixes(fixList); + return result.build(); + }); + } + + private static FileInputStream tunelFileInputStream(File file) throws TunnelException { + try { + return new FileInputStream(file); + } catch (FileNotFoundException e) { + throw new TunnelException(e); + } + } + + private static BackportedFix tunnelParse(InputStream s) throws TunnelException { + try { + var fix = BackportedFix.parseFrom(s); + s.close(); + return fix; + } catch (IOException e) { + throw new TunnelException(e); + } + } + + private static class TunnelException extends RuntimeException { + TunnelException(Exception cause) { + super("If you see this TunnelException something went wrong. It should always be rethrown as the cause.", cause); + } + + <X extends Exception> RuntimeException rethrow(Class<X> exceptionClazz) throws X { + checkNotNull(exceptionClazz); + Throwables.throwIfInstanceOf(getCause(), exceptionClazz); + throw exception( + getCause(), + "rethrow(%s) doesn't match underlying exception", exceptionClazz); + } + + public <X1 extends Exception, X2 extends Exception> RuntimeException rethrow( + Class<X1> exceptionClazz1, Class<X2> exceptionClazz2) throws X1, X2 { + checkNotNull(exceptionClazz1); + checkNotNull(exceptionClazz2); + Throwables.throwIfInstanceOf(getCause(), exceptionClazz1); + Throwables.throwIfInstanceOf(getCause(), exceptionClazz2); + throw exception( + getCause(), + "rethrow(%s, %s) doesn't match underlying exception", + exceptionClazz1, + exceptionClazz2); + } + + private static ClassCastException exception( + Throwable cause, String message, Object... formatArgs) { + ClassCastException result = new ClassCastException(String.format(message, formatArgs)); + result.initCause(cause); + return result; + } + + } + + private Parser() { + } +} diff --git a/backported_fixes/tests/java/com/android/build/backportedfixes/CombineBackportedFixesTest.java b/backported_fixes/tests/java/com/android/build/backportedfixes/CombineBackportedFixesTest.java new file mode 100644 index 0000000000..21d5f1e676 --- /dev/null +++ b/backported_fixes/tests/java/com/android/build/backportedfixes/CombineBackportedFixesTest.java @@ -0,0 +1,41 @@ + +/* + * Copyright (C) 2024 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.android.build.backportedfixes; + +import com.google.common.truth.Truth; + +import org.junit.Test; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; + +/** Tests for {@link CombineBackportedFixes}. */ +public class CombineBackportedFixesTest { + + + @Test + public void writeBackportedFixes_default() throws IOException { + // Not much of a test, but there is not much to test. + BackportedFixes fixes = BackportedFixes.newBuilder() + .addFixes(BackportedFix.newBuilder().setKnownIssue(123).build()) + .build(); + var result = new ByteArrayOutputStream(); + CombineBackportedFixes.writeBackportedFixes(fixes, result); + Truth.assertThat(BackportedFixes.parseFrom(result.toByteArray())) + .isEqualTo(fixes); + } +} diff --git a/backported_fixes/tests/java/com/android/build/backportedfixes/WriteBackportedFixesPropFileTest.java b/backported_fixes/tests/java/com/android/build/backportedfixes/WriteBackportedFixesPropFileTest.java new file mode 100644 index 0000000000..3209c15911 --- /dev/null +++ b/backported_fixes/tests/java/com/android/build/backportedfixes/WriteBackportedFixesPropFileTest.java @@ -0,0 +1,64 @@ + +/* + * Copyright (C) 2024 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.android.build.backportedfixes; + +import com.google.common.truth.Truth; + +import org.junit.Test; + +import java.io.PrintWriter; +import java.io.StringWriter; + +/** Tests for {@link WriteBackportedFixesPropFile}. */ +public class WriteBackportedFixesPropFileTest { + + + @Test + public void writeFixesAsAliasBitSet_default() { + BackportedFixes fixes = BackportedFixes.newBuilder().build(); + var result = new StringWriter(); + + WriteBackportedFixesPropFile.writeFixesAsAliasBitSet(fixes, new PrintWriter(result)); + + Truth.assertThat(result.toString()) + .isEqualTo(""" + # The following backported fixes have been applied + ro.build.backported_fixes.alias_bitset.long_list= + """); + } + + @Test + public void writeFixesAsAliasBitSet_some() { + BackportedFixes fixes = BackportedFixes.newBuilder() + .addFixes(BackportedFix.newBuilder().setKnownIssue(1234L).setAlias(1)) + .addFixes(BackportedFix.newBuilder().setKnownIssue(3L).setAlias(65)) + .addFixes(BackportedFix.newBuilder().setKnownIssue(4L).setAlias(67)) + .build(); + var result = new StringWriter(); + + WriteBackportedFixesPropFile.writeFixesAsAliasBitSet(fixes, new PrintWriter(result)); + + Truth.assertThat(result.toString()) + .isEqualTo(""" + # The following backported fixes have been applied + # https://issuetracker.google.com/issues/1234 with alias 1 + # https://issuetracker.google.com/issues/3 with alias 65 + # https://issuetracker.google.com/issues/4 with alias 67 + ro.build.backported_fixes.alias_bitset.long_list=2,10 + """); + } +} diff --git a/backported_fixes/tests/java/com/android/build/backportedfixes/common/ParserTest.java b/backported_fixes/tests/java/com/android/build/backportedfixes/common/ParserTest.java new file mode 100644 index 0000000000..57a0a40b90 --- /dev/null +++ b/backported_fixes/tests/java/com/android/build/backportedfixes/common/ParserTest.java @@ -0,0 +1,104 @@ +/* + * Copyright (C) 2024 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.android.build.backportedfixes.common; + +import static com.google.common.truth.Truth.assertThat; +import static com.google.common.truth.extensions.proto.ProtoTruth.assertThat; + +import com.android.build.backportedfixes.BackportedFix; +import com.android.build.backportedfixes.BackportedFixes; + +import com.google.common.collect.ImmutableList; + +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; + +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.nio.file.Files; + +/** Tests for {@link Parser}.*/ +public class ParserTest { + + @Rule + public TemporaryFolder mTempFolder = new TemporaryFolder(); + + @Test + public void getFileInputStreams() throws IOException { + var results = Parser.getFileInputStreams( + ImmutableList.of(Files.createTempFile("test", null).toFile())); + assertThat(results).isNotEmpty(); + } + + + @Test + public void getBitSetArray_empty() { + var results = Parser.getBitSetArray(new int[]{}); + assertThat(results).isEmpty(); + } + + @Test + public void getBitSetArray_2_3_64() { + var results = Parser.getBitSetArray(new int[]{2,3,64}); + assertThat(results).asList().containsExactly(12L,1L).inOrder(); + } + + @Test + public void parseBackportedFixFiles_empty() throws IOException { + var result = Parser.parseBackportedFixFiles(ImmutableList.of()); + assertThat(result).isEqualTo(BackportedFixes.getDefaultInstance()); + } + + + @Test + public void parseBackportedFixFiles_oneBlank() throws IOException { + var result = Parser.parseBackportedFixFiles(ImmutableList.of(mTempFolder.newFile())); + + assertThat(result).isEqualTo( + BackportedFixes.newBuilder() + .addFixes(BackportedFix.getDefaultInstance()) + .build()); + } + + @Test + public void parseBackportedFixFiles_two() throws IOException { + BackportedFix ki123 = BackportedFix.newBuilder() + .setKnownIssue(123) + .setAlias(1) + .build(); + BackportedFix ki456 = BackportedFix.newBuilder() + .setKnownIssue(456) + .setAlias(2) + .build(); + var result = Parser.parseBackportedFixFiles( + ImmutableList.of(tempFile(ki456), tempFile(ki123))); + assertThat(result).isEqualTo( + BackportedFixes.newBuilder() + .addFixes(ki123) + .addFixes(ki456) + .build()); + } + + private File tempFile(BackportedFix fix) throws IOException { + File f = mTempFolder.newFile(); + try (FileOutputStream out = new FileOutputStream(f)) { + fix.writeTo(out); + return f; + } + } +} diff --git a/ci/Android.bp b/ci/Android.bp index 104f517ccd..3f28be4494 100644 --- a/ci/Android.bp +++ b/ci/Android.bp @@ -25,7 +25,7 @@ python_test_host { "build_test_suites_test.py", ], libs: [ - "build_test_suites", + "build_test_suites_lib", "pyfakefs", "ci_test_lib", ], @@ -56,7 +56,7 @@ python_test_host { "build_test_suites_local_test.py", ], libs: [ - "build_test_suites", + "build_test_suites_lib", "pyfakefs", "ci_test_lib", ], @@ -71,11 +71,60 @@ python_test_host { }, } -python_library_host { +python_test_host { + name: "optimized_targets_test", + main: "optimized_targets_test.py", + pkg_path: "testdata", + srcs: [ + "optimized_targets_test.py", + ], + libs: [ + "build_test_suites_lib", + "pyfakefs", + ], + test_options: { + unit_test: true, + }, + data: [ + ":py3-cmd", + ], + version: { + py3: { + embedded_launcher: true, + }, + }, +} + +python_binary_host { name: "build_test_suites", srcs: [ "build_test_suites.py", "optimized_targets.py", + "test_mapping_module_retriever.py", + "build_context.py", + "test_discovery_agent.py", + "metrics_agent.py", + "buildbot.py", + ], + main: "build_test_suites.py", + libs: [ + "soong-metrics-proto-py", + ], +} + +python_library_host { + name: "build_test_suites_lib", + srcs: [ + "build_test_suites.py", + "optimized_targets.py", + "test_mapping_module_retriever.py", + "build_context.py", + "test_discovery_agent.py", + "metrics_agent.py", + "buildbot.py", + ], + libs: [ + "soong-metrics-proto-py", ], } diff --git a/ci/build_context.py b/ci/build_context.py new file mode 100644 index 0000000000..c7a1defb57 --- /dev/null +++ b/ci/build_context.py @@ -0,0 +1,67 @@ +# Copyright 2024, The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Container class for build context with utility functions.""" + +import re + + +class BuildContext: + + def __init__(self, build_context_dict: dict[str, any]): + self.enabled_build_features = set() + for opt in build_context_dict.get('enabledBuildFeatures', []): + self.enabled_build_features.add(opt.get('name')) + self.test_infos = set() + for test_info_dict in build_context_dict.get('testContext', dict()).get( + 'testInfos', [] + ): + self.test_infos.add(self.TestInfo(test_info_dict)) + + def build_target_used(self, target: str) -> bool: + return any(test.build_target_used(target) for test in self.test_infos) + + class TestInfo: + + _DOWNLOAD_OPTS = { + 'test-config-only-zip', + 'test-zip-file-filter', + 'extra-host-shared-lib-zip', + 'sandbox-tests-zips', + 'additional-files-filter', + 'cts-package-name', + } + + def __init__(self, test_info_dict: dict[str, any]): + self.is_test_mapping = False + self.test_mapping_test_groups = set() + self.file_download_options = set() + self.name = test_info_dict.get('name') + self.command = test_info_dict.get('command') + self.extra_options = test_info_dict.get('extraOptions') + for opt in test_info_dict.get('extraOptions', []): + key = opt.get('key') + if key == 'test-mapping-test-group': + self.is_test_mapping = True + self.test_mapping_test_groups.update(opt.get('values', set())) + + if key in self._DOWNLOAD_OPTS: + self.file_download_options.update(opt.get('values', set())) + + def build_target_used(self, target: str) -> bool: + # For all of a targets' outputs, check if any of the regexes used by tests + # to download artifacts would match it. If any of them do then this target + # is necessary. + regex = r'\b(%s)\b' % re.escape(target) + return any(re.search(regex, opt) for opt in self.file_download_options) diff --git a/ci/build_device_and_tests b/ci/build_device_and_tests new file mode 100755 index 0000000000..63d3ce3519 --- /dev/null +++ b/ci/build_device_and_tests @@ -0,0 +1,19 @@ +#!/usr/bin/env bash +# +# Copyright 2024, The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +set -euo pipefail + +build/soong/soong_ui.bash --make-mode build_test_suites +$(build/soong/soong_ui.bash --dumpvar-mode HOST_OUT)/bin/build_test_suites --device-build $@ diff --git a/ci/build_metadata b/ci/build_metadata new file mode 100755 index 0000000000..3e9218f200 --- /dev/null +++ b/ci/build_metadata @@ -0,0 +1,44 @@ +#/bin/bash + +# Copyright 2024, The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -x + +source build/make/shell_utils.sh + +export TARGET_PRODUCT=aosp_arm64 +export TARGET_RELEASE=trunk_staging +export TARGET_BUILD_VARIANT=eng + +import_build_vars \ + OUT_DIR \ + DIST_DIR \ + HOST_OUT_EXECUTABLES \ + || exit $? + +TARGETS=( + all_teams + source_tree_size + release_config_metadata +) + +# Build modules +build/soong/bin/m dist ${TARGETS[@]} || exit $? + +# List all source files in the tree +( \ + $HOST_OUT_EXECUTABLES/source_tree_size -o $DIST_DIR/all_source_tree_files.pb \ + && gzip -fn $DIST_DIR/all_source_tree_files.pb \ +) || exit $? diff --git a/ci/build_test_suites b/ci/build_test_suites index 5aaf2f49b7..74470a8e16 100755 --- a/ci/build_test_suites +++ b/ci/build_test_suites @@ -1,4 +1,4 @@ -#!prebuilts/build-tools/linux-x86/bin/py3-cmd -B +#!/usr/bin/env bash # # Copyright 2024, The Android Open Source Project # @@ -13,8 +13,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +set -euo pipefail -import build_test_suites -import sys - -build_test_suites.main(sys.argv[1:]) +build/soong/soong_ui.bash --make-mode build_test_suites +$(build/soong/soong_ui.bash --dumpvar-mode HOST_OUT)/bin/build_test_suites $@ diff --git a/ci/build_test_suites.py b/ci/build_test_suites.py index 75dd9f2f70..d81248b496 100644 --- a/ci/build_test_suites.py +++ b/ci/build_test_suites.py @@ -24,12 +24,18 @@ import re import subprocess import sys from typing import Callable +from build_context import BuildContext import optimized_targets +import metrics_agent +import test_discovery_agent -REQUIRED_ENV_VARS = frozenset(['TARGET_PRODUCT', 'TARGET_RELEASE', 'TOP']) +REQUIRED_ENV_VARS = frozenset(['TARGET_PRODUCT', 'TARGET_RELEASE', 'TOP', 'DIST_DIR']) SOONG_UI_EXE_REL_PATH = 'build/soong/soong_ui.bash' LOG_PATH = 'logs/build_test_suites.log' +# Currently, this prevents the removal of those tags when they exist. In the future we likely +# want the script to supply 'dist directly +REQUIRED_BUILD_TARGETS = frozenset(['dist', 'droid', 'checkbuild']) class Error(Exception): @@ -53,18 +59,9 @@ class BuildPlanner: any output zip files needed by the build. """ - _DOWNLOAD_OPTS = { - 'test-config-only-zip', - 'test-zip-file-filter', - 'extra-host-shared-lib-zip', - 'sandbox-tests-zips', - 'additional-files-filter', - 'cts-package-name', - } - def __init__( self, - build_context: dict[str, any], + build_context: BuildContext, args: argparse.Namespace, target_optimizations: dict[str, optimized_targets.OptimizedBuildTarget], ): @@ -74,19 +71,31 @@ class BuildPlanner: def create_build_plan(self): - if 'optimized_build' not in self.build_context.get( - 'enabledBuildFeatures', [] - ): + if 'optimized_build' not in self.build_context.enabled_build_features: return BuildPlan(set(self.args.extra_targets), set()) - build_targets = set() - packaging_functions = set() - for target in self.args.extra_targets: - if self._unused_target_exclusion_enabled( - target - ) and not self._build_target_used(target): - continue + if not self.build_context.test_infos: + logging.warning('Build context has no test infos, skipping optimizations.') + for target in self.args.extra_targets: + get_metrics_agent().report_unoptimized_target(target, 'BUILD_CONTEXT has no test infos.') + return BuildPlan(set(self.args.extra_targets), set()) + build_targets = set() + packaging_commands_getters = [] + # In order to roll optimizations out differently between test suites and + # device builds, we have separate flags. + enable_discovery = (('test_suites_zip_test_discovery' + in self.build_context.enabled_build_features + and not self.args.device_build + ) or ( + 'device_zip_test_discovery' + in self.build_context.enabled_build_features + and self.args.device_build + )) and not self.args.test_discovery_info_mode + logging.info(f'Discovery mode is enabled= {enable_discovery}') + preliminary_build_targets = self._collect_preliminary_build_targets(enable_discovery) + + for target in preliminary_build_targets: target_optimizer_getter = self.target_optimizations.get(target, None) if not target_optimizer_getter: build_targets.add(target) @@ -96,69 +105,106 @@ class BuildPlanner: target, self.build_context, self.args ) build_targets.update(target_optimizer.get_build_targets()) - packaging_functions.add(target_optimizer.package_outputs) + packaging_commands_getters.append( + target_optimizer.get_package_outputs_commands + ) + + return BuildPlan(build_targets, packaging_commands_getters) + + def _collect_preliminary_build_targets(self, enable_discovery: bool): + build_targets = set() + try: + test_discovery_zip_regexes = self._get_test_discovery_zip_regexes() + logging.info(f'Discovered test discovery regexes: {test_discovery_zip_regexes}') + except test_discovery_agent.TestDiscoveryError as e: + optimization_rationale = e.message + logging.warning(f'Unable to perform test discovery: {optimization_rationale}') + + for target in self.args.extra_targets: + get_metrics_agent().report_unoptimized_target(target, optimization_rationale) + return self._legacy_collect_preliminary_build_targets() + + for target in self.args.extra_targets: + if target in REQUIRED_BUILD_TARGETS: + build_targets.add(target) + get_metrics_agent().report_unoptimized_target(target, 'Required build target.') + continue + # If nothing is discovered without error, that means nothing is needed. + if not test_discovery_zip_regexes: + get_metrics_agent().report_optimized_target(target) + continue + + regex = r'\b(%s.*)\b' % re.escape(target) + for opt in test_discovery_zip_regexes: + try: + if re.search(regex, opt): + get_metrics_agent().report_unoptimized_target(target, 'Test artifact used.') + build_targets.add(target) + # proceed to next target evaluation + break + get_metrics_agent().report_optimized_target(target) + except Exception as e: + # In case of exception report as unoptimized + build_targets.add(target) + get_metrics_agent().report_unoptimized_target(target, f'Error in parsing test discovery output for {target}: {repr(e)}') + logging.error(f'unable to parse test discovery output: {repr(e)}') + break + # If discovery is not enabled, return the original list + if not enable_discovery: + return self._legacy_collect_preliminary_build_targets() + + return build_targets + + def _legacy_collect_preliminary_build_targets(self): + build_targets = set() + for target in self.args.extra_targets: + if self._unused_target_exclusion_enabled( + target + ) and not self.build_context.build_target_used(target): + continue - return BuildPlan(build_targets, packaging_functions) + build_targets.add(target) + return build_targets def _unused_target_exclusion_enabled(self, target: str) -> bool: - return f'{target}_unused_exclusion' in self.build_context.get( - 'enabledBuildFeatures', [] + return ( + f'{target}_unused_exclusion' + in self.build_context.enabled_build_features ) - def _build_target_used(self, target: str) -> bool: - """Determines whether this target's outputs are used by the test configurations listed in the build context.""" - file_download_regexes = self._aggregate_file_download_regexes() - # For all of a targets' outputs, check if any of the regexes used by tests - # to download artifacts would match it. If any of them do then this target - # is necessary. - for artifact in self._get_target_potential_outputs(target): - for regex in file_download_regexes: - if re.match(regex, artifact): - return True - return False - - def _get_target_potential_outputs(self, target: str) -> set[str]: - tests_suffix = '-tests' - if target.endswith('tests'): - tests_suffix = '' - # This is a list of all the potential zips output by the test suite targets. - # If the test downloads artifacts from any of these zips, we will be - # conservative and avoid skipping the tests. - return { - f'{target}.zip', - f'android-{target}.zip', - f'android-{target}-verifier.zip', - f'{target}{tests_suffix}_list.zip', - f'android-{target}{tests_suffix}_list.zip', - f'{target}{tests_suffix}_host-shared-libs.zip', - f'android-{target}{tests_suffix}_host-shared-libs.zip', - f'{target}{tests_suffix}_configs.zip', - f'android-{target}{tests_suffix}_configs.zip', - } - - def _aggregate_file_download_regexes(self) -> set[re.Pattern]: - """Lists out all test config options to specify targets to download. - - These come in the form of regexes. - """ - all_regexes = set() - for test_info in self._get_test_infos(): - for opt in test_info.get('extraOptions', []): - # check the known list of options for downloading files. - if opt.get('key') in self._DOWNLOAD_OPTS: - all_regexes.update( - re.compile(value) for value in opt.get('values', []) - ) - return all_regexes - - def _get_test_infos(self): - return self.build_context.get('testContext', dict()).get('testInfos', []) + def _get_test_discovery_zip_regexes(self) -> set[str]: + build_target_regexes = set() + for test_info in self.build_context.test_infos: + tf_command = self._build_tf_command(test_info) + discovery_agent = test_discovery_agent.TestDiscoveryAgent(tradefed_args=tf_command) + for regex in discovery_agent.discover_test_zip_regexes(): + build_target_regexes.add(regex) + return build_target_regexes + def _build_tf_command(self, test_info) -> list[str]: + command = [test_info.command] + for extra_option in test_info.extra_options: + if not extra_option.get('key'): + continue + arg_key = '--' + extra_option.get('key') + if arg_key == '--build-id': + command.append(arg_key) + command.append(os.environ.get('BUILD_NUMBER')) + continue + if extra_option.get('values'): + for value in extra_option.get('values'): + command.append(arg_key) + command.append(value) + else: + command.append(arg_key) + + return command + @dataclass(frozen=True) class BuildPlan: build_targets: set[str] - packaging_functions: set[Callable[..., None]] + packaging_commands_getters: list[Callable[[], list[list[str]]]] def build_test_suites(argv: list[str]) -> int: @@ -170,19 +216,27 @@ def build_test_suites(argv: list[str]) -> int: Returns: The exit code of the build. """ - args = parse_args(argv) - check_required_env() - build_context = load_build_context() - build_planner = BuildPlanner( - build_context, args, optimized_targets.OPTIMIZED_BUILD_TARGETS - ) - build_plan = build_planner.create_build_plan() + get_metrics_agent().analysis_start() + try: + args = parse_args(argv) + check_required_env() + build_context = BuildContext(load_build_context()) + build_planner = BuildPlanner( + build_context, args, optimized_targets.OPTIMIZED_BUILD_TARGETS + ) + build_plan = build_planner.create_build_plan() + except: + raise + finally: + get_metrics_agent().analysis_end() try: execute_build_plan(build_plan) except BuildFailureError as e: logging.error('Build command failed! Check build_log for details.') return e.return_code + finally: + get_metrics_agent().end_reporting() return 0 @@ -193,6 +247,16 @@ def parse_args(argv: list[str]) -> argparse.Namespace: argparser.add_argument( 'extra_targets', nargs='*', help='Extra test suites to build.' ) + argparser.add_argument( + '--device-build', + action='store_true', + help='Flag to indicate running a device build.', + ) + argparser.add_argument( + '--test_discovery_info_mode', + action='store_true', + help='Flag to enable running test discovery in info only mode.', + ) return argparser.parse_args(argv) @@ -234,14 +298,21 @@ def execute_build_plan(build_plan: BuildPlan): build_command.append(get_top().joinpath(SOONG_UI_EXE_REL_PATH)) build_command.append('--make-mode') build_command.extend(build_plan.build_targets) - + logging.info(f'Running build command: {build_command}') try: run_command(build_command) except subprocess.CalledProcessError as e: raise BuildFailureError(e.returncode) from e - for packaging_function in build_plan.packaging_functions: - packaging_function() + get_metrics_agent().packaging_start() + try: + for packaging_commands_getter in build_plan.packaging_commands_getters: + for packaging_command in packaging_commands_getter(): + run_command(packaging_command) + except subprocess.CalledProcessError as e: + raise BuildFailureError(e.returncode) from e + finally: + get_metrics_agent().packaging_end() def get_top() -> pathlib.Path: @@ -252,6 +323,10 @@ def run_command(args: list[str], stdout=None): subprocess.run(args=args, check=True, stdout=stdout) +def get_metrics_agent(): + return metrics_agent.MetricsAgent.instance() + + def main(argv): dist_dir = os.environ.get('DIST_DIR') if dist_dir: @@ -262,3 +337,7 @@ def main(argv): filename=log_file, ) sys.exit(build_test_suites(argv)) + + +if __name__ == '__main__': + main(sys.argv[1:]) diff --git a/ci/build_test_suites_test.py b/ci/build_test_suites_test.py index 25c072e2b0..190740f811 100644 --- a/ci/build_test_suites_test.py +++ b/ci/build_test_suites_test.py @@ -32,10 +32,13 @@ import time from typing import Callable import unittest from unittest import mock +from build_context import BuildContext import build_test_suites import ci_test_lib import optimized_targets from pyfakefs import fake_filesystem_unittest +import metrics_agent +import test_discovery_agent class BuildTestSuitesTest(fake_filesystem_unittest.TestCase): @@ -51,6 +54,10 @@ class BuildTestSuitesTest(fake_filesystem_unittest.TestCase): self.addCleanup(subprocess_run_patcher.stop) self.mock_subprocess_run = subprocess_run_patcher.start() + metrics_agent_finalize_patcher = mock.patch('metrics_agent.MetricsAgent.end_reporting') + self.addCleanup(metrics_agent_finalize_patcher.stop) + self.mock_metrics_agent_end = metrics_agent_finalize_patcher.start() + self._setup_working_build_env() def test_missing_target_release_env_var_raises(self): @@ -71,6 +78,12 @@ class BuildTestSuitesTest(fake_filesystem_unittest.TestCase): with self.assert_raises_word(build_test_suites.Error, 'TOP'): build_test_suites.main([]) + def test_missing_dist_dir_env_var_raises(self): + del os.environ['DIST_DIR'] + + with self.assert_raises_word(build_test_suites.Error, 'DIST_DIR'): + build_test_suites.main([]) + def test_invalid_arg_raises(self): invalid_args = ['--invalid_arg'] @@ -107,6 +120,9 @@ class BuildTestSuitesTest(fake_filesystem_unittest.TestCase): self.soong_ui_dir = self.fake_top.joinpath('build/soong') self.soong_ui_dir.mkdir(parents=True, exist_ok=True) + self.logs_dir = self.fake_top.joinpath('dist/logs') + self.logs_dir.mkdir(parents=True, exist_ok=True) + self.soong_ui = self.soong_ui_dir.joinpath('soong_ui.bash') self.soong_ui.touch() @@ -114,6 +130,7 @@ class BuildTestSuitesTest(fake_filesystem_unittest.TestCase): 'TARGET_RELEASE': 'release', 'TARGET_PRODUCT': 'product', 'TOP': str(self.fake_top), + 'DIST_DIR': str(self.fake_top.joinpath('dist')), }) self.mock_subprocess_run.return_value = 0 @@ -240,21 +257,27 @@ class BuildPlannerTest(unittest.TestCase): class TestOptimizedBuildTarget(optimized_targets.OptimizedBuildTarget): def __init__( - self, target, build_context, args, output_targets, packaging_outputs + self, target, build_context, args, output_targets, packaging_commands ): super().__init__(target, build_context, args) self.output_targets = output_targets - self.packaging_outputs = packaging_outputs + self.packaging_commands = packaging_commands def get_build_targets_impl(self): return self.output_targets - def package_outputs_impl(self): - self.packaging_outputs.add(f'packaging {" ".join(self.output_targets)}') + def get_package_outputs_commands_impl(self): + return self.packaging_commands def get_enabled_flag(self): return f'{self.target}_enabled' + def setUp(self): + test_discovery_agent_patcher = mock.patch('test_discovery_agent.TestDiscoveryAgent.discover_test_zip_regexes') + self.addCleanup(test_discovery_agent_patcher.stop) + self.mock_test_discovery_agent_end = test_discovery_agent_patcher.start() + + def test_build_optimization_off_builds_everything(self): build_targets = {'target_1', 'target_2'} build_planner = self.create_build_planner( @@ -275,14 +298,16 @@ class BuildPlannerTest(unittest.TestCase): build_plan = build_planner.create_build_plan() - self.assertEqual(len(build_plan.packaging_functions), 0) + for packaging_command in self.run_packaging_commands(build_plan): + self.assertEqual(len(packaging_command), 0) def test_build_optimization_on_optimizes_target(self): build_targets = {'target_1', 'target_2'} build_planner = self.create_build_planner( build_targets=build_targets, build_context=self.create_build_context( - enabled_build_features={self.get_target_flag('target_1')} + enabled_build_features=[{'name': self.get_target_flag('target_1')}], + test_context=self.get_test_context('target_1'), ), ) @@ -293,20 +318,20 @@ class BuildPlannerTest(unittest.TestCase): def test_build_optimization_on_packages_target(self): build_targets = {'target_1', 'target_2'} - packaging_outputs = set() + optimized_target_name = self.get_optimized_target_name('target_1') + packaging_commands = [[f'packaging {optimized_target_name}']] build_planner = self.create_build_planner( build_targets=build_targets, build_context=self.create_build_context( - enabled_build_features={self.get_target_flag('target_1')}, + enabled_build_features=[{'name': self.get_target_flag('target_1')}], + test_context=self.get_test_context('target_1'), ), - packaging_outputs=packaging_outputs, + packaging_commands=packaging_commands, ) build_plan = build_planner.create_build_plan() - self.run_packaging_functions(build_plan) - optimized_target_name = self.get_optimized_target_name('target_1') - self.assertIn(f'packaging {optimized_target_name}', packaging_outputs) + self.assertIn(packaging_commands, self.run_packaging_commands(build_plan)) def test_individual_build_optimization_off_doesnt_optimize(self): build_targets = {'target_1', 'target_2'} @@ -320,16 +345,16 @@ class BuildPlannerTest(unittest.TestCase): def test_individual_build_optimization_off_doesnt_package(self): build_targets = {'target_1', 'target_2'} - packaging_outputs = set() + packaging_commands = [['packaging command']] build_planner = self.create_build_planner( build_targets=build_targets, - packaging_outputs=packaging_outputs, + packaging_commands=packaging_commands, ) build_plan = build_planner.create_build_plan() - self.run_packaging_functions(build_plan) - self.assertFalse(packaging_outputs) + for packaging_command in self.run_packaging_commands(build_plan): + self.assertEqual(len(packaging_command), 0) def test_target_output_used_target_built(self): build_target = 'test_target' @@ -337,7 +362,7 @@ class BuildPlannerTest(unittest.TestCase): build_targets={build_target}, build_context=self.create_build_context( test_context=self.get_test_context(build_target), - enabled_build_features={'test_target_unused_exclusion'}, + enabled_build_features=[{'name': 'test_target_unused_exclusion'}], ), ) @@ -356,7 +381,7 @@ class BuildPlannerTest(unittest.TestCase): build_targets={build_target}, build_context=self.create_build_context( test_context=test_context, - enabled_build_features={'test_target_unused_exclusion'}, + enabled_build_features=[{'name': 'test_target_unused_exclusion'}], ), ) @@ -372,7 +397,26 @@ class BuildPlannerTest(unittest.TestCase): build_targets={build_target}, build_context=self.create_build_context( test_context=test_context, - enabled_build_features={'test_target_unused_exclusion'}, + enabled_build_features=[{'name': 'test_target_unused_exclusion'}], + ), + ) + + build_plan = build_planner.create_build_plan() + + self.assertSetEqual(build_plan.build_targets, set()) + + def test_target_regex_matching_not_too_broad(self): + build_target = 'test_target' + test_context = self.get_test_context(build_target) + test_context['testInfos'][0]['extraOptions'] = [{ + 'key': 'additional-files-filter', + 'values': [f'.*a{build_target}.*\.zip'], + }] + build_planner = self.create_build_planner( + build_targets={build_target}, + build_context=self.create_build_context( + test_context=test_context, + enabled_build_features=[{'name': 'test_target_unused_exclusion'}], ), ) @@ -383,12 +427,12 @@ class BuildPlannerTest(unittest.TestCase): def create_build_planner( self, build_targets: set[str], - build_context: dict[str, any] = None, + build_context: BuildContext = None, args: argparse.Namespace = None, target_optimizations: dict[ str, optimized_targets.OptimizedBuildTarget ] = None, - packaging_outputs: set[str] = set(), + packaging_commands: list[list[str]] = [], ) -> build_test_suites.BuildPlanner: if not build_context: build_context = self.create_build_context() @@ -398,7 +442,7 @@ class BuildPlannerTest(unittest.TestCase): target_optimizations = self.create_target_optimizations( build_context, build_targets, - packaging_outputs, + packaging_commands, ) return build_test_suites.BuildPlanner( build_context, args, target_optimizations @@ -407,15 +451,17 @@ class BuildPlannerTest(unittest.TestCase): def create_build_context( self, optimized_build_enabled: bool = True, - enabled_build_features: set[str] = set(), + enabled_build_features: list[dict[str, str]] = [], test_context: dict[str, any] = {}, - ) -> dict[str, any]: - build_context = {} - build_context['enabledBuildFeatures'] = enabled_build_features + ) -> BuildContext: + build_context_dict = {} + build_context_dict['enabledBuildFeatures'] = enabled_build_features if optimized_build_enabled: - build_context['enabledBuildFeatures'].add('optimized_build') - build_context['testContext'] = test_context - return build_context + build_context_dict['enabledBuildFeatures'].append( + {'name': 'optimized_build'} + ) + build_context_dict['testContext'] = test_context + return BuildContext(build_context_dict) def create_args( self, extra_build_targets: set[str] = set() @@ -426,16 +472,16 @@ class BuildPlannerTest(unittest.TestCase): def create_target_optimizations( self, - build_context: dict[str, any], + build_context: BuildContext, build_targets: set[str], - packaging_outputs: set[str] = set(), + packaging_commands: list[list[str]] = [], ): target_optimizations = dict() for target in build_targets: target_optimizations[target] = functools.partial( self.TestOptimizedBuildTarget, output_targets={self.get_optimized_target_name(target)}, - packaging_outputs=packaging_outputs, + packaging_commands=packaging_commands, ) return target_optimizations @@ -446,10 +492,6 @@ class BuildPlannerTest(unittest.TestCase): def get_optimized_target_name(self, target: str): return f'{target}_optimized' - def run_packaging_functions(self, build_plan: build_test_suites.BuildPlan): - for packaging_function in build_plan.packaging_functions: - packaging_function() - def get_test_context(self, target: str): return { 'testInfos': [ @@ -469,6 +511,12 @@ class BuildPlannerTest(unittest.TestCase): ], } + def run_packaging_commands(self, build_plan: build_test_suites.BuildPlan): + return [ + packaging_command_getter() + for packaging_command_getter in build_plan.packaging_commands_getters + ] + def wait_until( condition_function: Callable[[], bool], diff --git a/ci/buildbot.py b/ci/buildbot.py new file mode 100644 index 0000000000..97097be598 --- /dev/null +++ b/ci/buildbot.py @@ -0,0 +1,43 @@ +# Copyright 2024, The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Utilities for interacting with buildbot, with a simulation in a local environment""" + +import os +import sys + +# Check that the script is running from the root of the tree. Prevents subtle +# errors later, and CI always runs from the root of the tree. +if not os.path.exists("build/make/ci/buildbot.py"): + raise Exception("CI script must be run from the root of the tree instead of: " + + os.getcwd()) + +# Check that we are using the hermetic interpreter +if "prebuilts/build-tools/" not in sys.executable: + raise Exception("CI script must be run using the hermetic interpreter from " + + "prebuilts/build-tools instead of: " + sys.executable) + + +def OutDir(): + "Get the out directory. Will create it if needed." + result = os.environ.get("OUT_DIR", "out") + os.makedirs(result, exist_ok=True) + return result + +def DistDir(): + "Get the dist directory. Will create it if needed." + result = os.environ.get("DIST_DIR", os.path.join(OutDir(), "dist")) + os.makedirs(result, exist_ok=True) + return result + diff --git a/ci/dump_product_config b/ci/dump_product_config new file mode 100755 index 0000000000..77b51dd281 --- /dev/null +++ b/ci/dump_product_config @@ -0,0 +1,353 @@ +#!prebuilts/build-tools/linux-x86/bin/py3-cmd -B + +# Copyright 2024, The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Script to collect all of the make variables from all product config combos. + +This script must be run from the root of the source tree. + +See GetArgs() below or run dump_product_config for more information. +""" + +import argparse +import asyncio +import contextlib +import csv +import dataclasses +import json +import multiprocessing +import os +import subprocess +import sys +import time +from typing import List, Dict, Tuple, Optional + +import buildbot + +# We have some BIG variables +csv.field_size_limit(sys.maxsize) + + +class DataclassJSONEncoder(json.JSONEncoder): + """JSONEncoder for our custom types.""" + def default(self, o): + if dataclasses.is_dataclass(o): + return dataclasses.asdict(o) + return super().default(o) + + +def GetProducts(): + """Get the all of the available TARGET_PRODUCT values.""" + try: + stdout = subprocess.check_output(["build/soong/bin/list_products"], text=True) + except subprocess.CalledProcessError: + sys.exit(1) + return [s.strip() for s in stdout.splitlines() if s.strip()] + + +def GetReleases(product): + """For a given product, get the release configs available to it.""" + if True: + # Hard code the list + mainline_products = [ + "module_arm", + "module_x86", + "module_arm64", + "module_riscv64", + "module_x86_64", + "module_arm64only", + "module_x86_64only", + ] + if product in mainline_products: + return ["trunk_staging", "trunk", "mainline"] + else: + return ["trunk_staging", "trunk", "next"] + else: + # Get it from the build system + try: + stdout = subprocess.check_output(["build/soong/bin/list_releases", product], text=True) + except subprocess.CalledProcessError: + sys.exit(1) + return [s.strip() for s in stdout.splitlines() if s.strip()] + + +def GenerateAllLunchTargets(): + """Generate the full list of lunch targets.""" + for product in GetProducts(): + for release in GetReleases(product): + for variant in ["user", "userdebug", "eng"]: + yield (product, release, variant) + + +async def ParallelExec(parallelism, tasks): + ''' + ParallelExec takes a parallelism number, and an iterator of tasks to run. + Then it will run all the tasks, but a maximum of parallelism will be run at + any given time. The tasks must be async functions that accept one argument, + which will be an integer id of the worker that they're running on. + ''' + tasks = iter(tasks) + + overall_start = time.monotonic() + # lists so they can be modified from the inner function + total_duration = [0] + count = [0] + async def dispatch(worker): + while True: + try: + task = next(tasks) + item_start = time.monotonic() + await task(worker) + now = time.monotonic() + item_duration = now - item_start + count[0] += 1 + total_duration[0] += item_duration + sys.stderr.write(f"Timing: Items processed: {count[0]}, Wall time: {now-overall_start:0.1f} sec, Throughput: {(now-overall_start)/count[0]:0.3f} sec per item, Average duration: {total_duration[0]/count[0]:0.1f} sec\n") + except StopIteration: + return + + await asyncio.gather(*[dispatch(worker) for worker in range(parallelism)]) + + +async def DumpProductConfigs(out, generator, out_dir): + """Collects all of the product config data and store it in file.""" + # Write the outer json list by hand so we can stream it + out.write("[") + try: + first_result = [True] # a list so it can be modified from the inner function + def run(lunch): + async def curried(worker): + sys.stderr.write(f"running: {'-'.join(lunch)}\n") + result = await DumpOneProductConfig(lunch, os.path.join(out_dir, f"lunchable_{worker}")) + if first_result[0]: + out.write("\n") + first_result[0] = False + else: + out.write(",\n") + result.dumpToFile(out) + sys.stderr.write(f"finished: {'-'.join(lunch)}\n") + return curried + + await ParallelExec(multiprocessing.cpu_count(), (run(lunch) for lunch in generator)) + finally: + # Close the json regardless of how we exit + out.write("\n]\n") + + +@dataclasses.dataclass(frozen=True) +class Variable: + """A variable name, value and where it was set.""" + name: str + value: str + location: str + + +@dataclasses.dataclass(frozen=True) +class ProductResult: + product: str + release: str + variant: str + board_includes: List[str] + product_includes: Dict[str, List[str]] + product_graph: List[Tuple[str, str]] + board_vars: List[Variable] + product_vars: List[Variable] + + def dumpToFile(self, f): + json.dump(self, f, sort_keys=True, indent=2, cls=DataclassJSONEncoder) + + +@dataclasses.dataclass(frozen=True) +class ProductError: + product: str + release: str + variant: str + error: str + + def dumpToFile(self, f): + json.dump(self, f, sort_keys=True, indent=2, cls=DataclassJSONEncoder) + + +def NormalizeInheritGraph(lists): + """Flatten the inheritance graph to a simple list for easier querying.""" + result = set() + for item in lists: + for i in range(len(item)): + result.add((item[i+1] if i < len(item)-1 else "", item[i])) + return sorted(list(result)) + + +def ParseDump(lunch, filename) -> ProductResult: + """Parses the csv and returns a tuple of the data.""" + def diff(initial, final): + return [after for after in final.values() if + initial.get(after.name, Variable(after.name, "", "<unset>")).value != after.value] + product_initial = {} + product_final = {} + board_initial = {} + board_final = {} + inherit_product = [] # The stack of inherit-product calls + product_includes = {} # Other files included by each of the properly imported files + board_includes = [] # Files included by boardconfig + with open(filename) as f: + phase = "" + for line in csv.reader(f): + if line[0] == "phase": + phase = line[1] + elif line[0] == "val": + # TOOD: We should skip these somewhere else. + if line[3].startswith("_ALL_RELEASE_FLAGS"): + continue + if line[3].startswith("PRODUCTS."): + continue + if phase == "PRODUCTS": + if line[2] == "initial": + product_initial[line[3]] = Variable(line[3], line[4], line[5]) + if phase == "PRODUCT-EXPAND": + if line[2] == "final": + product_final[line[3]] = Variable(line[3], line[4], line[5]) + if phase == "BOARD": + if line[2] == "initial": + board_initial[line[3]] = Variable(line[3], line[4], line[5]) + if line[2] == "final": + board_final[line[3]] = Variable(line[3], line[4], line[5]) + elif line[0] == "imported": + imports = [s.strip() for s in line[1].split()] + if imports: + inherit_product.append(imports) + inc = [s.strip() for s in line[2].split()] + for f in inc: + product_includes.setdefault(imports[0], []).append(f) + elif line[0] == "board_config_files": + board_includes += [s.strip() for s in line[1].split()] + return ProductResult( + product = lunch[0], + release = lunch[1], + variant = lunch[2], + product_vars = diff(product_initial, product_final), + board_vars = diff(board_initial, board_final), + product_graph = NormalizeInheritGraph(inherit_product), + product_includes = product_includes, + board_includes = board_includes + ) + + +async def DumpOneProductConfig(lunch, out_dir) -> ProductResult | ProductError: + """Print a single config's lunch info to stdout.""" + product, release, variant = lunch + + dumpconfig_file = os.path.join(out_dir, f"{product}-{release}-{variant}.csv") + + # Run get_build_var to bootstrap soong_ui for this target + env = dict(os.environ) + env["TARGET_PRODUCT"] = product + env["TARGET_RELEASE"] = release + env["TARGET_BUILD_VARIANT"] = variant + env["OUT_DIR"] = out_dir + process = await asyncio.create_subprocess_exec( + "build/soong/bin/get_build_var", + "TARGET_PRODUCT", + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + env=env + ) + stdout, _ = await process.communicate() + stdout = stdout.decode() + + if process.returncode != 0: + return ProductError( + product = product, + release = release, + variant = variant, + error = stdout + ) + else: + # Run kati to extract the data + process = await asyncio.create_subprocess_exec( + "prebuilts/build-tools/linux-x86/bin/ckati", + "-f", + "build/make/core/dumpconfig.mk", + f"TARGET_PRODUCT={product}", + f"TARGET_RELEASE={release}", + f"TARGET_BUILD_VARIANT={variant}", + f"DUMPCONFIG_FILE={dumpconfig_file}", + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + env=env + ) + stdout, _ = await process.communicate() + if process.returncode != 0: + stdout = stdout.decode() + return ProductError( + product = product, + release = release, + variant = variant, + error = stdout + ) + else: + # Parse and record the output + return ParseDump(lunch, dumpconfig_file) + + +def GetArgs(): + """Parse command line arguments.""" + parser = argparse.ArgumentParser( + description="Collect all of the make variables from product config.", + epilog="NOTE: This script must be run from the root of the source tree.") + parser.add_argument("--lunch", nargs="*") + parser.add_argument("--dist", action="store_true") + + return parser.parse_args() + + +async def main(): + args = GetArgs() + + out_dir = buildbot.OutDir() + + if args.dist: + cm = open(os.path.join(buildbot.DistDir(), "all_product_config.json"), "w") + else: + cm = contextlib.nullcontext(sys.stdout) + + + with cm as out: + if args.lunch: + lunches = [lunch.split("-") for lunch in args.lunch] + fail = False + for i in range(len(lunches)): + if len(lunches[i]) != 3: + sys.stderr.write(f"Malformed lunch targets: {args.lunch[i]}\n") + fail = True + if fail: + sys.exit(1) + if len(lunches) == 1: + result = await DumpOneProductConfig(lunches[0], out_dir) + result.dumpToFile(out) + out.write("\n") + else: + await DumpProductConfigs(out, lunches, out_dir) + else: + # All configs mode. This will exec single config mode in parallel + # for each lunch combo. Write output to $DIST_DIR. + await DumpProductConfigs(out, GenerateAllLunchTargets(), out_dir) + + +if __name__ == "__main__": + asyncio.run(main()) + + +# vim: set syntax=python ts=4 sw=4 sts=4: + diff --git a/ci/metrics_agent.py b/ci/metrics_agent.py new file mode 100644 index 0000000000..bc2479eab6 --- /dev/null +++ b/ci/metrics_agent.py @@ -0,0 +1,116 @@ +# Copyright 2024, The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""MetricsAgent is a singleton class that collects metrics for optimized build.""" + +from enum import Enum +import time +import metrics_pb2 +import os +import logging + + +class MetricsAgent: + _SOONG_METRICS_PATH = 'logs/soong_metrics' + _DIST_DIR = 'DIST_DIR' + _instance = None + + def __init__(self): + raise RuntimeError( + 'MetricsAgent cannot be instantialized, use instance() instead' + ) + + @classmethod + def instance(cls): + if not cls._instance: + cls._instance = cls.__new__(cls) + cls._instance._proto = metrics_pb2.OptimizedBuildMetrics() + cls._instance._init_proto() + cls._instance._target_results = dict() + + return cls._instance + + def _init_proto(self): + self._proto.analysis_perf.name = 'Optimized build analysis time.' + self._proto.packaging_perf.name = 'Optimized build total packaging time.' + + def analysis_start(self): + self._proto.analysis_perf.start_time = time.time_ns() + + def analysis_end(self): + self._proto.analysis_perf.real_time = ( + time.time_ns() - self._proto.analysis_perf.start_time + ) + + def packaging_start(self): + self._proto.packaging_perf.start_time = time.time_ns() + + def packaging_end(self): + self._proto.packaging_perf.real_time = ( + time.time_ns() - self._proto.packaging_perf.start_time + ) + + def report_optimized_target(self, name: str): + target_result = metrics_pb2.OptimizedBuildMetrics.TargetOptimizationResult() + target_result.name = name + target_result.optimized = True + self._target_results[name] = target_result + + def report_unoptimized_target(self, name: str, optimization_rationale: str): + target_result = metrics_pb2.OptimizedBuildMetrics.TargetOptimizationResult() + target_result.name = name + target_result.optimization_rationale = optimization_rationale + target_result.optimized = False + self._target_results[name] = target_result + + def target_packaging_start(self, name: str): + target_result = self._target_results.get(name) + target_result.packaging_perf.start_time = time.time_ns() + self._target_results[name] = target_result + + def target_packaging_end(self, name: str): + target_result = self._target_results.get(name) + target_result.packaging_perf.real_time = ( + time.time_ns() - target_result.packaging_perf.start_time + ) + + def add_target_artifact( + self, + target_name: str, + artifact_name: str, + size: int, + included_modules: set[str], + ): + target_result = self.target_results.get(target_name) + artifact = ( + metrics_pb2.OptimizedBuildMetrics.TargetOptimizationResult.OutputArtifact() + ) + artifact.name = artifact_name + artifact.size = size + for module in included_modules: + artifact.included_modules.add(module) + target_result.output_artifacts.add(artifact) + + def end_reporting(self): + for target_result in self._target_results.values(): + self._proto.target_result.append(target_result) + soong_metrics_proto = metrics_pb2.MetricsBase() + # Read in existing metrics that should have been written out by the soong + # build command so that we don't overwrite them. + with open(os.path.join(os.environ[self._DIST_DIR], self._SOONG_METRICS_PATH), 'rb') as f: + soong_metrics_proto.ParseFromString(f.read()) + soong_metrics_proto.optimized_build_metrics.CopyFrom(self._proto) + logging.info(soong_metrics_proto) + with open(os.path.join(os.environ[self._DIST_DIR], self._SOONG_METRICS_PATH), 'wb') as f: + f.write(soong_metrics_proto.SerializeToString()) diff --git a/ci/optimized_targets.py b/ci/optimized_targets.py index 8a529c7420..688bdd8370 100644 --- a/ci/optimized_targets.py +++ b/ci/optimized_targets.py @@ -14,9 +14,16 @@ # limitations under the License. from abc import ABC -from typing import Self import argparse import functools +import json +import logging +import os +import pathlib +import subprocess + +from build_context import BuildContext +import test_mapping_module_retriever class OptimizedBuildTarget(ABC): @@ -27,10 +34,13 @@ class OptimizedBuildTarget(ABC): build. """ + _SOONG_UI_BASH_PATH = 'build/soong/soong_ui.bash' + _PREBUILT_SOONG_ZIP_PATH = 'prebuilts/build-tools/linux-x86/bin/soong_zip' + def __init__( self, target: str, - build_context: dict[str, any], + build_context: BuildContext, args: argparse.Namespace, ): self.target = target @@ -38,19 +48,25 @@ class OptimizedBuildTarget(ABC): self.args = args def get_build_targets(self) -> set[str]: - features = self.build_context.get('enabledBuildFeatures', []) + features = self.build_context.enabled_build_features if self.get_enabled_flag() in features: - return self.get_build_targets_impl() + self.modules_to_build = self.get_build_targets_impl() + return self.modules_to_build + + self.modules_to_build = {self.target} return {self.target} - def package_outputs(self): - features = self.build_context.get('enabledBuildFeatures', []) + def get_package_outputs_commands(self) -> list[list[str]]: + features = self.build_context.enabled_build_features if self.get_enabled_flag() in features: - return self.package_outputs_impl() + return self.get_package_outputs_commands_impl() - def package_outputs_impl(self): + return [] + + def get_package_outputs_commands_impl(self) -> list[list[str]]: raise NotImplementedError( - f'package_outputs_impl not implemented in {type(self).__name__}' + 'get_package_outputs_commands_impl not implemented in' + f' {type(self).__name__}' ) def get_enabled_flag(self): @@ -63,6 +79,88 @@ class OptimizedBuildTarget(ABC): f'get_build_targets_impl not implemented in {type(self).__name__}' ) + def _generate_zip_options_for_items( + self, + prefix: str = '', + relative_root: str = '', + list_files: list[str] | None = None, + files: list[str] | None = None, + directories: list[str] | None = None, + ) -> list[str]: + if not list_files and not files and not directories: + raise RuntimeError( + f'No items specified to be added to zip! Prefix: {prefix}, Relative' + f' root: {relative_root}' + ) + command_segment = [] + # These are all soong_zip options so consult soong_zip --help for specifics. + if prefix: + command_segment.append('-P') + command_segment.append(prefix) + if relative_root: + command_segment.append('-C') + command_segment.append(relative_root) + if list_files: + for list_file in list_files: + command_segment.append('-l') + command_segment.append(list_file) + if files: + for file in files: + command_segment.append('-f') + command_segment.append(file) + if directories: + for directory in directories: + command_segment.append('-D') + command_segment.append(directory) + + return command_segment + + def _query_soong_vars( + self, src_top: pathlib.Path, soong_vars: list[str] + ) -> dict[str, str]: + process_result = subprocess.run( + args=[ + f'{src_top / self._SOONG_UI_BASH_PATH}', + '--dumpvars-mode', + f'--abs-vars={" ".join(soong_vars)}', + ], + env=os.environ, + check=False, + capture_output=True, + text=True, + ) + if not process_result.returncode == 0: + logging.error('soong dumpvars command failed! stderr:') + logging.error(process_result.stderr) + raise RuntimeError('Soong dumpvars failed! See log for stderr.') + + if not process_result.stdout: + raise RuntimeError( + 'Necessary soong variables ' + soong_vars + ' not found.' + ) + + try: + return { + line.split('=')[0]: line.split('=')[1].strip("'") + for line in process_result.stdout.strip().split('\n') + } + except IndexError as e: + raise RuntimeError( + 'Error parsing soong dumpvars output! See output here:' + f' {process_result.stdout}', + e, + ) + + def _base_zip_command( + self, src_top: pathlib.Path, dist_dir: pathlib.Path, name: str + ) -> list[str]: + return [ + f'{src_top / self._PREBUILT_SOONG_ZIP_PATH }', + '-d', + '-o', + f'{dist_dir / name}', + ] + class NullOptimizer(OptimizedBuildTarget): """No-op target optimizer. @@ -77,15 +175,38 @@ class NullOptimizer(OptimizedBuildTarget): def get_build_targets(self): return {self.target} - def package_outputs(self): - pass + def get_package_outputs_commands(self): + return [] + + +class ChangeInfo: + + def __init__(self, change_info_file_path): + try: + with open(change_info_file_path) as change_info_file: + change_info_contents = json.load(change_info_file) + except json.decoder.JSONDecodeError: + logging.error(f'Failed to load CHANGE_INFO: {change_info_file_path}') + raise + + self._change_info_contents = change_info_contents + + def find_changed_files(self) -> set[str]: + changed_files = set() + + for change in self._change_info_contents['changes']: + project_path = change.get('projectPath') + '/' + + for revision in change.get('revisions'): + for file_info in revision.get('fileInfos'): + changed_files.add(project_path + file_info.get('path')) + + return changed_files class GeneralTestsOptimizer(OptimizedBuildTarget): """general-tests optimizer - TODO(b/358215235): Implement - This optimizer reads in the list of changed files from the file located in env[CHANGE_INFO] and uses this list alongside the normal TEST MAPPING logic to determine what test mapping modules will run for the given changes. It then @@ -93,8 +214,264 @@ class GeneralTestsOptimizer(OptimizedBuildTarget): normally built. """ + # List of modules that are built alongside general-tests as dependencies. + _REQUIRED_MODULES = frozenset([ + 'cts-tradefed', + 'vts-tradefed', + 'compatibility-host-util', + 'general-tests-shared-libs', + ]) + + def get_build_targets_impl(self) -> set[str]: + change_info_file_path = os.environ.get('CHANGE_INFO') + if not change_info_file_path: + logging.info( + 'No CHANGE_INFO env var found, general-tests optimization disabled.' + ) + return {'general-tests'} + + test_infos = self.build_context.test_infos + test_mapping_test_groups = set() + for test_info in test_infos: + is_test_mapping = test_info.is_test_mapping + current_test_mapping_test_groups = test_info.test_mapping_test_groups + uses_general_tests = test_info.build_target_used('general-tests') + + if uses_general_tests and not is_test_mapping: + logging.info( + 'Test uses general-tests.zip but is not test-mapping, general-tests' + ' optimization disabled.' + ) + return {'general-tests'} + + if is_test_mapping: + test_mapping_test_groups.update(current_test_mapping_test_groups) + + change_info = ChangeInfo(change_info_file_path) + changed_files = change_info.find_changed_files() + + test_mappings = test_mapping_module_retriever.GetTestMappings( + changed_files, set() + ) + + modules_to_build = set(self._REQUIRED_MODULES) + + modules_to_build.update( + test_mapping_module_retriever.FindAffectedModules( + test_mappings, changed_files, test_mapping_test_groups + ) + ) + + return modules_to_build + + def get_package_outputs_commands_impl(self): + src_top = pathlib.Path(os.environ.get('TOP', os.getcwd())) + dist_dir = pathlib.Path(os.environ.get('DIST_DIR')) + + soong_vars = self._query_soong_vars( + src_top, + [ + 'HOST_OUT_TESTCASES', + 'TARGET_OUT_TESTCASES', + 'PRODUCT_OUT', + 'SOONG_HOST_OUT', + 'HOST_OUT', + ], + ) + host_out_testcases = pathlib.Path(soong_vars.get('HOST_OUT_TESTCASES')) + target_out_testcases = pathlib.Path(soong_vars.get('TARGET_OUT_TESTCASES')) + product_out = pathlib.Path(soong_vars.get('PRODUCT_OUT')) + soong_host_out = pathlib.Path(soong_vars.get('SOONG_HOST_OUT')) + host_out = pathlib.Path(soong_vars.get('HOST_OUT')) + + host_paths = [] + target_paths = [] + host_config_files = [] + target_config_files = [] + for module in self.modules_to_build: + # The required modules are handled separately, no need to package. + if module in self._REQUIRED_MODULES: + continue + + host_path = host_out_testcases / module + if os.path.exists(host_path): + host_paths.append(host_path) + self._collect_config_files(src_top, host_path, host_config_files) + + target_path = target_out_testcases / module + if os.path.exists(target_path): + target_paths.append(target_path) + self._collect_config_files(src_top, target_path, target_config_files) + + if not os.path.exists(host_path) and not os.path.exists(target_path): + logging.info(f'No host or target build outputs found for {module}.') + + zip_commands = [] + + zip_commands.extend( + self._get_zip_test_configs_zips_commands( + src_top, + dist_dir, + host_out, + product_out, + host_config_files, + target_config_files, + ) + ) + + zip_command = self._base_zip_command(src_top, dist_dir, 'general-tests.zip') + + # Add host testcases. + if host_paths: + zip_command.extend( + self._generate_zip_options_for_items( + prefix='host', + relative_root=f'{src_top / soong_host_out}', + directories=host_paths, + ) + ) + + # Add target testcases. + if target_paths: + zip_command.extend( + self._generate_zip_options_for_items( + prefix='target', + relative_root=f'{src_top / product_out}', + directories=target_paths, + ) + ) + + # TODO(lucafarsi): Push this logic into a general-tests-minimal build command + # Add necessary tools. These are also hardcoded in general-tests.mk. + framework_path = soong_host_out / 'framework' + + zip_command.extend( + self._generate_zip_options_for_items( + prefix='host/tools', + relative_root=str(framework_path), + files=[ + f"{framework_path / 'cts-tradefed.jar'}", + f"{framework_path / 'compatibility-host-util.jar'}", + f"{framework_path / 'vts-tradefed.jar'}", + ], + ) + ) + + zip_commands.append(zip_command) + return zip_commands + + def _collect_config_files( + self, + src_top: pathlib.Path, + root_dir: pathlib.Path, + config_files: list[str], + ): + for root, dirs, files in os.walk(src_top / root_dir): + for file in files: + if file.endswith('.config'): + config_files.append(root_dir / file) + + def _get_zip_test_configs_zips_commands( + self, + src_top: pathlib.Path, + dist_dir: pathlib.Path, + host_out: pathlib.Path, + product_out: pathlib.Path, + host_config_files: list[str], + target_config_files: list[str], + ) -> tuple[list[str], list[str]]: + """Generate general-tests_configs.zip and general-tests_list.zip. + + general-tests_configs.zip contains all of the .config files that were + built and general-tests_list.zip contains a text file which lists + all of the .config files that are in general-tests_configs.zip. + + general-tests_configs.zip is organized as follows: + / + host/ + testcases/ + test_1.config + test_2.config + ... + target/ + testcases/ + test_1.config + test_2.config + ... + + So the process is we write out the paths to all the host config files into + one + file and all the paths to the target config files in another. We also write + the paths to all the config files into a third file to use for + general-tests_list.zip. + + Args: + dist_dir: dist directory. + host_out: host out directory. + product_out: product out directory. + host_config_files: list of all host config files. + target_config_files: list of all target config files. + + Returns: + The commands to generate general-tests_configs.zip and + general-tests_list.zip + """ + with open( + f"{host_out / 'host_general-tests_list'}", 'w' + ) as host_list_file, open( + f"{product_out / 'target_general-tests_list'}", 'w' + ) as target_list_file, open( + f"{host_out / 'general-tests_list'}", 'w' + ) as list_file: + + for config_file in host_config_files: + host_list_file.write(f'{config_file}' + '\n') + list_file.write('host/' + os.path.relpath(config_file, host_out) + '\n') + + for config_file in target_config_files: + target_list_file.write(f'{config_file}' + '\n') + list_file.write( + 'target/' + os.path.relpath(config_file, product_out) + '\n' + ) + + zip_commands = [] + + tests_config_zip_command = self._base_zip_command( + src_top, dist_dir, 'general-tests_configs.zip' + ) + tests_config_zip_command.extend( + self._generate_zip_options_for_items( + prefix='host', + relative_root=str(host_out), + list_files=[f"{host_out / 'host_general-tests_list'}"], + ) + ) + + tests_config_zip_command.extend( + self._generate_zip_options_for_items( + prefix='target', + relative_root=str(product_out), + list_files=[f"{product_out / 'target_general-tests_list'}"], + ), + ) + + zip_commands.append(tests_config_zip_command) + + tests_list_zip_command = self._base_zip_command( + src_top, dist_dir, 'general-tests_list.zip' + ) + tests_list_zip_command.extend( + self._generate_zip_options_for_items( + relative_root=str(host_out), + files=[f"{host_out / 'general-tests_list'}"], + ) + ) + zip_commands.append(tests_list_zip_command) + + return zip_commands + def get_enabled_flag(self): - return 'general-tests-optimized' + return 'general_tests_optimized' @classmethod def get_optimized_targets(cls) -> dict[str, OptimizedBuildTarget]: diff --git a/ci/optimized_targets_test.py b/ci/optimized_targets_test.py new file mode 100644 index 0000000000..0b0c0ec087 --- /dev/null +++ b/ci/optimized_targets_test.py @@ -0,0 +1,350 @@ +# Copyright 2024, The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Tests for optimized_targets.py""" + +import json +import logging +import os +import pathlib +import re +import subprocess +import textwrap +import unittest +from unittest import mock +from build_context import BuildContext +import optimized_targets +from pyfakefs import fake_filesystem_unittest + + +class GeneralTestsOptimizerTest(fake_filesystem_unittest.TestCase): + + def setUp(self): + self.setUpPyfakefs() + + os_environ_patcher = mock.patch.dict('os.environ', {}) + self.addCleanup(os_environ_patcher.stop) + self.mock_os_environ = os_environ_patcher.start() + + self._setup_working_build_env() + self._write_change_info_file() + test_mapping_dir = pathlib.Path('/project/path/file/path') + test_mapping_dir.mkdir(parents=True) + self._write_test_mapping_file() + + def _setup_working_build_env(self): + self.change_info_file = pathlib.Path('/tmp/change_info') + self._write_soong_ui_file() + self._host_out_testcases = pathlib.Path('/tmp/top/host_out_testcases') + self._host_out_testcases.mkdir(parents=True) + self._target_out_testcases = pathlib.Path('/tmp/top/target_out_testcases') + self._target_out_testcases.mkdir(parents=True) + self._product_out = pathlib.Path('/tmp/top/product_out') + self._product_out.mkdir(parents=True) + self._soong_host_out = pathlib.Path('/tmp/top/soong_host_out') + self._soong_host_out.mkdir(parents=True) + self._host_out = pathlib.Path('/tmp/top/host_out') + self._host_out.mkdir(parents=True) + + self._dist_dir = pathlib.Path('/tmp/top/out/dist') + self._dist_dir.mkdir(parents=True) + + self.mock_os_environ.update({ + 'CHANGE_INFO': str(self.change_info_file), + 'TOP': '/tmp/top', + 'DIST_DIR': '/tmp/top/out/dist', + }) + + def _write_soong_ui_file(self): + soong_path = pathlib.Path('/tmp/top/build/soong') + soong_path.mkdir(parents=True) + with open(os.path.join(soong_path, 'soong_ui.bash'), 'w') as f: + f.write(""" + #/bin/bash + echo HOST_OUT_TESTCASES='/tmp/top/host_out_testcases' + echo TARGET_OUT_TESTCASES='/tmp/top/target_out_testcases' + echo PRODUCT_OUT='/tmp/top/product_out' + echo SOONG_HOST_OUT='/tmp/top/soong_host_out' + echo HOST_OUT='/tmp/top/host_out' + """) + os.chmod(os.path.join(soong_path, 'soong_ui.bash'), 0o666) + + def _write_change_info_file(self): + change_info_contents = { + 'changes': [{ + 'projectPath': '/project/path', + 'revisions': [{ + 'fileInfos': [{ + 'path': 'file/path/file_name', + }], + }], + }] + } + + with open(self.change_info_file, 'w') as f: + json.dump(change_info_contents, f) + + def _write_test_mapping_file(self): + test_mapping_contents = { + 'test-mapping-group': [ + { + 'name': 'test_mapping_module', + }, + ], + } + + with open('/project/path/file/path/TEST_MAPPING', 'w') as f: + json.dump(test_mapping_contents, f) + + def test_general_tests_optimized(self): + optimizer = self._create_general_tests_optimizer() + + build_targets = optimizer.get_build_targets() + + expected_build_targets = set( + optimized_targets.GeneralTestsOptimizer._REQUIRED_MODULES + ) + expected_build_targets.add('test_mapping_module') + + self.assertSetEqual(build_targets, expected_build_targets) + + def test_no_change_info_no_optimization(self): + del os.environ['CHANGE_INFO'] + + optimizer = self._create_general_tests_optimizer() + + build_targets = optimizer.get_build_targets() + + self.assertSetEqual(build_targets, {'general-tests'}) + + def test_mapping_groups_unused_module_not_built(self): + test_context = self._create_test_context() + test_context['testInfos'][0]['extraOptions'] = [ + { + 'key': 'additional-files-filter', + 'values': ['general-tests.zip'], + }, + { + 'key': 'test-mapping-test-group', + 'values': ['unused-test-mapping-group'], + }, + ] + optimizer = self._create_general_tests_optimizer( + build_context=self._create_build_context(test_context=test_context) + ) + + build_targets = optimizer.get_build_targets() + + expected_build_targets = set( + optimized_targets.GeneralTestsOptimizer._REQUIRED_MODULES + ) + self.assertSetEqual(build_targets, expected_build_targets) + + def test_general_tests_used_by_non_test_mapping_test_no_optimization(self): + test_context = self._create_test_context() + test_context['testInfos'][0]['extraOptions'] = [{ + 'key': 'additional-files-filter', + 'values': ['general-tests.zip'], + }] + optimizer = self._create_general_tests_optimizer( + build_context=self._create_build_context(test_context=test_context) + ) + + build_targets = optimizer.get_build_targets() + + self.assertSetEqual(build_targets, {'general-tests'}) + + def test_malformed_change_info_raises(self): + with open(self.change_info_file, 'w') as f: + f.write('not change info') + + optimizer = self._create_general_tests_optimizer() + + with self.assertRaises(json.decoder.JSONDecodeError): + build_targets = optimizer.get_build_targets() + + def test_malformed_test_mapping_raises(self): + with open('/project/path/file/path/TEST_MAPPING', 'w') as f: + f.write('not test mapping') + + optimizer = self._create_general_tests_optimizer() + + with self.assertRaises(json.decoder.JSONDecodeError): + build_targets = optimizer.get_build_targets() + + @mock.patch('subprocess.run') + def test_packaging_outputs_success(self, subprocess_run): + subprocess_run.return_value = self._get_soong_vars_output() + optimizer = self._create_general_tests_optimizer() + self._set_up_build_outputs(['test_mapping_module']) + + targets = optimizer.get_build_targets() + package_commands = optimizer.get_package_outputs_commands() + + self._verify_soong_zip_commands(package_commands, ['test_mapping_module']) + + @mock.patch('subprocess.run') + def test_get_soong_dumpvars_fails_raises(self, subprocess_run): + subprocess_run.return_value = self._get_soong_vars_output(return_code=-1) + optimizer = self._create_general_tests_optimizer() + self._set_up_build_outputs(['test_mapping_module']) + + targets = optimizer.get_build_targets() + + with self.assertRaisesRegex(RuntimeError, 'Soong dumpvars failed!'): + package_commands = optimizer.get_package_outputs_commands() + + @mock.patch('subprocess.run') + def test_get_soong_dumpvars_bad_output_raises(self, subprocess_run): + subprocess_run.return_value = self._get_soong_vars_output( + stdout='This output is bad' + ) + optimizer = self._create_general_tests_optimizer() + self._set_up_build_outputs(['test_mapping_module']) + + targets = optimizer.get_build_targets() + + with self.assertRaisesRegex( + RuntimeError, 'Error parsing soong dumpvars output' + ): + package_commands = optimizer.get_package_outputs_commands() + + def _create_general_tests_optimizer(self, build_context: BuildContext = None): + if not build_context: + build_context = self._create_build_context() + return optimized_targets.GeneralTestsOptimizer( + 'general-tests', build_context, None + ) + + def _create_build_context( + self, + general_tests_optimized: bool = True, + test_context: dict[str, any] = None, + ) -> BuildContext: + if not test_context: + test_context = self._create_test_context() + build_context_dict = {} + build_context_dict['enabledBuildFeatures'] = [{'name': 'optimized_build'}] + if general_tests_optimized: + build_context_dict['enabledBuildFeatures'].append( + {'name': 'general_tests_optimized'} + ) + build_context_dict['testContext'] = test_context + return BuildContext(build_context_dict) + + def _create_test_context(self): + return { + 'testInfos': [ + { + 'name': 'atp_test', + 'target': 'test_target', + 'branch': 'branch', + 'extraOptions': [ + { + 'key': 'additional-files-filter', + 'values': ['general-tests.zip'], + }, + { + 'key': 'test-mapping-test-group', + 'values': ['test-mapping-group'], + }, + ], + 'command': '/tf/command', + 'extraBuildTargets': [ + 'extra_build_target', + ], + }, + ], + } + + def _get_soong_vars_output( + self, return_code: int = 0, stdout: str = '' + ) -> subprocess.CompletedProcess: + return_value = subprocess.CompletedProcess(args=[], returncode=return_code) + if not stdout: + stdout = textwrap.dedent(f"""\ + HOST_OUT_TESTCASES='{self._host_out_testcases}' + TARGET_OUT_TESTCASES='{self._target_out_testcases}' + PRODUCT_OUT='{self._product_out}' + SOONG_HOST_OUT='{self._soong_host_out}' + HOST_OUT='{self._host_out}'""") + + return_value.stdout = stdout + return return_value + + def _set_up_build_outputs(self, targets: list[str]): + for target in targets: + host_dir = self._host_out_testcases / target + host_dir.mkdir() + (host_dir / f'{target}.config').touch() + (host_dir / f'test_file').touch() + + target_dir = self._target_out_testcases / target + target_dir.mkdir() + (target_dir / f'{target}.config').touch() + (target_dir / f'test_file').touch() + + def _verify_soong_zip_commands(self, commands: list[str], targets: list[str]): + """Verify the structure of the zip commands. + + Zip commands have to start with the soong_zip binary path, then are followed + by a couple of options and the name of the file being zipped. Depending on + which zip we are creating look for a few essential items being added in + those zips. + + Args: + commands: list of command lists + targets: list of targets expected to be in general-tests.zip + """ + for command in commands: + self.assertEqual( + '/tmp/top/prebuilts/build-tools/linux-x86/bin/soong_zip', + command[0], + ) + self.assertEqual('-d', command[1]) + self.assertEqual('-o', command[2]) + match (command[3]): + case '/tmp/top/out/dist/general-tests_configs.zip': + self.assertIn(f'{self._host_out}/host_general-tests_list', command) + self.assertIn( + f'{self._product_out}/target_general-tests_list', command + ) + return + case '/tmp/top/out/dist/general-tests_list.zip': + self.assertIn('-f', command) + self.assertIn(f'{self._host_out}/general-tests_list', command) + return + case '/tmp/top/out/dist/general-tests.zip': + for target in targets: + self.assertIn(f'{self._host_out_testcases}/{target}', command) + self.assertIn(f'{self._target_out_testcases}/{target}', command) + self.assertIn( + f'{self._soong_host_out}/framework/cts-tradefed.jar', command + ) + self.assertIn( + f'{self._soong_host_out}/framework/compatibility-host-util.jar', + command, + ) + self.assertIn( + f'{self._soong_host_out}/framework/vts-tradefed.jar', command + ) + return + case _: + self.fail(f'malformed command: {command}') + + +if __name__ == '__main__': + # Setup logging to be silent so unit tests can pass through TF. + logging.disable(logging.ERROR) + unittest.main() diff --git a/ci/test_discovery_agent.py b/ci/test_discovery_agent.py new file mode 100644 index 0000000000..008ee47f8e --- /dev/null +++ b/ci/test_discovery_agent.py @@ -0,0 +1,120 @@ +# Copyright 2024, The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Test discovery agent that uses TradeFed to discover test artifacts.""" +import glob +import json +import logging +import os +import subprocess + + +class TestDiscoveryAgent: + """Test discovery agent.""" + + _TRADEFED_PREBUILT_JAR_RELATIVE_PATH = ( + "vendor/google_tradefederation/prebuilts/filegroups/google-tradefed/" + ) + + _TRADEFED_NO_POSSIBLE_TEST_DISCOVERY_KEY = "NoPossibleTestDiscovery" + + _TRADEFED_TEST_ZIP_REGEXES_LIST_KEY = "TestZipRegexes" + + _TRADEFED_DISCOVERY_OUTPUT_FILE_NAME = "test_discovery_agent.txt" + + def __init__( + self, + tradefed_args: list[str], + test_mapping_zip_path: str = "", + tradefed_jar_revelant_files_path: str = _TRADEFED_PREBUILT_JAR_RELATIVE_PATH, + ): + self.tradefed_args = tradefed_args + self.test_mapping_zip_path = test_mapping_zip_path + self.tradefed_jar_relevant_files_path = tradefed_jar_revelant_files_path + + def discover_test_zip_regexes(self) -> list[str]: + """Discover test zip regexes from TradeFed. + + Returns: + A list of test zip regexes that TF is going to try to pull files from. + """ + test_discovery_output_file_name = os.path.join( + os.environ.get('TOP'), 'out', self._TRADEFED_DISCOVERY_OUTPUT_FILE_NAME + ) + with open( + test_discovery_output_file_name, mode="w+t" + ) as test_discovery_output_file: + java_args = [] + java_args.append("prebuilts/jdk/jdk21/linux-x86/bin/java") + java_args.append("-cp") + java_args.append( + self.create_classpath(self.tradefed_jar_relevant_files_path) + ) + java_args.append( + "com.android.tradefed.observatory.TestZipDiscoveryExecutor" + ) + java_args.extend(self.tradefed_args) + env = os.environ.copy() + env.update({"DISCOVERY_OUTPUT_FILE": test_discovery_output_file.name}) + logging.info(f"Calling test discovery with args: {java_args}") + try: + result = subprocess.run(args=java_args, env=env, text=True, check=True) + logging.info(f"Test zip discovery output: {result.stdout}") + except subprocess.CalledProcessError as e: + raise TestDiscoveryError( + f"Failed to run test discovery, strout: {e.stdout}, strerr:" + f" {e.stderr}, returncode: {e.returncode}" + ) + data = json.loads(test_discovery_output_file.read()) + logging.info(f"Test discovery result file content: {data}") + if ( + self._TRADEFED_NO_POSSIBLE_TEST_DISCOVERY_KEY in data + and data[self._TRADEFED_NO_POSSIBLE_TEST_DISCOVERY_KEY] + ): + raise TestDiscoveryError("No possible test discovery") + if ( + data[self._TRADEFED_TEST_ZIP_REGEXES_LIST_KEY] is None + or data[self._TRADEFED_TEST_ZIP_REGEXES_LIST_KEY] is [] + ): + raise TestDiscoveryError("No test zip regexes returned") + return data[self._TRADEFED_TEST_ZIP_REGEXES_LIST_KEY] + + def discover_test_modules(self) -> list[str]: + """Discover test modules from TradeFed. + + Returns: + A list of test modules that TradeFed is going to execute based on the + TradeFed test args. + """ + return [] + + def create_classpath(self, directory): + """Creates a classpath string from all .jar files in the given directory. + + Args: + directory: The directory to search for .jar files. + + Returns: + A string representing the classpath, with jar files separated by the + OS-specific path separator (e.g., ':' on Linux/macOS, ';' on Windows). + """ + jar_files = glob.glob(os.path.join(directory, "*.jar")) + return os.pathsep.join(jar_files) + + +class TestDiscoveryError(Exception): + """A TestDiscoveryErrorclass.""" + + def __init__(self, message): + super().__init__(message) + self.message = message diff --git a/ci/test_mapping_module_retriever.py b/ci/test_mapping_module_retriever.py index d2c13c0e7d..c93cdd5953 100644 --- a/ci/test_mapping_module_retriever.py +++ b/ci/test_mapping_module_retriever.py @@ -17,11 +17,13 @@ Simple parsing code to scan test_mapping files and determine which modules are needed to build for the given list of changed files. TODO(lucafarsi): Deduplicate from artifact_helper.py """ +# TODO(lucafarsi): Share this logic with the original logic in +# test_mapping_test_retriever.py -from typing import Any, Dict, Set, Text import json import os import re +from typing import Any # Regex to extra test name from the path of test config file. TEST_NAME_REGEX = r'(?:^|.*/)([^/]+)\.config' @@ -39,7 +41,7 @@ TEST_MAPPING = 'TEST_MAPPING' _COMMENTS_RE = re.compile(r'(\"(?:[^\"\\]|\\.)*\"|(?=//))(?://.*)?') -def FilterComments(test_mapping_file: Text) -> Text: +def FilterComments(test_mapping_file: str) -> str: """Remove comments in TEST_MAPPING file to valid format. Only '//' is regarded as comments. @@ -52,8 +54,8 @@ def FilterComments(test_mapping_file: Text) -> Text: """ return re.sub(_COMMENTS_RE, r'\1', test_mapping_file) -def GetTestMappings(paths: Set[Text], - checked_paths: Set[Text]) -> Dict[Text, Dict[Text, Any]]: +def GetTestMappings(paths: set[str], + checked_paths: set[str]) -> dict[str, dict[str, Any]]: """Get the affected TEST_MAPPING files. TEST_MAPPING files in source code are packaged into a build artifact @@ -123,3 +125,68 @@ def GetTestMappings(paths: Set[Text], pass return test_mappings + + +def FindAffectedModules( + test_mappings: dict[str, Any], + changed_files: set[str], + test_mapping_test_groups: set[str], +) -> set[str]: + """Find affected test modules. + + Find the affected set of test modules that would run in a test mapping run based on the given test mappings, changed files, and test mapping test group. + + Args: + test_mappings: A set of test mappings returned by GetTestMappings in the following format: + { + 'test_mapping_file_path': { + 'group_name' : [ + 'name': 'module_name', + ], + } + } + changed_files: A set of files changed for the given run. + test_mapping_test_groups: A set of test mapping test groups that are being considered for the given run. + + Returns: + A set of test module names which would run for a test mapping test run with the given parameters. + """ + + modules = set() + + for test_mapping in test_mappings.values(): + for group_name, group in test_mapping.items(): + # If a module is not in any of the test mapping groups being tested skip + # it. + if group_name not in test_mapping_test_groups: + continue + + for entry in group: + module_name = entry.get('name') + + if not module_name: + continue + + file_patterns = entry.get('file_patterns') + if not file_patterns: + modules.add(module_name) + continue + + if matches_file_patterns(file_patterns, changed_files): + modules.add(module_name) + + return modules + +def MatchesFilePatterns( + file_patterns: list[set], changed_files: set[str] +) -> bool: + """Checks if any of the changed files match any of the file patterns. + + Args: + file_patterns: A list of file patterns to match against. + changed_files: A set of files to check against the file patterns. + + Returns: + True if any of the changed files match any of the file patterns. + """ + return any(re.search(pattern, "|".join(changed_files)) for pattern in file_patterns) diff --git a/cogsetup.sh b/cogsetup.sh deleted file mode 100644 index ef1485d5f2..0000000000 --- a/cogsetup.sh +++ /dev/null @@ -1,71 +0,0 @@ -# -# Copyright (C) 2023 The Android Open Source Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# This file is executed by build/envsetup.sh, and can use anything -# defined in envsetup.sh. -function _create_out_symlink_for_cog() { - if [[ "${OUT_DIR}" == "" ]]; then - OUT_DIR="out" - fi - - # getoutdir ensures paths are absolute. envsetup could be called from a - # directory other than the root of the source tree - local outdir=$(getoutdir) - if [[ -L "${outdir}" ]]; then - return - fi - if [ -d "${outdir}" ]; then - echo -e "\tOutput directory ${outdir} cannot be present in a Cog workspace." - echo -e "\tDelete \"${outdir}\" or create a symlink from \"${outdir}\" to a directory outside your workspace." - return 1 - fi - - DEFAULT_OUTPUT_DIR="${HOME}/.cog/android-build-out" - mkdir -p ${DEFAULT_OUTPUT_DIR} - ln -s ${DEFAULT_OUTPUT_DIR} ${outdir} -} - -# This function sets up the build environment to be appropriate for Cog. -function _setup_cog_env() { - _create_out_symlink_for_cog - if [ "$?" -eq "1" ]; then - echo -e "\e[0;33mWARNING:\e[00m Cog environment setup failed!" - return 1 - fi - - export ANDROID_BUILD_ENVIRONMENT_CONFIG="googler-cog" - - # Running repo command within Cog workspaces is not supported, so override - # it with this function. If the user is running repo within a Cog workspace, - # we'll fail with an error, otherwise, we run the original repo command with - # the given args. - if ! ORIG_REPO_PATH=`which repo`; then - return 0 - fi - function repo { - if [[ "${PWD}" == /google/cog/* ]]; then - echo "\e[01;31mERROR:\e[0mrepo command is disallowed within Cog workspaces." - return 1 - fi - ${ORIG_REPO_PATH} "$@" - } -} - -if [[ "${PWD}" != /google/cog/* ]]; then - echo -e "\e[01;31mERROR:\e[0m This script must be run from a Cog workspace." -fi - -_setup_cog_env diff --git a/core/Makefile b/core/Makefile index 96588e3976..7b4f58dfbd 100644 --- a/core/Makefile +++ b/core/Makefile @@ -169,7 +169,7 @@ $(foreach cf,$(unique_product_copy_files_pairs), \ $(eval $(call copy-xml-file-checked,$(_src),$(_fulldest))),\ $(if $(and $(filter %.jar,$(_dest)),$(filter $(basename $(notdir $(_dest))),$(PRODUCT_LOADED_BY_PRIVILEGED_MODULES))),\ $(eval $(call copy-and-uncompress-dexs,$(_src),$(_fulldest))), \ - $(if $(filter init%rc,$(notdir $(_dest)))$(filter %/etc/init,$(dir $(_dest))),\ + $(if $(filter init%rc,$(notdir $(_dest)))$(filter %/etc/init/,$(dir $(_dest))),\ $(eval $(call copy-init-script-file-checked,$(_src),$(_fulldest))),\ $(if $(and $(filter true,$(check_elf_prebuilt_product_copy_files)), \ $(filter bin lib lib64,$(subst /,$(space),$(_dest)))), \ @@ -192,6 +192,34 @@ product_copy_files_ignored := unique_product_copy_files_pairs := unique_product_copy_files_destinations := + +# Returns a list of EXTRA_INSTALL_ZIPS trios whose primary file is contained within $(1) +# The trios will contain the primary installed file : the directory to unzip the zip to : the zip +define relevant-extra-install-zips +$(strip $(foreach p,$(EXTRA_INSTALL_ZIPS), \ + $(if $(filter $(call word-colon,1,$(p)),$(1)), \ + $(p)))) +endef + +# Writes a text file that contains all of the files that will be inside a partition. +# All the file paths will be relative to the partition's staging directory. +# It will also take into account files inside zips listed in EXTRA_INSTALL_ZIPS. +# +# Arguments: +# $(1): Output file +# $(2): The partition's staging directory +# $(3): Files to include in the partition +define write-partition-file-list +$(1): PRIVATE_FILES := $(subst $(2)/,,$(filter $(2)/%,$(3))) +$(1): PRIVATE_EXTRA_INSTALL_ZIPS := $(call relevant-extra-install-zips,$(filter $(2)/%,$(3))) +$(1): $$(HOST_OUT_EXECUTABLES)/extra_install_zips_file_list $(foreach p,$(call relevant-extra-install-zips,$(filter $(2)/%,$(3))),$(call word-colon,3,$(p))) + @echo Writing $$@ + rm -f $$@ + echo -n > $$@ + $$(foreach f,$$(PRIVATE_FILES),echo "$$(f)" >> $$@$$(newline)) + $$(HOST_OUT_EXECUTABLES)/extra_install_zips_file_list $(2) $$(PRIVATE_EXTRA_INSTALL_ZIPS) >> $$@ +endef + # ----------------------------------------------------------------- # Returns the max allowed size for an image suitable for hash verification # (e.g., boot.img, recovery.img, etc). @@ -692,7 +720,7 @@ endif BOARD_KERNEL_MODULE_DIRS += top -# Default to not generating modules.dep for kernel modules on system +# Default to not generating modules.load for kernel modules on system # side. We should only load these modules if they are depended by vendor # side modules. ifeq ($(BOARD_SYSTEM_KERNEL_MODULES_LOAD),) @@ -717,7 +745,7 @@ $(foreach kmd,$(BOARD_KERNEL_MODULE_DIRS), \ $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-vendor-ramdisk-charger-load,$(kmd))) \ $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-vendor-kernel-ramdisk-charger-load,$(kmd))) \ $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,ODM,$(if $(filter true,$(BOARD_USES_ODM_DLKMIMAGE)),$(TARGET_OUT_ODM_DLKM),$(TARGET_OUT_ODM)),odm,modules.load,,$(kmd))) \ - $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,SYSTEM,$(if $(filter true,$(BOARD_USES_SYSTEM_DLKMIMAGE)),$(TARGET_OUT_SYSTEM_DLKM),$(TARGET_OUT_SYSTEM)),system,modules.load,,$(kmd))) \ + $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,SYSTEM,$(if $(filter true,$(BOARD_USES_SYSTEM_DLKMIMAGE)),$(TARGET_OUT_SYSTEM_DLKM),$(TARGET_OUT)),system,modules.load,,$(kmd))) \ $(if $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)),\ $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-recovery-as-boot-load,$(kmd))),\ $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,GENERIC_RAMDISK,$(TARGET_RAMDISK_OUT),,modules.load,$(GENERIC_RAMDISK_STRIPPED_MODULE_STAGING_DIR),$(kmd))))) @@ -844,6 +872,7 @@ SOONG_CONV := $(sort $(SOONG_CONV)) SOONG_CONV_DATA := $(call intermediates-dir-for,PACKAGING,soong_conversion)/soong_conv_data $(SOONG_CONV_DATA): @rm -f $@ + @touch $@ # This file must be present even if SOONG_CONV is empty. @$(foreach s,$(SOONG_CONV),echo "$(s),$(SOONG_CONV.$(s).TYPE),$(sort $(SOONG_CONV.$(s).PROBLEMS)),$(sort $(filter-out $(SOONG_ALREADY_CONV),$(SOONG_CONV.$(s).DEPS))),$(sort $(SOONG_CONV.$(s).MAKEFILES)),$(sort $(SOONG_CONV.$(s).INSTALLED))" >>$@;) $(call declare-1p-target,$(SOONG_CONV_DATA),build) @@ -856,11 +885,6 @@ $(SOONG_TO_CONVERT): $(SOONG_CONV_DATA) $(SOONG_TO_CONVERT_SCRIPT) $(call declare-1p-target,$(SOONG_TO_CONVERT),build) $(call dist-for-goals,droidcore-unbundled,$(SOONG_TO_CONVERT)) -$(PRODUCT_OUT)/product_packages.txt: - @rm -f $@ - echo "" > $@ - $(foreach x,$(PRODUCT_PACKAGES),echo $(x) >> $@$(newline)) - MK2BP_CATALOG_SCRIPT := build/make/tools/mk2bp_catalog.py PRODUCT_PACKAGES_TXT := $(PRODUCT_OUT)/product_packages.txt MK2BP_REMAINING_HTML := $(PRODUCT_OUT)/mk2bp_remaining.html @@ -941,27 +965,12 @@ systemimage: # ----------------------------------------------------------------- -.PHONY: event-log-tags - -# Produce an event logs tag file for everything we know about, in order -# to properly allocate numbers. Then produce a file that's filtered -# for what's going to be installed. - -all_event_log_tags_file := $(TARGET_OUT_COMMON_INTERMEDIATES)/all-event-log-tags.txt - event_log_tags_file := $(TARGET_OUT)/etc/event-log-tags # Include tags from all packages that we know about all_event_log_tags_src := \ $(sort $(foreach m, $(ALL_MODULES), $(ALL_MODULES.$(m).EVENT_LOG_TAGS))) -$(all_event_log_tags_file): PRIVATE_SRC_FILES := $(all_event_log_tags_src) -$(all_event_log_tags_file): $(all_event_log_tags_src) $(MERGETAGS) build/make/tools/event_log_tags.py - $(hide) mkdir -p $(dir $@) - $(hide) $(MERGETAGS) -o $@ $(PRIVATE_SRC_FILES) - -$(call declare-0p-target,$(all_event_log_tags_file)) - # Include tags from all packages included in this product, plus all # tags that are part of the system (ie, not in a vendor/ or device/ # directory). @@ -973,13 +982,13 @@ event_log_tags_src := \ $(filter-out vendor/% device/% out/%,$(all_event_log_tags_src))) $(event_log_tags_file): PRIVATE_SRC_FILES := $(event_log_tags_src) -$(event_log_tags_file): PRIVATE_MERGED_FILE := $(all_event_log_tags_file) -$(event_log_tags_file): $(event_log_tags_src) $(all_event_log_tags_file) $(MERGETAGS) build/make/tools/event_log_tags.py +$(event_log_tags_file): $(event_log_tags_src) $(MERGETAGS) $(hide) mkdir -p $(dir $@) - $(hide) $(MERGETAGS) -o $@ -m $(PRIVATE_MERGED_FILE) $(PRIVATE_SRC_FILES) + $(hide) $(MERGETAGS) -o $@ $(PRIVATE_SRC_FILES) $(eval $(call declare-0p-target,$(event_log_tags_file))) +.PHONY: event-log-tags event-log-tags: $(event_log_tags_file) ALL_DEFAULT_INSTALLED_MODULES += $(event_log_tags_file) @@ -1268,8 +1277,11 @@ boototapackage_16k: $(BUILT_BOOT_OTA_PACKAGE_16K) endif +ramdisk_intermediates :=$= $(call intermediates-dir-for,PACKAGING,ramdisk) +$(eval $(call write-partition-file-list,$(ramdisk_intermediates)/file_list.txt,$(TARGET_RAMDISK_OUT),$(INTERNAL_RAMDISK_FILES))) + +# The value of RAMDISK_NODE_LIST is defined in system/core/rootdir/Android.bp. # This file contains /dev nodes description added to the generic ramdisk -RAMDISK_NODE_LIST := $(PRODUCT_OUT)/ramdisk_node_list # We just build this directly to the install location. INSTALLED_RAMDISK_TARGET := $(BUILT_RAMDISK_TARGET) @@ -1565,6 +1577,7 @@ $(INSTALLED_INIT_BOOT_IMAGE_TARGET): $(AVBTOOL) $(BOARD_AVB_INIT_BOOT_KEY_PATH) $(AVBTOOL) add_hash_footer \ --image $@ \ $(call get-partition-size-argument,$(BOARD_INIT_BOOT_IMAGE_PARTITION_SIZE)) \ + --salt $$(sha256sum $(BUILD_NUMBER_FILE) $(BUILD_DATETIME_FILE) | cut -d " " -f 1 | tr -d '\n') \ --partition_name init_boot $(INTERNAL_AVB_INIT_BOOT_SIGNING_ARGS) \ $(BOARD_AVB_INIT_BOOT_ADD_HASH_FOOTER_ARGS) @@ -1623,6 +1636,8 @@ INTERNAL_VENDOR_RAMDISK_FILES := $(filter $(TARGET_VENDOR_RAMDISK_OUT)/%, \ $(ALL_DEFAULT_INSTALLED_MODULES)) INTERNAL_VENDOR_RAMDISK_TARGET := $(call intermediates-dir-for,PACKAGING,vendor_boot)/vendor_ramdisk.cpio$(RAMDISK_EXT) +vendor_ramdisk_intermediates :=$= $(call intermediates-dir-for,PACKAGING,vendor_ramdisk) +$(eval $(call write-partition-file-list,$(vendor_ramdisk_intermediates)/file_list.txt,$(TARGET_VENDOR_RAMDISK_OUT),$(INTERNAL_VENDOR_RAMDISK_FILES))) # Exclude recovery files in the default vendor ramdisk if including a standalone # recovery ramdisk in vendor_boot. @@ -1677,12 +1692,13 @@ ifdef INTERNAL_KERNEL_CMDLINE INTERNAL_VENDOR_BOOTIMAGE_ARGS += --vendor_cmdline "$(INTERNAL_KERNEL_CMDLINE)" endif -ifdef INTERNAL_BOOTCONFIG +ifneq (, $(INTERNAL_BOOTCONFIG)$(INTERNAL_BOOTCONFIG_FILE)) INTERNAL_VENDOR_BOOTCONFIG_TARGET := $(PRODUCT_OUT)/vendor-bootconfig.img $(INTERNAL_VENDOR_BOOTCONFIG_TARGET): rm -f $@ $(foreach param,$(INTERNAL_BOOTCONFIG), \ printf "%s\n" $(param) >> $@;) + cat $(INTERNAL_BOOTCONFIG_FILE) >> $@ INTERNAL_VENDOR_BOOTIMAGE_ARGS += --vendor_bootconfig $(INTERNAL_VENDOR_BOOTCONFIG_TARGET) endif @@ -1737,6 +1753,7 @@ $(INSTALLED_VENDOR_BOOTIMAGE_TARGET): $(AVBTOOL) $(BOARD_AVB_VENDOR_BOOTIMAGE_KE $(AVBTOOL) add_hash_footer \ --image $@ \ $(call get-partition-size-argument,$(BOARD_VENDOR_BOOTIMAGE_PARTITION_SIZE)) \ + --salt $$(sha256sum $(BUILD_NUMBER_FILE) $(BUILD_DATETIME_FILE) | cut -d " " -f 1 | tr -d '\n') \ --partition_name vendor_boot $(INTERNAL_AVB_VENDOR_BOOT_SIGNING_ARGS) \ $(BOARD_AVB_VENDOR_BOOT_ADD_HASH_FOOTER_ARGS) else @@ -1976,7 +1993,8 @@ ALL_INSTALLED_NOTICE_FILES := \ # $1 installed file path, e.g. out/target/product/vsoc_x86_64/system_ext/etc/NOTICE.xml.gz define is-notice-file -$(if $(findstring $1,$(ALL_INSTALLED_NOTICE_FILES)),Y) +$(if $(filter true,$(PRODUCT_USE_SOONG_NOTICE_XML)),, \ + $(if $(findstring $1,$(ALL_INSTALLED_NOTICE_FILES)),Y)) endef # Notice files are copied to TARGET_OUT_NOTICE_FILES as a side-effect of their module @@ -2379,7 +2397,7 @@ $(if $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)),\ $(hide) echo "root_dir=$(TARGET_ROOT_OUT)" >> $(1) $(if $(filter true,$(PRODUCT_USE_DYNAMIC_PARTITION_SIZE)),\ $(hide) echo "use_dynamic_partition_size=true" >> $(1)) -$(if $(COPY_IMAGES_FOR_TARGET_FILES_ZIP),\ +$(if $(USE_FIXED_TIMESTAMP_IMG_FILES)$(COPY_IMAGES_FOR_TARGET_FILES_ZIP),\ $(hide) echo "use_fixed_timestamp=true" >> $(1)) $(if $(3),$(hide) $(foreach kv,$(3),echo "$(kv)" >> $(1);)) $(hide) sort -o $(1) $(1) @@ -2625,7 +2643,7 @@ ifndef TARGET_PRIVATE_RES_DIRS TARGET_PRIVATE_RES_DIRS := $(wildcard $(TARGET_DEVICE_DIR)/recovery/res) endif recovery_resource_deps := $(shell find $(recovery_resources_common) \ - $(TARGET_PRIVATE_RES_DIRS) -type f) + $(TARGET_PRIVATE_RES_DIRS) -type f -not -name "*.bp") recovery_resource_deps += $(generated_recovery_text_files) @@ -2898,6 +2916,9 @@ ifneq ($(BOARD_NAND_SPARE_SIZE),) $(error MTD device is no longer supported and thus BOARD_NAND_SPARE_SIZE is deprecated.) endif +recovery_intermediates := $(call intermediates-dir-for,PACKAGING,recovery) +$(eval $(call write-partition-file-list,$(recovery_intermediates)/file_list.txt,$(TARGET_RECOVERY_OUT),$(INTERNAL_RECOVERYIMAGE_FILES))) + # ----------------------------------------------------------------- # Build debug ramdisk and debug boot image. @@ -3417,8 +3438,10 @@ endif # PRODUCT_FSVERITY_GENERATE_METADATA # system image INSTALLED_FILES_OUTSIDE_IMAGES := $(filter-out $(TARGET_OUT)/%, $(INSTALLED_FILES_OUTSIDE_IMAGES)) +ifdef BUILDING_SYSTEM_IMAGE INTERNAL_SYSTEMIMAGE_FILES := $(sort $(filter $(TARGET_OUT)/%, \ $(ALL_DEFAULT_INSTALLED_MODULES))) +endif # Create symlink /system/vendor to /vendor if necessary. ifdef BOARD_USES_VENDORIMAGE @@ -3468,31 +3491,6 @@ endif FULL_SYSTEMIMAGE_DEPS += $(INTERNAL_ROOT_FILES) $(INSTALLED_FILES_FILE_ROOT) -# Returns a list of EXTRA_INSTALL_ZIPS trios whose primary file is contained within $(1) -# The trios will contain the primary installed file : the directory to unzip the zip to : the zip -define relevant-extra-install-zips -$(strip $(foreach p,$(EXTRA_INSTALL_ZIPS), \ - $(if $(filter $(call word-colon,1,$(p)),$(1)), \ - $(p)))) -endef - -# Writes a text file that contains all of the files that will be inside a partition. -# All the file paths will be relative to the partition's staging directory. -# It will also take into account files inside zips listed in EXTRA_INSTALL_ZIPS. -# -# Arguments: -# $(1): Output file -# $(2): The partition's staging directory -# $(3): Files to include in the partition -define write-partition-file-list -$(1): $$(HOST_OUT_EXECUTABLES)/extra_install_zips_file_list $(foreach p,$(call relevant-extra-install-zips,$(filter $(2)/%,$(3))),$(call word-colon,3,$(p))) - @echo Writing $$@ - rm -f $$@ - echo -n > $$@ - $$(foreach f,$(subst $(2)/,,$(filter $(2)/%,$(3))),echo "$$(f)" >> $$@$$(newline)) - $$(HOST_OUT_EXECUTABLES)/extra_install_zips_file_list $(2) $(call relevant-extra-install-zips,$(filter $(2)/%,$(3))) >> $$@ -endef - # ----------------------------------------------------------------- ifdef BUILDING_SYSTEM_IMAGE @@ -3500,16 +3498,20 @@ ifdef BUILDING_SYSTEM_IMAGE # Collect all available stub libraries installed in system and install with predefined linker configuration # Also append LLNDK libraries in the APEX as required libs SYSTEM_LINKER_CONFIG := $(TARGET_OUT)/etc/linker.config.pb -SYSTEM_LINKER_CONFIG_SOURCE := $(call intermediates-dir-for,ETC,system_linker_config)/system_linker_config +SYSTEM_LINKER_CONFIG_SOURCE := system/core/rootdir/etc/linker.config.json $(SYSTEM_LINKER_CONFIG): PRIVATE_SYSTEM_LINKER_CONFIG_SOURCE := $(SYSTEM_LINKER_CONFIG_SOURCE) $(SYSTEM_LINKER_CONFIG): $(INTERNAL_SYSTEMIMAGE_FILES) $(SYSTEM_LINKER_CONFIG_SOURCE) | conv_linker_config @echo Creating linker config: $@ @mkdir -p $(dir $@) - @rm -f $@ - $(HOST_OUT_EXECUTABLES)/conv_linker_config systemprovide --source $(PRIVATE_SYSTEM_LINKER_CONFIG_SOURCE) \ + @rm -f $@ $@.step1 + $(HOST_OUT_EXECUTABLES)/conv_linker_config proto --force -s $(PRIVATE_SYSTEM_LINKER_CONFIG_SOURCE) -o $@.step1 + $(HOST_OUT_EXECUTABLES)/conv_linker_config systemprovide --source $@.step1 \ --output $@ --value "$(STUB_LIBRARIES)" --system "$(TARGET_OUT)" $(HOST_OUT_EXECUTABLES)/conv_linker_config append --source $@ --output $@ --key requireLibs \ --value "$(foreach lib,$(LLNDK_MOVED_TO_APEX_LIBRARIES), $(lib).so)" + $(HOST_OUT_EXECUTABLES)/conv_linker_config append --source $@ --output $@ --key provideLibs \ + --value "$(foreach lib,$(PRODUCT_EXTRA_STUB_LIBRARIES), $(lib).so)" + rm -f $@.step1 $(call declare-1p-target,$(SYSTEM_LINKER_CONFIG),) $(call declare-license-deps,$(SYSTEM_LINKER_CONFIG),$(INTERNAL_SYSTEMIMAGE_FILES) $(SYSTEM_LINKER_CONFIG_SOURCE)) @@ -3562,14 +3564,24 @@ ifneq ($(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE),) file_list_diff := $(HOST_OUT_EXECUTABLES)/file_list_diff$(HOST_EXECUTABLE_SUFFIX) system_file_diff_timestamp := $(systemimage_intermediates)/file_diff.timestamp +# The build configuration to build the REL version may have more files to allow. +# Use allowlist_next in addition to the allowlist in this case. +system_file_diff_allowlist_next := +ifeq (REL,$(PLATFORM_VERSION_CODENAME)) +system_file_diff_allowlist_next := $(ALL_MODULES.system_image_diff_allowlist_next.INSTALLED) +$(system_file_diff_timestamp): PRIVATE_ALLOWLIST_NEXT := $(system_file_diff_allowlist_next) +endif $(system_file_diff_timestamp): \ $(systemimage_intermediates)/file_list.txt \ $(ALL_MODULES.$(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE).FILESYSTEM_FILELIST) \ $(ALL_MODULES.system_image_diff_allowlist.INSTALLED) \ + $(system_file_diff_allowlist_next) \ $(file_list_diff) $(file_list_diff) $(systemimage_intermediates)/file_list.txt \ $(ALL_MODULES.$(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE).FILESYSTEM_FILELIST) \ - $(ALL_MODULES.system_image_diff_allowlist.INSTALLED) $(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE) + $(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE) \ + --allowlists $(ALL_MODULES.system_image_diff_allowlist.INSTALLED) \ + $(PRIVATE_ALLOWLIST_NEXT) touch $@ $(BUILT_SYSTEMIMAGE): $(system_file_diff_timestamp) @@ -3587,10 +3599,10 @@ ifeq ($(USE_SOONG_DEFINED_SYSTEM_IMAGE),true) ifeq ($(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE),) $(error PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE must be set if USE_SOONG_DEFINED_SYSTEM_IMAGE is true) endif -soong_defined_system_image := $(call intermediates-dir-for,ETC,$(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE))/$(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE) -$(BUILT_SYSTEMIMAGE): $(INSTALLED_FILES_FILE) $(systemimage_intermediates)/file_list.txt $(soong_defined_system_image) -$(eval $(call copy-one-file, $(soong_defined_system_image), $(BUILT_SYSTEMIMAGE))) -soong_defined_system_image := +SOONG_DEFINED_SYSTEM_IMAGE_PATH := $(call intermediates-dir-for,ETC,$(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE))/$(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE) +SOONG_DEFINED_SYSTEM_IMAGE_BASE := $(dir $(ALL_MODULES.$(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE).FILESYSTEM_FILELIST)) +$(BUILT_SYSTEMIMAGE): $(FULL_SYSTEMIMAGE_DEPS) $(INSTALLED_FILES_FILE) $(systemimage_intermediates)/file_list.txt $(SOONG_DEFINED_SYSTEM_IMAGE_PATH) +$(eval $(call copy-one-file, $(SOONG_DEFINED_SYSTEM_IMAGE_PATH), $(BUILT_SYSTEMIMAGE))) else $(BUILT_SYSTEMIMAGE): $(FULL_SYSTEMIMAGE_DEPS) $(INSTALLED_FILES_FILE) $(systemimage_intermediates)/file_list.txt $(call build-systemimage-target,$@) @@ -3675,10 +3687,10 @@ platform-java: # ----------------------------------------------------------------- # data partition image INSTALLED_FILES_OUTSIDE_IMAGES := $(filter-out $(TARGET_OUT_DATA)/%, $(INSTALLED_FILES_OUTSIDE_IMAGES)) +ifdef BUILDING_USERDATA_IMAGE INTERNAL_USERDATAIMAGE_FILES := \ $(filter $(TARGET_OUT_DATA)/%,$(ALL_DEFAULT_INSTALLED_MODULES)) -ifdef BUILDING_USERDATA_IMAGE userdataimage_intermediates := \ $(call intermediates-dir-for,PACKAGING,userdata) BUILT_USERDATAIMAGE_TARGET := $(PRODUCT_OUT)/userdata.img @@ -3995,6 +4007,21 @@ INTERNAL_PRODUCTIMAGE_FILES := \ $(filter $(TARGET_OUT_PRODUCT)/%,\ $(ALL_DEFAULT_INSTALLED_MODULES)) +# Install product/etc/linker.config.pb with PRODUCT_PRODUCT_LINKER_CONFIG_FRAGMENTS +product_linker_config_file := $(TARGET_OUT_PRODUCT)/etc/linker.config.pb +$(product_linker_config_file): private_linker_config_fragments := $(PRODUCT_PRODUCT_LINKER_CONFIG_FRAGMENTS) +$(product_linker_config_file): $(INTERNAL_PRODUCTIMAGE_FILES) | $(HOST_OUT_EXECUTABLES)/conv_linker_config + @echo Creating linker config: $@ + @mkdir -p $(dir $@) + @rm -f $@ + $(HOST_OUT_EXECUTABLES)/conv_linker_config proto \ + --source $(call normalize-path-list,$(private_linker_config_fragments)) \ + --output $@ +$(call define declare-1p-target,$(product_linker_config_file),) +INTERNAL_PRODUCTIMAGE_FILES += $(product_linker_config_file) +ALL_DEFAULT_INSTALLED_MODULES += $(product_linker_config_file) + + INSTALLED_FILES_FILE_PRODUCT := $(PRODUCT_OUT)/installed-files-product.txt INSTALLED_FILES_JSON_PRODUCT := $(INSTALLED_FILES_FILE_PRODUCT:.txt=.json) $(INSTALLED_FILES_FILE_PRODUCT): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_PRODUCT) @@ -4436,6 +4463,25 @@ INTERNAL_PVMFWIMAGE_FILES := $(call module-target-built-files,pvmfw_img) INTERNAL_PVMFW_EMBEDDED_AVBKEY := $(call module-target-built-files,pvmfw_embedded_key_pub_bin) INTERNAL_PVMFW_SYMBOL := $(TARGET_OUT_EXECUTABLES_UNSTRIPPED)/pvmfw +# If pvmfw target is not available and there is a prebuilt available use prebuilt +# NOTE: This is only a temporary feature for x86_64 and is not meant to be supported for long. +# TODO(b/391333413): Don't allow use of pvmfw prebuilts as soon as it is possible +ifeq ($(INTERNAL_PVMFWIMAGE_FILES),) +ifneq ($(PRODUCT_PVMFW_IMAGE_PREBUILT),) +INTERNAL_PVMFWIMAGE_FILES := $(call module-target-built-files,$(PRODUCT_PVMFW_IMAGE_PREBUILT)) +INTERNAL_PVMFW_SYMBOL := + +ifneq ($(PRODUCT_PVMFW_BIN_PREBUILT),) +INSTALLED_PVMFW_BINARY_TARGET := $(call module-target-built-files,$(PRODUCT_PVMFW_BIN_PREBUILT)) +endif # PRODUCT_PVMFW_BIN_PREBUILT + +ifneq ($(PRODUCT_PVMFW_EMBEDDED_AVBKEY_PREBUILT),) +INTERNAL_PVMFW_EMBEDDED_AVBKEY := $(call module-target-built-files,$(PRODUCT_PVMFW_EMBEDDED_AVBKEY_PREBUILT)) +endif # PRODUCT_PVMFW_EMBEDDED_AVBKEY_PREBUILT + +endif # PRODUCT_PVMFW_IMAGE_PREBUILT +endif # INTERNAL_PVMFWIMAGE_FILES + $(call declare-1p-container,$(INSTALLED_PVMFWIMAGE_TARGET),) $(call declare-container-license-deps,$(INSTALLED_PVMFWIMAGE_TARGET),$(INTERNAL_PVMFWIMAGE_FILES),$(PRODUCT_OUT)/:/) @@ -5006,6 +5052,10 @@ define build-chained-vbmeta-image $(foreach image,$(BOARD_AVB_$(call to-upper,$(1))), \ --include_descriptors_from_image $(call images-for-partitions,$(image))) \ --output $@ + # libavb expects to be able to read the maximum vbmeta size, so we must provide a partition + # which matches this or the read will fail. + # See external/avb/libavb/avb_slot_verify.c#VBMETA_MAX_SIZE + truncate -s 65536 $@ endef ifdef BUILDING_SYSTEM_IMAGE @@ -5064,6 +5114,10 @@ define build-vbmetaimage-target $(PRIVATE_AVB_VBMETA_SIGNING_ARGS) \ $(BOARD_AVB_MAKE_VBMETA_IMAGE_ARGS) \ --output $@ + # libavb expects to be able to read the maximum vbmeta size, so we must provide a partition + # which matches this or the read will fail. + # See external/avb/libavb/avb_slot_verify.c#VBMETA_MAX_SIZE + truncate -s 65536 $@ $(hide) rm -rf $(AVB_CHAIN_KEY_DIR) endef @@ -5131,11 +5185,13 @@ INTERNAL_ALLIMAGES_FILES := \ # Run apex_sepolicy_tests for all installed APEXes ifeq (,$(TARGET_BUILD_UNBUNDLED)) +ifneq (,$(filter ext4 erofs,$(PRODUCT_DEFAULT_APEX_PAYLOAD_TYPE))) intermediate := $(call intermediates-dir-for,PACKAGING,apex_sepolicy_tests) apex_dirs := \ $(TARGET_OUT)/apex/% \ $(TARGET_OUT_SYSTEM_EXT)/apex/% \ $(TARGET_OUT_VENDOR)/apex/% \ + $(TARGET_OUT_ODM)/apex/% \ $(TARGET_OUT_PRODUCT)/apex/% \ apex_files := $(sort $(filter $(apex_dirs), $(INTERNAL_ALLIMAGES_FILES))) @@ -5146,11 +5202,10 @@ apex_dirs := define _run_apex_sepolicy_tests $2: $1 \ $(HOST_OUT_EXECUTABLES)/apex_sepolicy_tests \ - $(HOST_OUT_EXECUTABLES)/deapexer \ - $(HOST_OUT_EXECUTABLES)/debugfs_static + $(HOST_OUT_EXECUTABLES)/apex-ls @rm -rf $$@ @mkdir -p $(dir $$@) - $(HOST_OUT_EXECUTABLES)/apex_sepolicy_tests --all -f <($(HOST_OUT_EXECUTABLES)/deapexer --debugfs_path $(HOST_OUT_EXECUTABLES)/debugfs_static list -Z $$<) + $(HOST_OUT_EXECUTABLES)/apex_sepolicy_tests --all -f <($(HOST_OUT_EXECUTABLES)/apex-ls -Z $$<) @touch $$@ endef @@ -5169,6 +5224,7 @@ droid_targets: run_apex_sepolicy_tests apex_files := intermediate := +endif # PRODUCT_DEFAULT_APEX_PAYLOAD_TYPE endif # TARGET_BUILD_UNBUNDLED # ----------------------------------------------------------------- @@ -5188,6 +5244,7 @@ apex_dirs := \ $(TARGET_OUT_PRODUCT)/apex/% \ $(TARGET_OUT_SYSTEM_EXT)/apex/% \ $(TARGET_OUT_VENDOR)/apex/% \ + $(TARGET_OUT_ODM)/apex/% \ apex_files := $(sort $(filter $(apex_dirs), $(INTERNAL_ALLIMAGES_FILES))) @@ -5206,6 +5263,7 @@ $(APEX_INFO_FILE): $(HOST_OUT_EXECUTABLES)/apexd_host $(apex_files) --system_ext_path $(TARGET_OUT_SYSTEM_EXT) \ --product_path $(TARGET_OUT_PRODUCT) \ --vendor_path $(TARGET_OUT_VENDOR) \ + --odm_path $(TARGET_OUT_ODM) \ --apex_path $(APEX_OUT) apex_files := @@ -5262,7 +5320,7 @@ $(vintffm_log): $(HOST_OUT_EXECUTABLES)/vintffm $(check_vintf_system_deps) $(APE --dirmap /system_ext:$(TARGET_OUT_SYSTEM_EXT) \ --dirmap /product:$(TARGET_OUT_PRODUCT) \ --dirmap /apex:$(APEX_OUT) \ - $(VINTF_FRAMEWORK_MANIFEST_FROZEN_DIR) > $@ 2>&1 ) || ( cat $@ && exit 1 ) + system/libhidl/vintfdata/frozen > $@ 2>&1 ) || ( cat $@ && exit 1 ) $(call declare-1p-target,$(vintffm_log)) @@ -5401,7 +5459,8 @@ ifeq (default,$(ENABLE_UFFD_GC)) ifneq (,$(BUILT_KERNEL_VERSION_FILE)) $(BUILT_KERNEL_VERSION_FILE_FOR_UFFD_GC): $(BUILT_KERNEL_VERSION_FILE) $(BUILT_KERNEL_VERSION_FILE_FOR_UFFD_GC): - cp $(BUILT_KERNEL_VERSION_FILE) $(BUILT_KERNEL_VERSION_FILE_FOR_UFFD_GC) + if ! cmp -s $(BUILT_KERNEL_VERSION_FILE) $@ ; then cp $(BUILT_KERNEL_VERSION_FILE) $@; fi +.KATI_RESTAT: $(BUILT_KERNEL_VERSION_FILE_FOR_UFFD_GC) else # We make this a warning rather than an error to avoid breaking too many builds. When it happens, # we use a placeholder as the kernel version, which is consumed by uffd_gc_utils.py. @@ -5610,7 +5669,9 @@ else endif endif # INSTALLED_BOOTIMAGE_TARGET == "" ifeq ($(recovery_fstab),) - build_ota_package := false + ifeq ($(filter $(TARGET_RECOVERY_ROOT_OUT)/system/etc/recovery.fstab,$(INTERNAL_RECOVERYIMAGE_FILES)),) + build_ota_package := false + endif endif endif # PRODUCT_BUILD_GENERIC_OTA_PACKAGE @@ -5635,6 +5696,7 @@ INTERNAL_OTATOOLS_MODULES := \ brotli \ bsdiff \ build_image \ + build_mixed_kernels_ramdisk_host \ build_super_image \ build_verity_metadata \ build_verity_tree \ @@ -6130,6 +6192,9 @@ $(BUILT_TARGET_FILES_PACKAGE): zip_root := $(intermediates)/$(name) $(BUILT_TARGET_FILES_DIR): zip_root := $(intermediates)/$(name) $(BUILT_TARGET_FILES_DIR): intermediates := $(intermediates) +ifneq ($(SOONG_DEFINED_SYSTEM_IMAGE_PATH),) + $(BUILT_TARGET_FILES_DIR): $(SOONG_DEFINED_SYSTEM_IMAGE_PATH) +endif # $(1): Directory to copy # $(2): Location to copy it to @@ -6143,11 +6208,14 @@ endef built_ota_tools := + # We can't build static executables when SANITIZE_TARGET=address ifeq (,$(filter address, $(SANITIZE_TARGET))) +ifeq (false,$(AB_OTA_UPDATER)) built_ota_tools += \ $(call intermediates-dir-for,EXECUTABLES,updater)/updater endif +endif $(BUILT_TARGET_FILES_DIR): PRIVATE_OTA_TOOLS := $(built_ota_tools) @@ -6344,6 +6412,10 @@ ifdef BUILDING_VENDOR_BOOT_IMAGE endif endif +ifdef BUILDING_VENDOR_KERNEL_BOOT_IMAGE + $(BUILT_TARGET_FILES_DIR): $(INTERNAL_VENDOR_KERNEL_RAMDISK_FILES) +endif + ifdef BUILDING_RECOVERY_IMAGE # TODO(b/30414428): Can't depend on INTERNAL_RECOVERYIMAGE_FILES alone like other # BUILT_TARGET_FILES_PACKAGE dependencies because currently there're cp/rsync/rm @@ -6458,8 +6530,11 @@ $(BUILT_TARGET_FILES_DIR): \ $(INSTALLED_RAMDISK_TARGET) \ $(INSTALLED_DTBIMAGE_TARGET) \ $(INSTALLED_2NDBOOTLOADER_TARGET) \ + $(INSTALLED_VENDOR_KERNEL_RAMDISK_TARGET) \ $(BUILT_RAMDISK_16K_TARGET) \ $(BUILT_KERNEL_16K_TARGET) \ + $(BUILT_BOOTIMAGE_16K_TARGET) \ + $(INSTALLED_DTBOIMAGE_16KB_TARGET) \ $(BOARD_PREBUILT_DTBOIMAGE) \ $(BOARD_PREBUILT_RECOVERY_DTBOIMAGE) \ $(BOARD_RECOVERY_ACPIO) \ @@ -6613,8 +6688,13 @@ endif endif # INSTALLED_VENDOR_BOOTIMAGE_TARGET ifdef BUILDING_SYSTEM_IMAGE @# Contents of the system image +ifneq ($(SOONG_DEFINED_SYSTEM_IMAGE_PATH),) + $(hide) $(call package_files-copy-root, \ + $(SOONG_DEFINED_SYSTEM_IMAGE_BASE)/system/system,$(zip_root)/SYSTEM) +else $(hide) $(call package_files-copy-root, \ $(SYSTEMIMAGE_SOURCE_DIR),$(zip_root)/SYSTEM) +endif else ifdef INSTALLED_BUILD_PROP_TARGET @# Copy the system build.prop even if not building a system image @# because add_img_to_target_files may need it to build other partition @@ -6808,14 +6888,22 @@ ifdef BOARD_PREBUILT_DTBOIMAGE $(hide) mkdir -p $(zip_root)/PREBUILT_IMAGES $(hide) cp $(INSTALLED_DTBOIMAGE_TARGET) $(zip_root)/PREBUILT_IMAGES/ endif # BOARD_PREBUILT_DTBOIMAGE -ifdef BUILT_KERNEL_16K_TARGET +ifdef BOARD_KERNEL_PATH_16K $(hide) mkdir -p $(zip_root)/PREBUILT_IMAGES $(hide) cp $(BUILT_KERNEL_16K_TARGET) $(zip_root)/PREBUILT_IMAGES/ -endif # BUILT_KERNEL_16K_TARGET -ifdef BUILT_RAMDISK_16K_TARGET +endif # BOARD_KERNEL_PATH_16K +ifdef BOARD_KERNEL_MODULES_16K $(hide) mkdir -p $(zip_root)/PREBUILT_IMAGES $(hide) cp $(BUILT_RAMDISK_16K_TARGET) $(zip_root)/PREBUILT_IMAGES/ -endif # BUILT_RAMDISK_16K_TARGET +endif # BOARD_KERNEL_MODULES_16K +ifdef BUILT_BOOTIMAGE_16K_TARGET + $(hide) mkdir -p $(zip_root)/IMAGES + $(hide) cp $(BUILT_BOOTIMAGE_16K_TARGET) $(zip_root)/IMAGES/ +endif # BUILT_BOOTIMAGE_16K_TARGET +ifdef INSTALLED_DTBOIMAGE_16KB_TARGET + $(hide) mkdir -p $(zip_root)/IMAGES + $(hide) cp $(INSTALLED_DTBOIMAGE_16KB_TARGET) $(zip_root)/IMAGES/ +endif # INSTALLED_DTBOIMAGE_16KB_TARGET ifeq ($(BOARD_USES_PVMFWIMAGE),true) $(hide) mkdir -p $(zip_root)/PREBUILT_IMAGES $(hide) cp $(INSTALLED_PVMFWIMAGE_TARGET) $(zip_root)/PREBUILT_IMAGES/ @@ -6883,6 +6971,33 @@ ifdef BOARD_KERNEL_PAGESIZE $(hide) echo "$(BOARD_KERNEL_PAGESIZE)" > $(zip_root)/INIT_BOOT/pagesize endif # BOARD_KERNEL_PAGESIZE endif # BUILDING_INIT_BOOT_IMAGE +ifdef BOARD_EROFS_COMPRESS_HINTS + $(hide) cp $(BOARD_EROFS_COMPRESS_HINTS) $(zip_root)/META/erofs_default_compress_hints.txt +endif +ifdef BOARD_SYSTEMIMAGE_EROFS_COMPRESS_HINTS + $(hide) cp $(BOARD_SYSTEMIMAGE_EROFS_COMPRESS_HINTS) $(zip_root)/META/system_erofs_compress_hints.txt +endif +ifdef BOARD_SYSTEM_EXTIMAGE_EROFS_COMPRESS_HINTS + $(hide) cp $(BOARD_SYSTEM_EXTIMAGE_EROFS_COMPRESS_HINTS) $(zip_root)/META/system_ext_erofs_compress_hints.txt +endif +ifdef BOARD_PRODUCTIMAGE_EROFS_COMPRESS_HINTS + $(hide) cp $(BOARD_PRODUCTIMAGE_EROFS_COMPRESS_HINTS) $(zip_root)/META/product_erofs_compress_hints.txt +endif +ifdef BOARD_VENDORIMAGE_EROFS_COMPRESS_HINTS + $(hide) cp $(BOARD_VENDORIMAGE_EROFS_COMPRESS_HINTS) $(zip_root)/META/vendor_erofs_compress_hints.txt +endif +ifdef BOARD_ODMIMAGE_EROFS_COMPRESS_HINTS + $(hide) cp $(BOARD_ODMIMAGE_EROFS_COMPRESS_HINTS) $(zip_root)/META/odm_erofs_compress_hints.txt +endif +ifdef BOARD_VENDOR_DLKMIMAGE_EROFS_COMPRESS_HINTS + $(hide) cp $(BOARD_VENDOR_DLKMIMAGE_EROFS_COMPRESS_HINTS) $(zip_root)/META/vendor_dlkm_erofs_compress_hints.txt +endif +ifdef BOARD_ODM_DLKMIMAGE_EROFS_COMPRESS_HINTS + $(hide) cp $(BOARD_ODM_DLKMIMAGE_EROFS_COMPRESS_HINTS) $(zip_root)/META/odm_dlkm_erofs_compress_hints.txt +endif +ifdef BOARD_SYSTEM_DLKMIMAGE_EROFS_COMPRESS_HINTS + $(hide) cp $(BOARD_SYSTEM_DLKMIMAGE_EROFS_COMPRESS_HINTS) $(zip_root)/META/system_dlkm_erofs_compress_hints.txt +endif ifneq ($(INSTALLED_VENDOR_BOOTIMAGE_TARGET),) $(call fs_config,$(zip_root)/VENDOR_BOOT/RAMDISK,) > $(zip_root)/META/vendor_boot_filesystem_config.txt endif @@ -7859,7 +7974,7 @@ haiku-presubmit: $(SOONG_PRESUBMIT_FUZZ_PACKAGING_ARCH_MODULES) $(ALL_PRESUBMIT_ $(call dist-for-goals,haiku-presubmit,$(SOONG_PRESUBMIT_FUZZ_PACKAGING_ARCH_MODULES)) # ----------------------------------------------------------------- -# Extract platform fonts used in Layoutlib +# Extract additional data files used in Layoutlib include $(BUILD_SYSTEM)/layoutlib_data.mk # ----------------------------------------------------------------- @@ -7885,30 +8000,84 @@ PACKED_IMAGE_ARCHIVE_TARGET := $(PACK_IMAGE_TARGET).gz $(PACKED_IMAGE_ARCHIVE_TARGET): $(PACK_IMAGE_TARGET) | $(GZIP) $(GZIP) -fk $(PACK_IMAGE_TARGET) -droidcore-unbundled: $(PACKED_IMAGE_ARCHIVE_TARGET) - $(call dist-for-goals,dist_files,$(PACKED_IMAGE_ARCHIVE_TARGET)) +.PHONY: pack-image +pack-image: $(PACK_IMAGE_TARGET) + endif # PACK_DESKTOP_FILESYSTEM_IMAGES # ----------------------------------------------------------------- # Desktop pack recovery image hook. -ifneq (,$(strip $(PACK_DESKTOP_RECOVERY_IMAGE))) +ifeq ($(BOARD_USES_DESKTOP_RECOVERY_IMAGE),true) PACK_RECOVERY_IMAGE_TARGET := $(PRODUCT_OUT)/android-desktop_recovery_image.bin +PACK_RECOVERY_IMAGE_ARGS := --noarchive --recovery + +ifneq (,$(strip $(PACK_RECOVERY_IMAGE_EXPERIMENTAL))) +PACK_RECOVERY_IMAGE_ARGS += --experimental +endif # PACK_RECOVERY_IMAGE_EXPERIMENTAL $(PACK_RECOVERY_IMAGE_TARGET): $(IMAGES) $(PACK_IMAGE_SCRIPT) - $(PACK_IMAGE_SCRIPT) --out_dir $(PRODUCT_OUT) --noarchive --recovery + $(PACK_IMAGE_SCRIPT) --out_dir $(PRODUCT_OUT) $(PACK_RECOVERY_IMAGE_ARGS) PACKED_RECOVERY_IMAGE_ARCHIVE_TARGET := $(PACK_RECOVERY_IMAGE_TARGET).gz $(PACKED_RECOVERY_IMAGE_ARCHIVE_TARGET): $(PACK_RECOVERY_IMAGE_TARGET) | $(GZIP) $(GZIP) -fk $(PACK_RECOVERY_IMAGE_TARGET) -droidcore-unbundled: $(PACKED_RECOVERY_IMAGE_ARCHIVE_TARGET) - $(call dist-for-goals,dist_files,$(PACKED_RECOVERY_IMAGE_ARCHIVE_TARGET)) -endif # PACK_DESKTOP_RECOVERY_IMAGE +.PHONY: pack-recovery-image +pack-recovery-image: $(PACK_RECOVERY_IMAGE_TARGET) + +endif # BOARD_USES_DESKTOP_RECOVERY_IMAGE + +# ----------------------------------------------------------------- +# Desktop pack update image hook. +ifeq ($(BOARD_USES_DESKTOP_UPDATE_IMAGE),true) +PACK_UPDATE_IMAGE_TARGET := $(PRODUCT_OUT)/android-desktop_update_image.bin +PACK_UPDATE_IMAGE_ARGS := --noarchive --update + +ifneq (,$(strip $(PACK_UPDATE_IMAGE_EXPERIMENTAL))) +PACK_UPDATE_IMAGE_ARGS += --experimental +endif # PACK_UPDATE_IMAGE_EXPERIMENTAL + +$(PACK_UPDATE_IMAGE_TARGET): $(IMAGES) $(PACK_IMAGE_SCRIPT) + $(PACK_IMAGE_SCRIPT) --out_dir $(PRODUCT_OUT) $(PACK_UPDATE_IMAGE_ARGS) + +PACKED_UPDATE_IMAGE_ARCHIVE_TARGET := $(PACK_UPDATE_IMAGE_TARGET).gz + +$(PACKED_UPDATE_IMAGE_ARCHIVE_TARGET): $(PACK_UPDATE_IMAGE_TARGET) | $(GZIP) + $(GZIP) -fk $(PACK_UPDATE_IMAGE_TARGET) + +$(call dist-for-goals,dist_files,$(PACKED_UPDATE_IMAGE_ARCHIVE_TARGET)) + +.PHONY: pack-update-image +pack-update-image: $(PACK_UPDATE_IMAGE_TARGET) + +endif # BOARD_USES_DESKTOP_UPDATE_IMAGE + +PACK_MIGRATION_IMAGE_SCRIPT := $(HOST_OUT_EXECUTABLES)/pack_migration_image + +# ----------------------------------------------------------------- +# Desktop pack migration image hook. +ifeq ($(ANDROID_DESKTOP_MIGRATION_IMAGE),true) +PACK_MIGRATION_IMAGE_TARGET := $(PRODUCT_OUT)/android-desktop_migration_image.bin + +$(PACK_MIGRATION_IMAGE_TARGET): $(IMAGES) $(PACK_MIGRATION_IMAGE_SCRIPT) + $(PACK_MIGRATION_IMAGE_SCRIPT) --out_dir $(PRODUCT_OUT) --noarchive + +PACKED_MIGRATION_IMAGE_ARCHIVE_TARGET := $(PACK_MIGRATION_IMAGE_TARGET).gz + +$(PACKED_MIGRATION_IMAGE_ARCHIVE_TARGET): $(PACK_MIGRATION_IMAGE_TARGET) | $(GZIP) + $(GZIP) -fk $(PACK_MIGRATION_IMAGE_TARGET) + +$(call dist-for-goals,dist_files,$(PACKED_MIGRATION_IMAGE_ARCHIVE_TARGET)) + +.PHONY: pack-migration-image +pack-migration-image: $(PACK_MIGRATION_IMAGE_TARGET) + +endif # ANDROID_DESKTOP_MIGRATION_IMAGE # ----------------------------------------------------------------- # OS Licensing diff --git a/core/android_soong_config_vars.mk b/core/android_soong_config_vars.mk index 915f55f349..a205ab50b8 100644 --- a/core/android_soong_config_vars.mk +++ b/core/android_soong_config_vars.mk @@ -30,12 +30,35 @@ $(call add_soong_config_var,ANDROID,BOARD_USES_ODMIMAGE) $(call soong_config_set_bool,ANDROID,BOARD_USES_RECOVERY_AS_BOOT,$(BOARD_USES_RECOVERY_AS_BOOT)) $(call soong_config_set_bool,ANDROID,BOARD_MOVE_GSI_AVB_KEYS_TO_VENDOR_BOOT,$(BOARD_MOVE_GSI_AVB_KEYS_TO_VENDOR_BOOT)) $(call add_soong_config_var,ANDROID,CHECK_DEV_TYPE_VIOLATIONS) +$(call soong_config_set_bool,ANDROID,HAS_BOARD_SYSTEM_EXT_PREBUILT_DIR,$(if $(BOARD_SYSTEM_EXT_PREBUILT_DIR),true,false)) +$(call soong_config_set_bool,ANDROID,HAS_BOARD_PRODUCT_PREBUILT_DIR,$(if $(BOARD_PRODUCT_PREBUILT_DIR),true,false)) $(call add_soong_config_var,ANDROID,PLATFORM_SEPOLICY_VERSION) $(call add_soong_config_var,ANDROID,PLATFORM_SEPOLICY_COMPAT_VERSIONS) $(call add_soong_config_var,ANDROID,PRODUCT_INSTALL_DEBUG_POLICY_TO_SYSTEM_EXT) +$(call soong_config_set_bool,ANDROID,RELEASE_BOARD_API_LEVEL_FROZEN,$(RELEASE_BOARD_API_LEVEL_FROZEN)) $(call add_soong_config_var,ANDROID,TARGET_DYNAMIC_64_32_DRMSERVER) $(call add_soong_config_var,ANDROID,TARGET_ENABLE_MEDIADRM_64) $(call add_soong_config_var,ANDROID,TARGET_DYNAMIC_64_32_MEDIASERVER) +$(call soong_config_set_bool,ANDROID,TARGET_SUPPORTS_32_BIT_APPS,$(if $(filter true,$(TARGET_SUPPORTS_32_BIT_APPS)),true,false)) +$(call soong_config_set_bool,ANDROID,TARGET_SUPPORTS_64_BIT_APPS,$(if $(filter true,$(TARGET_SUPPORTS_64_BIT_APPS)),true,false)) +$(call add_soong_config_var,ANDROID,BOARD_GENFS_LABELS_VERSION) +$(call soong_config_set_bool,ANDROID,PRODUCT_FSVERITY_GENERATE_METADATA,$(if $(filter true,$(PRODUCT_FSVERITY_GENERATE_METADATA)),true,false)) + +$(call add_soong_config_var,ANDROID,ADDITIONAL_M4DEFS,$(if $(BOARD_SEPOLICY_M4DEFS),$(addprefix -D,$(BOARD_SEPOLICY_M4DEFS)))) + +# For BUILDING_GSI +$(call soong_config_set_bool,gsi,building_gsi,$(if $(filter true,$(BUILDING_GSI)),true,false)) + +# For bootable/recovery +RECOVERY_API_VERSION := 3 +RECOVERY_FSTAB_VERSION := 2 +$(call soong_config_set, recovery, recovery_api_version, $(RECOVERY_API_VERSION)) +$(call soong_config_set, recovery, recovery_fstab_version, $(RECOVERY_FSTAB_VERSION)) +$(call soong_config_set_bool, recovery ,target_userimages_use_f2fs ,$(if $(TARGET_USERIMAGES_USE_F2FS),true,false)) +$(call soong_config_set_bool, recovery ,has_board_cacheimage_partition_size ,$(if $(BOARD_CACHEIMAGE_PARTITION_SIZE),true,false)) +ifdef TARGET_RECOVERY_UI_LIB + $(call soong_config_set_string_list, recovery, target_recovery_ui_lib, $(TARGET_RECOVERY_UI_LIB)) +endif # For Sanitizers $(call soong_config_set_bool,ANDROID,ASAN_ENABLED,$(if $(filter address,$(SANITIZE_TARGET)),true,false)) @@ -47,6 +70,8 @@ $(call soong_config_set_bool,ANDROID,GCOV_COVERAGE,$(NATIVE_COVERAGE)) $(call soong_config_set_bool,ANDROID,CLANG_COVERAGE,$(CLANG_COVERAGE)) $(call soong_config_set,ANDROID,SCUDO_ALLOCATION_RING_BUFFER_SIZE,$(PRODUCT_SCUDO_ALLOCATION_RING_BUFFER_SIZE)) +$(call soong_config_set_bool,ANDROID,EMMA_INSTRUMENT,$(if $(filter true,$(EMMA_INSTRUMENT)),true,false)) + # PRODUCT_PRECOMPILED_SEPOLICY defaults to true. Explicitly check if it's "false" or not. $(call soong_config_set_bool,ANDROID,PRODUCT_PRECOMPILED_SEPOLICY,$(if $(filter false,$(PRODUCT_PRECOMPILED_SEPOLICY)),false,true)) @@ -58,6 +83,13 @@ endif # The default value of ART_BUILD_HOST_DEBUG is true $(call soong_config_set_bool,art_module,art_build_host_debug,$(if $(filter false,$(ART_BUILD_HOST_DEBUG)),false,true)) +# For chre +$(call soong_config_set_bool,chre,chre_daemon_lpma_enabled,$(if $(filter true,$(CHRE_DAEMON_LPMA_ENABLED)),true,false)) +$(call soong_config_set_bool,chre,chre_dedicated_transport_channel_enabled,$(if $(filter true,$(CHRE_DEDICATED_TRANSPORT_CHANNEL_ENABLED)),true,false)) +$(call soong_config_set_bool,chre,chre_log_atom_extension_enabled,$(if $(filter true,$(CHRE_LOG_ATOM_EXTENSION_ENABLED)),true,false)) +$(call soong_config_set_bool,chre,building_vendor_image,$(if $(filter true,$(BUILDING_VENDOR_IMAGE)),true,false)) +$(call soong_config_set_bool,chre,chre_usf_daemon_enabled,$(if $(filter true,$(CHRE_USF_DAEMON_ENABLED)),true,false)) + ifdef TARGET_BOARD_AUTO $(call add_soong_config_var_value, ANDROID, target_board_auto, $(TARGET_BOARD_AUTO)) endif @@ -71,15 +103,13 @@ $(call add_soong_config_var_value,ANDROID,library_linking_strategy,prefer_static endif endif -# TODO(b/308187800): some internal modules set `prefer` to true on the prebuilt apex module, -# and set that to false when `ANDROID.module_build_from_source` is true. -# Set this soong config variable to true for now, and cleanup `prefer` as part of b/308187800 -$(call add_soong_config_var_value,ANDROID,module_build_from_source,true) - # Enable SystemUI optimizations by default unless explicitly set. SYSTEMUI_OPTIMIZE_JAVA ?= true $(call add_soong_config_var,ANDROID,SYSTEMUI_OPTIMIZE_JAVA) +# Flag for enabling compose for Launcher. +$(call soong_config_set,ANDROID,release_enable_compose_in_launcher,$(RELEASE_ENABLE_COMPOSE_IN_LAUNCHER)) + ifdef PRODUCT_AVF_ENABLED $(call add_soong_config_var_value,ANDROID,avf_enabled,$(PRODUCT_AVF_ENABLED)) endif @@ -94,12 +124,14 @@ ifdef PRODUCT_AVF_MICRODROID_GUEST_GKI_VERSION $(call add_soong_config_var_value,ANDROID,avf_microdroid_guest_gki_version,$(PRODUCT_AVF_MICRODROID_GUEST_GKI_VERSION)) endif -ifdef PRODUCT_MEMCG_V2_FORCE_ENABLED -$(call add_soong_config_var_value,ANDROID,memcg_v2_force_enabled,$(PRODUCT_MEMCG_V2_FORCE_ENABLED)) +ifdef TARGET_BOOTS_16K +$(call soong_config_set_bool,ANDROID,target_boots_16k,$(filter true,$(TARGET_BOOTS_16K))) endif ifdef PRODUCT_CGROUP_V2_SYS_APP_ISOLATION_ENABLED $(call add_soong_config_var_value,ANDROID,cgroup_v2_sys_app_isolation,$(PRODUCT_CGROUP_V2_SYS_APP_ISOLATION_ENABLED)) +else +$(call add_soong_config_var_value,ANDROID,cgroup_v2_sys_app_isolation,true) endif $(call add_soong_config_var_value,ANDROID,release_avf_allow_preinstalled_apps,$(RELEASE_AVF_ALLOW_PREINSTALLED_APPS)) @@ -162,6 +194,14 @@ else $(call add_soong_config_var_value,ANDROID,include_nonpublic_framework_api,true) endif +# Add nfc build flag to soong +ifneq ($(RELEASE_PACKAGE_NFC_STACK),NfcNci) + $(call soong_config_set,bootclasspath,nfc_apex_bootclasspath_fragment,true) +endif + +# Add uwb build flag to soong +$(call soong_config_set,bootclasspath,release_ranging_stack,$(RELEASE_RANGING_STACK)) + # Add crashrecovery build flag to soong $(call soong_config_set,ANDROID,release_crashrecovery_module,$(RELEASE_CRASHRECOVERY_MODULE)) # Add crashrecovery file move flags to soong, for both platform and module @@ -175,10 +215,24 @@ endif # Required as platform_bootclasspath is using this namespace $(call soong_config_set,bootclasspath,release_crashrecovery_module,$(RELEASE_CRASHRECOVERY_MODULE)) +# Add uprobestats build flag to soong +$(call soong_config_set,ANDROID,release_uprobestats_module,$(RELEASE_UPROBESTATS_MODULE)) +# Add uprobestats file move flags to soong, for both platform and module +ifeq (true,$(RELEASE_UPROBESTATS_FILE_MOVE)) + $(call soong_config_set,ANDROID,uprobestats_files_in_module,true) + $(call soong_config_set,ANDROID,uprobestats_files_in_platform,false) +else + $(call soong_config_set,ANDROID,uprobestats_files_in_module,false) + $(call soong_config_set,ANDROID,uprobestats_files_in_platform,true) +endif + # Enable Profiling module. Also used by platform_bootclasspath. $(call soong_config_set,ANDROID,release_package_profiling_module,$(RELEASE_PACKAGE_PROFILING_MODULE)) $(call soong_config_set,bootclasspath,release_package_profiling_module,$(RELEASE_PACKAGE_PROFILING_MODULE)) +# Move VCN from platform to the Tethering module; used by both platform and module +$(call soong_config_set,ANDROID,is_vcn_in_mainline,$(RELEASE_MOVE_VCN_TO_MAINLINE)) + # Add perf-setup build flag to soong # Note: BOARD_PERFSETUP_SCRIPT location must be under platform_testing/scripts/perf-setup/. ifdef BOARD_PERFSETUP_SCRIPT @@ -187,3 +241,86 @@ endif # Add target_use_pan_display flag for hardware/libhardware:gralloc.default $(call soong_config_set_bool,gralloc,target_use_pan_display,$(if $(filter true,$(TARGET_USE_PAN_DISPLAY)),true,false)) + +# Add use_camera_v4l2_hal flag for hardware/libhardware/modules/camera/3_4:camera.v4l2 +$(call soong_config_set_bool,camera,use_camera_v4l2_hal,$(if $(filter true,$(USE_CAMERA_V4L2_HAL)),true,false)) + +# Add audioserver_multilib flag for hardware/interfaces/soundtrigger/2.0/default:android.hardware.soundtrigger@2.0-impl +ifneq ($(strip $(AUDIOSERVER_MULTILIB)),) + $(call soong_config_set,soundtrigger,audioserver_multilib,$(AUDIOSERVER_MULTILIB)) +endif + +# Add sim_count, disable_rild_oem_hook, and use_aosp_rild flag for ril related modules +$(call soong_config_set,ril,sim_count,$(SIM_COUNT)) +ifneq ($(DISABLE_RILD_OEM_HOOK), false) + $(call soong_config_set_bool,ril,disable_rild_oem_hook,true) +endif +ifneq ($(ENABLE_VENDOR_RIL_SERVICE), true) + $(call soong_config_set_bool,ril,use_aosp_rild,true) +endif + +# Export target_board_platform to soong for hardware/google/graphics/common/libmemtrack:memtrack.$(TARGET_BOARD_PLATFORM) +$(call soong_config_set,ANDROID,target_board_platform,$(TARGET_BOARD_PLATFORM)) + +# Export board_uses_scaler_m2m1shot and board_uses_align_restriction to soong for hardware/google/graphics/common/libscaler:libexynosscaler +$(call soong_config_set_bool,google_graphics,board_uses_scaler_m2m1shot,$(if $(filter true,$(BOARD_USES_SCALER_M2M1SHOT)),true,false)) +$(call soong_config_set_bool,google_graphics,board_uses_align_restriction,$(if $(filter true,$(BOARD_USES_ALIGN_RESTRICTION)),true,false)) + +# Export related variables to soong for hardware/google/graphics/common/libacryl:libacryl +ifdef BOARD_LIBACRYL_DEFAULT_COMPOSITOR + $(call soong_config_set,acryl,libacryl_default_compositor,$(BOARD_LIBACRYL_DEFAULT_COMPOSITOR)) +endif +ifdef BOARD_LIBACRYL_DEFAULT_SCALER + $(call soong_config_set,acryl,libacryl_default_scaler,$(BOARD_LIBACRYL_DEFAULT_SCALER)) +endif +ifdef BOARD_LIBACRYL_DEFAULT_BLTER + $(call soong_config_set,acryl,libacryl_default_blter,$(BOARD_LIBACRYL_DEFAULT_BLTER)) +endif +ifdef BOARD_LIBACRYL_G2D_HDR_PLUGIN + #BOARD_LIBACRYL_G2D_HDR_PLUGIN is set in each board config + $(call soong_config_set_bool,acryl,libacryl_use_g2d_hdr_plugin,true) +endif + +# Export related variables to soong for hardware/google/graphics/common/BoardConfigCFlags.mk +$(call soong_config_set_bool,google_graphics,hwc_no_support_skip_validate,$(if $(filter true,$(HWC_NO_SUPPORT_SKIP_VALIDATE)),true,false)) +$(call soong_config_set_bool,google_graphics,hwc_support_color_transform,$(if $(filter true,$(HWC_SUPPORT_COLOR_TRANSFORM)),true,false)) +$(call soong_config_set_bool,google_graphics,hwc_support_render_intent,$(if $(filter true,$(HWC_SUPPORT_RENDER_INTENT)),true,false)) +$(call soong_config_set_bool,google_graphics,board_uses_virtual_display,$(if $(filter true,$(BOARD_USES_VIRTUAL_DISPLAY)),true,false)) +$(call soong_config_set_bool,google_graphics,board_uses_dt,$(if $(filter true,$(BOARD_USES_DT)),true,false)) +$(call soong_config_set_bool,google_graphics,board_uses_decon_64bit_address,$(if $(filter true,$(BOARD_USES_DECON_64BIT_ADDRESS)),true,false)) +$(call soong_config_set_bool,google_graphics,board_uses_hdrui_gles_conversion,$(if $(filter true,$(BOARD_USES_HDRUI_GLES_CONVERSION)),true,false)) +$(call soong_config_set_bool,google_graphics,uses_idisplay_intf_sec,$(if $(filter true,$(USES_IDISPLAY_INTF_SEC)),true,false)) + +# Variables for fs_config +$(call soong_config_set_bool,fs_config,vendor,$(if $(BOARD_USES_VENDORIMAGE)$(BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE),true,false)) +$(call soong_config_set_bool,fs_config,oem,$(if $(BOARD_USES_OEMIMAGE)$(BOARD_OEMIMAGE_FILE_SYSTEM_TYPE),true,false)) +$(call soong_config_set_bool,fs_config,odm,$(if $(BOARD_USES_ODMIMAGE)$(BOARD_ODMIMAGE_FILE_SYSTEM_TYPE),true,false)) +$(call soong_config_set_bool,fs_config,vendor_dlkm,$(if $(BOARD_USES_VENDOR_DLKMIMAGE)$(BOARD_VENDOR_DLKMIMAGE_FILE_SYSTEM_TYPE),true,false)) +$(call soong_config_set_bool,fs_config,odm_dlkm,$(if $(BOARD_USES_ODM_DLKMIMAGE)$(BOARD_ODM_DLKMIMAGE_FILE_SYSTEM_TYPE),true,false)) +$(call soong_config_set_bool,fs_config,system_dlkm,$(if $(BOARD_USES_SYSTEM_DLKMIMAGE)$(BOARD_SYSTEM_DLKMIMAGE_FILE_SYSTEM_TYPE),true,false)) + +# Variables for telephony +$(call soong_config_set_bool,telephony,sec_cp_secure_boot,$(if $(filter true,$(SEC_CP_SECURE_BOOT)),true,false)) +$(call soong_config_set_bool,telephony,cbd_protocol_sit,$(if $(filter true,$(CBD_PROTOCOL_SIT)),true,false)) +$(call soong_config_set_bool,telephony,use_radioexternal_hal_aidl,$(if $(filter true,$(USE_RADIOEXTERNAL_HAL_AIDL)),true,false)) + +# Variables for hwcomposer.$(TARGET_BOARD_PLATFORM) +$(call soong_config_set_bool,google_graphics,board_uses_hwc_services,$(if $(filter true,$(BOARD_USES_HWC_SERVICES)),true,false)) + +# Variables for controlling android.hardware.composer.hwc3-service.pixel +$(call soong_config_set,google_graphics,board_hwc_version,$(BOARD_HWC_VERSION)) + +# Flag ExcludeExtractApk is to support "extract_apk" property for the following conditions. +ifneq ($(WITH_DEXPREOPT),true) + $(call soong_config_set_bool,PrebuiltGmsCore,ExcludeExtractApk,true) +endif +ifeq ($(DONT_DEXPREOPT_PREBUILTS),true) + $(call soong_config_set_bool,PrebuiltGmsCore,ExcludeExtractApk,true) +endif +ifeq ($(WITH_DEXPREOPT_BOOT_IMG_AND_SYSTEM_SERVER_ONLY),true) + $(call soong_config_set_bool,PrebuiltGmsCore,ExcludeExtractApk,true) +endif + +# Variables for extra branches +# TODO(b/383238397): Use bootstrap_go_package to enable extra flags. +-include vendor/google/build/extra_soong_config_vars.mk diff --git a/core/base_rules.mk b/core/base_rules.mk index 1135003998..5363e0fbf9 100644 --- a/core/base_rules.mk +++ b/core/base_rules.mk @@ -340,7 +340,7 @@ LOCAL_BUILT_MODULE := $(intermediates)/$(my_built_module_stem) ifneq (,$(LOCAL_SOONG_INSTALLED_MODULE)) ifneq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK)) - $(call pretty-error, LOCAL_SOONG_INSTALLED_MODULE can only be used from $(SOONG_ANDROID_MK)) + $(call pretty-error, LOCAL_MODULE_MAKEFILE can only be used from $(SOONG_ANDROID_MK)) endif # Use the install path requested by Soong. LOCAL_INSTALLED_MODULE := $(LOCAL_SOONG_INSTALLED_MODULE) diff --git a/core/binary.mk b/core/binary.mk index 1e98bc08fb..ea862be6b4 100644 --- a/core/binary.mk +++ b/core/binary.mk @@ -174,7 +174,7 @@ my_allow_undefined_symbols := true endif endif -my_ndk_sysroot_include := +my_ndk_sysroot := my_ndk_sysroot_lib := my_api_level := 10000 @@ -207,11 +207,9 @@ ifneq ($(LOCAL_SDK_VERSION),) my_built_ndk := $(SOONG_OUT_DIR)/ndk my_ndk_triple := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_NDK_TRIPLE) - my_ndk_sysroot_include := \ - $(my_built_ndk)/sysroot/usr/include \ - $(my_built_ndk)/sysroot/usr/include/$(my_ndk_triple) \ + my_ndk_sysroot := $(my_built_ndk)/sysroot - my_ndk_sysroot_lib := $(my_built_ndk)/sysroot/usr/lib/$(my_ndk_triple)/$(my_ndk_api) + my_ndk_sysroot_lib := $(my_ndk_sysroot)/usr/lib/$(my_ndk_triple)/$(my_ndk_api) # The bionic linker now has support for packed relocations and gnu style # hashes (which are much faster!), but shipping to older devices requires @@ -330,18 +328,20 @@ ifneq ($(call module-in-vendor-or-product),) ifneq ($(LOCAL_IN_VENDOR),) # Vendor modules have LOCAL_IN_VENDOR my_cflags += -D__ANDROID_VENDOR__ - - ifeq ($(BOARD_API_LEVEL),) - # TODO(b/314036847): This is a fallback for UDC targets. - # This must be a build failure when UDC is no longer built from this source tree. - my_cflags += -D__ANDROID_VENDOR_API__=$(PLATFORM_SDK_VERSION) - else - my_cflags += -D__ANDROID_VENDOR_API__=$(BOARD_API_LEVEL) - endif else ifneq ($(LOCAL_IN_PRODUCT),) # Product modules have LOCAL_IN_PRODUCT my_cflags += -D__ANDROID_PRODUCT__ endif + + # Define __ANDROID_VENDOR_API__ for both product and vendor variants because + # they both use the same LLNDK libraries. + ifeq ($(BOARD_API_LEVEL),) + # TODO(b/314036847): This is a fallback for UDC targets. + # This must be a build failure when UDC is no longer built from this source tree. + my_cflags += -D__ANDROID_VENDOR_API__=$(PLATFORM_SDK_VERSION) + else + my_cflags += -D__ANDROID_VENDOR_API__=$(BOARD_API_LEVEL) + endif endif ifndef LOCAL_IS_HOST_MODULE @@ -1626,19 +1626,6 @@ my_ldlibs += $(my_cxx_ldlibs) ########################################################### ifndef LOCAL_IS_HOST_MODULE -ifeq ($(call module-in-vendor-or-product),true) - my_target_global_c_includes := - my_target_global_c_system_includes := $(TARGET_OUT_HEADERS) -else ifdef LOCAL_SDK_VERSION - my_target_global_c_includes := - my_target_global_c_system_includes := $(my_ndk_stl_include_path) $(my_ndk_sysroot_include) -else - my_target_global_c_includes := $(SRC_HEADERS) \ - $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)C_INCLUDES) - my_target_global_c_system_includes := $(SRC_SYSTEM_HEADERS) \ - $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)C_SYSTEM_INCLUDES) -endif - my_target_global_cflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)CLANG_$(my_prefix)GLOBAL_CFLAGS) my_target_global_conlyflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)CLANG_$(my_prefix)GLOBAL_CONLYFLAGS) $(my_c_std_conlyflags) my_target_global_cppflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)CLANG_$(my_prefix)GLOBAL_CPPFLAGS) $(my_cpp_std_cppflags) @@ -1654,6 +1641,22 @@ else my_target_global_ldflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)CLANG_$(my_prefix)GLOBAL_LDFLAGS) endif # my_use_clang_lld +ifeq ($(call module-in-vendor-or-product),true) + my_target_global_c_includes := + my_target_global_c_system_includes := $(TARGET_OUT_HEADERS) + my_target_global_cflags += -nostdlibinc +else ifdef LOCAL_SDK_VERSION + my_target_global_c_includes := + my_target_global_c_system_includes := $(my_ndk_stl_include_path) + my_target_global_cflags += --sysroot $(my_ndk_sysroot) +else + my_target_global_c_includes := $(SRC_HEADERS) \ + $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)C_INCLUDES) + my_target_global_c_system_includes := $(SRC_SYSTEM_HEADERS) \ + $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)C_SYSTEM_INCLUDES) + my_target_global_cflags += -nostdlibinc +endif + my_target_triple := $($(LOCAL_2ND_ARCH_VAR_PREFIX)CLANG_$(my_prefix)TRIPLE) ifndef LOCAL_IS_HOST_MODULE my_target_triple_flag := -target $(my_target_triple)$(my_api_level) diff --git a/core/board_config.mk b/core/board_config.mk index d3f0493a6c..ad89c0313b 100644 --- a/core/board_config.mk +++ b/core/board_config.mk @@ -27,6 +27,7 @@ _board_strip_readonly_list += BOARD_INSTALLER_CMDLINE _board_strip_readonly_list += BOARD_KERNEL_CMDLINE _board_strip_readonly_list += BOARD_BOOT_HEADER_VERSION _board_strip_readonly_list += BOARD_BOOTCONFIG +_board_strip_readonly_list += BOARD_BOOTCONFIG_FILE _board_strip_readonly_list += BOARD_KERNEL_BASE _board_strip_readonly_list += BOARD_USES_GENERIC_AUDIO _board_strip_readonly_list += BOARD_USES_RECOVERY_AS_BOOT @@ -237,6 +238,7 @@ else .KATI_READONLY := TARGET_DEVICE_DIR endif +$(call dump-phase-start,BOARD,,,, build/make/core/board_config.mk) ifndef RBC_PRODUCT_CONFIG include $(board_config_mk) else @@ -261,6 +263,7 @@ else include $(OUT_DIR)/rbc/rbc_board_config_results.mk endif +$(call dump-phase-end, build/make/core/board_config.mk) ifneq (,$(and $(TARGET_ARCH),$(TARGET_ARCH_SUITE))) $(error $(board_config_mk) erroneously sets both TARGET_ARCH and TARGET_ARCH_SUITE) @@ -288,6 +291,7 @@ $(foreach var,$(_board_true_false_vars), \ include $(BUILD_SYSTEM)/board_config_wifi.mk # Set up soong config for "soong_config_value_variable". +-include hardware/interfaces/configstore/1.1/default/surfaceflinger.mk -include vendor/google/build/soong/soong_config_namespace/camera.mk # Default *_CPU_VARIANT_RUNTIME to CPU_VARIANT if unspecified. @@ -309,9 +313,10 @@ endif .KATI_READONLY := $(_board_strip_readonly_list) INTERNAL_KERNEL_CMDLINE := $(BOARD_KERNEL_CMDLINE) -ifneq (,$(BOARD_BOOTCONFIG)) +ifneq (,$(BOARD_BOOTCONFIG)$(BOARD_BOOTCONFIG_FILE)) INTERNAL_KERNEL_CMDLINE += bootconfig INTERNAL_BOOTCONFIG := $(BOARD_BOOTCONFIG) + INTERNAL_BOOTCONFIG_FILE := $(BOARD_BOOTCONFIG_FILE) endif ifneq ($(filter %64,$(TARGET_ARCH)),) @@ -920,6 +925,18 @@ ifeq ($(PRODUCT_BUILD_PVMFW_IMAGE),true) endif .KATI_READONLY := BOARD_USES_PVMFWIMAGE +BOARD_USES_DESKTOP_RECOVERY_IMAGE := +ifeq ($(PRODUCT_BUILD_DESKTOP_RECOVERY_IMAGE),true) + BOARD_USES_DESKTOP_RECOVERY_IMAGE := true +endif +.KATI_READONLY := BOARD_USES_DESKTOP_RECOVERY_IMAGE + +BOARD_USES_DESKTOP_UPDATE_IMAGE := +ifeq ($(PRODUCT_BUILD_DESKTOP_UPDATE_IMAGE),true) + BOARD_USES_DESKTOP_UPDATE_IMAGE := true +endif +.KATI_READONLY := BOARD_USES_DESKTOP_UPDATE_IMAGE + ########################################### # Ensure consistency among TARGET_RECOVERY_UPDATER_LIBS, AB_OTA_UPDATER, and PRODUCT_OTA_FORCE_NON_AB_PACKAGE. TARGET_RECOVERY_UPDATER_LIBS ?= diff --git a/core/BUILD.bazel b/core/combo/arch/arm64/armv9-2a.mk index f4869d4833..69ffde014b 100644 --- a/core/BUILD.bazel +++ b/core/combo/arch/arm64/armv9-2a.mk @@ -1,28 +1,18 @@ +# # Copyright (C) 2023 The Android Open Source Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and -# limitations under the License - -# Export tradefed templates for tests. -exports_files( - glob(["*.xml"]), -) +# limitations under the License. +# -# Export proguard flag files for r8. -filegroup( - name = "global_proguard_flags", - srcs = [ - "proguard.flags", - "proguard_basic_keeps.flags", - ], - visibility = ["//visibility:public"], -) +# .mk file required to support build for the ARMv9.2-A arch variant. +# The file just needs to be present, it does not need to contain anything. diff --git a/core/combo/arch/x86/alderlake.mk b/core/combo/arch/x86/alderlake.mk new file mode 100644 index 0000000000..a7ae6ed679 --- /dev/null +++ b/core/combo/arch/x86/alderlake.mk @@ -0,0 +1,6 @@ +# Configuration for Linux on x86. +# Generating binaries for processors +# that have AVX2 feature flag +# + +ARCH_X86_HAVE_SSE4_1 := true diff --git a/core/combo/arch/x86_64/alderlake.mk b/core/combo/arch/x86_64/alderlake.mk new file mode 100644 index 0000000000..a7ae6ed679 --- /dev/null +++ b/core/combo/arch/x86_64/alderlake.mk @@ -0,0 +1,6 @@ +# Configuration for Linux on x86. +# Generating binaries for processors +# that have AVX2 feature flag +# + +ARCH_X86_HAVE_SSE4_1 := true diff --git a/core/config.mk b/core/config.mk index bd905dcd6b..b89292400b 100644 --- a/core/config.mk +++ b/core/config.mk @@ -173,6 +173,7 @@ $(KATI_obsolete_var BOARD_PREBUILT_PVMFWIMAGE,pvmfw.bin is now built in AOSP and $(KATI_obsolete_var BUILDING_PVMFW_IMAGE,BUILDING_PVMFW_IMAGE is no longer used) $(KATI_obsolete_var BOARD_BUILD_SYSTEM_ROOT_IMAGE) $(KATI_obsolete_var FS_GET_STATS) +$(KATI_obsolete_var BUILD_BROKEN_USES_SOONG_PYTHON2_MODULES) # Used to force goals to build. Only use for conditionally defined goals. .PHONY: FORCE @@ -329,6 +330,18 @@ $(eval SOONG_CONFIG_$(strip $1)_$(strip $2):=$(filter true,$3)) $(eval SOONG_CONFIG_TYPE_$(strip $1)_$(strip $2):=bool) endef +# soong_config_set_string_list is the same as soong_config_set, but it will +# also type the variable as a list of strings, so that when using select() expressions +# in blueprint files they can use list values instead of strings. +# The values of the list must be space-separated. +# $1 is the namespace. $2 is the variable name. $3 is the variable value. +# Ex: $(call soong_config_set_string_list,acme,COOL_LIBS,a b) +define soong_config_set_string_list +$(call soong_config_define_internal,$1,$2) \ +$(eval SOONG_CONFIG_$(strip $1)_$(strip $2):=$(strip $3)) +$(eval SOONG_CONFIG_TYPE_$(strip $1)_$(strip $2):=string_list) +endef + # soong_config_append appends to the value of the variable in the given Soong # config namespace. If the variable does not exist, it will be defined. If the # namespace does not exist, it will be defined. @@ -363,8 +376,7 @@ endif # configs, generally for cross-cutting features. # Build broken variables that should be treated as booleans -_build_broken_bool_vars := \ - BUILD_BROKEN_USES_SOONG_PYTHON2_MODULES \ +_build_broken_bool_vars := # Build broken variables that should be treated as lists _build_broken_list_vars := \ @@ -432,13 +444,6 @@ else endif .KATI_READONLY := TARGET_MAX_PAGE_SIZE_SUPPORTED -ifdef PRODUCT_CHECK_PREBUILT_MAX_PAGE_SIZE - TARGET_CHECK_PREBUILT_MAX_PAGE_SIZE := $(PRODUCT_CHECK_PREBUILT_MAX_PAGE_SIZE) -else - TARGET_CHECK_PREBUILT_MAX_PAGE_SIZE := false -endif -.KATI_READONLY := TARGET_CHECK_PREBUILT_MAX_PAGE_SIZE - # Boolean variable determining if AOSP relies on bionic's PAGE_SIZE macro. ifdef PRODUCT_NO_BIONIC_PAGE_SIZE_MACRO TARGET_NO_BIONIC_PAGE_SIZE_MACRO := $(PRODUCT_NO_BIONIC_PAGE_SIZE_MACRO) @@ -725,8 +730,8 @@ SYMBOLS_MAP := $(HOST_OUT_EXECUTABLES)/symbols_map PROGUARD_HOME := external/proguard PROGUARD := $(PROGUARD_HOME)/bin/proguard.sh PROGUARD_DEPS := $(PROGUARD) $(PROGUARD_HOME)/lib/proguard.jar -JAVATAGS := build/make/tools/java-event-log-tags.py -MERGETAGS := build/make/tools/merge-event-log-tags.py +JAVATAGS := $(HOST_OUT_EXECUTABLES)/java-event-log-tags +MERGETAGS := $(HOST_OUT_EXECUTABLES)/merge-event-log-tags APPEND2SIMG := $(HOST_OUT_EXECUTABLES)/append2simg VERITY_SIGNER := $(HOST_OUT_EXECUTABLES)/verity_signer BUILD_VERITY_METADATA := $(HOST_OUT_EXECUTABLES)/build_verity_metadata @@ -758,50 +763,23 @@ endif .KATI_READONLY := \ PRODUCT_COMPATIBLE_PROPERTY -# Boolean variable determining if Treble is fully enabled -PRODUCT_FULL_TREBLE := false -ifneq ($(PRODUCT_FULL_TREBLE_OVERRIDE),) - PRODUCT_FULL_TREBLE := $(PRODUCT_FULL_TREBLE_OVERRIDE) -else ifeq ($(PRODUCT_SHIPPING_API_LEVEL),) - #$(warning no product shipping level defined) -else ifneq ($(call math_gt_or_eq,$(PRODUCT_SHIPPING_API_LEVEL),26),) - PRODUCT_FULL_TREBLE := true -endif - -requirements := \ - PRODUCT_TREBLE_LINKER_NAMESPACES \ - PRODUCT_ENFORCE_VINTF_MANIFEST - -# If it is overriden, then the requirement override is taken, otherwise it's -# PRODUCT_FULL_TREBLE -$(foreach req,$(requirements),$(eval \ - $(req) := $(if $($(req)_OVERRIDE),$($(req)_OVERRIDE),$(PRODUCT_FULL_TREBLE)))) -# If the requirement is false for any reason, then it's not PRODUCT_FULL_TREBLE -$(foreach req,$(requirements),$(eval \ - PRODUCT_FULL_TREBLE := $(if $(filter false,$($(req))),false,$(PRODUCT_FULL_TREBLE)))) - -PRODUCT_FULL_TREBLE_OVERRIDE ?= -$(foreach req,$(requirements),$(eval $(req)_OVERRIDE ?=)) - -# used to be a part of PRODUCT_FULL_TREBLE, but now always set it +# TODO: remove all code referencing these, and remove override variables +PRODUCT_FULL_TREBLE := true PRODUCT_NOTICE_SPLIT := true +PRODUCT_TREBLE_LINKER_NAMESPACES := true +PRODUCT_ENFORCE_VINTF_MANIFEST := true # TODO(b/114488870): disallow PRODUCT_FULL_TREBLE_OVERRIDE from being used. .KATI_READONLY := \ - PRODUCT_FULL_TREBLE_OVERRIDE \ - $(foreach req,$(requirements),$(req)_OVERRIDE) \ - $(requirements) \ PRODUCT_FULL_TREBLE \ + PRODUCT_TREBLE_LINKER_NAMESPACES \ + PRODUCT_ENFORCE_VINTF_MANIFEST \ PRODUCT_NOTICE_SPLIT \ -ifneq ($(PRODUCT_FULL_TREBLE),true) - $(warning This device does not have Treble enabled. This is unsafe.) -endif - -$(KATI_obsolete_var $(foreach req,$(requirements),$(req)_OVERRIDE) \ - ,This should be referenced without the _OVERRIDE suffix.) - -requirements := +# TODO(b/114488870): remove all sets of these everwhere, and disallow them to be used +$(KATI_obsolete_var PRODUCT_TREBLE_LINKER_NAMESPACES_OVERRIDE,Deprecated.) +$(KATI_obsolete_var PRODUCT_ENFORCE_VINTF_MANIFEST_OVERRIDE,Deprecated.) +$(KATI_obsolete_var PRODUCT_FULL_TREBLE_OVERRIDE,Deprecated.) # BOARD_PROPERTY_OVERRIDES_SPLIT_ENABLED can be true only if early-mount of # partitions is supported. But the early-mount must be supported for full @@ -811,6 +789,24 @@ ifeq ($(PRODUCT_FULL_TREBLE),true) BOARD_PROPERTY_OVERRIDES_SPLIT_ENABLED ?= true endif +ifneq ($(call math_gt_or_eq,$(PRODUCT_SHIPPING_API_LEVEL),36),) + ifneq ($(NEED_AIDL_NDK_PLATFORM_BACKEND),) + $(error Must not set NEED_AIDL_NDK_PLATFORM_BACKEND, but it is set to: $(NEED_AIDL_NDK_PLATFORM_BACKEND). Support will be removed.) + endif +endif + +ifdef PRODUCT_CHECK_PREBUILT_MAX_PAGE_SIZE + TARGET_CHECK_PREBUILT_MAX_PAGE_SIZE := $(PRODUCT_CHECK_PREBUILT_MAX_PAGE_SIZE) +else ifeq (true,$(TARGET_BUILD_UNBUNDLED)) + # unbundled builds may not have updated build sources + TARGET_CHECK_PREBUILT_MAX_PAGE_SIZE := false +else ifneq ($(call math_gt_or_eq,$(PRODUCT_SHIPPING_API_LEVEL),36),) + TARGET_CHECK_PREBUILT_MAX_PAGE_SIZE := true +else + TARGET_CHECK_PREBUILT_MAX_PAGE_SIZE := false +endif +.KATI_READONLY := TARGET_CHECK_PREBUILT_MAX_PAGE_SIZE + # Set BOARD_SYSTEMSDK_VERSIONS to the latest SystemSDK version starting from P-launching # devices if unset. ifndef BOARD_SYSTEMSDK_VERSIONS @@ -833,12 +829,6 @@ endif .KATI_READONLY := BOARD_CURRENT_API_LEVEL_FOR_VENDOR_MODULES ifdef PRODUCT_SHIPPING_API_LEVEL - board_api_level := $(firstword $(BOARD_API_LEVEL) $(BOARD_SHIPPING_API_LEVEL)) - ifneq (,$(board_api_level)) - min_systemsdk_version := $(call math_min,$(board_api_level),$(PRODUCT_SHIPPING_API_LEVEL)) - else - min_systemsdk_version := $(PRODUCT_SHIPPING_API_LEVEL) - endif ifneq ($(call math_gt_or_eq,$(PRODUCT_SHIPPING_API_LEVEL),29),) ifneq ($(BOARD_OTA_FRAMEWORK_VBMETA_VERSION_OVERRIDE),) $(error When PRODUCT_SHIPPING_API_LEVEL >= 29, BOARD_OTA_FRAMEWORK_VBMETA_VERSION_OVERRIDE cannot be set) @@ -875,20 +865,28 @@ BOARD_SEPOLICY_VERS := $(PLATFORM_SEPOLICY_VERSION) .KATI_READONLY := PLATFORM_SEPOLICY_VERSION BOARD_SEPOLICY_VERS # A list of SEPolicy versions, besides PLATFORM_SEPOLICY_VERSION, that the framework supports. -PLATFORM_SEPOLICY_COMPAT_VERSIONS := $(filter-out $(PLATFORM_SEPOLICY_VERSION), \ +PLATFORM_SEPOLICY_COMPAT_VERSIONS := \ 29.0 \ 30.0 \ 31.0 \ 32.0 \ 33.0 \ 34.0 \ + +PLATFORM_SEPOLICY_COMPAT_VERSIONS += $(foreach ver,\ 202404 \ - ) + 202504 \ + ,$(if $(filter true,$(call math_gt,$(PLATFORM_SEPOLICY_VERSION),$(ver))),$(ver))) .KATI_READONLY := \ PLATFORM_SEPOLICY_COMPAT_VERSIONS \ PLATFORM_SEPOLICY_VERSION \ +BOARD_GENFS_LABELS_VERSION ?= $(BOARD_API_LEVEL) +ifeq ($(call math_gt,$(BOARD_API_LEVEL),$(BOARD_GENFS_LABELS_VERSION)),true) + $(error BOARD_GENFS_LABELS_VERSION ($(BOARD_GENFS_LABELS_VERSION)) must be greater than or equal to BOARD_API_LEVEL ($(BOARD_API_LEVEL))) +endif + ifeq ($(PRODUCT_RETROFIT_DYNAMIC_PARTITIONS),true) ifneq ($(PRODUCT_USE_DYNAMIC_PARTITIONS),true) $(error PRODUCT_USE_DYNAMIC_PARTITIONS must be true when PRODUCT_RETROFIT_DYNAMIC_PARTITIONS \ @@ -1201,6 +1199,11 @@ RSCOMPAT_NO_USAGEIO_API_LEVELS := 8 9 10 11 12 13 APPS_DEFAULT_VERSION_NAME := $(PLATFORM_VERSION) +# Add BUILD_NUMBER to apps if PRODUCT_BUILD_APPS_WITH_BUILD_NUMBER is defined. +ifeq ($(PRODUCT_BUILD_APPS_WITH_BUILD_NUMBER),true) + APPS_DEFAULT_VERSION_NAME := $(PLATFORM_VERSION)-$(BUILD_NUMBER_FROM_FILE) +endif + # ANDROID_WARNING_ALLOWED_PROJECTS is generated by build/soong. define find_warning_allowed_projects $(filter $(ANDROID_WARNING_ALLOWED_PROJECTS),$(1)/) @@ -1254,7 +1257,19 @@ ifeq ($(TARGET_SYSTEM_EXT_PROP),) TARGET_SYSTEM_EXT_PROP := $(wildcard $(TARGET_DEVICE_DIR)/system_ext.prop) endif -.KATI_READONLY += TARGET_SYSTEM_PROP TARGET_SYSTEM_EXT_PROP +ifeq ($(TARGET_PRODUCT_PROP),) +TARGET_PRODUCT_PROP := $(wildcard $(TARGET_DEVICE_DIR)/product.prop) +endif + +ifeq ($(TARGET_ODM_PROP),) +TARGET_ODM_PROP := $(wildcard $(TARGET_DEVICE_DIR)/odm.prop) +endif + +.KATI_READONLY := \ + TARGET_SYSTEM_PROP \ + TARGET_SYSTEM_EXT_PROP \ + TARGET_PRODUCT_PROP \ + TARGET_ODM_PROP \ include $(BUILD_SYSTEM)/sysprop_config.mk @@ -1262,8 +1277,15 @@ include $(BUILD_SYSTEM)/sysprop_config.mk # consistency with those defined in BoardConfig.mk files. include $(BUILD_SYSTEM)/android_soong_config_vars.mk -SOONG_VARIABLES := $(SOONG_OUT_DIR)/soong.$(TARGET_PRODUCT).variables -SOONG_EXTRA_VARIABLES := $(SOONG_OUT_DIR)/soong.$(TARGET_PRODUCT).extra.variables +# EMMA_INSTRUMENT is set to true when coverage is enabled. Creates a suffix to +# differeciate the coverage version of ninja files. This will save 5 minutes of +# build time used to regenerate ninja. +ifeq (true,$(EMMA_INSTRUMENT)) +COVERAGE_SUFFIX := .coverage +endif + +SOONG_VARIABLES := $(SOONG_OUT_DIR)/soong.$(TARGET_PRODUCT)$(COVERAGE_SUFFIX).variables +SOONG_EXTRA_VARIABLES := $(SOONG_OUT_DIR)/soong.$(TARGET_PRODUCT)$(COVERAGE_SUFFIX).extra.variables ifeq ($(CALLED_FROM_SETUP),true) include $(BUILD_SYSTEM)/ninja_config.mk @@ -1273,10 +1295,6 @@ endif SOONG_VARIABLES := SOONG_EXTRA_VARIABLES := --include external/ltp/android/ltp_package_list.mk -DEFAULT_DATA_OUT_MODULES := ltp $(ltp_packages) -.KATI_READONLY := DEFAULT_DATA_OUT_MODULES - include $(BUILD_SYSTEM)/dumpvar.mk ifdef BOARD_VNDK_VERSION @@ -1291,3 +1309,58 @@ ifeq (false,$(SYSTEM_OPTIMIZE_JAVA)) $(error SYSTEM_OPTIMIZE_JAVA must be enabled when FULL_SYSTEM_OPTIMIZE_JAVA is enabled) endif endif + +# ----------------------------------------------------------------- +# Define fingerprint, thumbprint, and version tags for the current build +# +# BUILD_VERSION_TAGS is a comma-separated list of tags chosen by the device +# implementer that further distinguishes the build. It's basically defined +# by the device implementer. Here, we are adding a mandatory tag that +# identifies the signing config of the build. +BUILD_VERSION_TAGS := $(BUILD_VERSION_TAGS) +ifeq ($(TARGET_BUILD_TYPE),debug) + BUILD_VERSION_TAGS += debug +endif +# The "test-keys" tag marks builds signed with the old test keys, +# which are available in the SDK. "dev-keys" marks builds signed with +# non-default dev keys (usually private keys from a vendor directory). +# Both of these tags will be removed and replaced with "release-keys" +# when the target-files is signed in a post-build step. +ifeq ($(DEFAULT_SYSTEM_DEV_CERTIFICATE),build/make/target/product/security/testkey) +BUILD_KEYS := test-keys +else +BUILD_KEYS := dev-keys +endif +BUILD_VERSION_TAGS += $(BUILD_KEYS) +BUILD_VERSION_TAGS := $(subst $(space),$(comma),$(sort $(BUILD_VERSION_TAGS))) + +# BUILD_FINGERPRINT is used used to uniquely identify the combined build and +# product; used by the OTA server. +ifeq (,$(strip $(BUILD_FINGERPRINT))) + BUILD_FINGERPRINT := $(PRODUCT_BRAND)/$(TARGET_PRODUCT)/$(TARGET_DEVICE):$(PLATFORM_VERSION)/$(BUILD_ID)/$(BUILD_NUMBER_FROM_FILE):$(TARGET_BUILD_VARIANT)/$(BUILD_VERSION_TAGS) +endif + +BUILD_FINGERPRINT_FILE := $(PRODUCT_OUT)/build_fingerprint.txt +ifneq (,$(shell mkdir -p $(PRODUCT_OUT) && echo $(BUILD_FINGERPRINT) >$(BUILD_FINGERPRINT_FILE).tmp && (if ! cmp -s $(BUILD_FINGERPRINT_FILE).tmp $(BUILD_FINGERPRINT_FILE); then mv $(BUILD_FINGERPRINT_FILE).tmp $(BUILD_FINGERPRINT_FILE); else rm $(BUILD_FINGERPRINT_FILE).tmp; fi) && grep " " $(BUILD_FINGERPRINT_FILE))) + $(error BUILD_FINGERPRINT cannot contain spaces: "$(file <$(BUILD_FINGERPRINT_FILE))") +endif +BUILD_FINGERPRINT_FROM_FILE := $$(cat $(BUILD_FINGERPRINT_FILE)) +# unset it for safety. +BUILD_FINGERPRINT := + +# BUILD_THUMBPRINT is used to uniquely identify the system build; used by the +# OTA server. This purposefully excludes any product-specific variables. +ifeq (,$(strip $(BUILD_THUMBPRINT))) + BUILD_THUMBPRINT := $(PLATFORM_VERSION)/$(BUILD_ID)/$(BUILD_NUMBER_FROM_FILE):$(TARGET_BUILD_VARIANT)/$(BUILD_VERSION_TAGS) +endif + +BUILD_THUMBPRINT_FILE := $(PRODUCT_OUT)/build_thumbprint.txt +ifeq ($(strip $(HAS_BUILD_NUMBER)),true) +$(BUILD_THUMBPRINT_FILE): $(BUILD_NUMBER_FILE) +endif +ifneq (,$(shell mkdir -p $(PRODUCT_OUT) && echo $(BUILD_THUMBPRINT) >$(BUILD_THUMBPRINT_FILE) && grep " " $(BUILD_THUMBPRINT_FILE))) + $(error BUILD_THUMBPRINT cannot contain spaces: "$(file <$(BUILD_THUMBPRINT_FILE))") +endif +# unset it for safety. +BUILD_THUMBPRINT_FILE := +BUILD_THUMBPRINT := diff --git a/core/definitions.mk b/core/definitions.mk index cd1b36e4c7..1ab6388838 100644 --- a/core/definitions.mk +++ b/core/definitions.mk @@ -1555,7 +1555,7 @@ endef define transform-logtags-to-java @mkdir -p $(dir $@) @echo "logtags: $@ <= $<" -$(hide) $(JAVATAGS) -o $@ $< $(PRIVATE_MERGED_TAG) +$(hide) $(JAVATAGS) -o $@ $< endef @@ -2605,7 +2605,87 @@ define dump-words-to-file @$(call emit-line,$(wordlist 108501,109000,$(1)),$(2)) @$(call emit-line,$(wordlist 109001,109500,$(1)),$(2)) @$(call emit-line,$(wordlist 109501,110000,$(1)),$(2)) - @$(if $(wordlist 110001,110002,$(1)),$(error dump-words-to-file: Too many words ($(words $(1))))) + @$(call emit-line,$(wordlist 110001,110500,$(1)),$(2)) + @$(call emit-line,$(wordlist 110501,111000,$(1)),$(2)) + @$(call emit-line,$(wordlist 111001,111500,$(1)),$(2)) + @$(call emit-line,$(wordlist 111501,112000,$(1)),$(2)) + @$(call emit-line,$(wordlist 112001,112500,$(1)),$(2)) + @$(call emit-line,$(wordlist 112501,113000,$(1)),$(2)) + @$(call emit-line,$(wordlist 113001,113500,$(1)),$(2)) + @$(call emit-line,$(wordlist 113501,114000,$(1)),$(2)) + @$(call emit-line,$(wordlist 114001,114500,$(1)),$(2)) + @$(call emit-line,$(wordlist 114501,115000,$(1)),$(2)) + @$(call emit-line,$(wordlist 115001,115500,$(1)),$(2)) + @$(call emit-line,$(wordlist 115501,116000,$(1)),$(2)) + @$(call emit-line,$(wordlist 116001,116500,$(1)),$(2)) + @$(call emit-line,$(wordlist 116501,117000,$(1)),$(2)) + @$(call emit-line,$(wordlist 117001,117500,$(1)),$(2)) + @$(call emit-line,$(wordlist 117501,118000,$(1)),$(2)) + @$(call emit-line,$(wordlist 118001,118500,$(1)),$(2)) + @$(call emit-line,$(wordlist 118501,119000,$(1)),$(2)) + @$(call emit-line,$(wordlist 119001,119500,$(1)),$(2)) + @$(call emit-line,$(wordlist 119501,120000,$(1)),$(2)) + @$(call emit-line,$(wordlist 120001,120500,$(1)),$(2)) + @$(call emit-line,$(wordlist 120501,121000,$(1)),$(2)) + @$(call emit-line,$(wordlist 121001,121500,$(1)),$(2)) + @$(call emit-line,$(wordlist 121501,122000,$(1)),$(2)) + @$(call emit-line,$(wordlist 122001,122500,$(1)),$(2)) + @$(call emit-line,$(wordlist 122501,123000,$(1)),$(2)) + @$(call emit-line,$(wordlist 123001,123500,$(1)),$(2)) + @$(call emit-line,$(wordlist 123501,124000,$(1)),$(2)) + @$(call emit-line,$(wordlist 124001,124500,$(1)),$(2)) + @$(call emit-line,$(wordlist 124501,125000,$(1)),$(2)) + @$(call emit-line,$(wordlist 125001,125500,$(1)),$(2)) + @$(call emit-line,$(wordlist 125501,126000,$(1)),$(2)) + @$(call emit-line,$(wordlist 126001,126500,$(1)),$(2)) + @$(call emit-line,$(wordlist 126501,127000,$(1)),$(2)) + @$(call emit-line,$(wordlist 127001,127500,$(1)),$(2)) + @$(call emit-line,$(wordlist 127501,128000,$(1)),$(2)) + @$(call emit-line,$(wordlist 128001,128500,$(1)),$(2)) + @$(call emit-line,$(wordlist 128501,129000,$(1)),$(2)) + @$(call emit-line,$(wordlist 129001,129500,$(1)),$(2)) + @$(call emit-line,$(wordlist 129501,130000,$(1)),$(2)) + @$(call emit-line,$(wordlist 130001,130500,$(1)),$(2)) + @$(call emit-line,$(wordlist 130501,131000,$(1)),$(2)) + @$(call emit-line,$(wordlist 131001,131500,$(1)),$(2)) + @$(call emit-line,$(wordlist 131501,132000,$(1)),$(2)) + @$(call emit-line,$(wordlist 132001,132500,$(1)),$(2)) + @$(call emit-line,$(wordlist 132501,133000,$(1)),$(2)) + @$(call emit-line,$(wordlist 133001,133500,$(1)),$(2)) + @$(call emit-line,$(wordlist 133501,134000,$(1)),$(2)) + @$(call emit-line,$(wordlist 134001,134500,$(1)),$(2)) + @$(call emit-line,$(wordlist 134501,135000,$(1)),$(2)) + @$(call emit-line,$(wordlist 135001,135500,$(1)),$(2)) + @$(call emit-line,$(wordlist 135501,136000,$(1)),$(2)) + @$(call emit-line,$(wordlist 136001,136500,$(1)),$(2)) + @$(call emit-line,$(wordlist 136501,137000,$(1)),$(2)) + @$(call emit-line,$(wordlist 137001,137500,$(1)),$(2)) + @$(call emit-line,$(wordlist 137501,138000,$(1)),$(2)) + @$(call emit-line,$(wordlist 138001,138500,$(1)),$(2)) + @$(call emit-line,$(wordlist 138501,139000,$(1)),$(2)) + @$(call emit-line,$(wordlist 139001,139500,$(1)),$(2)) + @$(call emit-line,$(wordlist 139501,140000,$(1)),$(2)) + @$(call emit-line,$(wordlist 140001,140500,$(1)),$(2)) + @$(call emit-line,$(wordlist 140501,141000,$(1)),$(2)) + @$(call emit-line,$(wordlist 141001,141500,$(1)),$(2)) + @$(call emit-line,$(wordlist 141501,142000,$(1)),$(2)) + @$(call emit-line,$(wordlist 142001,142500,$(1)),$(2)) + @$(call emit-line,$(wordlist 142501,143000,$(1)),$(2)) + @$(call emit-line,$(wordlist 143001,143500,$(1)),$(2)) + @$(call emit-line,$(wordlist 143501,144000,$(1)),$(2)) + @$(call emit-line,$(wordlist 144001,144500,$(1)),$(2)) + @$(call emit-line,$(wordlist 144501,145000,$(1)),$(2)) + @$(call emit-line,$(wordlist 145001,145500,$(1)),$(2)) + @$(call emit-line,$(wordlist 145501,146000,$(1)),$(2)) + @$(call emit-line,$(wordlist 146001,146500,$(1)),$(2)) + @$(call emit-line,$(wordlist 146501,147000,$(1)),$(2)) + @$(call emit-line,$(wordlist 147001,147500,$(1)),$(2)) + @$(call emit-line,$(wordlist 147501,148000,$(1)),$(2)) + @$(call emit-line,$(wordlist 148001,148500,$(1)),$(2)) + @$(call emit-line,$(wordlist 148501,149000,$(1)),$(2)) + @$(call emit-line,$(wordlist 149001,149500,$(1)),$(2)) + @$(call emit-line,$(wordlist 149501,150000,$(1)),$(2)) + @$(if $(wordlist 150001,150002,$(1)),$(error dump-words-to-file: Too many words ($(words $(1))))) endef # Return jar arguments to compress files in a given directory # $(1): directory diff --git a/core/dex_preopt.mk b/core/dex_preopt.mk index 906d7f0163..88e0cc7452 100644 --- a/core/dex_preopt.mk +++ b/core/dex_preopt.mk @@ -13,28 +13,6 @@ else install-on-system-other = $(filter-out $(PRODUCT_DEXPREOPT_SPEED_APPS) $(PRODUCT_SYSTEM_SERVER_APPS),$(basename $(notdir $(filter $(foreach f,$(SYSTEM_OTHER_ODEX_FILTER),$(TARGET_OUT)/$(f)),$(1))))) endif -# Install boot images for testing on host. We exclude framework image as it is not part of art manifest. -my_boot_image_arch := HOST_ARCH -my_boot_image_out := $(HOST_OUT) -my_boot_image_syms := $(HOST_OUT)/symbols -HOST_BOOT_IMAGE_MODULE := \ - $(foreach my_boot_image_name,art_host,$(strip \ - $(eval include $(BUILD_SYSTEM)/dex_preopt_libart.mk) \ - $(my_boot_image_module))) -HOST_BOOT_IMAGE := $(call module-installed-files,$(HOST_BOOT_IMAGE_MODULE)) -ifdef HOST_2ND_ARCH - my_boot_image_arch := HOST_2ND_ARCH - 2ND_HOST_BOOT_IMAGE_MODULE := \ - $(foreach my_boot_image_name,art_host,$(strip \ - $(eval include $(BUILD_SYSTEM)/dex_preopt_libart.mk) \ - $(my_boot_image_module))) - 2ND_HOST_BOOT_IMAGE := $(call module-installed-files,$(2ND_HOST_BOOT_IMAGE_MODULE)) -endif -my_boot_image_arch := -my_boot_image_out := -my_boot_image_syms := -my_boot_image_module := - # Build the boot.zip which contains the boot jars and their compilation output # We can do this only if preopt is enabled and if the product uses libart config (which sets the # default properties for preopting). diff --git a/core/dex_preopt_config.mk b/core/dex_preopt_config.mk index d51de33273..f1e9fb59b7 100644 --- a/core/dex_preopt_config.mk +++ b/core/dex_preopt_config.mk @@ -1,4 +1,4 @@ -DEX_PREOPT_CONFIG := $(SOONG_OUT_DIR)/dexpreopt.config +DEX_PREOPT_CONFIG := $(SOONG_OUT_DIR)/dexpreopt${COVERAGE_SUFFIX}.config ENABLE_PREOPT := true ENABLE_PREOPT_BOOT_IMAGES := true diff --git a/core/dex_preopt_libart.mk b/core/dex_preopt_libart.mk deleted file mode 100644 index a2c9942a41..0000000000 --- a/core/dex_preopt_libart.mk +++ /dev/null @@ -1,109 +0,0 @@ -#################################### -# ART boot image installation -# Input variables: -# my_boot_image_name: the boot image to install -# my_boot_image_arch: the architecture to install (e.g. TARGET_ARCH, not expanded) -# my_boot_image_out: the install directory (e.g. $(PRODUCT_OUT)) -# my_boot_image_syms: the symbols director (e.g. $(TARGET_OUT_UNSTRIPPED)) -# -# Output variables: -# my_boot_image_module: the created module name. Empty if no module is created. -# -# Install the boot images compiled by Soong. -# Create a module named dexpreopt_bootjar.$(my_boot_image_name)_$($(my_boot_image_arch)) -# that installs all of boot image files. -# If there is no file to install for $(my_boot_image_name), for example when -# building an unbundled build, then no module is created. -# -#################################### - -# Takes a list of src:dest install pairs and returns a new list with a path -# prefixed to each dest value. -# $(1): list of src:dest install pairs -# $(2): path to prefix to each dest value -define prefix-copy-many-files-dest -$(foreach v,$(1),$(call word-colon,1,$(v)):$(2)$(call word-colon,2,$(v))) -endef - -# Converts an architecture-specific vdex path into a location that can be shared -# between architectures. -define vdex-shared-install-path -$(dir $(patsubst %/,%,$(dir $(1))))$(notdir $(1)) -endef - -# Takes a list of src:dest install pairs of vdex files and returns a new list -# where each dest has been rewritten to the shared location for vdex files. -define vdex-copy-many-files-shared-dest -$(foreach v,$(1),$(call word-colon,1,$(v)):$(call vdex-shared-install-path,$(call word-colon,2,$(v)))) -endef - -# Creates a rule to symlink an architecture specific vdex file to the shared -# location for that vdex file. -define symlink-vdex-file -$(strip \ - $(call symlink-file,\ - $(call vdex-shared-install-path,$(1)),\ - ../$(notdir $(1)),\ - $(1))\ - $(1)) -endef - -# Takes a list of src:dest install pairs of vdex files and creates rules to -# symlink each dest to the shared location for that vdex file. -define symlink-vdex-files -$(foreach v,$(1),$(call symlink-vdex-file,$(call word-colon,2,$(v)))) -endef - -my_boot_image_module := - -my_suffix := $(my_boot_image_name)_$($(my_boot_image_arch)) -my_copy_pairs := $(call prefix-copy-many-files-dest,$(DEXPREOPT_IMAGE_BUILT_INSTALLED_$(my_suffix)),$(my_boot_image_out)) -my_vdex_copy_pairs := $(call prefix-copy-many-files-dest,$(DEXPREOPT_IMAGE_VDEX_BUILT_INSTALLED_$(my_suffix)),$(my_boot_image_out)) -my_vdex_copy_shared_pairs := $(call vdex-copy-many-files-shared-dest,$(my_vdex_copy_pairs)) -ifeq (,$(filter %_2ND_ARCH,$(my_boot_image_arch))) - # Only install the vdex to the shared location for the primary architecture. - my_copy_pairs += $(my_vdex_copy_shared_pairs) -endif - -my_unstripped_copy_pairs := $(call prefix-copy-many-files-dest,$(DEXPREOPT_IMAGE_UNSTRIPPED_BUILT_INSTALLED_$(my_suffix)),$(my_boot_image_syms)) - -# Generate the boot image module only if there is any file to install. -ifneq (,$(strip $(my_copy_pairs))) - my_first_pair := $(firstword $(my_copy_pairs)) - my_rest_pairs := $(wordlist 2,$(words $(my_copy_pairs)),$(my_copy_pairs)) - - my_first_src := $(call word-colon,1,$(my_first_pair)) - my_first_dest := $(call word-colon,2,$(my_first_pair)) - - my_installed := $(call copy-many-files,$(my_copy_pairs)) - my_unstripped_installed := $(call copy-many-files,$(my_unstripped_copy_pairs)) - - my_symlinks := $(call symlink-vdex-files,$(my_vdex_copy_pairs)) - - # We don't have a LOCAL_PATH for the auto-generated modules, so let it be the $(BUILD_SYSTEM). - LOCAL_PATH := $(BUILD_SYSTEM) - # Hack to let these pseudo-modules wrapped around Soong modules use LOCAL_SOONG_INSTALLED_MODULE. - LOCAL_MODULE_MAKEFILE := $(SOONG_ANDROID_MK) - - include $(CLEAR_VARS) - LOCAL_MODULE := dexpreopt_bootjar.$(my_suffix) - LOCAL_PREBUILT_MODULE_FILE := $(my_first_src) - LOCAL_MODULE_PATH := $(dir $(my_first_dest)) - LOCAL_MODULE_STEM := $(notdir $(my_first_dest)) - LOCAL_SOONG_INSTALL_PAIRS := $(my_copy_pairs) - LOCAL_SOONG_INSTALL_SYMLINKS := $(my_symlinks) - LOCAL_SOONG_INSTALLED_MODULE := $(my_first_dest) - LOCAL_SOONG_LICENSE_METADATA := $(DEXPREOPT_IMAGE_LICENSE_METADATA_$(my_suffix)) - ifneq (,$(strip $(filter HOST_%,$(my_boot_image_arch)))) - LOCAL_IS_HOST_MODULE := true - endif - LOCAL_MODULE_CLASS := ETC - include $(BUILD_PREBUILT) - $(LOCAL_BUILT_MODULE): | $(my_unstripped_installed) - # Installing boot.art causes all boot image bits to be installed. - # Keep this old behavior in case anyone still needs it. - $(LOCAL_INSTALLED_MODULE): $(wordlist 2,$(words $(my_installed)),$(my_installed)) $(my_symlinks) - $(my_all_targets): $(my_installed) $(my_symlinks) - - my_boot_image_module := $(LOCAL_MODULE) -endif # my_copy_pairs != empty diff --git a/core/dumpconfig.mk b/core/dumpconfig.mk index 640fe10f9c..eb4c822dc5 100644 --- a/core/dumpconfig.mk +++ b/core/dumpconfig.mk @@ -56,7 +56,7 @@ BUILD_DATETIME_FILE := $(OUT_DIR)/build_date.txt # Escape quotation marks for CSV, and wraps in quotation marks. define escape-for-csv -"$(subst ","",$1)" +"$(subst ","",$(subst $(newline), ,$1))" endef # Args: @@ -68,7 +68,7 @@ endef # Args: # $(1): include stack define dump-import-done -$(eval $(file >> $(DUMPCONFIG_FILE),imported,$(strip $(1)))) +$(eval $(file >> $(DUMPCONFIG_FILE),imported,$(strip $(1)),$(filter-out $(1),$(MAKEFILE_LIST)))) endef # Args: diff --git a/core/envsetup.mk b/core/envsetup.mk index c063f60a15..f82e861abf 100644 --- a/core/envsetup.mk +++ b/core/envsetup.mk @@ -417,6 +417,7 @@ HOST_OUT_SDK_ADDON := $(HOST_OUT)/sdk_addon HOST_OUT_NATIVE_TESTS := $(HOST_OUT)/nativetest64 HOST_OUT_COVERAGE := $(HOST_OUT)/coverage HOST_OUT_TESTCASES := $(HOST_OUT)/testcases +HOST_OUT_ETC := $(HOST_OUT)/etc .KATI_READONLY := \ HOST_OUT_EXECUTABLES \ HOST_OUT_SHARED_LIBRARIES \ @@ -425,7 +426,8 @@ HOST_OUT_TESTCASES := $(HOST_OUT)/testcases HOST_OUT_SDK_ADDON \ HOST_OUT_NATIVE_TESTS \ HOST_OUT_COVERAGE \ - HOST_OUT_TESTCASES + HOST_OUT_TESTCASES \ + HOST_OUT_ETC HOST_CROSS_OUT_EXECUTABLES := $(HOST_CROSS_OUT)/bin HOST_CROSS_OUT_SHARED_LIBRARIES := $(HOST_CROSS_OUT)/lib diff --git a/core/java.mk b/core/java.mk index 5fbc916859..41a1b1ba84 100644 --- a/core/java.mk +++ b/core/java.mk @@ -140,8 +140,7 @@ ifneq ($(strip $(logtags_sources)),) logtags_java_sources := $(patsubst %.logtags,%.java,$(addprefix $(intermediates.COMMON)/logtags/, $(logtags_sources))) logtags_sources := $(addprefix $(LOCAL_PATH)/, $(logtags_sources)) -$(logtags_java_sources): PRIVATE_MERGED_TAG := $(TARGET_OUT_COMMON_INTERMEDIATES)/all-event-log-tags.txt -$(logtags_java_sources): $(intermediates.COMMON)/logtags/%.java: $(LOCAL_PATH)/%.logtags $(TARGET_OUT_COMMON_INTERMEDIATES)/all-event-log-tags.txt $(JAVATAGS) build/make/tools/event_log_tags.py +$(logtags_java_sources): $(intermediates.COMMON)/logtags/%.java: $(LOCAL_PATH)/%.logtags $(JAVATAGS) $(transform-logtags-to-java) else diff --git a/core/java_common.mk b/core/java_common.mk index a21f062029..f574b7623e 100644 --- a/core/java_common.mk +++ b/core/java_common.mk @@ -32,7 +32,7 @@ ifeq (,$(LOCAL_JAVA_LANGUAGE_VERSION)) else ifneq (,$(LOCAL_SDK_VERSION)$(TARGET_BUILD_USE_PREBUILT_SDKS)) # TODO(ccross): allow 1.9 for current and unbundled once we have SDK system modules LOCAL_JAVA_LANGUAGE_VERSION := 1.8 - else ifeq ($(EXPERIMENTAL_TARGET_JAVA_VERSION_21),true) + else ifeq ($(RELEASE_TARGET_JAVA_21),true) LOCAL_JAVA_LANGUAGE_VERSION := 21 else LOCAL_JAVA_LANGUAGE_VERSION := 17 diff --git a/core/java_prebuilt_internal.mk b/core/java_prebuilt_internal.mk index 46393acb12..4b6eea7616 100644 --- a/core/java_prebuilt_internal.mk +++ b/core/java_prebuilt_internal.mk @@ -172,6 +172,12 @@ framework_res_package_export := \ endif endif +# transitive-res-packages is only populated for Soong modules for now, but needs +# to exist so that other Make modules can depend on it. Create an empty file. +my_transitive_res_packages := $(intermediates.COMMON)/transitive-res-packages +$(my_transitive_res_packages): + touch $@ + my_res_package := $(intermediates.COMMON)/package-res.apk # We needed only very few PRIVATE variables and aapt2.mk input variables. Reset the unnecessary ones. diff --git a/core/layoutlib_data.mk b/core/layoutlib_data.mk index e45f7efe16..dabcfb20c9 100644 --- a/core/layoutlib_data.mk +++ b/core/layoutlib_data.mk @@ -31,8 +31,18 @@ $(keyboards): $(KEYBOARD_TEMP)/%.kcm: frameworks/base/data/keyboards/%.kcm $(hide) mkdir -p $(dir $@) $(hide) cp -vf $< $@ -# List of all data files - font files, font configuration files, key character map files -LAYOUTLIB_FILES := $(fonts_device) $(font_config) $(keyboards) +HYPHEN_TEMP := $(call intermediates-dir-for,PACKAGING,hyphen,HOST,COMMON) + +# The hyphenation pattern files needed to support text hyphenation +hyphen := $(filter $(TARGET_OUT)/usr/hyphen-data/%.hyb, $(INTERNAL_SYSTEMIMAGE_FILES)) +hyphen := $(addprefix $(HYPHEN_TEMP)/, $(notdir $(hyphen))) + +$(hyphen): $(HYPHEN_TEMP)/%: $(TARGET_OUT)/usr/hyphen-data/% + $(hide) mkdir -p $(dir $@) + $(hide) cp -vf $< $@ + +# List of all data files - font files, font configuration files, key character map files, hyphenation pattern files +LAYOUTLIB_FILES := $(fonts_device) $(font_config) $(keyboards) $(hyphen) .PHONY: layoutlib layoutlib-tests layoutlib layoutlib-tests: $(LAYOUTLIB_FILES) @@ -40,6 +50,7 @@ layoutlib layoutlib-tests: $(LAYOUTLIB_FILES) $(call dist-for-goals, layoutlib, $(foreach m,$(fonts_device), $(m):layoutlib_native/fonts/$(notdir $(m)))) $(call dist-for-goals, layoutlib, $(foreach m,$(font_config), $(m):layoutlib_native/fonts/$(notdir $(m)))) $(call dist-for-goals, layoutlib, $(foreach m,$(keyboards), $(m):layoutlib_native/keyboards/$(notdir $(m)))) +$(call dist-for-goals, layoutlib, $(foreach m,$(hyphen), $(m):layoutlib_native/hyphen-data/$(notdir $(m)))) FONT_TEMP := font_config := @@ -66,11 +77,19 @@ $(call dist-for-goals,layoutlib,$(LAYOUTLIB_BUILD_PROP)/layoutlib-build.prop:lay # Resource files from frameworks/base/core/res/res LAYOUTLIB_RES := $(call intermediates-dir-for,PACKAGING,layoutlib-res,HOST,COMMON) LAYOUTLIB_RES_FILES := $(shell find frameworks/base/core/res/res -type f -not -path 'frameworks/base/core/res/res/values-m[nc]c*' | sort) -$(LAYOUTLIB_RES)/layoutlib-res.zip: $(SOONG_ZIP) $(HOST_OUT_EXECUTABLES)/aapt2 $(LAYOUTLIB_RES_FILES) +EMULATED_OVERLAYS_FILES := $(shell find frameworks/base/packages/overlays/*/res/ | sort) +DEVICE_OVERLAYS_FILES := $(shell find device/generic/goldfish/phone/overlay/frameworks/base/packages/overlays/*/AndroidOverlay/res/ | sort) +$(LAYOUTLIB_RES)/layoutlib-res.zip: $(SOONG_ZIP) $(HOST_OUT_EXECUTABLES)/aapt2 $(LAYOUTLIB_RES_FILES) $(EMULATED_OVERLAYS_FILES) $(DEVICE_OVERLAYS_FILES) rm -rf $@ - echo $(LAYOUTLIB_RES_FILES) > $(LAYOUTLIB_RES)/filelist.txt - $(SOONG_ZIP) -C frameworks/base/core/res -l $(LAYOUTLIB_RES)/filelist.txt -o $(LAYOUTLIB_RES)/temp.zip - rm -rf $(LAYOUTLIB_RES)/data && unzip -q -d $(LAYOUTLIB_RES)/data $(LAYOUTLIB_RES)/temp.zip + echo $(LAYOUTLIB_RES_FILES) > $(LAYOUTLIB_RES)/filelist_res.txt + $(SOONG_ZIP) -C frameworks/base/core/res -l $(LAYOUTLIB_RES)/filelist_res.txt -o $(LAYOUTLIB_RES)/temp_res.zip + echo $(EMULATED_OVERLAYS_FILES) > $(LAYOUTLIB_RES)/filelist_emulated_overlays.txt + $(SOONG_ZIP) -C frameworks/base/packages -l $(LAYOUTLIB_RES)/filelist_emulated_overlays.txt -o $(LAYOUTLIB_RES)/temp_emulated_overlays.zip + echo $(DEVICE_OVERLAYS_FILES) > $(LAYOUTLIB_RES)/filelist_device_overlays.txt + $(SOONG_ZIP) -C device/generic/goldfish/phone/overlay/frameworks/base/packages -l $(LAYOUTLIB_RES)/filelist_device_overlays.txt -o $(LAYOUTLIB_RES)/temp_device_overlays.zip + rm -rf $(LAYOUTLIB_RES)/data && unzip -q -d $(LAYOUTLIB_RES)/data $(LAYOUTLIB_RES)/temp_res.zip + unzip -q -d $(LAYOUTLIB_RES)/data $(LAYOUTLIB_RES)/temp_emulated_overlays.zip + unzip -q -d $(LAYOUTLIB_RES)/data $(LAYOUTLIB_RES)/temp_device_overlays.zip rm -rf $(LAYOUTLIB_RES)/compiled && mkdir $(LAYOUTLIB_RES)/compiled && $(HOST_OUT_EXECUTABLES)/aapt2 compile $(LAYOUTLIB_RES)/data/res/**/*.9.png -o $(LAYOUTLIB_RES)/compiled printf '<?xml version="1.0" encoding="utf-8"?>\n<manifest xmlns:android="http://schemas.android.com/apk/res/android" package="com.google.android.layoutlib" />' > $(LAYOUTLIB_RES)/AndroidManifest.xml $(HOST_OUT_EXECUTABLES)/aapt2 link -R $(LAYOUTLIB_RES)/compiled/* -o $(LAYOUTLIB_RES)/compiled.apk --manifest $(LAYOUTLIB_RES)/AndroidManifest.xml @@ -78,7 +97,7 @@ $(LAYOUTLIB_RES)/layoutlib-res.zip: $(SOONG_ZIP) $(HOST_OUT_EXECUTABLES)/aapt2 $ for f in $(LAYOUTLIB_RES)/compiled_apk/res/*; do mv "$$f" "$${f/-v4/}";done for f in $(LAYOUTLIB_RES)/compiled_apk/res/**/*.9.png; do mv "$$f" "$${f/.9.png/.compiled.9.png}";done cp -r $(LAYOUTLIB_RES)/compiled_apk/res $(LAYOUTLIB_RES)/data - $(SOONG_ZIP) -C $(LAYOUTLIB_RES)/data -D $(LAYOUTLIB_RES)/data/res -o $@ + $(SOONG_ZIP) -C $(LAYOUTLIB_RES)/data -D $(LAYOUTLIB_RES)/data/ -o $@ $(call dist-for-goals,layoutlib,$(LAYOUTLIB_RES)/layoutlib-res.zip:layoutlib_native/res.zip) @@ -87,6 +106,7 @@ LAYOUTLIB_SBOM := $(call intermediates-dir-for,PACKAGING,layoutlib-sbom,HOST) _layoutlib_font_config_files := $(sort $(wildcard frameworks/base/data/fonts/*.xml)) _layoutlib_fonts_files := $(filter $(TARGET_OUT)/fonts/%.ttf $(TARGET_OUT)/fonts/%.ttc $(TARGET_OUT)/fonts/%.otf, $(INTERNAL_SYSTEMIMAGE_FILES)) _layoutlib_keyboard_files := $(sort $(wildcard frameworks/base/data/keyboards/*.kcm)) +_layoutlib_hyphen_files := $(filter $(TARGET_OUT)/usr/hyphen-data/%.hyb, $(INTERNAL_SYSTEMIMAGE_FILES)) # Find out files disted with layoutlib in Soong. ### Filter out static libraries for Windows and files already handled in make. @@ -116,6 +136,13 @@ $(LAYOUTLIB_SBOM)/sbom-metadata.csv: echo data/keyboards/$(notdir $f),frameworks/base/data/keyboards,prebuilt_etc,,,,,$f,,, >> $@; \ ) + $(foreach f,$(_layoutlib_hyphen_files), \ + $(eval _module_name := $(ALL_INSTALLED_FILES.$f)) \ + $(eval _module_path := $(strip $(sort $(ALL_MODULES.$(_module_name).PATH)))) \ + $(eval _soong_module_type := $(strip $(sort $(ALL_MODULES.$(_module_name).SOONG_MODULE_TYPE)))) \ + echo data/hyphen-data/$(notdir $f),$(_module_path),$(_soong_module_type),,,,,$f,,, >> $@; \ + ) + $(foreach f,$(_layoutlib_files_disted_by_soong), \ $(eval _prebuilt_module_file := $(call word-colon,1,$f)) \ $(eval _dist_file := $(call word-colon,2,$f)) \ @@ -132,16 +159,26 @@ $(LAYOUTLIB_SBOM)/sbom-metadata.csv: echo $(_path),,,,,,Y,$f,,, >> $@; \ ) + $(foreach f,$(EMULATED_OVERLAYS_FILES), \ + $(eval _path := $(subst frameworks/base/packages,data,$f)) \ + echo $(_path),,,,,,Y,$f,,, >> $@; \ + ) + + $(foreach f,$(DEVICE_OVERLAYS_FILES), \ + $(eval _path := $(subst device/generic/goldfish/phone/overlay/frameworks/base/packages,data,$f)) \ + echo $(_path),,,,,,Y,$f,,, >> $@; \ + ) + .PHONY: layoutlib-sbom layoutlib-sbom: $(LAYOUTLIB_SBOM)/layoutlib.spdx.json -$(LAYOUTLIB_SBOM)/layoutlib.spdx.json: $(PRODUCT_OUT)/always_dirty_file.txt $(GEN_SBOM) $(LAYOUTLIB_SBOM)/sbom-metadata.csv $(_layoutlib_font_config_files) $(_layoutlib_fonts_files) $(LAYOUTLIB_BUILD_PROP)/layoutlib-build.prop $(_layoutlib_keyboard_files) $(LAYOUTLIB_RES_FILES) +$(LAYOUTLIB_SBOM)/layoutlib.spdx.json: $(PRODUCT_OUT)/always_dirty_file.txt $(GEN_SBOM) $(LAYOUTLIB_SBOM)/sbom-metadata.csv $(_layoutlib_font_config_files) $(_layoutlib_fonts_files) $(LAYOUTLIB_BUILD_PROP)/layoutlib-build.prop $(_layoutlib_keyboard_files) $(_layoutlib_hyphen_files) $(LAYOUTLIB_RES_FILES) $(EMULATED_OVERLAYS_FILES) $(DEVICE_OVERLAYS_FILES) rm -rf $@ $(GEN_SBOM) --output_file $@ --metadata $(LAYOUTLIB_SBOM)/sbom-metadata.csv --build_version $(BUILD_FINGERPRINT_FROM_FILE) --product_mfr "$(PRODUCT_MANUFACTURER)" --module_name "layoutlib" --json $(call dist-for-goals,layoutlib,$(LAYOUTLIB_SBOM)/layoutlib.spdx.json:layoutlib_native/sbom/layoutlib.spdx.json) # Generate SBOM of framework_res.jar that is created in release_layoutlib.sh. -# The generated SBOM contains placeholders for release_layotlib.sh to substitute, and the placeholders include: +# The generated SBOM contains placeholders for release_layoutlib.sh to substitute, and the placeholders include: # document name, document namespace, document creation info, organization and SHA1 value of framework_res.jar. GEN_SBOM_FRAMEWORK_RES := $(HOST_OUT_EXECUTABLES)/generate-sbom-framework_res .PHONY: layoutlib-framework_res-sbom diff --git a/core/main.mk b/core/main.mk index 27ba526f60..d670397531 100644 --- a/core/main.mk +++ b/core/main.mk @@ -31,8 +31,7 @@ endif .KATI_READONLY := $(foreach n,$(SOONG_CONFIG_NAMESPACES),SOONG_CONFIG_$(n)) .KATI_READONLY := $(foreach n,$(SOONG_CONFIG_NAMESPACES),$(foreach k,$(SOONG_CONFIG_$(n)),SOONG_CONFIG_$(n)_$(k))) -include $(SOONG_MAKEVARS_MK) - +include $(SOONG_OUT_DIR)/make_vars-$(TARGET_PRODUCT)$(COVERAGE_SUFFIX).mk YACC :=$= $(BISON) -d include $(BUILD_SYSTEM)/clang/config.mk @@ -46,11 +45,6 @@ BUILD_HOSTNAME_FILE := $(SOONG_OUT_DIR)/build_hostname.txt $(KATI_obsolete_var BUILD_HOSTNAME,Use BUILD_HOSTNAME_FROM_FILE instead) $(KATI_obsolete_var FILE_NAME_TAG,https://android.googlesource.com/platform/build/+/master/Changes.md#FILE_NAME_TAG) -$(BUILD_NUMBER_FILE): - # empty rule to prevent dangling rule error for a file that is written by soong_ui -$(BUILD_HOSTNAME_FILE): - # empty rule to prevent dangling rule error for a file that is written by soong_ui - .KATI_RESTAT: $(BUILD_NUMBER_FILE) .KATI_RESTAT: $(BUILD_HOSTNAME_FILE) @@ -84,6 +78,8 @@ $(shell mkdir -p $(EMPTY_DIRECTORY) && rm -rf $(EMPTY_DIRECTORY)/*) -include test/cts-root/tools/build/config.mk # WVTS-specific config. -include test/wvts/tools/build/config.mk +# DTS-specific config. +-include test/dts/tools/build/config.mk # Clean rules @@ -276,17 +272,36 @@ FULL_BUILD := true # Include all of the makefiles in the system # -subdir_makefiles := $(SOONG_OUT_DIR)/installs-$(TARGET_PRODUCT).mk $(SOONG_ANDROID_MK) +subdir_makefiles := \ + $(SOONG_OUT_DIR)/installs-$(TARGET_PRODUCT)$(COVERAGE_SUFFIX).mk \ + $(SOONG_ANDROID_MK) \ + build/make/target/board/android-info.mk + # Android.mk files are only used on Linux builds, Mac only supports Android.bp ifeq ($(HOST_OS),linux) - subdir_makefiles += $(file <$(OUT_DIR)/.module_paths/Android.mk.list) + ifeq ($(PRODUCT_IGNORE_ALL_ANDROIDMK),true) + allowed_androidmk_files := + ifdef PRODUCT_ANDROIDMK_ALLOWLIST_FILE + -include $(PRODUCT_ANDROIDMK_ALLOWLIST_FILE) + endif + allowed_androidmk_files += $(PRODUCT_ALLOWED_ANDROIDMK_FILES) + subdir_makefiles += $(filter $(allowed_androidmk_files),$(file <$(OUT_DIR)/.module_paths/Android.mk.list)) + allowed_androidmk_files := + else + subdir_makefiles += $(file <$(OUT_DIR)/.module_paths/Android.mk.list) + endif endif -subdir_makefiles += $(SOONG_OUT_DIR)/late-$(TARGET_PRODUCT).mk + +subdir_makefiles += $(SOONG_OUT_DIR)/late-$(TARGET_PRODUCT)$(COVERAGE_SUFFIX).mk + subdir_makefiles_total := $(words int $(subdir_makefiles) post finish) .KATI_READONLY := subdir_makefiles_total $(foreach mk,$(subdir_makefiles),$(info [$(call inc_and_print,subdir_makefiles_inc)/$(subdir_makefiles_total)] including $(mk) ...)$(eval include $(mk))) +# Build bootloader.img/radio.img, and unpack the partitions. +-include vendor/google_devices/$(TARGET_SOC)/prebuilts/misc_bins/update_bootloader_radio_image.mk + # For an unbundled image, we can skip blueprint_tools because unbundled image # aims to remove a large number framework projects from the manifest, the # sources or dependencies for these tools may be missing from the tree. @@ -295,6 +310,9 @@ droid_targets : blueprint_tools checkbuild: blueprint_tests endif +# Create necessary directories and symlinks in the root filesystem +include system/core/rootdir/create_root_structure.mk + endif # dont_bother ifndef subdir_makefiles_total @@ -679,12 +697,12 @@ endef # Scan all modules in general-tests, device-tests and other selected suites and # flatten the shared library dependencies. define update-host-shared-libs-deps-for-suites -$(foreach suite,general-tests device-tests vts tvts art-host-tests host-unit-tests,\ +$(foreach suite,general-tests device-tests vts tvts art-host-tests host-unit-tests camera-hal-tests,\ $(foreach m,$(COMPATIBILITY.$(suite).MODULES),\ $(eval my_deps := $(call get-all-shared-libs-deps,$(m)))\ $(foreach dep,$(my_deps),\ $(foreach f,$(ALL_MODULES.$(dep).HOST_SHARED_LIBRARY_FILES),\ - $(if $(filter $(suite),device-tests general-tests art-host-tests host-unit-tests),\ + $(if $(filter $(suite),device-tests general-tests art-host-tests host-unit-tests camera-hal-tests),\ $(eval my_testcases := $(HOST_OUT_TESTCASES)),\ $(eval my_testcases := $$(COMPATIBILITY_TESTCASES_OUT_$(suite))))\ $(eval target := $(my_testcases)/$(lastword $(subst /, ,$(dir $(f))))/$(notdir $(f)))\ @@ -973,8 +991,8 @@ endef # Returns modules included automatically as a result of certain BoardConfig # variables being set. define auto-included-modules - llndk_in_system \ - $(if $(DEVICE_MANIFEST_FILE),vendor_manifest.xml) \ + $(foreach vndk_ver,$(PRODUCT_EXTRA_VNDK_VERSIONS),com.android.vndk.v$(vndk_ver)) \ + llndk.libraries.txt \ $(if $(DEVICE_MANIFEST_SKUS),$(foreach sku, $(DEVICE_MANIFEST_SKUS),vendor_manifest_$(sku).xml)) \ $(if $(ODM_MANIFEST_FILES),odm_manifest.xml) \ $(if $(ODM_MANIFEST_SKUS),$(foreach sku, $(ODM_MANIFEST_SKUS),odm_manifest_$(sku).xml)) \ @@ -1376,6 +1394,7 @@ droidcore-unbundled: $(filter $(HOST_OUT_ROOT)/%,$(modules_to_install)) \ $(INSTALLED_RAMDISK_TARGET) \ $(INSTALLED_BOOTIMAGE_TARGET) \ $(INSTALLED_INIT_BOOT_IMAGE_TARGET) \ + $(INSTALLED_DTBOIMAGE_TARGET) \ $(INSTALLED_RADIOIMAGE_TARGET) \ $(INSTALLED_DEBUG_RAMDISK_TARGET) \ $(INSTALLED_DEBUG_BOOTIMAGE_TARGET) \ @@ -1735,10 +1754,6 @@ dump-files: @echo $(sort $(patsubst $(PRODUCT_OUT)/%,%,$(filter $(PRODUCT_OUT)/%,$(modules_to_install)))) | tr -s ' ' '\n' @echo Successfully dumped product target file list. -.PHONY: nothing -nothing: - @echo Successfully read the makefiles. - .PHONY: tidy_only tidy_only: @echo Successfully make tidy_only. @@ -1858,6 +1873,11 @@ ifndef INSTALLED_RECOVERYIMAGE_TARGET filter_out_files += $(PRODUCT_OUT)/recovery/% endif +# userdata.img +ifndef BUILDING_USERDATA_IMAGE +filter_out_files += $(PRODUCT_OUT)/data/% +endif + installed_files := $(sort $(filter-out $(filter_out_files),$(filter $(PRODUCT_OUT)/%,$(modules_to_install)))) else installed_files := $(apps_only_installed_files) @@ -1886,20 +1906,20 @@ $(SOONG_OUT_DIR)/compliance-metadata/$(TARGET_PRODUCT)/make-metadata.csv: $(eval _kernel_module_copy_files := $(sort $(filter %$(_path_on_device),$(KERNEL_MODULE_COPY_FILES)))) \ $(eval _is_build_prop := $(call is-build-prop,$f)) \ $(eval _is_notice_file := $(call is-notice-file,$f)) \ - $(eval _is_dexpreopt_image_profile := $(if $(filter %:/$(_path_on_device),$(DEXPREOPT_IMAGE_PROFILE_BUILT_INSTALLED)),Y)) \ $(eval _is_product_system_other_avbkey := $(if $(findstring $f,$(INSTALLED_PRODUCT_SYSTEM_OTHER_AVBKEY_TARGET)),Y)) \ $(eval _is_event_log_tags_file := $(if $(findstring $f,$(event_log_tags_file)),Y)) \ $(eval _is_system_other_odex_marker := $(if $(findstring $f,$(INSTALLED_SYSTEM_OTHER_ODEX_MARKER)),Y)) \ $(eval _is_kernel_modules_blocklist := $(if $(findstring $f,$(ALL_KERNEL_MODULES_BLOCKLIST)),Y)) \ $(eval _is_fsverity_build_manifest_apk := $(if $(findstring $f,$(ALL_FSVERITY_BUILD_MANIFEST_APK)),Y)) \ - $(eval _is_linker_config := $(if $(findstring $f,$(SYSTEM_LINKER_CONFIG) $(vendor_linker_config_file)),Y)) \ + $(eval _is_linker_config := $(if $(findstring $f,$(SYSTEM_LINKER_CONFIG) $(vendor_linker_config_file) $(product_linker_config_file)),Y)) \ $(eval _is_partition_compat_symlink := $(if $(findstring $f,$(PARTITION_COMPAT_SYMLINKS)),Y)) \ $(eval _is_flags_file := $(if $(findstring $f, $(ALL_FLAGS_FILES)),Y)) \ $(eval _is_rootdir_symlink := $(if $(findstring $f, $(ALL_ROOTDIR_SYMLINKS)),Y)) \ - $(eval _is_platform_generated := $(_is_build_prop)$(_is_notice_file)$(_is_dexpreopt_image_profile)$(_is_product_system_other_avbkey)$(_is_event_log_tags_file)$(_is_system_other_odex_marker)$(_is_kernel_modules_blocklist)$(_is_fsverity_build_manifest_apk)$(_is_linker_config)$(_is_partition_compat_symlink)$(_is_flags_file)$(_is_rootdir_symlink)) \ + $(eval _is_platform_generated := $(_is_build_prop)$(_is_notice_file)$(_is_product_system_other_avbkey)$(_is_event_log_tags_file)$(_is_system_other_odex_marker)$(_is_kernel_modules_blocklist)$(_is_fsverity_build_manifest_apk)$(_is_linker_config)$(_is_partition_compat_symlink)$(_is_flags_file)$(_is_rootdir_symlink)) \ $(eval _static_libs := $(if $(_is_soong_module),,$(ALL_INSTALLED_FILES.$f.STATIC_LIBRARIES))) \ $(eval _whole_static_libs := $(if $(_is_soong_module),,$(ALL_INSTALLED_FILES.$f.WHOLE_STATIC_LIBRARIES))) \ - $(eval _license_text := $(if $(filter $(_build_output_path),$(ALL_NON_MODULES)),$(ALL_NON_MODULES.$(_build_output_path).NOTICES))) \ + $(eval _license_text := $(if $(filter $(_build_output_path),$(ALL_NON_MODULES)),$(ALL_NON_MODULES.$(_build_output_path).NOTICES),\ + $(if $(_is_partition_compat_symlink),build/soong/licenses/LICENSE))) \ echo '$(_build_output_path),$(_module_path),$(_is_soong_module),$(_is_prebuilt_make_module),$(_product_copy_files),$(_kernel_module_copy_files),$(_is_platform_generated),$(_static_libs),$(_whole_static_libs),$(_license_text)' >> $@; \ ) diff --git a/core/misc_prebuilt_internal.mk b/core/misc_prebuilt_internal.mk index a56220772c..b14b9ce032 100644 --- a/core/misc_prebuilt_internal.mk +++ b/core/misc_prebuilt_internal.mk @@ -25,7 +25,7 @@ endif include $(BUILD_SYSTEM)/base_rules.mk -ifneq ($(filter init%rc,$(notdir $(LOCAL_INSTALLED_MODULE)))$(filter %/etc/init,$(dir $(LOCAL_INSTALLED_MODULE))),) +ifneq ($(filter init%rc,$(notdir $(LOCAL_INSTALLED_MODULE)))$(filter %/etc/init/,$(dir $(LOCAL_INSTALLED_MODULE))),) $(eval $(call copy-init-script-file-checked,$(my_prebuilt_src_file),$(LOCAL_BUILT_MODULE))) else $(LOCAL_BUILT_MODULE) : $(my_prebuilt_src_file) diff --git a/core/os_licensing.mk b/core/os_licensing.mk index 1e1b7df7a9..97e55a7685 100644 --- a/core/os_licensing.mk +++ b/core/os_licensing.mk @@ -17,13 +17,17 @@ $(eval $(call xml-notice-rule,$(target_notice_file_xml_gz),"System image",$(syst $(eval $(call text-notice-rule,$(target_notice_file_txt),"System image",$(system_notice_file_message),$(SYSTEM_NOTICE_DEPS),$(SYSTEM_NOTICE_DEPS))) +ifneq ($(PRODUCT_USE_SOONG_NOTICE_XML),true) $(installed_notice_html_or_xml_gz): $(target_notice_file_xml_gz) $(copy-file-to-target) endif +endif $(call declare-1p-target,$(target_notice_file_xml_gz)) +ifneq ($(PRODUCT_USE_SOONG_NOTICE_XML),true) $(call declare-1p-target,$(installed_notice_html_or_xml_gz)) endif +endif .PHONY: vendorlicense vendorlicense: $(call corresponding-license-metadata, $(VENDOR_NOTICE_DEPS)) reportmissinglicenses @@ -40,12 +44,16 @@ $(eval $(call xml-notice-rule,$(target_vendor_notice_file_xml_gz),"Vendor image" "Notices for files contained in all filesystem images except system/system_ext/product/odm/vendor_dlkm/odm_dlkm in this directory:", \ $(VENDOR_NOTICE_DEPS),$(VENDOR_NOTICE_DEPS))) +ifneq ($(PRODUCT_USE_SOONG_NOTICE_XML),true) $(installed_vendor_notice_xml_gz): $(target_vendor_notice_file_xml_gz) $(copy-file-to-target) +endif $(call declare-1p-target,$(target_vendor_notice_file_xml_gz)) +ifneq ($(PRODUCT_USE_SOONG_NOTICE_XML),true) $(call declare-1p-target,$(installed_vendor_notice_xml_gz)) endif +endif .PHONY: odmlicense odmlicense: $(call corresponding-license-metadata, $(ODM_NOTICE_DEPS)) reportmissinglicenses @@ -59,12 +67,16 @@ $(eval $(call xml-notice-rule,$(target_odm_notice_file_xml_gz),"ODM filesystem i "Notices for files contained in the odm filesystem image in this directory:", \ $(ODM_NOTICE_DEPS),$(ODM_NOTICE_DEPS))) +ifneq ($(PRODUCT_USE_SOONG_NOTICE_XML),true) $(installed_odm_notice_xml_gz): $(target_odm_notice_file_xml_gz) $(copy-file-to-target) +endif $(call declare-1p-target,$(target_odm_notice_file_xml_gz)) +ifneq ($(PRODUCT_USE_SOONG_NOTICE_XML),true) $(call declare-1p-target,$(installed_odm_notice_xml_gz)) endif +endif .PHONY: oemlicense oemlicense: $(call corresponding-license-metadata, $(OEM_NOTICE_DEPS)) reportmissinglicenses @@ -81,12 +93,16 @@ $(eval $(call xml-notice-rule,$(target_product_notice_file_xml_gz),"Product imag "Notices for files contained in the product filesystem image in this directory:", \ $(PRODUCT_NOTICE_DEPS),$(PRODUCT_NOTICE_DEPS))) +ifneq ($(PRODUCT_USE_SOONG_NOTICE_XML),true) $(installed_product_notice_xml_gz): $(target_product_notice_file_xml_gz) $(copy-file-to-target) +endif $(call declare-1p-target,$(target_product_notice_file_xml_gz)) +ifneq ($(PRODUCT_USE_SOONG_NOTICE_XML),true) $(call declare-1p-target,$(installed_product_notice_xml_gz)) endif +endif .PHONY: systemextlicense systemextlicense: $(call corresponding-license-metadata, $(SYSTEM_EXT_NOTICE_DEPS)) reportmissinglicenses @@ -100,12 +116,16 @@ $(eval $(call xml-notice-rule,$(target_system_ext_notice_file_xml_gz),"System_ex "Notices for files contained in the system_ext filesystem image in this directory:", \ $(SYSTEM_EXT_NOTICE_DEPS),$(SYSTEM_EXT_NOTICE_DEPS))) +ifneq ($(PRODUCT_USE_SOONG_NOTICE_XML),true) $(installed_system_ext_notice_xml_gz): $(target_system_ext_notice_file_xml_gz) $(copy-file-to-target) +endif $(call declare-1p-target,$(target_system_ext_notice_file_xml_gz)) +ifneq ($(PRODUCT_USE_SOONG_NOTICE_XML),true) $(call declare-1p-target,$(installed_system_ext_notice_xml_gz)) endif +endif .PHONY: vendor_dlkmlicense vendor_dlkmlicense: $(call corresponding-license-metadata, $(VENDOR_DLKM_NOTICE_DEPS)) reportmissinglicenses @@ -119,12 +139,16 @@ $(eval $(call xml-notice-rule,$(target_vendor_dlkm_notice_file_xml_gz),"Vendor_d "Notices for files contained in the vendor_dlkm filesystem image in this directory:", \ $(VENDOR_DLKM_NOTICE_DEPS),$(VENDOR_DLKM_NOTICE_DEPS))) +ifneq ($(PRODUCT_USE_SOONG_NOTICE_XML),true) $(installed_vendor_dlkm_notice_xml_gz): $(target_vendor_dlkm_notice_file_xml_gz) $(copy-file-to-target) +endif $(call declare-1p-target,$(target_vendor_dlkm_notice_file_xml_gz)) +ifneq ($(PRODUCT_USE_SOONG_NOTICE_XML),true) $(call declare-1p-target,$(installed_vendor_dlkm_notice_xml_gz)) endif +endif .PHONY: odm_dlkmlicense odm_dlkmlicense: $(call corresponding-license-metadata, $(ODM_DLKM_NOTICE_DEPS)) reportmissinglicenses @@ -138,12 +162,16 @@ $(eval $(call xml-notice-rule,$(target_odm_dlkm_notice_file_xml_gz),"ODM_dlkm fi "Notices for files contained in the odm_dlkm filesystem image in this directory:", \ $(ODM_DLKM_NOTICE_DEPS),$(ODM_DLKM_NOTICE_DEPS))) +ifneq ($(PRODUCT_USE_SOONG_NOTICE_XML),true) $(installed_odm_dlkm_notice_xml_gz): $(target_odm_dlkm_notice_file_xml_gz) $(copy-file-to-target) +endif $(call declare-1p-target,$(target_odm_dlkm_notice_file_xml_gz)) +ifneq ($(PRODUCT_USE_SOONG_NOTICE_XML),true) $(call declare-1p-target,$(installed_odm_dlkm_notice_xml_gz)) endif +endif .PHONY: system_dlkmlicense system_dlkmlicense: $(call corresponding-license-metadata, $(SYSTEM_DLKM_NOTICE_DEPS)) reportmissinglicenses @@ -157,11 +185,15 @@ $(eval $(call xml-notice-rule,$(target_system_dlkm_notice_file_xml_gz),"System_d "Notices for files contained in the system_dlkm filesystem image in this directory:", \ $(SYSTEM_DLKM_NOTICE_DEPS),$(SYSTEM_DLKM_NOTICE_DEPS))) +ifneq ($(PRODUCT_USE_SOONG_NOTICE_XML),true) $(installed_system_dlkm_notice_xml_gz): $(target_system_dlkm_notice_file_xml_gz) $(copy-file-to-target) +endif $(call declare-1p-target,$(target_system_dlkm_notice_file_xml_gz)) +ifneq ($(PRODUCT_USE_SOONG_NOTICE_XML),true) $(call declare-1p-target,$(installed_sysetm_dlkm_notice_xml_gz)) endif +endif endif # not TARGET_BUILD_APPS diff --git a/core/packaging/flags.mk b/core/packaging/flags.mk index a77956bdea..a371a00461 100644 --- a/core/packaging/flags.mk +++ b/core/packaging/flags.mk @@ -18,13 +18,13 @@ # # TODO: Should we do all of the images in $(IMAGES_TO_BUILD)? -_FLAG_PARTITIONS := product system system_ext vendor +_FLAG_PARTITIONS := product system vendor # ----------------------------------------------------------------- # Aconfig Flags -# Create a summary file of build flags for each partition +# Create a summary file of build flags for a single partition # $(1): built aconfig flags file (out) # $(2): installed aconfig flags file (out) # $(3): the partition (in) @@ -36,12 +36,15 @@ $(strip $(1)): $(ACONFIG) $(strip $(4)) mkdir -p $$(dir $$(PRIVATE_OUT)) $$(if $$(PRIVATE_IN), \ $$(ACONFIG) dump --dedup --format protobuf --out $$(PRIVATE_OUT) \ - --filter container:$$(strip $(3)) $$(addprefix --cache ,$$(PRIVATE_IN)), \ + --filter container:$(strip $(3))+state:ENABLED \ + --filter container:$(strip $(3))+permission:READ_WRITE \ + $$(addprefix --cache ,$$(PRIVATE_IN)), \ echo -n > $$(PRIVATE_OUT) \ ) $(call copy-one-file, $(1), $(2)) endef + # Create a summary file of build flags for each partition # $(1): built aconfig flags file (out) # $(2): installed aconfig flags file (out) @@ -59,16 +62,22 @@ $(strip $(1)): $(ACONFIG) $(strip $(3)) $(call copy-one-file, $(1), $(2)) endef - $(foreach partition, $(_FLAG_PARTITIONS), \ $(eval aconfig_flag_summaries_protobuf.$(partition) := $(PRODUCT_OUT)/$(partition)/etc/aconfig_flags.pb) \ $(eval $(call generate-partition-aconfig-flag-file, \ - $(TARGET_OUT_FLAGS)/$(partition)/aconfig_flags.pb, \ - $(aconfig_flag_summaries_protobuf.$(partition)), \ - $(partition), \ - $(sort $(foreach m,$(call register-names-for-partition, $(partition)), \ + $(TARGET_OUT_FLAGS)/$(partition)/aconfig_flags.pb, \ + $(aconfig_flag_summaries_protobuf.$(partition)), \ + $(partition), \ + $(sort \ + $(foreach m, $(call register-names-for-partition, $(partition)), \ $(ALL_MODULES.$(m).ACONFIG_FILES) \ - )), \ + ) \ + $(if $(filter system, $(partition)), \ + $(foreach m, $(call register-names-for-partition, system_ext), \ + $(ALL_MODULES.$(m).ACONFIG_FILES) \ + ) \ + ) \ + ) \ )) \ ) @@ -90,42 +99,61 @@ $(eval $(call generate-global-aconfig-flag-file, \ # $(1): built aconfig flags storage package map file (out) # $(2): built aconfig flags storage flag map file (out) # $(3): built aconfig flags storage flag val file (out) -# $(4): installed aconfig flags storage package map file (out) -# $(5): installed aconfig flags storage flag map file (out) -# $(6): installed aconfig flags storage flag value file (out) -# $(7): input aconfig files for the partition (in) -# $(8): partition name +# $(4): built aconfig flags storage flag info file (out) +# $(5): installed aconfig flags storage package map file (out) +# $(6): installed aconfig flags storage flag map file (out) +# $(7): installed aconfig flags storage flag value file (out) +# $(8): installed aconfig flags storage flag info file (out) +# $(9): input aconfig files for the partition (in) +# $(10): partition name define generate-partition-aconfig-storage-file $(eval $(strip $(1)): PRIVATE_OUT := $(strip $(1))) -$(eval $(strip $(1)): PRIVATE_IN := $(strip $(7))) -$(strip $(1)): $(ACONFIG) $(strip $(7)) +$(eval $(strip $(1)): PRIVATE_IN := $(strip $(9))) + +ifneq (,$(RELEASE_FINGERPRINT_ACONFIG_PACKAGES)) +STORAGE_FILE_VERSION := 2 +else +STORAGE_FILE_VERSION := 1 +endif + +$(strip $(1)): $(ACONFIG) $(strip $(9)) mkdir -p $$(dir $$(PRIVATE_OUT)) $$(if $$(PRIVATE_IN), \ - $$(ACONFIG) create-storage --container $(8) --file package_map --out $$(PRIVATE_OUT) \ + $$(ACONFIG) create-storage --container $(10) --file package_map --out $$(PRIVATE_OUT) --version $$(STORAGE_FILE_VERSION) \ $$(addprefix --cache ,$$(PRIVATE_IN)), \ ) touch $$(PRIVATE_OUT) $(eval $(strip $(2)): PRIVATE_OUT := $(strip $(2))) -$(eval $(strip $(2)): PRIVATE_IN := $(strip $(7))) -$(strip $(2)): $(ACONFIG) $(strip $(7)) +$(eval $(strip $(2)): PRIVATE_IN := $(strip $(9))) +$(strip $(2)): $(ACONFIG) $(strip $(9)) mkdir -p $$(dir $$(PRIVATE_OUT)) $$(if $$(PRIVATE_IN), \ - $$(ACONFIG) create-storage --container $(8) --file flag_map --out $$(PRIVATE_OUT) \ + $$(ACONFIG) create-storage --container $(10) --file flag_map --out $$(PRIVATE_OUT) --version $$(STORAGE_FILE_VERSION) \ $$(addprefix --cache ,$$(PRIVATE_IN)), \ ) touch $$(PRIVATE_OUT) $(eval $(strip $(3)): PRIVATE_OUT := $(strip $(3))) -$(eval $(strip $(3)): PRIVATE_IN := $(strip $(7))) -$(strip $(3)): $(ACONFIG) $(strip $(7)) +$(eval $(strip $(3)): PRIVATE_IN := $(strip $(9))) +$(strip $(3)): $(ACONFIG) $(strip $(9)) mkdir -p $$(dir $$(PRIVATE_OUT)) $$(if $$(PRIVATE_IN), \ - $$(ACONFIG) create-storage --container $(8) --file flag_val --out $$(PRIVATE_OUT) \ + $$(ACONFIG) create-storage --container $(10) --file flag_val --out $$(PRIVATE_OUT) --version $$(STORAGE_FILE_VERSION) \ $$(addprefix --cache ,$$(PRIVATE_IN)), \ ) touch $$(PRIVATE_OUT) -$(call copy-one-file, $(strip $(1)), $(4)) -$(call copy-one-file, $(strip $(2)), $(5)) -$(call copy-one-file, $(strip $(3)), $(6)) +$(eval $(strip $(4)): PRIVATE_OUT := $(strip $(4))) +$(eval $(strip $(4)): PRIVATE_IN := $(strip $(9))) +$(strip $(4)): $(ACONFIG) $(strip $(9)) + mkdir -p $$(dir $$(PRIVATE_OUT)) + $$(if $$(PRIVATE_IN), \ + $$(ACONFIG) create-storage --container $(10) --file flag_info --out $$(PRIVATE_OUT) --version $$(STORAGE_FILE_VERSION) \ + $$(addprefix --cache ,$$(PRIVATE_IN)), \ + ) + touch $$(PRIVATE_OUT) +$(call copy-one-file, $(strip $(1)), $(5)) +$(call copy-one-file, $(strip $(2)), $(6)) +$(call copy-one-file, $(strip $(3)), $(7)) +$(call copy-one-file, $(strip $(4)), $(8)) endef ifeq ($(RELEASE_CREATE_ACONFIG_STORAGE_FILE),true) @@ -133,13 +161,16 @@ $(foreach partition, $(_FLAG_PARTITIONS), \ $(eval aconfig_storage_package_map.$(partition) := $(PRODUCT_OUT)/$(partition)/etc/aconfig/package.map) \ $(eval aconfig_storage_flag_map.$(partition) := $(PRODUCT_OUT)/$(partition)/etc/aconfig/flag.map) \ $(eval aconfig_storage_flag_val.$(partition) := $(PRODUCT_OUT)/$(partition)/etc/aconfig/flag.val) \ + $(eval aconfig_storage_flag_info.$(partition) := $(PRODUCT_OUT)/$(partition)/etc/aconfig/flag.info) \ $(eval $(call generate-partition-aconfig-storage-file, \ $(TARGET_OUT_FLAGS)/$(partition)/package.map, \ $(TARGET_OUT_FLAGS)/$(partition)/flag.map, \ $(TARGET_OUT_FLAGS)/$(partition)/flag.val, \ + $(TARGET_OUT_FLAGS)/$(partition)/flag.info, \ $(aconfig_storage_package_map.$(partition)), \ $(aconfig_storage_flag_map.$(partition)), \ $(aconfig_storage_flag_val.$(partition)), \ + $(aconfig_storage_flag_info.$(partition)), \ $(aconfig_flag_summaries_protobuf.$(partition)), \ $(partition), \ )) \ @@ -155,6 +186,7 @@ required_flags_files := \ $(aconfig_storage_package_map.$(partition)) \ $(aconfig_storage_flag_map.$(partition)) \ $(aconfig_storage_flag_val.$(partition)) \ + $(aconfig_storage_flag_info.$(partition)) \ )) ALL_DEFAULT_INSTALLED_MODULES += $(required_flags_files) @@ -174,5 +206,5 @@ $(foreach partition, $(_FLAG_PARTITIONS), \ $(eval aconfig_storage_package_map.$(partition):=) \ $(eval aconfig_storage_flag_map.$(partition):=) \ $(eval aconfig_storage_flag_val.$(partition):=) \ + $(eval aconfig_storage_flag_info.$(partition):=) \ ) - diff --git a/core/product.mk b/core/product.mk index 8d86d92684..1fbc3eef51 100644 --- a/core/product.mk +++ b/core/product.mk @@ -284,6 +284,9 @@ _product_list_vars += PRODUCT_EXTRA_VNDK_VERSIONS # Whether APEX should be compressed or not _product_single_value_vars += PRODUCT_COMPRESSED_APEX +# Default fs type for APEX payload image (apex_payload.img) +_product_single_value_vars += PRODUCT_DEFAULT_APEX_PAYLOAD_TYPE + # VNDK version of product partition. It can be 'current' if the product # partitions uses PLATFORM_VNDK_VERSION. _product_single_value_vars += PRODUCT_PRODUCT_VNDK_VERSION @@ -366,6 +369,8 @@ _product_single_value_vars += PRODUCT_BUILD_DEBUG_VENDOR_BOOT_IMAGE _product_single_value_vars += PRODUCT_BUILD_VBMETA_IMAGE _product_single_value_vars += PRODUCT_BUILD_SUPER_EMPTY_IMAGE _product_single_value_vars += PRODUCT_BUILD_PVMFW_IMAGE +_product_single_value_vars += PRODUCT_BUILD_DESKTOP_RECOVERY_IMAGE +_product_single_value_vars += PRODUCT_BUILD_DESKTOP_UPDATE_IMAGE # List of boot jars delivered via updatable APEXes, following the same format as # PRODUCT_BOOT_JARS. @@ -390,20 +395,6 @@ _product_single_value_vars += PRODUCT_OTA_FORCE_NON_AB_PACKAGE # If set, Java module in product partition cannot use hidden APIs. _product_single_value_vars += PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE -# If set, only java_sdk_library can be used at inter-partition dependency. -# Note: Build error if BOARD_VNDK_VERSION is not set while -# PRODUCT_ENFORCE_INTER_PARTITION_JAVA_SDK_LIBRARY is true, because -# PRODUCT_ENFORCE_INTER_PARTITION_JAVA_SDK_LIBRARY has no meaning if -# BOARD_VNDK_VERSION is not set. -# Note: When PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE is not set, there are -# no restrictions at dependency between system and product partition. -_product_single_value_vars += PRODUCT_ENFORCE_INTER_PARTITION_JAVA_SDK_LIBRARY - -# Allowlist for PRODUCT_ENFORCE_INTER_PARTITION_JAVA_SDK_LIBRARY option. -# Listed modules are allowed at inter-partition dependency even if it isn't -# a java_sdk_library module. -_product_list_vars += PRODUCT_INTER_PARTITION_JAVA_LIBRARY_ALLOWLIST - # Install a copy of the debug policy to the system_ext partition, and allow # init-second-stage to load debug policy from system_ext. # This option is only meant to be set by compliance GSI targets. @@ -436,8 +427,9 @@ _product_single_value_vars += PRODUCT_MEMCG_V2_FORCE_ENABLED # If true, the cgroup v2 hierarchy will be split into apps/system subtrees _product_single_value_vars += PRODUCT_CGROUP_V2_SYS_APP_ISOLATION_ENABLED -# List of .json files to be merged/compiled into vendor/etc/linker.config.pb +# List of .json files to be merged/compiled into vendor/etc/linker.config.pb and product/etc/linker.config.pb _product_list_vars += PRODUCT_VENDOR_LINKER_CONFIG_FRAGMENTS +_product_list_vars += PRODUCT_PRODUCT_LINKER_CONFIG_FRAGMENTS # Whether to use userfaultfd GC. # Possible values are: @@ -493,9 +485,29 @@ _product_single_value_vars += PRODUCT_16K_DEVELOPER_OPTION # by this flag. _product_single_value_vars += PRODUCT_NOT_DEBUGGABLE_IN_USERDEBUG +# If set, the default value of the versionName of apps will include the build number. +_product_single_value_vars += PRODUCT_BUILD_APPS_WITH_BUILD_NUMBER + # If set, build would generate system image from Soong-defined module. _product_single_value_vars += PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE +# List of stub libraries specific to the product that are already present in the system image and +# should be included in the system_linker_config. +_product_list_vars += PRODUCT_EXTRA_STUB_LIBRARIES + +# If set to true, all Android.mk files will be ignored. +_product_single_value_vars += PRODUCT_IGNORE_ALL_ANDROIDMK +# When PRODUCT_IGNORE_ALL_ANDROIDMK is set to true, this variable will be used to allow some Android.mk files. +_product_list_vars += PRODUCT_ALLOWED_ANDROIDMK_FILES +# When PRODUCT_IGNORE_ALL_ANDROIDMK is set to true, path of file that contains a list of allowed Android.mk files +_product_single_value_vars += PRODUCT_ANDROIDMK_ALLOWLIST_FILE +# Setting PRODUCT_SOONG_ONLY will cause the build to default to --soong-only mode, and the main +# kati invocation will not be run. +_product_single_value_vars += PRODUCT_SOONG_ONLY + +# If set to true, use NOTICE.xml.gz generated by soong +_product_single_value_vars += PRODUCT_USE_SOONG_NOTICE_XML + .KATI_READONLY := _product_single_value_vars _product_list_vars _product_var_list :=$= $(_product_single_value_vars) $(_product_list_vars) diff --git a/core/product_config.mk b/core/product_config.mk index 738d4cff58..019d711403 100644 --- a/core/product_config.mk +++ b/core/product_config.mk @@ -424,10 +424,12 @@ ifdef PRODUCT_DEFAULT_DEV_CERTIFICATE endif endif -$(foreach pair,$(PRODUCT_APEX_BOOT_JARS), \ - $(eval jar := $(call word-colon,2,$(pair))) \ - $(if $(findstring $(jar), $(PRODUCT_BOOT_JARS)), \ - $(error A jar in PRODUCT_APEX_BOOT_JARS must not be in PRODUCT_BOOT_JARS, but $(jar) is))) +$(foreach apexpair,$(PRODUCT_APEX_BOOT_JARS), \ + $(foreach platformpair,$(PRODUCT_BOOT_JARS), \ + $(eval apexjar := $(call word-colon,2,$(apexpair))) \ + $(eval platformjar := $(call word-colon,2,$(platformpair))) \ + $(if $(filter $(apexjar), $(platformjar)), \ + $(error A jar in PRODUCT_APEX_BOOT_JARS must not be in PRODUCT_BOOT_JARS, but $(apexjar) is)))) ENFORCE_SYSTEM_CERTIFICATE := $(PRODUCT_ENFORCE_ARTIFACT_SYSTEM_CERTIFICATE_REQUIREMENT) ENFORCE_SYSTEM_CERTIFICATE_ALLOW_LIST := $(PRODUCT_ARTIFACT_SYSTEM_CERTIFICATE_REQUIREMENT_ALLOW_LIST) @@ -466,10 +468,17 @@ $(foreach c,$(PRODUCT_SANITIZER_MODULE_CONFIGS),\ $(eval SANITIZER.$(TARGET_PRODUCT).$(m).CONFIG := $(cf)))) _psmc_modules := -# Reset ADB keys for non-debuggable builds -ifeq (,$(filter eng userdebug,$(TARGET_BUILD_VARIANT))) +# Reset ADB keys. If RELEASE_BUILD_USE_VARIANT_FLAGS is set look for +# the value of a dedicated flag. Otherwise check if build variant is +# non-debuggable. +ifneq (,$(RELEASE_BUILD_USE_VARIANT_FLAGS)) +ifneq (,$(RELEASE_BUILD_PURGE_PRODUCT_ADB_KEYS)) PRODUCT_ADB_KEYS := endif +else ifeq (,$(filter eng userdebug,$(TARGET_BUILD_VARIANT))) + PRODUCT_ADB_KEYS := +endif + ifneq ($(filter-out 0 1,$(words $(PRODUCT_ADB_KEYS))),) $(error Only one file may be in PRODUCT_ADB_KEYS: $(PRODUCT_ADB_KEYS)) endif @@ -532,6 +541,17 @@ ifdef OVERRIDE_PRODUCT_COMPRESSED_APEX PRODUCT_COMPRESSED_APEX := $(OVERRIDE_PRODUCT_COMPRESSED_APEX) endif +ifdef OVERRIDE_PRODUCT_DEFAULT_APEX_PAYLOAD_TYPE + PRODUCT_DEFAULT_APEX_PAYLOAD_TYPE := $(OVERRIDE_PRODUCT_DEFAULT_APEX_PAYLOAD_TYPE) +else ifeq ($(PRODUCT_DEFAULT_APEX_PAYLOAD_TYPE),) + # Use ext4 as a default payload fs type + PRODUCT_DEFAULT_APEX_PAYLOAD_TYPE := ext4 +endif +ifeq ($(filter ext4 erofs,$(PRODUCT_DEFAULT_APEX_PAYLOAD_TYPE)),) + $(error PRODUCT_DEFAULT_APEX_PAYLOAD_TYPE should be either erofs or ext4,\ + not $(PRODUCT_DEFAULT_APEX_PAYLOAD_TYPE).) +endif + $(KATI_obsolete_var OVERRIDE_PRODUCT_EXTRA_VNDK_VERSIONS \ ,Use PRODUCT_EXTRA_VNDK_VERSIONS instead) @@ -602,7 +622,12 @@ else # Vendors with GRF must define BOARD_SHIPPING_API_LEVEL for the vendor API level. # In this case, the VSR API level is the minimum of the PRODUCT_SHIPPING_API_LEVEL # and RELEASE_BOARD_API_LEVEL - VSR_VENDOR_API_LEVEL := $(call math_min,$(VSR_VENDOR_API_LEVEL),$(RELEASE_BOARD_API_LEVEL)) + board_api_level := $(RELEASE_BOARD_API_LEVEL) + ifdef BOARD_API_LEVEL_PROP_OVERRIDE + board_api_level := $(BOARD_API_LEVEL_PROP_OVERRIDE) + endif + VSR_VENDOR_API_LEVEL := $(call math_min,$(VSR_VENDOR_API_LEVEL),$(board_api_level)) + board_api_level := endif endif .KATI_READONLY := VSR_VENDOR_API_LEVEL @@ -674,4 +699,12 @@ $(foreach image, \ product-build-image-config := +ifdef PRODUCT_SOONG_ONLY + ifneq ($(PRODUCT_SOONG_ONLY),true) + ifneq ($(PRODUCT_SOONG_ONLY),false) + $(error PRODUCT_SOONG_ONLY can only be true, false or unset) + endif + endif +endif + $(call readonly-product-vars) diff --git a/core/product_config.rbc b/core/product_config.rbc index 59e2c95903..20344f4f87 100644 --- a/core/product_config.rbc +++ b/core/product_config.rbc @@ -382,6 +382,11 @@ def _soong_config_set(g, nsname, var, value): _soong_config_namespace(g, nsname) g[_soong_config_namespaces_key][nsname][var]=_mkstrip(value) +def _soong_config_set_bool(g, nsname, var, value): + """Assigns the value to the variable in the namespace, and marks it as a boolean.""" + _soong_config_set(g, nsname, var, _filter("true", value)) + g["SOONG_CONFIG_TYPE_%s_%s" % (nsname, var)] = "bool" + def _soong_config_append(g, nsname, var, value): """Appends to the value of the variable in the namespace.""" _soong_config_namespace(g, nsname) @@ -861,6 +866,7 @@ rblf = struct( soong_config_namespace = _soong_config_namespace, soong_config_append = _soong_config_append, soong_config_set = _soong_config_set, + soong_config_set_bool = _soong_config_set_bool, soong_config_get = _soong_config_get, abspath = _abspath, add_product_dex_preopt_module_config = _add_product_dex_preopt_module_config, diff --git a/core/proguard.flags b/core/proguard.flags index aa406b983e..dc32e15e4a 100644 --- a/core/proguard.flags +++ b/core/proguard.flags @@ -1,14 +1,3 @@ -# We have moved -dontobfuscate and -dontoptimize to the makefiles. -# dex does not like code run through proguard optimize and preverify steps. -# -dontoptimize --dontpreverify - -# Don't obfuscate. We only need dead code striping. -# -dontobfuscate - -# Add this flag in your package's own configuration if it's needed. -#-flattenpackagehierarchy - # Keep classes and members with the platform-defined @VisibleForTesting annotation. -keep @com.android.internal.annotations.VisibleForTesting class * -keepclassmembers class * { @@ -38,6 +27,17 @@ @com.android.internal.annotations.KeepForWeakReference <fields>; } +# Needed to ensure callback field references are kept in their respective +# owning classes when the downstream callback registrars only store weak refs. +-if @com.android.internal.annotations.WeaklyReferencedCallback class * +-keepclassmembers,allowaccessmodification class * { + <1> *; +} +-if class * extends @com.android.internal.annotations.WeaklyReferencedCallback ** +-keepclassmembers,allowaccessmodification class * { + <1> *; +} + # Understand the common @Keep annotation from various Android packages: # * android.support.annotation # * androidx.annotation diff --git a/core/proguard_basic_keeps.flags b/core/proguard_basic_keeps.flags index f6b34b8217..a9416d5df0 100644 --- a/core/proguard_basic_keeps.flags +++ b/core/proguard_basic_keeps.flags @@ -1,7 +1,3 @@ -# Some classes in the libraries extend package private classes to chare common functionality -# that isn't explicitly part of the API --dontskipnonpubliclibraryclasses -dontskipnonpubliclibraryclassmembers - # Preserve line number information for debugging stack traces. -keepattributes SourceFile,LineNumberTable diff --git a/core/ravenwood_test_config_template.xml b/core/ravenwood_test_config_template.xml index 2f21baedf7..9e9dd762ff 100644 --- a/core/ravenwood_test_config_template.xml +++ b/core/ravenwood_test_config_template.xml @@ -22,6 +22,7 @@ <option name="use-ravenwood-resources" value="true" /> <option name="exclude-paths" value="java" /> <option name="null-device" value="true" /> + <option name="do-not-swallow-runner-errors" value="true" /> {EXTRA_CONFIGS} diff --git a/core/release_config.mk b/core/release_config.mk index fe2170ede4..68e115f0c4 100644 --- a/core/release_config.mk +++ b/core/release_config.mk @@ -146,6 +146,9 @@ ifneq (,$(_use_protobuf)) # This will also set ALL_RELEASE_CONFIGS_FOR_PRODUCT and _used_files for us. $(eval include $(_flags_file)) $(KATI_extra_file_deps $(OUT_DIR)/release-config $(protobuf_map_files) $(_flags_file)) + ifneq (,$(_disallow_lunch_use)) + $(error Release config ${TARGET_RELEASE} is disallowed for build. Please use one of: $(ALL_RELEASE_CONFIGS_FOR_PRODUCT)) + endif else # This is the first pass of product config. $(eval include $(_flags_varmk)) diff --git a/core/robolectric_test_config_template.xml b/core/robolectric_test_config_template.xml index b1d0c2f4fa..509ac7bfba 100644 --- a/core/robolectric_test_config_template.xml +++ b/core/robolectric_test_config_template.xml @@ -18,13 +18,22 @@ <option name="test-suite-tag" value="robolectric" /> <option name="test-suite-tag" value="robolectric-tests" /> - <option name="java-folder" value="prebuilts/jdk/jdk21/linux-x86/" /> <option name="exclude-paths" value="java" /> <option name="use-robolectric-resources" value="true" /> + <!-- attempt to always show Tradefed errors --> + <option name="do-not-swallow-runner-errors" value="true" /> + + <!-- prevent Tradefed from hanging indefinitely in CI --> + <option name="socket-timeout" value="600000" /> + <option name="test-case-timeout" value="2m" /> + {EXTRA_CONFIGS} <test class="com.android.tradefed.testtype.IsolatedHostTest" > + + {EXTRA_TEST_RUNNER_CONFIGS} + <option name="jar" value="{MODULE}.jar" /> <option name="java-flags" value="--add-modules=jdk.compiler"/> <option name="java-flags" value="--add-opens=java.base/java.lang=ALL-UNNAMED"/> @@ -33,5 +42,15 @@ <option name="java-flags" value="--add-opens=java.base/jdk.internal.util.random=ALL-UNNAMED"/> <!-- b/251387255 --> <option name="java-flags" value="--add-opens=java.base/java.io=ALL-UNNAMED"/> + <option name="java-flags" value="--add-opens=java.base/java.net=ALL-UNNAMED"/> + <option name="java-flags" value="--add-opens=java.base/java.nio=ALL-UNNAMED"/> <!-- required for ShadowVMRuntime --> + <option name="java-flags" value="--add-opens=java.base/java.security=ALL-UNNAMED"/> + <option name="java-flags" value="--add-opens=java.base/java.text=ALL-UNNAMED"/> + <option name="java-flags" value="--add-opens=java.base/java.util=ALL-UNNAMED"/> + <option name="java-flags" value="--add-opens=java.base/jdk.internal.access=ALL-UNNAMED"/> + <option name="java-flags" value="--add-opens=java.desktop/java.awt.font=ALL-UNNAMED"/> + <option name="java-flags" value="--add-opens=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED"/> + <option name="java-flags" value="--add-opens=jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED"/> + <option name="java-flags" value="--add-opens=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED"/> </test> </configuration> diff --git a/core/soong_android_app_set.mk b/core/soong_android_app_set.mk index ec3d8c85cb..d97980d2ba 100644 --- a/core/soong_android_app_set.mk +++ b/core/soong_android_app_set.mk @@ -9,10 +9,6 @@ endif LOCAL_BUILT_MODULE_STEM := package.apk LOCAL_INSTALLED_MODULE_STEM := $(notdir $(LOCAL_PREBUILT_MODULE_FILE)) -# Use the Soong output as the checkbuild target instead of LOCAL_BUILT_MODULE -# to avoid checkbuilds making an extra copy of every module. -LOCAL_CHECKED_MODULE := $(LOCAL_PREBUILT_MODULE_FILE) - ####################################### include $(BUILD_SYSTEM)/base_rules.mk ####################################### diff --git a/core/soong_app_prebuilt.mk b/core/soong_app_prebuilt.mk index 3aa244c77f..ab9227f676 100644 --- a/core/soong_app_prebuilt.mk +++ b/core/soong_app_prebuilt.mk @@ -29,16 +29,6 @@ full_classes_pre_proguard_jar := $(intermediates.COMMON)/classes-pre-proguard.ja full_classes_header_jar := $(intermediates.COMMON)/classes-header.jar -# Use the Soong output as the checkbuild target instead of LOCAL_BUILT_MODULE -# to avoid checkbuilds making an extra copy of every module. -LOCAL_CHECKED_MODULE := $(LOCAL_PREBUILT_MODULE_FILE) -LOCAL_ADDITIONAL_CHECKED_MODULE += $(LOCAL_SOONG_CLASSES_JAR) -LOCAL_ADDITIONAL_CHECKED_MODULE += $(LOCAL_SOONG_HEADER_JAR) -LOCAL_ADDITIONAL_CHECKED_MODULE += $(LOCAL_FULL_MANIFEST_FILE) -LOCAL_ADDITIONAL_CHECKED_MODULE += $(LOCAL_SOONG_DEXPREOPT_CONFIG) -LOCAL_ADDITIONAL_CHECKED_MODULE += $(LOCAL_SOONG_RESOURCE_EXPORT_PACKAGE) -LOCAL_ADDITIONAL_CHECKED_MODULE += $(LOCAL_SOONG_DEX_JAR) - ####################################### include $(BUILD_SYSTEM)/base_rules.mk ####################################### @@ -234,30 +224,6 @@ my_common := COMMON include $(BUILD_SYSTEM)/link_type.mk endif # !LOCAL_IS_HOST_MODULE -ifeq (,$(filter tests,$(LOCAL_MODULE_TAGS))) - ifdef LOCAL_SOONG_DEVICE_RRO_DIRS - $(call append_enforce_rro_sources, \ - $(my_register_name), \ - false, \ - $(LOCAL_FULL_MANIFEST_FILE), \ - $(if $(LOCAL_EXPORT_PACKAGE_RESOURCES),true,false), \ - $(LOCAL_SOONG_DEVICE_RRO_DIRS), \ - vendor \ - ) - endif - - ifdef LOCAL_SOONG_PRODUCT_RRO_DIRS - $(call append_enforce_rro_sources, \ - $(my_register_name), \ - false, \ - $(LOCAL_FULL_MANIFEST_FILE), \ - $(if $(LOCAL_EXPORT_PACKAGE_RESOURCES),true,false), \ - $(LOCAL_SOONG_PRODUCT_RRO_DIRS), \ - product \ - ) - endif -endif - ifdef LOCAL_PREBUILT_COVERAGE_ARCHIVE my_coverage_dir := $(TARGET_OUT_COVERAGE)/$(patsubst $(PRODUCT_OUT)/%,%,$(my_module_path)) my_coverage_copy_pairs := $(foreach f,$(LOCAL_PREBUILT_COVERAGE_ARCHIVE),$(f):$(my_coverage_dir)/$(notdir $(f))) diff --git a/core/soong_cc_rust_prebuilt.mk b/core/soong_cc_rust_prebuilt.mk index a1c64786ee..da608322f2 100644 --- a/core/soong_cc_rust_prebuilt.mk +++ b/core/soong_cc_rust_prebuilt.mk @@ -38,10 +38,6 @@ ifndef LOCAL_UNINSTALLABLE_MODULE endif endif -# Use the Soong output as the checkbuild target instead of LOCAL_BUILT_MODULE -# to avoid checkbuilds making an extra copy of every module. -LOCAL_CHECKED_MODULE := $(LOCAL_PREBUILT_MODULE_FILE) - my_check_same_vndk_variants := same_vndk_variants_stamp := ifeq ($(LOCAL_CHECK_SAME_VNDK_VARIANTS),true) @@ -61,7 +57,7 @@ ifeq ($(my_check_same_vndk_variants),true) # Note that because `checkbuild` doesn't check LOCAL_BUILT_MODULE for soong-built modules adding # the timestamp to LOCAL_BUILT_MODULE isn't enough. It is skipped when the vendor variant # isn't used at all and it may break in the downstream trees. - LOCAL_ADDITIONAL_CHECKED_MODULE := $(same_vndk_variants_stamp) + LOCAL_ADDITIONAL_CHECKED_MODULE += $(same_vndk_variants_stamp) endif ####################################### diff --git a/core/soong_config.mk b/core/soong_config.mk index 5fca203148..dcd654dd2b 100644 --- a/core/soong_config.mk +++ b/core/soong_config.mk @@ -1,5 +1,5 @@ -SOONG_MAKEVARS_MK := $(SOONG_OUT_DIR)/make_vars-$(TARGET_PRODUCT).mk -SOONG_ANDROID_MK := $(SOONG_OUT_DIR)/Android-$(TARGET_PRODUCT).mk +SOONG_MAKEVARS_MK := $(SOONG_OUT_DIR)/make_vars-$(TARGET_PRODUCT)$(COVERAGE_SUFFIX).mk +SOONG_ANDROID_MK := $(SOONG_OUT_DIR)/Android-$(TARGET_PRODUCT)$(COVERAGE_SUFFIX).mk include $(BUILD_SYSTEM)/art_config.mk include $(BUILD_SYSTEM)/dex_preopt_config.mk @@ -26,7 +26,7 @@ ifeq ($(WRITE_SOONG_VARIABLES),true) $(shell mkdir -p $(dir $(SOONG_VARIABLES))) $(call json_start) -$(call add_json_str, Make_suffix, -$(TARGET_PRODUCT)) +$(call add_json_str, Make_suffix, -$(TARGET_PRODUCT)$(COVERAGE_SUFFIX)) $(call add_json_str, BuildId, $(BUILD_ID)) $(call add_json_str, BuildFingerprintFile, build_fingerprint.txt) @@ -150,6 +150,7 @@ $(call add_json_bool, ArtUseReadBarrier, $(call invert_bool,$(fi $(call add_json_str, BtConfigIncludeDir, $(BOARD_BLUETOOTH_BDROID_BUILDCFG_INCLUDE_DIR)) $(call add_json_list, DeviceKernelHeaders, $(TARGET_DEVICE_KERNEL_HEADERS) $(TARGET_BOARD_KERNEL_HEADERS) $(TARGET_PRODUCT_KERNEL_HEADERS)) $(call add_json_str, VendorApiLevel, $(BOARD_API_LEVEL)) +$(call add_json_str, VendorApiLevelPropOverride, $(BOARD_API_LEVEL_PROP_OVERRIDE)) $(call add_json_list, ExtraVndkVersions, $(PRODUCT_EXTRA_VNDK_VERSIONS)) $(call add_json_list, DeviceSystemSdkVersions, $(BOARD_SYSTEMSDK_VERSIONS)) $(call add_json_list, Platform_systemsdk_versions, $(PLATFORM_SYSTEMSDK_VERSIONS)) @@ -182,10 +183,21 @@ $(call add_json_bool, Enforce_vintf_manifest, $(filter true,$(PRODUCT $(call add_json_bool, Uml, $(filter true,$(TARGET_USER_MODE_LINUX))) $(call add_json_str, VendorPath, $(TARGET_COPY_OUT_VENDOR)) +$(call add_json_str, VendorDlkmPath, $(TARGET_COPY_OUT_VENDOR_DLKM)) +$(call add_json_bool, BuildingVendorImage, $(BUILDING_VENDOR_IMAGE)) $(call add_json_str, OdmPath, $(TARGET_COPY_OUT_ODM)) +$(call add_json_bool, BuildingOdmImage, $(BUILDING_ODM_IMAGE)) +$(call add_json_str, OdmDlkmPath, $(TARGET_COPY_OUT_ODM_DLKM)) $(call add_json_str, ProductPath, $(TARGET_COPY_OUT_PRODUCT)) +$(call add_json_bool, BuildingProductImage, $(BUILDING_PRODUCT_IMAGE)) $(call add_json_str, SystemExtPath, $(TARGET_COPY_OUT_SYSTEM_EXT)) +$(call add_json_str, SystemDlkmPath, $(TARGET_COPY_OUT_SYSTEM_DLKM)) +$(call add_json_str, OemPath, $(TARGET_COPY_OUT_OEM)) $(call add_json_bool, MinimizeJavaDebugInfo, $(filter true,$(PRODUCT_MINIMIZE_JAVA_DEBUG_INFO))) +$(call add_json_str, RecoveryPath, $(TARGET_COPY_OUT_RECOVERY)) +$(call add_json_bool, BuildingRecoveryImage, $(BUILDING_RECOVERY_IMAGE)) +$(call add_json_str, UserdataPath, $(TARGET_COPY_OUT_DATA)) +$(call add_json_bool, BuildingUserdataImage, $(BUILDING_USERDATA_IMAGE)) $(call add_json_bool, UseGoma, $(filter-out false,$(USE_GOMA))) $(call add_json_bool, UseRBE, $(filter-out false,$(USE_RBE))) @@ -206,6 +218,7 @@ $(call add_json_list, BoardSepolicyM4Defs, $(BOARD_SEPOLICY_M4DEFS $(call add_json_str, BoardSepolicyVers, $(BOARD_SEPOLICY_VERS)) $(call add_json_str, SystemExtSepolicyPrebuiltApiDir, $(BOARD_SYSTEM_EXT_PREBUILT_DIR)) $(call add_json_str, ProductSepolicyPrebuiltApiDir, $(BOARD_PRODUCT_PREBUILT_DIR)) +$(call add_json_str, BoardPlatform, $(TARGET_BOARD_PLATFORM)) $(call add_json_str, PlatformSepolicyVersion, $(PLATFORM_SEPOLICY_VERSION)) $(call add_json_list, PlatformSepolicyCompatVersions, $(PLATFORM_SEPOLICY_COMPAT_VERSIONS)) @@ -235,6 +248,14 @@ $(call add_json_list, ProductPrivateSepolicyDirs, $(PRODUCT_PRIVATE_SEPOL $(call add_json_list, TargetFSConfigGen, $(TARGET_FS_CONFIG_GEN)) +# Although USE_SOONG_DEFINED_SYSTEM_IMAGE determines whether to use the system image specified by +# PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE, PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE is still used to compare +# installed files between make and soong, regardless of the USE_SOONG_DEFINED_SYSTEM_IMAGE setting. +$(call add_json_bool, UseSoongSystemImage, $(filter true,$(USE_SOONG_DEFINED_SYSTEM_IMAGE))) +$(call add_json_str, ProductSoongDefinedSystemImage, $(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE)) + +$(call add_json_bool, UseSoongNoticeXML, $(filter true,$(PRODUCT_USE_SOONG_NOTICE_XML))) + $(call add_json_map, VendorVars) $(foreach namespace,$(sort $(SOONG_CONFIG_NAMESPACES)),\ $(call add_json_map, $(namespace))\ @@ -258,14 +279,8 @@ $(call end_json_map) $(call add_json_bool, EnforceProductPartitionInterface, $(filter true,$(PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE))) $(call add_json_str, DeviceCurrentApiLevelForVendorModules, $(BOARD_CURRENT_API_LEVEL_FOR_VENDOR_MODULES)) -$(call add_json_bool, EnforceInterPartitionJavaSdkLibrary, $(filter true,$(PRODUCT_ENFORCE_INTER_PARTITION_JAVA_SDK_LIBRARY))) -$(call add_json_list, InterPartitionJavaLibraryAllowList, $(PRODUCT_INTER_PARTITION_JAVA_LIBRARY_ALLOWLIST)) - $(call add_json_bool, CompressedApex, $(filter true,$(PRODUCT_COMPRESSED_APEX))) - -ifndef APEX_BUILD_FOR_PRE_S_DEVICES -$(call add_json_bool, TrimmedApex, $(filter true,$(PRODUCT_TRIMMED_APEX))) -endif +$(call add_json_str, DefaultApexPayloadType, $(PRODUCT_DEFAULT_APEX_PAYLOAD_TYPE)) $(call add_json_bool, BoardUsesRecoveryAsBoot, $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT))) @@ -285,7 +300,6 @@ $(call add_json_bool, BuildBrokenClangCFlags, $(filter true,$(BUILD $(call add_json_bool, GenruleSandboxing, $(if $(GENRULE_SANDBOXING),$(filter true,$(GENRULE_SANDBOXING)),$(if $(filter true,$(BUILD_BROKEN_GENRULE_SANDBOXING)),,true))) $(call add_json_bool, BuildBrokenEnforceSyspropOwner, $(filter true,$(BUILD_BROKEN_ENFORCE_SYSPROP_OWNER))) $(call add_json_bool, BuildBrokenTrebleSyspropNeverallow, $(filter true,$(BUILD_BROKEN_TREBLE_SYSPROP_NEVERALLOW))) -$(call add_json_bool, BuildBrokenUsesSoongPython2Modules, $(filter true,$(BUILD_BROKEN_USES_SOONG_PYTHON2_MODULES))) $(call add_json_bool, BuildBrokenVendorPropertyNamespace, $(filter true,$(BUILD_BROKEN_VENDOR_PROPERTY_NAMESPACE))) $(call add_json_bool, BuildBrokenIncorrectPartitionImages, $(filter true,$(BUILD_BROKEN_INCORRECT_PARTITION_IMAGES))) $(call add_json_list, BuildBrokenInputDirModules, $(BUILD_BROKEN_INPUT_DIR_MODULES)) @@ -313,6 +327,8 @@ $(call add_json_list, AfdoProfiles, $(ALL_AFDO_PROFILES)) $(call add_json_str, ProductManufacturer, $(PRODUCT_MANUFACTURER)) $(call add_json_str, ProductBrand, $(PRODUCT_BRAND)) +$(call add_json_str, ProductDevice, $(PRODUCT_DEVICE)) +$(call add_json_str, ProductModel, $(PRODUCT_MODEL)) $(call add_json_str, ReleaseVersion, $(_RELEASE_VERSION)) $(call add_json_list, ReleaseAconfigValueSets, $(RELEASE_ACONFIG_VALUE_SETS)) @@ -342,6 +358,9 @@ $(call add_json_list, OemProperties, $(PRODUCT_OEM_PROPERTIES)) $(call add_json_list, SystemPropFiles, $(TARGET_SYSTEM_PROP)) $(call add_json_list, SystemExtPropFiles, $(TARGET_SYSTEM_EXT_PROP)) +$(call add_json_list, ProductPropFiles, $(TARGET_PRODUCT_PROP)) +$(call add_json_list, OdmPropFiles, $(TARGET_ODM_PROP)) +$(call add_json_list, VendorPropFiles, $(TARGET_VENDOR_PROP)) # Do not set ArtTargetIncludeDebugBuild into any value if PRODUCT_ART_TARGET_INCLUDE_DEBUG_BUILD is not set, # to have the same behavior from runtime_libart.mk. @@ -353,6 +372,220 @@ _config_enable_uffd_gc := \ $(firstword $(OVERRIDE_ENABLE_UFFD_GC) $(PRODUCT_ENABLE_UFFD_GC) default) $(call add_json_str, EnableUffdGc, $(_config_enable_uffd_gc)) _config_enable_uffd_gc := +$(call add_json_str, BoardKernelVersion, $(BOARD_KERNEL_VERSION)) + +$(call add_json_list, DeviceFrameworkCompatibilityMatrixFile, $(DEVICE_FRAMEWORK_COMPATIBILITY_MATRIX_FILE)) +$(call add_json_list, DeviceProductCompatibilityMatrixFile, $(DEVICE_PRODUCT_COMPATIBILITY_MATRIX_FILE)) +$(call add_json_list, BoardAvbSystemAddHashtreeFooterArgs, $(BOARD_AVB_SYSTEM_ADD_HASHTREE_FOOTER_ARGS)) +$(call add_json_bool, BoardAvbEnable, $(filter true,$(BOARD_AVB_ENABLE))) + +$(call add_json_str, AdbKeys, $(PRODUCT_ADB_KEYS)) + +$(call add_json_map, PartitionVarsForSoongMigrationOnlyDoNotUse) + $(call add_json_str, ProductDirectory, $(dir $(INTERNAL_PRODUCT))) + + $(call add_json_map,PartitionQualifiedVariables) + $(foreach image_type,INIT_BOOT BOOT VENDOR_BOOT SYSTEM VENDOR CACHE USERDATA PRODUCT SYSTEM_EXT OEM ODM VENDOR_DLKM ODM_DLKM SYSTEM_DLKM VBMETA VBMETA_SYSTEM VBMETA_SYSTEM_DLKM VBMETA_VENDOR_DLKM, \ + $(call add_json_map,$(call to-lower,$(image_type))) \ + $(call add_json_bool, BuildingImage, $(filter true,$(BUILDING_$(image_type)_IMAGE))) \ + $(call add_json_bool, PrebuiltImage, $(filter true,$(BOARD_PREBUILT_$(image_type)IMAGE))) \ + $(call add_json_str, BoardErofsCompressor, $(BOARD_$(image_type)IMAGE_EROFS_COMPRESSOR)) \ + $(call add_json_str, BoardErofsCompressHints, $(BOARD_$(image_type)IMAGE_EROFS_COMPRESS_HINTS)) \ + $(call add_json_str, BoardErofsPclusterSize, $(BOARD_$(image_type)IMAGE_EROFS_PCLUSTER_SIZE)) \ + $(call add_json_str, BoardExtfsInodeCount, $(BOARD_$(image_type)IMAGE_EXTFS_INODE_COUNT)) \ + $(call add_json_str, BoardExtfsRsvPct, $(BOARD_$(image_type)IMAGE_EXTFS_RSV_PCT)) \ + $(call add_json_str, BoardF2fsSloadCompressFlags, $(BOARD_$(image_type)IMAGE_F2FS_SLOAD_COMPRESS_FLAGS)) \ + $(call add_json_str, BoardFileSystemCompress, $(BOARD_$(image_type)IMAGE_FILE_SYSTEM_COMPRESS)) \ + $(call add_json_str, BoardFileSystemType, $(BOARD_$(image_type)IMAGE_FILE_SYSTEM_TYPE)) \ + $(call add_json_str, BoardJournalSize, $(BOARD_$(image_type)IMAGE_JOURNAL_SIZE)) \ + $(call add_json_str, BoardPartitionReservedSize, $(BOARD_$(image_type)IMAGE_PARTITION_RESERVED_SIZE)) \ + $(call add_json_str, BoardPartitionSize, $(BOARD_$(image_type)IMAGE_PARTITION_SIZE)) \ + $(call add_json_str, BoardSquashfsBlockSize, $(BOARD_$(image_type)IMAGE_SQUASHFS_BLOCK_SIZE)) \ + $(call add_json_str, BoardSquashfsCompressor, $(BOARD_$(image_type)IMAGE_SQUASHFS_COMPRESSOR)) \ + $(call add_json_str, BoardSquashfsCompressorOpt, $(BOARD_$(image_type)IMAGE_SQUASHFS_COMPRESSOR_OPT)) \ + $(call add_json_str, BoardSquashfsDisable4kAlign, $(BOARD_$(image_type)IMAGE_SQUASHFS_DISABLE_4K_ALIGN)) \ + $(call add_json_str, BoardAvbKeyPath, $(BOARD_AVB_$(image_type)_KEY_PATH)) \ + $(call add_json_str, BoardAvbAlgorithm, $(BOARD_AVB_$(image_type)_ALGORITHM)) \ + $(call add_json_str, BoardAvbRollbackIndex, $(BOARD_AVB_$(image_type)_ROLLBACK_INDEX)) \ + $(call add_json_str, BoardAvbRollbackIndexLocation, $(BOARD_AVB_$(image_type)_ROLLBACK_INDEX_LOCATION)) \ + $(call add_json_str, BoardAvbAddHashtreeFooterArgs, $(BOARD_AVB_$(image_type)_ADD_HASHTREE_FOOTER_ARGS)) \ + $(call add_json_str, ProductBaseFsPath, $(PRODUCT_$(image_type)_BASE_FS_PATH)) \ + $(call add_json_str, ProductHeadroom, $(PRODUCT_$(image_type)_HEADROOM)) \ + $(call add_json_str, ProductVerityPartition, $(PRODUCT_$(image_type)_VERITY_PARTITION)) \ + $(call end_json_map) \ + ) + $(call end_json_map) + + $(call add_json_bool, TargetUserimagesUseExt2, $(filter true,$(TARGET_USERIMAGES_USE_EXT2))) + $(call add_json_bool, TargetUserimagesUseExt3, $(filter true,$(TARGET_USERIMAGES_USE_EXT3))) + $(call add_json_bool, TargetUserimagesUseExt4, $(filter true,$(TARGET_USERIMAGES_USE_EXT4))) + + $(call add_json_bool, TargetUserimagesSparseExtDisabled, $(filter true,$(TARGET_USERIMAGES_SPARSE_EXT_DISABLED))) + $(call add_json_bool, TargetUserimagesSparseErofsDisabled, $(filter true,$(TARGET_USERIMAGES_SPARSE_EROFS_DISABLED))) + $(call add_json_bool, TargetUserimagesSparseSquashfsDisabled, $(filter true,$(TARGET_USERIMAGES_SPARSE_SQUASHFS_DISABLED))) + $(call add_json_bool, TargetUserimagesSparseF2fsDisabled, $(filter true,$(TARGET_USERIMAGES_SPARSE_F2FS_DISABLED))) + + $(call add_json_str, BoardErofsCompressor, $(BOARD_EROFS_COMPRESSOR)) + $(call add_json_str, BoardErofsCompressorHints, $(BOARD_EROFS_COMPRESS_HINTS)) + $(call add_json_str, BoardErofsPclusterSize, $(BOARD_EROFS_PCLUSTER_SIZE)) + $(call add_json_str, BoardErofsShareDupBlocks, $(BOARD_EROFS_SHARE_DUP_BLOCKS)) + $(call add_json_str, BoardErofsUseLegacyCompression, $(BOARD_EROFS_USE_LEGACY_COMPRESSION)) + $(call add_json_str, BoardExt4ShareDupBlocks, $(BOARD_EXT4_SHARE_DUP_BLOCKS)) + $(call add_json_str, BoardFlashLogicalBlockSize, $(BOARD_FLASH_LOGICAL_BLOCK_SIZE)) + $(call add_json_str, BoardFlashEraseBlockSize, $(BOARD_FLASH_ERASE_BLOCK_SIZE)) + $(call add_json_bool, BuildingVbmetaImage, $(BUILDING_VBMETA_IMAGE)) + + # boot image stuff + $(call add_json_bool, BuildingRamdiskImage, $(filter true,$(BUILDING_RAMDISK_IMAGE))) + $(call add_json_bool, ProductBuildBootImage, $(filter true,$(PRODUCT_BUILD_BOOT_IMAGE))) + $(call add_json_str, ProductBuildVendorBootImage, $(PRODUCT_BUILD_VENDOR_BOOT_IMAGE)) + $(call add_json_bool, ProductBuildInitBootImage, $(filter true,$(PRODUCT_BUILD_INIT_BOOT_IMAGE))) + $(call add_json_bool, BoardUsesRecoveryAsBoot, $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT))) + $(call add_json_str, BoardPrebuiltBootimage, $(BOARD_PREBUILT_BOOT_IMAGE)) + $(call add_json_str, BoardPrebuiltInitBootimage, $(BOARD_PREBUILT_INIT_BOOT_IMAGE)) + $(call add_json_str, BoardBootimagePartitionSize, $(BOARD_BOOTIMAGE_PARTITION_SIZE)) + $(call add_json_str, BoardVendorBootimagePartitionSize, $(BOARD_VENDOR_BOOTIMAGE_PARTITION_SIZE)) + $(call add_json_str, BoardInitBootimagePartitionSize, $(BOARD_INIT_BOOT_IMAGE_PARTITION_SIZE)) + $(call add_json_str, BoardBootHeaderVersion, $(BOARD_BOOT_HEADER_VERSION)) + $(call add_json_str, TargetKernelPath, $(TARGET_KERNEL_PATH)) + $(call add_json_bool, BoardUsesGenericKernelImage, $(BOARD_USES_GENERIC_KERNEL_IMAGE)) + $(call add_json_str, BootSecurityPatch, $(BOOT_SECURITY_PATCH)) + $(call add_json_str, InitBootSecurityPatch, $(INIT_BOOT_SECURITY_PATCH)) + $(call add_json_str, VendorSecurityPatch, $(VENDOR_SECURITY_PATCH)) + $(call add_json_str, OdmSecurityPatch, $(ODM_SECURITY_PATCH)) + $(call add_json_str, SystemDlkmSecurityPatch, $(SYSTEM_DLKM_SECURITY_PATCH)) + $(call add_json_str, VendorDlkmSecurityPatch, $(VENDOR_DLKM_SECURITY_PATCH)) + $(call add_json_str, OdmDlkmSecurityPatch, $(ODM_DLKM_SECURITY_PATCH)) + $(call add_json_bool, BoardIncludeDtbInBootimg, $(BOARD_INCLUDE_DTB_IN_BOOTIMG)) + $(call add_json_list, InternalKernelCmdline, $(INTERNAL_KERNEL_CMDLINE)) + $(call add_json_list, InternalBootconfig, $(INTERNAL_BOOTCONFIG)) + $(call add_json_str, InternalBootconfigFile, $(INTERNAL_BOOTCONFIG_FILE)) + + $(call add_json_bool, BuildingSystemOtherImage, $(BUILDING_SYSTEM_OTHER_IMAGE)) + + # super image stuff + $(call add_json_bool, ProductUseDynamicPartitions, $(filter true,$(PRODUCT_USE_DYNAMIC_PARTITIONS))) + $(call add_json_bool, ProductRetrofitDynamicPartitions, $(filter true,$(PRODUCT_RETROFIT_DYNAMIC_PARTITIONS))) + $(call add_json_bool, ProductBuildSuperPartition, $(filter true,$(PRODUCT_BUILD_SUPER_PARTITION))) + $(call add_json_str, BoardSuperPartitionSize, $(BOARD_SUPER_PARTITION_SIZE)) + $(call add_json_str, BoardSuperPartitionMetadataDevice, $(BOARD_SUPER_PARTITION_METADATA_DEVICE)) + $(call add_json_list, BoardSuperPartitionBlockDevices, $(BOARD_SUPER_PARTITION_BLOCK_DEVICES)) + $(call add_json_map, BoardSuperPartitionGroups) + $(foreach group, $(BOARD_SUPER_PARTITION_GROUPS), \ + $(call add_json_map, $(group)) \ + $(call add_json_str, GroupSize, $(BOARD_$(call to-upper,$(group))_SIZE)) \ + $(if $(BOARD_$(call to-upper,$(group))_PARTITION_LIST), \ + $(call add_json_list, PartitionList, $(BOARD_$(call to-upper,$(group))_PARTITION_LIST))) \ + $(call end_json_map)) + $(call end_json_map) + $(call add_json_bool, ProductVirtualAbOta, $(filter true,$(PRODUCT_VIRTUAL_AB_OTA))) + $(call add_json_bool, ProductVirtualAbOtaRetrofit, $(filter true,$(PRODUCT_VIRTUAL_AB_OTA_RETROFIT))) + $(call add_json_bool, ProductVirtualAbCompression, $(filter true,$(PRODUCT_VIRTUAL_AB_COMPRESSION))) + $(call add_json_str, ProductVirtualAbCompressionMethod, $(PRODUCT_VIRTUAL_AB_COMPRESSION_METHOD)) + $(call add_json_str, ProductVirtualAbCompressionFactor, $(PRODUCT_VIRTUAL_AB_COMPRESSION_FACTOR)) + $(call add_json_str, ProductVirtualAbCowVersion, $(PRODUCT_VIRTUAL_AB_COW_VERSION)) + $(call add_json_bool, AbOtaUpdater, $(filter true,$(AB_OTA_UPDATER))) + + # Avb (android verified boot) stuff + $(call add_json_bool, BoardAvbEnable, $(filter true,$(BOARD_AVB_ENABLE))) + $(call add_json_str, BoardAvbAlgorithm, $(BOARD_AVB_ALGORITHM)) + $(call add_json_str, BoardAvbKeyPath, $(BOARD_AVB_KEY_PATH)) + $(call add_json_str, BoardAvbRollbackIndex, $(BOARD_AVB_ROLLBACK_INDEX)) + $(call add_json_map, ChainedVbmetaPartitions) + $(foreach partition,system vendor $(BOARD_AVB_VBMETA_CUSTOM_PARTITIONS),\ + $(call add_json_map, $(partition)) \ + $(call add_json_list,Partitions,$(BOARD_AVB_VBMETA_$(call to-upper,$(partition)))) \ + $(call add_json_str,Key,$(BOARD_AVB_VBMETA_$(call to-upper,$(partition))_KEY_PATH)) \ + $(call add_json_str,Algorithm,$(BOARD_AVB_VBMETA_$(call to-upper,$(partition))_ALGORITHM)) \ + $(call add_json_str,RollbackIndex,$(BOARD_AVB_VBMETA_$(call to-upper,$(partition))_ROLLBACK_INDEX)) \ + $(call add_json_str,RollbackIndexLocation,$(BOARD_AVB_VBMETA_$(call to-upper,$(partition))_ROLLBACK_INDEX_LOCATION)) \ + $(call end_json_map)) + $(call end_json_map) + + $(call add_json_bool, ProductUseDynamicPartitionSize, $(filter true,$(PRODUCT_USE_DYNAMIC_PARTITION_SIZE))) + $(call add_json_bool, CopyImagesForTargetFilesZip, $(filter true,$(COPY_IMAGES_FOR_TARGET_FILES_ZIP))) + + $(call add_json_list, ProductPackages, $(PRODUCT_PACKAGES)) + $(call add_json_list, ProductPackagesDebug, $(PRODUCT_PACKAGES_DEBUG)) + + # Used to generate /vendor/linker.config.pb + $(call add_json_list, VendorLinkerConfigSrcs, $(PRODUCT_VENDOR_LINKER_CONFIG_FRAGMENTS)) + $(call add_json_list, ProductLinkerConfigSrcs, $(PRODUCT_PRODUCT_LINKER_CONFIG_FRAGMENTS)) + + # Used to generate _dlkm partitions + $(call add_json_bool, BuildingSystemDlkmImage, $(BUILDING_SYSTEM_DLKM_IMAGE)) + $(call add_json_list, SystemKernelModules, $(BOARD_SYSTEM_KERNEL_MODULES)) + $(call add_json_str, SystemKernelBlocklistFile, $(BOARD_SYSTEM_KERNEL_MODULES_BLOCKLIST_FILE)) + $(call add_json_list, SystemKernelLoadModules, $(BOARD_SYSTEM_KERNEL_MODULES_LOAD)) + $(call add_json_bool, BuildingVendorDlkmImage, $(BUILDING_VENDOR_DLKM_IMAGE)) + $(call add_json_list, VendorKernelModules, $(BOARD_VENDOR_KERNEL_MODULES)) + $(call add_json_str, VendorKernelBlocklistFile, $(BOARD_VENDOR_KERNEL_MODULES_BLOCKLIST_FILE)) + $(call add_json_bool, BuildingOdmDlkmImage, $(BUILDING_ODM_DLKM_IMAGE)) + $(call add_json_list, OdmKernelModules, $(BOARD_ODM_KERNEL_MODULES)) + $(call add_json_str, OdmKernelBlocklistFile, $(BOARD_ODM_KERNEL_MODULES_BLOCKLIST_FILE)) + $(call add_json_list, VendorRamdiskKernelModules, $(BOARD_VENDOR_RAMDISK_KERNEL_MODULES)) + $(call add_json_str, VendorRamdiskKernelBlocklistFile, $(BOARD_VENDOR_RAMDISK_KERNEL_MODULES_BLOCKLIST_FILE)) + $(call add_json_list, VendorRamdiskKernelLoadModules, $(BOARD_VENDOR_RAMDISK_KERNEL_MODULES_LOAD)) + $(call add_json_str, VendorRamdiskKernelOptionsFile, $(BOARD_VENDOR_RAMDISK_KERNEL_MODULES_OPTIONS_FILE)) + + # Used to generate /vendor/build.prop + $(call add_json_list, BoardInfoFiles, $(if $(TARGET_BOARD_INFO_FILES),$(TARGET_BOARD_INFO_FILES),$(firstword $(TARGET_BOARD_INFO_FILE) $(wildcard $(TARGET_DEVICE_DIR)/board-info.txt)))) + $(call add_json_str, BootLoaderBoardName, $(TARGET_BOOTLOADER_BOARD_NAME)) + + $(call add_json_list, ProductCopyFiles, $(PRODUCT_COPY_FILES)) + + # Used to generate fsv meta + $(call add_json_bool, ProductFsverityGenerateMetadata, $(PRODUCT_FSVERITY_GENERATE_METADATA)) + + # Used to generate recovery partition + $(call add_json_str, TargetScreenDensity, $(TARGET_SCREEN_DENSITY)) + + # Used to generate /recovery/root/build.prop + $(call add_json_map, PrivateRecoveryUiProperties) + $(call add_json_str, animation_fps, $(TARGET_RECOVERY_UI_ANIMATION_FPS)) + $(call add_json_str, margin_height, $(TARGET_RECOVERY_UI_MARGIN_HEIGHT)) + $(call add_json_str, margin_width, $(TARGET_RECOVERY_UI_MARGIN_WIDTH)) + $(call add_json_str, menu_unusable_rows, $(TARGET_RECOVERY_UI_MENU_UNUSABLE_ROWS)) + $(call add_json_str, progress_bar_baseline, $(TARGET_RECOVERY_UI_PROGRESS_BAR_BASELINE)) + $(call add_json_str, touch_low_threshold, $(TARGET_RECOVERY_UI_TOUCH_LOW_THRESHOLD)) + $(call add_json_str, touch_high_threshold, $(TARGET_RECOVERY_UI_TOUCH_HIGH_THRESHOLD)) + $(call add_json_str, vr_stereo_offset, $(TARGET_RECOVERY_UI_VR_STEREO_OFFSET)) + $(call add_json_str, brightness_file, $(TARGET_RECOVERY_UI_BRIGHTNESS_FILE)) + $(call add_json_str, max_brightness_file, $(TARGET_RECOVERY_UI_MAX_BRIGHTNESS_FILE)) + $(call add_json_str, brightness_normal_percent, $(TARGET_RECOVERY_UI_BRIGHTNESS_NORMAL)) + $(call add_json_str, brightness_dimmed_percent, $(TARGET_RECOVERY_UI_BRIGHTNESS_DIMMED)) + $(call end_json_map) + + $(call add_json_str, PrebuiltBootloader, $(BOARD_PREBUILT_BOOTLOADER)) + + # Used to generate userdata partition + $(call add_json_str, ProductFsCasefold, $(PRODUCT_FS_CASEFOLD)) + $(call add_json_str, ProductQuotaProjid, $(PRODUCT_QUOTA_PROJID)) + $(call add_json_str, ProductFsCompression, $(PRODUCT_FS_COMPRESSION)) + +$(call end_json_map) + +# For converting vintf_data +$(call add_json_list, DeviceMatrixFile, $(DEVICE_MATRIX_FILE)) +$(call add_json_list, ProductManifestFiles, $(PRODUCT_MANIFEST_FILES)) +$(call add_json_list, SystemManifestFile, $(DEVICE_FRAMEWORK_MANIFEST_FILE)) +SYSTEM_EXT_HWSERVICE_FILES := +ifeq ($(PRODUCT_HIDL_ENABLED),true) + ifneq ($(filter hwservicemanager,$(PRODUCT_PACKAGES)),) + SYSTEM_EXT_HWSERVICE_FILES += system/hwservicemanager/hwservicemanager_no_max.xml + else + $(error If PRODUCT_HIDL_ENABLED is set, hwservicemanager must be added to PRODUCT_PACKAGES explicitly) + endif +else + ifneq ($(filter hwservicemanager,$(PRODUCT_PACKAGES)),) + SYSTEM_EXT_HWSERVICE_FILES += system/hwservicemanager/hwservicemanager.xml + else ifneq ($(filter hwservicemanager,$(PRODUCT_PACKAGES_SHIPPING_API_LEVEL_34)),) + SYSTEM_EXT_HWSERVICE_FILES += system/hwservicemanager/hwservicemanager.xml + endif +endif +$(call add_json_list, SystemExtManifestFiles, $(SYSTEM_EXT_MANIFEST_FILES) $(SYSTEM_EXT_HWSERVICE_FILES)) +$(call add_json_list, DeviceManifestFiles, $(DEVICE_MANIFEST_FILE)) +$(call add_json_list, OdmManifestFiles, $(ODM_MANIFEST_FILES)) $(call json_end) diff --git a/core/soong_extra_config.mk b/core/soong_extra_config.mk index 76da0d702b..8eee50ae00 100644 --- a/core/soong_extra_config.mk +++ b/core/soong_extra_config.mk @@ -43,6 +43,7 @@ $(call add_json_list, PRODUCT_VENDOR_PROPERTIES, $(call collapse-prop-pa $(call add_json_list, PRODUCT_PRODUCT_PROPERTIES, $(call collapse-prop-pairs,PRODUCT_PRODUCT_PROPERTIES)) $(call add_json_list, PRODUCT_ODM_PROPERTIES, $(call collapse-prop-pairs,PRODUCT_ODM_PROPERTIES)) $(call add_json_list, PRODUCT_PROPERTY_OVERRIDES, $(call collapse-prop-pairs,PRODUCT_PROPERTY_OVERRIDES)) +$(call add_json_list, PRODUCT_DEFAULT_PROPERTY_OVERRIDES, $(call collapse-prop-pairs,PRODUCT_DEFAULT_PROPERTY_OVERRIDES)) $(call add_json_str, BootloaderBoardName, $(TARGET_BOOTLOADER_BOARD_NAME)) @@ -79,7 +80,7 @@ $(call add_json_bool, PropertySplitEnabled, $(filter true,$(BOARD_PROPERTY_OVERR $(call add_json_str, ScreenDensity, $(TARGET_SCREEN_DENSITY)) -$(call add_json_bool, UsesVulkan, $(filter true,$(TARGET_USES_VULKAN))) +$(call add_json_str, UsesVulkan, $(TARGET_USES_VULKAN)) $(call add_json_bool, ZygoteForce64, $(filter true,$(ZYGOTE_FORCE_64))) @@ -90,6 +91,10 @@ $(call add_json_list, BuildVersionTags, $(BUILD_VERSION_TAGS)) $(call add_json_bool, ProductNotDebuggableInUserdebug, $(PRODUCT_NOT_DEBUGGABLE_IN_USERDEBUG)) +$(call add_json_bool, UsesProductImage, $(filter true,$(BOARD_USES_PRODUCTIMAGE))) + +$(call add_json_bool, TargetBoots16K, $(filter true,$(TARGET_BOOTS_16K))) + $(call json_end) $(shell mkdir -p $(dir $(SOONG_EXTRA_VARIABLES))) diff --git a/core/soong_java_prebuilt.mk b/core/soong_java_prebuilt.mk index f74bb6d975..8c3882f364 100644 --- a/core/soong_java_prebuilt.mk +++ b/core/soong_java_prebuilt.mk @@ -21,19 +21,6 @@ full_classes_pre_proguard_jar := $(intermediates.COMMON)/classes-pre-proguard.ja full_classes_header_jar := $(intermediates.COMMON)/classes-header.jar common_javalib.jar := $(intermediates.COMMON)/javalib.jar -ifdef LOCAL_SOONG_AAR - LOCAL_ADDITIONAL_CHECKED_MODULE += $(LOCAL_SOONG_AAR) -endif - -# Use the Soong output as the checkbuild target instead of LOCAL_BUILT_MODULE -# to avoid checkbuilds making an extra copy of every module. -LOCAL_CHECKED_MODULE := $(LOCAL_PREBUILT_MODULE_FILE) -LOCAL_ADDITIONAL_CHECKED_MODULE += $(LOCAL_SOONG_HEADER_JAR) -LOCAL_ADDITIONAL_CHECKED_MODULE += $(LOCAL_FULL_MANIFEST_FILE) -LOCAL_ADDITIONAL_CHECKED_MODULE += $(LOCAL_SOONG_DEXPREOPT_CONFIG) -LOCAL_ADDITIONAL_CHECKED_MODULE += $(LOCAL_SOONG_RESOURCE_EXPORT_PACKAGE) -LOCAL_ADDITIONAL_CHECKED_MODULE += $(LOCAL_SOONG_DEX_JAR) - ####################################### include $(BUILD_SYSTEM)/base_rules.mk ####################################### diff --git a/core/sysprop.mk b/core/sysprop.mk index 6d65e19efa..9a9f509333 100644 --- a/core/sysprop.mk +++ b/core/sysprop.mk @@ -122,11 +122,19 @@ $(2): $(POST_PROCESS_PROPS) $(INTERNAL_BUILD_ID_MAKEFILE) $(3) $(6) $(BUILT_KERN ifneq ($(strip $(7)), true) $(hide) $$(call generate-common-build-props,$(call to-lower,$(strip $(1))),$$@) endif + # Make and Soong use different intermediate files to build vendor/build.prop. + # Although the sysprop contents are same, the absolute paths of android-info.prop are different. + # Print the filename for the intermediate files (files in OUT_DIR). + # This helps with validating mk->soong migration of android partitions. $(hide) $(foreach file,$(strip $(3)),\ if [ -f "$(file)" ]; then\ echo "" >> $$@;\ echo "####################################" >> $$@;\ - echo "# from $(file)" >> $$@;\ + $(if $(filter $(OUT_DIR)/%,$(file)), \ + echo "# from $(notdir $(file))" >> $$@;\ + ,\ + echo "# from $(file)" >> $$@;\ + )\ echo "####################################" >> $$@;\ cat $(file) >> $$@;\ fi;) @@ -152,61 +160,6 @@ endif $(call declare-1p-target,$(2)) endef -# ----------------------------------------------------------------- -# Define fingerprint, thumbprint, and version tags for the current build -# -# BUILD_VERSION_TAGS is a comma-separated list of tags chosen by the device -# implementer that further distinguishes the build. It's basically defined -# by the device implementer. Here, we are adding a mandatory tag that -# identifies the signing config of the build. -BUILD_VERSION_TAGS := $(BUILD_VERSION_TAGS) -ifeq ($(TARGET_BUILD_TYPE),debug) - BUILD_VERSION_TAGS += debug -endif -# The "test-keys" tag marks builds signed with the old test keys, -# which are available in the SDK. "dev-keys" marks builds signed with -# non-default dev keys (usually private keys from a vendor directory). -# Both of these tags will be removed and replaced with "release-keys" -# when the target-files is signed in a post-build step. -ifeq ($(DEFAULT_SYSTEM_DEV_CERTIFICATE),build/make/target/product/security/testkey) -BUILD_KEYS := test-keys -else -BUILD_KEYS := dev-keys -endif -BUILD_VERSION_TAGS += $(BUILD_KEYS) -BUILD_VERSION_TAGS := $(subst $(space),$(comma),$(sort $(BUILD_VERSION_TAGS))) - -# BUILD_FINGERPRINT is used used to uniquely identify the combined build and -# product; used by the OTA server. -ifeq (,$(strip $(BUILD_FINGERPRINT))) - BUILD_FINGERPRINT := $(PRODUCT_BRAND)/$(TARGET_PRODUCT)/$(TARGET_DEVICE):$(PLATFORM_VERSION)/$(BUILD_ID)/$(BUILD_NUMBER_FROM_FILE):$(TARGET_BUILD_VARIANT)/$(BUILD_VERSION_TAGS) -endif - -BUILD_FINGERPRINT_FILE := $(PRODUCT_OUT)/build_fingerprint.txt -ifneq (,$(shell mkdir -p $(PRODUCT_OUT) && echo $(BUILD_FINGERPRINT) >$(BUILD_FINGERPRINT_FILE) && grep " " $(BUILD_FINGERPRINT_FILE))) - $(error BUILD_FINGERPRINT cannot contain spaces: "$(file <$(BUILD_FINGERPRINT_FILE))") -endif -BUILD_FINGERPRINT_FROM_FILE := $$(cat $(BUILD_FINGERPRINT_FILE)) -# unset it for safety. -BUILD_FINGERPRINT := - -# BUILD_THUMBPRINT is used to uniquely identify the system build; used by the -# OTA server. This purposefully excludes any product-specific variables. -ifeq (,$(strip $(BUILD_THUMBPRINT))) - BUILD_THUMBPRINT := $(PLATFORM_VERSION)/$(BUILD_ID)/$(BUILD_NUMBER_FROM_FILE):$(TARGET_BUILD_VARIANT)/$(BUILD_VERSION_TAGS) -endif - -BUILD_THUMBPRINT_FILE := $(PRODUCT_OUT)/build_thumbprint.txt -ifeq ($(strip $(HAS_BUILD_NUMBER)),true) -$(BUILD_THUMBPRINT_FILE): $(BUILD_NUMBER_FILE) -endif -ifneq (,$(shell mkdir -p $(PRODUCT_OUT) && echo $(BUILD_THUMBPRINT) >$(BUILD_THUMBPRINT_FILE) && grep " " $(BUILD_THUMBPRINT_FILE))) - $(error BUILD_THUMBPRINT cannot contain spaces: "$(file <$(BUILD_THUMBPRINT_FILE))") -endif -# unset it for safety. -BUILD_THUMBPRINT_FILE := -BUILD_THUMBPRINT := - KNOWN_OEM_THUMBPRINT_PROPERTIES := \ ro.product.brand \ ro.product.name \ @@ -230,7 +183,7 @@ _prop_files_ := $(if $(TARGET_VENDOR_PROP),\ $(TARGET_VENDOR_PROP),\ $(wildcard $(TARGET_DEVICE_DIR)/vendor.prop)) -android_info_prop := $(call intermediates-dir-for,ETC,android_info_prop)/android_info.prop +android_info_prop := $(call intermediates-dir-for,ETC,android_info_prop)/android-info.prop $(android_info_prop): $(INSTALLED_ANDROID_INFO_TXT_TARGET) cat $< | grep 'require version-' | sed -e 's/require version-/ro.build.expect./g' > $@ @@ -266,156 +219,47 @@ $(eval $(call declare-1p-target,$(INSTALLED_VENDOR_BUILD_PROP_TARGET))) # ----------------------------------------------------------------- # product/etc/build.prop # - -_prop_files_ := $(if $(TARGET_PRODUCT_PROP),\ - $(TARGET_PRODUCT_PROP),\ - $(wildcard $(TARGET_DEVICE_DIR)/product.prop)) - -# Order matters here. When there are duplicates, the last one wins. -# TODO(b/117892318): don't allow duplicates so that the ordering doesn't matter -_prop_vars_ := \ - ADDITIONAL_PRODUCT_PROPERTIES \ - PRODUCT_PRODUCT_PROPERTIES +# product/etc/build.prop is built by Soong. See product-build.prop module in +# build/soong/Android.bp. INSTALLED_PRODUCT_BUILD_PROP_TARGET := $(TARGET_OUT_PRODUCT)/etc/build.prop -ifdef PRODUCT_OEM_PROPERTIES -import_oem_prop := $(call intermediates-dir-for,ETC,import_oem_prop)/oem.prop - -$(import_oem_prop): - $(hide) echo "####################################" >> $@; \ - echo "# PRODUCT_OEM_PROPERTIES" >> $@; \ - echo "####################################" >> $@; - $(hide) $(foreach prop,$(PRODUCT_OEM_PROPERTIES), \ - echo "import /oem/oem.prop $(prop)" >> $@;) - -_footers_ := $(import_oem_prop) -else -_footers_ := -endif - -# Skip common /product properties generation if device released before R and -# has no product partition. This is the first part of the check. -ifeq ($(call math_lt,$(if $(PRODUCT_SHIPPING_API_LEVEL),$(PRODUCT_SHIPPING_API_LEVEL),30),30), true) - _skip_common_properties := true -endif - -# The second part of the check - always generate common properties for the -# devices with product partition regardless of shipping level. -ifneq ($(BOARD_USES_PRODUCTIMAGE),) - _skip_common_properties := -endif - -$(eval $(call build-properties,\ - product,\ - $(INSTALLED_PRODUCT_BUILD_PROP_TARGET),\ - $(_prop_files_),\ - $(_prop_vars_),\ - $(empty),\ - $(_footers_),\ - $(_skip_common_properties))) - -$(eval $(call declare-1p-target,$(INSTALLED_PRODUCT_BUILD_PROP_TARGET))) - -_skip_common_properties := - # ---------------------------------------------------------------- # odm/etc/build.prop # -_prop_files_ := $(if $(TARGET_ODM_PROP),\ - $(TARGET_ODM_PROP),\ - $(wildcard $(TARGET_DEVICE_DIR)/odm.prop)) - -# Order matters here. When there are duplicates, the last one wins. -# TODO(b/117892318): don't allow duplicates so that the ordering doesn't matter -_prop_vars_ := \ - ADDITIONAL_ODM_PROPERTIES \ - PRODUCT_ODM_PROPERTIES +# odm/etc/build.prop is built by Soong. See odm-build.prop module in +# build/soong/Android.bp. INSTALLED_ODM_BUILD_PROP_TARGET := $(TARGET_OUT_ODM)/etc/build.prop -$(eval $(call build-properties,\ - odm,\ - $(INSTALLED_ODM_BUILD_PROP_TARGET),\ - $(_prop_files_),\ - $(_prop_vars_),\ - $(empty),\ - $(empty),\ - $(empty))) - -$(eval $(call declare-1p-target,$(INSTALLED_ODM_BUILD_PROP_TARGET))) # ---------------------------------------------------------------- # vendor_dlkm/etc/build.prop -# - -INSTALLED_VENDOR_DLKM_BUILD_PROP_TARGET := $(TARGET_OUT_VENDOR_DLKM)/etc/build.prop -$(eval $(call build-properties,\ - vendor_dlkm,\ - $(INSTALLED_VENDOR_DLKM_BUILD_PROP_TARGET),\ - $(empty),\ - $(empty),\ - $(empty),\ - $(empty),\ - $(empty))) - -$(eval $(call declare-1p-target,$(INSTALLED_VENDOR_DLKM_BUILD_PROP_TARGET))) - -# ---------------------------------------------------------------- # odm_dlkm/etc/build.prop -# - -INSTALLED_ODM_DLKM_BUILD_PROP_TARGET := $(TARGET_OUT_ODM_DLKM)/etc/build.prop -$(eval $(call build-properties,\ - odm_dlkm,\ - $(INSTALLED_ODM_DLKM_BUILD_PROP_TARGET),\ - $(empty),\ - $(empty),\ - $(empty),\ - $(empty),\ - $(empty))) - -$(eval $(call declare-1p-target,$(INSTALLED_ODM_DLKM_BUILD_PROP_TARGET))) - -# ---------------------------------------------------------------- # system_dlkm/build.prop -# +# These are built by Soong. See build/soong/Android.bp +INSTALLED_VENDOR_DLKM_BUILD_PROP_TARGET := $(TARGET_OUT_VENDOR_DLKM)/etc/build.prop +INSTALLED_ODM_DLKM_BUILD_PROP_TARGET := $(TARGET_OUT_ODM_DLKM)/etc/build.prop INSTALLED_SYSTEM_DLKM_BUILD_PROP_TARGET := $(TARGET_OUT_SYSTEM_DLKM)/etc/build.prop -$(eval $(call build-properties,\ - system_dlkm,\ - $(INSTALLED_SYSTEM_DLKM_BUILD_PROP_TARGET),\ - $(empty),\ - $(empty),\ - $(empty),\ - $(empty),\ - $(empty))) - -$(eval $(call declare-1p-target,$(INSTALLED_SYSTEM_DLKM_BUILD_PROP_TARGET))) +ALL_DEFAULT_INSTALLED_MODULES += \ + $(INSTALLED_VENDOR_DLKM_BUILD_PROP_TARGET) \ + $(INSTALLED_ODM_DLKM_BUILD_PROP_TARGET) \ + $(INSTALLED_SYSTEM_DLKM_BUILD_PROP_TARGET) \ # ----------------------------------------------------------------- # system_ext/etc/build.prop # -# system_ext/build.prop is built by Soong. See system-build.prop module in +# system_ext/etc/build.prop is built by Soong. See system-build.prop module in # build/soong/Android.bp. INSTALLED_SYSTEM_EXT_BUILD_PROP_TARGET := $(TARGET_OUT_SYSTEM_EXT)/etc/build.prop -# ---------------------------------------------------------------- -# ramdisk/boot/etc/build.prop -# - RAMDISK_BUILD_PROP_REL_PATH := system/etc/ramdisk/build.prop +ifeq (true,$(BOARD_USES_RECOVERY_AS_BOOT)) +INSTALLED_RAMDISK_BUILD_PROP_TARGET := $(TARGET_RECOVERY_ROOT_OUT)/first_stage_ramdisk/$(RAMDISK_BUILD_PROP_REL_PATH) +else INSTALLED_RAMDISK_BUILD_PROP_TARGET := $(TARGET_RAMDISK_OUT)/$(RAMDISK_BUILD_PROP_REL_PATH) -$(eval $(call build-properties,\ - bootimage,\ - $(INSTALLED_RAMDISK_BUILD_PROP_TARGET),\ - $(empty),\ - $(empty),\ - $(empty),\ - $(empty),\ - $(empty))) - -$(eval $(call declare-1p-target,$(INSTALLED_RAMDISK_BUILD_PROP_TARGET))) +endif ALL_INSTALLED_BUILD_PROP_FILES := \ $(INSTALLED_BUILD_PROP_TARGET) \ diff --git a/core/sysprop_config.mk b/core/sysprop_config.mk index 543b86b0e0..199150347c 100644 --- a/core/sysprop_config.mk +++ b/core/sysprop_config.mk @@ -16,24 +16,8 @@ $(foreach name, $(_additional_prop_var_names),\ _additional_prop_var_names := $(KATI_obsolete_var ADDITIONAL_SYSTEM_PROPERTIES,Use build/soong/scripts/gen_build_prop.py instead) - -# Add the system server compiler filter if they are specified for the product. -ifneq (,$(PRODUCT_SYSTEM_SERVER_COMPILER_FILTER)) -ADDITIONAL_PRODUCT_PROPERTIES += dalvik.vm.systemservercompilerfilter=$(PRODUCT_SYSTEM_SERVER_COMPILER_FILTER) -endif - -# Add the 16K developer option if it is defined for the product. -ifeq ($(PRODUCT_16K_DEVELOPER_OPTION),true) -ADDITIONAL_PRODUCT_PROPERTIES += ro.product.build.16k_page.enabled=true -else -ADDITIONAL_PRODUCT_PROPERTIES += ro.product.build.16k_page.enabled=false -endif - -ifeq ($(TARGET_BOOTS_16K),true) -ADDITIONAL_PRODUCT_PROPERTIES += ro.product.page_size=16384 -else -ADDITIONAL_PRODUCT_PROPERTIES += ro.product.page_size=4096 -endif +$(KATI_obsolete_var ADDITIONAL_ODM_PROPERTIES,Use build/soong/scripts/gen_build_prop.py instead) +$(KATI_obsolete_var ADDITIONAL_PRODUCT_PROPERTIES,Use build/soong/scripts/gen_build_prop.py instead) # Add cpu properties for bionic and ART. ADDITIONAL_VENDOR_PROPERTIES += ro.bionic.arch=$(TARGET_ARCH) @@ -107,8 +91,12 @@ endif # Build system set BOARD_API_LEVEL to show the api level of the vendor API surface. # This must not be altered outside of build system. ifdef BOARD_API_LEVEL -ADDITIONAL_VENDOR_PROPERTIES += \ - ro.board.api_level=$(BOARD_API_LEVEL) + ADDITIONAL_VENDOR_PROPERTIES += \ + ro.board.api_level?=$(BOARD_API_LEVEL) + ifdef BOARD_API_LEVEL_PROP_OVERRIDE + ADDITIONAL_VENDOR_PROPERTIES += \ + ro.board.api_level=$(BOARD_API_LEVEL_PROP_OVERRIDE) + endif endif # RELEASE_BOARD_API_LEVEL_FROZEN is true when the vendor API surface is frozen. ifdef RELEASE_BOARD_API_LEVEL_FROZEN @@ -146,35 +134,16 @@ ADDITIONAL_VENDOR_PROPERTIES += \ ro.build.ab_update=$(AB_OTA_UPDATER) endif -ADDITIONAL_PRODUCT_PROPERTIES += ro.build.characteristics=$(TARGET_AAPT_CHARACTERISTICS) - ifeq ($(AB_OTA_UPDATER),true) -ADDITIONAL_PRODUCT_PROPERTIES += ro.product.ab_ota_partitions=$(subst $(space),$(comma),$(sort $(AB_OTA_PARTITIONS))) ADDITIONAL_VENDOR_PROPERTIES += ro.vendor.build.ab_ota_partitions=$(subst $(space),$(comma),$(sort $(AB_OTA_PARTITIONS))) endif -# Set this property for VTS to skip large page size tests on unsupported devices. -ADDITIONAL_PRODUCT_PROPERTIES += \ - ro.product.cpu.pagesize.max=$(TARGET_MAX_PAGE_SIZE_SUPPORTED) - -ifeq ($(PRODUCT_NO_BIONIC_PAGE_SIZE_MACRO),true) -ADDITIONAL_PRODUCT_PROPERTIES += ro.product.build.no_bionic_page_size_macro=true -endif - user_variant := $(filter user userdebug,$(TARGET_BUILD_VARIANT)) config_enable_uffd_gc := \ $(firstword $(OVERRIDE_ENABLE_UFFD_GC) $(PRODUCT_ENABLE_UFFD_GC) default) -# This is a temporary system property that controls the ART module. The plan is -# to remove it by Aug 2025, at which time Mainline updates of the ART module -# will ignore it as well. -# If the value is "default", it will be mangled by post_process_props.py. -ADDITIONAL_PRODUCT_PROPERTIES += ro.dalvik.vm.enable_uffd_gc=$(config_enable_uffd_gc) - -ADDITIONAL_PRODUCT_PROPERTIES := $(strip $(ADDITIONAL_PRODUCT_PROPERTIES)) ADDITIONAL_VENDOR_PROPERTIES := $(strip $(ADDITIONAL_VENDOR_PROPERTIES)) .KATI_READONLY += \ - ADDITIONAL_PRODUCT_PROPERTIES \ ADDITIONAL_VENDOR_PROPERTIES diff --git a/core/tasks/art-host-tests.mk b/core/tasks/art-host-tests.mk index c95f6e7878..eb54faeffe 100644 --- a/core/tasks/art-host-tests.mk +++ b/core/tasks/art-host-tests.mk @@ -47,21 +47,16 @@ $(art_host_tests_zip) : $(COMPATIBILITY.art-host-tests.FILES) $(my_host_shared_l $(hide) for shared_lib in $(PRIVATE_HOST_SHARED_LIBS); do \ echo $$shared_lib >> $(PRIVATE_INTERMEDIATES_DIR)/shared-libs.list; \ done - grep $(TARGET_OUT_TESTCASES) $(PRIVATE_INTERMEDIATES_DIR)/list > $(PRIVATE_INTERMEDIATES_DIR)/target.list || true $(hide) $(SOONG_ZIP) -d -o $@ -P host -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/host.list \ - -P target -C $(PRODUCT_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/target.list \ -P host/testcases -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/shared-libs.list \ -sha256 grep -e .*\\.config$$ $(PRIVATE_INTERMEDIATES_DIR)/host.list > $(PRIVATE_INTERMEDIATES_DIR)/host-test-configs.list || true - grep -e .*\\.config$$ $(PRIVATE_INTERMEDIATES_DIR)/target.list > $(PRIVATE_INTERMEDIATES_DIR)/target-test-configs.list || true $(hide) $(SOONG_ZIP) -d -o $(PRIVATE_art_host_tests_configs_zip) \ - -P host -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/host-test-configs.list \ - -P target -C $(PRODUCT_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/target-test-configs.list + -P host -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/host-test-configs.list grep $(HOST_OUT) $(PRIVATE_INTERMEDIATES_DIR)/shared-libs.list > $(PRIVATE_INTERMEDIATES_DIR)/host-shared-libs.list || true $(hide) $(SOONG_ZIP) -d -o $(PRIVATE_art_host_tests_host_shared_libs_zip) \ -P host -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/host-shared-libs.list grep -e .*\\.config$$ $(PRIVATE_INTERMEDIATES_DIR)/host.list | sed s%$(HOST_OUT)%host%g > $(PRIVATE_INTERMEDIATES_DIR)/art-host-tests_list - grep -e .*\\.config$$ $(PRIVATE_INTERMEDIATES_DIR)/target.list | sed s%$(PRODUCT_OUT)%target%g >> $(PRIVATE_INTERMEDIATES_DIR)/art-host-tests_list $(hide) $(SOONG_ZIP) -d -o $(PRIVATE_art_host_tests_list_zip) -C $(PRIVATE_INTERMEDIATES_DIR) -f $(PRIVATE_INTERMEDIATES_DIR)/art-host-tests_list art-host-tests: $(art_host_tests_zip) diff --git a/core/tasks/autorepro.mk b/core/tasks/autorepro.mk new file mode 100644 index 0000000000..2f81f9bf85 --- /dev/null +++ b/core/tasks/autorepro.mk @@ -0,0 +1,39 @@ +# Copyright (C) 2022 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +ifneq ($(wildcard test/sts/README-autorepro.md),) +test_suite_name := autorepro +test_suite_tradefed := sts-tradefed +test_suite_readme := test/sts/README-autorepro.md +autorepro_zip := $(HOST_OUT)/$(test_suite_name)/autorepro.zip + +include $(BUILD_SYSTEM)/tasks/tools/compatibility.mk + +autorepro_plugin_skel := $(call intermediates-dir-for,ETC,autorepro-plugin-skel.zip)/autorepro-plugin-skel.zip + +$(autorepro_zip): AUTOREPRO_ZIP := $(compatibility_zip) +$(autorepro_zip): AUTOREPRO_PLUGIN_SKEL := $(autorepro_plugin_skel) +$(autorepro_zip): $(MERGE_ZIPS) $(ZIP2ZIP) $(compatibility_zip) $(autorepro_plugin_skel) + rm -f $@ $(AUTOREPRO_ZIP)_filtered + $(ZIP2ZIP) -i $(AUTOREPRO_ZIP) -o $(AUTOREPRO_ZIP)_filtered \ + -x android-autorepro/tools/sts-tradefed-tests.jar \ + 'android-autorepro/tools/*:autorepro/src/main/resources/sts-tradefed-tools/' + $(MERGE_ZIPS) $@ $(AUTOREPRO_ZIP)_filtered $(AUTOREPRO_PLUGIN_SKEL) + rm -f $(AUTOREPRO_ZIP)_filtered + +.PHONY: autorepro +autorepro: $(autorepro_zip) +$(call dist-for-goals, autorepro, $(autorepro_zip)) + +endif diff --git a/target/product/gsi/Android.mk b/core/tasks/check-abi-dump-list.mk index 36897fef8e..81d549e46f 100644 --- a/target/product/gsi/Android.mk +++ b/core/tasks/check-abi-dump-list.mk @@ -1,4 +1,16 @@ -LOCAL_PATH:= $(call my-dir) +# Copyright (C) 2024 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. ##################################################################### # Check the generate list against the latest list stored in the @@ -109,60 +121,3 @@ $(check-abi-dump-list-timestamp): $(if $(added_vndk_abi_dumps)$(added_platform_abi_dumps),exit 1) $(hide) mkdir -p $(dir $@) $(hide) touch $@ - -##################################################################### -# VNDK package and snapshot. - -include $(CLEAR_VARS) - -LOCAL_MODULE := vndk_apex_snapshot_package -LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0 -LOCAL_LICENSE_CONDITIONS := notice -LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE -LOCAL_REQUIRED_MODULES := $(foreach vndk_ver,$(PRODUCT_EXTRA_VNDK_VERSIONS),com.android.vndk.v$(vndk_ver)) -include $(BUILD_PHONY_PACKAGE) - -##################################################################### -# Define Phony module to install LLNDK modules which are installed in -# the system image -include $(CLEAR_VARS) -LOCAL_MODULE := llndk_in_system -LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0 -LOCAL_LICENSE_CONDITIONS := notice -LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE - -# Filter LLNDK libs moved to APEX to avoid pulling them into /system/LIB -LOCAL_REQUIRED_MODULES := \ - $(filter-out $(LLNDK_MOVED_TO_APEX_LIBRARIES),$(LLNDK_LIBRARIES)) \ - llndk.libraries.txt - - -include $(BUILD_PHONY_PACKAGE) - -##################################################################### -# init.gsi.rc, GSI-specific init script. - -include $(CLEAR_VARS) -LOCAL_MODULE := init.gsi.rc -LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0 -LOCAL_LICENSE_CONDITIONS := notice -LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE -LOCAL_SRC_FILES := $(LOCAL_MODULE) -LOCAL_MODULE_CLASS := ETC -LOCAL_SYSTEM_EXT_MODULE := true -LOCAL_MODULE_RELATIVE_PATH := init - -include $(BUILD_PREBUILT) - - -include $(CLEAR_VARS) -LOCAL_MODULE := init.vndk-nodef.rc -LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0 -LOCAL_LICENSE_CONDITIONS := notice -LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE -LOCAL_SRC_FILES := $(LOCAL_MODULE) -LOCAL_MODULE_CLASS := ETC -LOCAL_SYSTEM_EXT_MODULE := true -LOCAL_MODULE_RELATIVE_PATH := gsi - -include $(BUILD_PREBUILT) diff --git a/core/tasks/cts.mk b/core/tasks/cts.mk index 294cb577e2..d9f615021c 100644 --- a/core/tasks/cts.mk +++ b/core/tasks/cts.mk @@ -97,12 +97,28 @@ cts_api_map_exe := $(HOST_OUT_EXECUTABLES)/cts-api-map coverage_out := $(HOST_OUT)/cts-api-coverage api_map_out := $(HOST_OUT)/cts-api-map -cts_jar_files := $(api_map_out)/api_map_files.txt +cts_jar_files := $(api_map_out)/cts_jar_files.txt +cts_interactive_jar_files := $(api_map_out)/cts_interactive_jar_files.txt +cts_all_jar_files := $(api_map_out)/cts_all_jar_files.txt + $(cts_jar_files): PRIVATE_API_MAP_FILES := $(sort $(COMPATIBILITY.cts.API_MAP_FILES)) $(cts_jar_files): mkdir -p $(dir $@) echo $(PRIVATE_API_MAP_FILES) > $@ +$(cts_interactive_jar_files): PRIVATE_API_MAP_FILES := $(sort $(COMPATIBILITY.cts-interactive.API_MAP_FILES)) +$(cts_interactive_jar_files): $(SOONG_ANDROID_CTS_VERIFIER_APP_LIST) + mkdir -p $(dir $@) + cp $< $@ + echo $(PRIVATE_API_MAP_FILES) >> $@ + +$(cts_all_jar_files): PRIVATE_API_MAP_FILES := $(sort $(COMPATIBILITY.cts.API_MAP_FILES) \ + $(COMPATIBILITY.cts-interactive.API_MAP_FILES)) +$(cts_all_jar_files): $(SOONG_ANDROID_CTS_VERIFIER_APP_LIST) + mkdir -p $(dir $@) + cp $< $@ + echo $(PRIVATE_API_MAP_FILES) >> $@ + api_xml_description := $(TARGET_OUT_COMMON_INTERMEDIATES)/api.xml napi_text_description := cts/tools/cts-api-coverage/etc/ndk-api.xml @@ -124,13 +140,14 @@ cts-combined-xml-coverage-report := $(coverage_out)/combined-coverage.xml cts_api_coverage_dependencies := $(cts_api_coverage_exe) $(dexdeps_exe) $(api_xml_description) $(napi_xml_description) cts_system_api_coverage_dependencies := $(cts_api_coverage_exe) $(dexdeps_exe) $(system_api_xml_description) -cts-api-xml-api-map-report := $(api_map_out)/api-map.xml -cts-api-html-api-map-report := $(api_map_out)/api-map.html -cts-system-api-xml-api-map-report := $(api_map_out)/system-api-map.xml -cts-system-api-html-api-map-report := $(api_map_out)/system-api-map.html +cts-system-api-map-xml-report := $(api_map_out)/cts-system-api-map.xml +cts-interactive-system-api-map-xml-report := $(api_map_out)/cts-interactive-system-api-map.xml +cts-combined-system-api-map-xml-report := $(api_map_out)/cts-combined-system-api-map.xml +cts-combined-system-api-map-html-report := $(api_map_out)/cts-combined-system-api-map.html cts_system_api_map_dependencies := $(cts_api_map_exe) $(system_api_xml_description) $(cts_jar_files) -cts_api_map_dependencies := $(cts_api_map_exe) $(api_xml_description) $(cts_jar_files) +cts_interactive_system_api_map_dependencies := $(cts_api_map_exe) $(system_api_xml_description) $(cts_interactive_jar_files) +cts_combined_system_api_map_dependencies := $(cts_api_map_exe) $(system_api_xml_description) $(cts_all_jar_files) android_cts_zip := $(HOST_OUT)/cts/android-cts.zip cts_verifier_apk := $(call intermediates-dir-for,APPS,CtsVerifier)/package.apk @@ -210,45 +227,42 @@ cts-combined-xml-coverage : $(cts-combined-xml-coverage-report) .PHONY: cts-coverage-report-all cts-api-coverage cts-coverage-report-all: cts-test-coverage cts-verifier-coverage cts-combined-coverage cts-combined-xml-coverage -$(cts-system-api-xml-api-map-report): PRIVATE_CTS_API_MAP_EXE := $(cts_api_map_exe) -$(cts-system-api-xml-api-map-report): PRIVATE_API_XML_DESC := $(system_api_xml_description) -$(cts-system-api-xml-api-map-report): PRIVATE_JAR_FILES := $(cts_jar_files) -$(cts-system-api-xml-api-map-report) : $(android_cts_zip) $(cts_system_api_map_dependencies) | $(ACP) +$(cts-system-api-map-xml-report): PRIVATE_CTS_API_MAP_EXE := $(cts_api_map_exe) +$(cts-system-api-map-xml-report): PRIVATE_API_XML_DESC := $(system_api_xml_description) +$(cts-system-api-map-xml-report): PRIVATE_JAR_FILES := $(cts_jar_files) +$(cts-system-api-map-xml-report) : $(android_cts_zip) $(cts_system_api_map_dependencies) | $(ACP) $(call generate-api-map-report-cts,"CTS System API MAP Report - XML",\ $(PRIVATE_JAR_FILES),xml) -$(cts-system-api-html-api-map-report): PRIVATE_CTS_API_MAP_EXE := $(cts_api_map_exe) -$(cts-system-api-html-api-map-report): PRIVATE_API_XML_DESC := $(system_api_xml_description) -$(cts-system-api-html-api-map-report): PRIVATE_JAR_FILES := $(cts_jar_files) -$(cts-system-api-html-api-map-report) : $(android_cts_zip) $(cts_system_api_map_dependencies) | $(ACP) - $(call generate-api-map-report-cts,"CTS System API MAP Report - HTML",\ - $(PRIVATE_JAR_FILES),html) +$(cts-interactive-system-api-map-xml-report): PRIVATE_CTS_API_MAP_EXE := $(cts_api_map_exe) +$(cts-interactive-system-api-map-xml-report): PRIVATE_API_XML_DESC := $(system_api_xml_description) +$(cts-interactive-system-api-map-xml-report): PRIVATE_JAR_FILES := $(cts_interactive_jar_files) +$(cts-interactive-system-api-map-xml-report) : $(verifier_zip) $(cts_interactive_system_api_map_dependencies) | $(ACP) + $(call generate-api-map-report-cts,"CTS Interactive System API MAP Report - XML",\ + $(PRIVATE_JAR_FILES),xml) -$(cts-api-xml-api-map-report): PRIVATE_CTS_API_MAP_EXE := $(cts_api_map_exe) -$(cts-api-xml-api-map-report): PRIVATE_API_XML_DESC := $(api_xml_description) -$(cts-api-xml-api-map-report): PRIVATE_JAR_FILES := $(cts_jar_files) -$(cts-api-xml-api-map-report) : $(android_cts_zip) $(cts_api_map_dependencies) | $(ACP) - $(call generate-api-map-report-cts,"CTS API MAP Report - XML",\ +$(cts-combined-system-api-map-xml-report): PRIVATE_CTS_API_MAP_EXE := $(cts_api_map_exe) +$(cts-combined-system-api-map-xml-report): PRIVATE_API_XML_DESC := $(system_api_xml_description) +$(cts-combined-system-api-map-xml-report): PRIVATE_JAR_FILES := $(cts_all_jar_files) +$(cts-combined-system-api-map-xml-report) : $(verifier_zip) $(android_cts_zip) $(cts_combined_system_api_map_dependencies) | $(ACP) + $(call generate-api-map-report-cts,"CTS Combined System API MAP Report - XML",\ $(PRIVATE_JAR_FILES),xml) -$(cts-api-html-api-map-report): PRIVATE_CTS_API_MAP_EXE := $(cts_api_map_exe) -$(cts-api-html-api-map-report): PRIVATE_API_XML_DESC := $(api_xml_description) -$(cts-api-html-api-map-report): PRIVATE_JAR_FILES := $(cts_jar_files) -$(cts-api-html-api-map-report) : $(android_cts_zip) $(cts_api_map_dependencies) | $(ACP) - $(call generate-api-map-report-cts,"CTS API MAP Report - HTML",\ +$(cts-combined-system-api-map-html-report): PRIVATE_CTS_API_MAP_EXE := $(cts_api_map_exe) +$(cts-combined-system-api-map-html-report): PRIVATE_API_XML_DESC := $(system_api_xml_description) +$(cts-combined-system-api-map-html-report): PRIVATE_JAR_FILES := $(cts_all_jar_files) +$(cts-combined-system-api-map-html-report) : $(verifier_zip) $(android_cts_zip) $(cts_combined_system_api_map_dependencies) | $(ACP) + $(call generate-api-map-report-cts,"CTS Combined System API MAP Report - HTML",\ $(PRIVATE_JAR_FILES),html) -.PHONY: cts-system-api-xml-api-map -cts-system-api-xml-api-map : $(cts-system-api-xml-api-map-report) - -.PHONY: cts-system-api-html-api-map -cts-system-api-html-api-map : $(cts-system-api-html-api-map-report) +.PHONY: cts-system-api-map-xml +cts-system-api-map-xml : $(cts-system-api-map-xml-report) -.PHONY: cts-api-xml-api-map -cts-api-xml-api-map : $(cts-api-xml-api-map-report) +.PHONY: cts-interactive-system-api-map-xml +cts-interactive-system-api-map-xml: $(cts-interactive-system-api-map-xml-report) -.PHONY: cts-api-html-api-map -cts-api-html-api-map : $(cts-api-html-api-map-report) +.PHONY: cts-combined-system-api-map-xml +cts-combined-system-api-map-xml : $(cts-combined-system-api-map-xml-report) .PHONY: cts-api-map-all @@ -268,15 +282,13 @@ ALL_TARGETS.$(cts-combined-coverage-report).META_LIC:=$(module_license_metadata) ALL_TARGETS.$(cts-combined-xml-coverage-report).META_LIC:=$(module_license_metadata) # Put the test api map report in the dist dir if "cts-api-map-all" is among the build goals. -$(call dist-for-goals, cts-api-map-all, $(cts-system-api-xml-api-map-report):cts-system-api-xml-api-map-report.xml) -$(call dist-for-goals, cts-api-map-all, $(cts-system-api-html-api-map-report):cts-system-api-html-api-map-report.html) -$(call dist-for-goals, cts-api-map-all, $(cts-api-xml-api-map-report):cts-api-xml-api-map-report.xml) -$(call dist-for-goals, cts-api-map-all, $(cts-api-html-api-map-report):cts-api-html-api-map-report.html) +$(call dist-for-goals, cts-api-map-all, $(cts-combined-system-api-map-xml-report):cts-api-map-report.xml) +$(call dist-for-goals, cts-api-map-all, $(cts-combined-system-api-map-html-report):cts-api-map-report.html) -ALL_TARGETS.$(cts-system-api-xml-api-map-report).META_LIC:=$(module_license_metadata) -ALL_TARGETS.$(cts-system-api-html-api-map-report).META_LIC:=$(module_license_metadata) -ALL_TARGETS.$(cts-api-xml-api-map-report).META_LIC:=$(module_license_metadata) -ALL_TARGETS.$(cts-api-html-api-map-report).META_LIC:=$(module_license_metadata) +ALL_TARGETS.$(cts-system-api-map-xml-report).META_LIC:=$(module_license_metadata) +ALL_TARGETS.$(cts-interactive-system-api-map-xml-report).META_LIC:=$(module_license_metadata) +ALL_TARGETS.$(cts-combined-system-api-map-xml-report).META_LIC:=$(module_license_metadata) +ALL_TARGETS.$(cts-combined-system-api-map-html-report).META_LIC:=$(module_license_metadata) # Arguments; # 1 - Name of the report printed out on the screen @@ -301,18 +313,19 @@ endef # Reset temp vars cts_api_coverage_dependencies := cts_system_api_coverage_dependencies := -cts_api_map_dependencies := cts_system_api_map_dependencies := +cts_interactive_system_api_map_dependencies := +cts_combined_system_api_map_dependencies := cts-combined-coverage-report := cts-combined-xml-coverage-report := cts-verifier-coverage-report := cts-test-coverage-report := cts-system-api-coverage-report := cts-system-api-xml-coverage-report := -cts-api-xml-api-map-report := -cts-api-html-api-map-report := -cts-system-api-xml-api-map-report := -cts-system-api-html-api-map-report := +cts-system-api-map-xml-report := +cts-interactive-system-api-map-xml-report := +cts-combined-system-api-map-xml-report := +cts-combined-system-api-map-html-report := api_xml_description := api_text_description := system_api_xml_description := diff --git a/core/tasks/device-tests.mk b/core/tasks/device-tests.mk index 5850c4ed73..6164c2e94b 100644 --- a/core/tasks/device-tests.mk +++ b/core/tasks/device-tests.mk @@ -14,6 +14,7 @@ .PHONY: device-tests +.PHONY: device-tests-host-shared-libs device-tests-zip := $(PRODUCT_OUT)/device-tests.zip # Create an artifact to include a list of test config files in device-tests. @@ -23,37 +24,45 @@ device-tests-configs-zip := $(PRODUCT_OUT)/device-tests_configs.zip my_host_shared_lib_for_device_tests := $(call copy-many-files,$(COMPATIBILITY.device-tests.HOST_SHARED_LIBRARY.FILES)) device_tests_host_shared_libs_zip := $(PRODUCT_OUT)/device-tests_host-shared-libs.zip -$(device-tests-zip) : .KATI_IMPLICIT_OUTPUTS := $(device-tests-list-zip) $(device-tests-configs-zip) $(device_tests_host_shared_libs_zip) +$(device-tests-zip) : .KATI_IMPLICIT_OUTPUTS := $(device-tests-list-zip) $(device-tests-configs-zip) $(device-tests-zip) : PRIVATE_device_tests_list := $(PRODUCT_OUT)/device-tests_list $(device-tests-zip) : PRIVATE_HOST_SHARED_LIBS := $(my_host_shared_lib_for_device_tests) -$(device-tests-zip) : PRIVATE_device_host_shared_libs_zip := $(device_tests_host_shared_libs_zip) $(device-tests-zip) : $(COMPATIBILITY.device-tests.FILES) $(COMPATIBILITY.device-tests.SOONG_INSTALLED_COMPATIBILITY_SUPPORT_FILES) $(my_host_shared_lib_for_device_tests) $(SOONG_ZIP) - rm -f $@-shared-libs.list echo $(sort $(COMPATIBILITY.device-tests.FILES) $(COMPATIBILITY.device-tests.SOONG_INSTALLED_COMPATIBILITY_SUPPORT_FILES)) | tr " " "\n" > $@.list grep $(HOST_OUT_TESTCASES) $@.list > $@-host.list || true grep -e .*\\.config$$ $@-host.list > $@-host-test-configs.list || true $(hide) for shared_lib in $(PRIVATE_HOST_SHARED_LIBS); do \ echo $$shared_lib >> $@-host.list; \ - echo $$shared_lib >> $@-shared-libs.list; \ done - grep $(HOST_OUT_TESTCASES) $@-shared-libs.list > $@-host-shared-libs.list || true grep $(TARGET_OUT_TESTCASES) $@.list > $@-target.list || true grep -e .*\\.config$$ $@-target.list > $@-target-test-configs.list || true $(hide) $(SOONG_ZIP) -d -o $@ -P host -C $(HOST_OUT) -l $@-host.list -P target -C $(PRODUCT_OUT) -l $@-target.list -sha256 $(hide) $(SOONG_ZIP) -d -o $(device-tests-configs-zip) \ -P host -C $(HOST_OUT) -l $@-host-test-configs.list \ -P target -C $(PRODUCT_OUT) -l $@-target-test-configs.list - $(SOONG_ZIP) -d -o $(PRIVATE_device_host_shared_libs_zip) \ - -P host -C $(HOST_OUT) -l $@-host-shared-libs.list rm -f $(PRIVATE_device_tests_list) $(hide) grep -e .*\\.config$$ $@-host.list | sed s%$(HOST_OUT)%host%g > $(PRIVATE_device_tests_list) $(hide) grep -e .*\\.config$$ $@-target.list | sed s%$(PRODUCT_OUT)%target%g >> $(PRIVATE_device_tests_list) $(hide) $(SOONG_ZIP) -d -o $(device-tests-list-zip) -C $(dir $@) -f $(PRIVATE_device_tests_list) rm -f $@.list $@-host.list $@-target.list $@-host-test-configs.list $@-target-test-configs.list \ - $@-shared-libs.list $@-host-shared-libs.list $(PRIVATE_device_tests_list) + $(PRIVATE_device_tests_list) + +$(device_tests_host_shared_libs_zip) : PRIVATE_device_host_shared_libs_zip := $(device_tests_host_shared_libs_zip) +$(device_tests_host_shared_libs_zip) : PRIVATE_HOST_SHARED_LIBS := $(my_host_shared_lib_for_device_tests) +$(device_tests_host_shared_libs_zip) : $(my_host_shared_lib_for_device_tests) $(SOONG_ZIP) + rm -f $@-shared-libs.list + $(hide) for shared_lib in $(PRIVATE_HOST_SHARED_LIBS); do \ + echo $$shared_lib >> $@-shared-libs.list; \ + done + grep $(HOST_OUT_TESTCASES) $@-shared-libs.list > $@-host-shared-libs.list || true + $(SOONG_ZIP) -d -o $(PRIVATE_device_host_shared_libs_zip) \ + -P host -C $(HOST_OUT) -l $@-host-shared-libs.list device-tests: $(device-tests-zip) +device-tests-host-shared-libs: $(device_tests_host_shared_libs_zip) + $(call dist-for-goals, device-tests, $(device-tests-zip) $(device-tests-list-zip) $(device-tests-configs-zip) $(device_tests_host_shared_libs_zip)) +$(call dist-for-goals, device-tests-host-shared-libs, $(device_tests_host_shared_libs_zip)) $(call declare-1p-container,$(device-tests-zip),) $(call declare-container-license-deps,$(device-tests-zip),$(COMPATIBILITY.device-tests.FILES) $(my_host_shared_lib_for_device_tests),$(PRODUCT_OUT)/:/) diff --git a/core/tasks/dts.mk b/core/tasks/dts.mk new file mode 100644 index 0000000000..8f090828d9 --- /dev/null +++ b/core/tasks/dts.mk @@ -0,0 +1,28 @@ +# Copyright (C) 2024 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Desktop test suite +ifneq ($(wildcard test/dts/tools/dts-tradefed/README),) +test_suite_name := dts +test_suite_tradefed := dts-tradefed +test_suite_readme := test/dts/tools/dts-tradefed/README +test_suite_tools := $(HOST_OUT_JAVA_LIBRARIES)/ats_console_deploy.jar \ + $(HOST_OUT_JAVA_LIBRARIES)/ats_olc_server_local_mode_deploy.jar + +include $(BUILD_SYSTEM)/tasks/tools/compatibility.mk + +.PHONY: dts +dts: $(compatibility_zip) $(compatibility_tests_list_zip) +$(call dist-for-goals, dts, $(compatibility_zip) $(compatibility_tests_list_zip)) +endif diff --git a/core/tasks/general-tests-shared-libs.mk b/core/tasks/general-tests-shared-libs.mk deleted file mode 100644 index 240514073e..0000000000 --- a/core/tasks/general-tests-shared-libs.mk +++ /dev/null @@ -1,52 +0,0 @@ -# Copyright (C) 2024 The Android Open Source Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -.PHONY: general-tests-shared-libs - -intermediates_dir := $(call intermediates-dir-for,PACKAGING,general-tests-shared-libs) - -general_tests_shared_libs_zip := $(PRODUCT_OUT)/general-tests_host-shared-libs.zip - -# Filter shared entries between general-tests and device-tests's HOST_SHARED_LIBRARY.FILES, -# to avoid warning about overriding commands. -my_host_shared_lib_for_general_tests := \ - $(foreach m,$(filter $(COMPATIBILITY.device-tests.HOST_SHARED_LIBRARY.FILES),\ - $(COMPATIBILITY.general-tests.HOST_SHARED_LIBRARY.FILES)),$(call word-colon,2,$(m))) -my_general_tests_shared_lib_files := \ - $(filter-out $(COMPATIBILITY.device-tests.HOST_SHARED_LIBRARY.FILES),\ - $(COMPATIBILITY.general-tests.HOST_SHARED_LIBRARY.FILES)) - -my_host_shared_lib_for_general_tests += $(call copy-many-files,$(my_general_tests_shared_lib_files)) - -$(general_tests_shared_libs_zip) : PRIVATE_INTERMEDIATES_DIR := $(intermediates_dir) -$(general_tests_shared_libs_zip) : PRIVATE_HOST_SHARED_LIBS := $(my_host_shared_lib_for_general_tests) -$(general_tests_shared_libs_zip) : PRIVATE_general_host_shared_libs_zip := $(general_tests_shared_libs_zip) -$(general_tests_shared_libs_zip) : $(my_host_shared_lib_for_general_tests) $(SOONG_ZIP) - rm -rf $(PRIVATE_INTERMEDIATES_DIR) - mkdir -p $(PRIVATE_INTERMEDIATES_DIR) $(PRIVATE_INTERMEDIATES_DIR)/tools - $(hide) for shared_lib in $(PRIVATE_HOST_SHARED_LIBS); do \ - echo $$shared_lib >> $(PRIVATE_INTERMEDIATES_DIR)/shared-libs.list; \ - done - grep $(HOST_OUT_TESTCASES) $(PRIVATE_INTERMEDIATES_DIR)/shared-libs.list > $(PRIVATE_INTERMEDIATES_DIR)/host-shared-libs.list || true - $(SOONG_ZIP) -d -o $(PRIVATE_general_host_shared_libs_zip) \ - -P host -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/host-shared-libs.list - -general-tests-shared-libs: $(general_tests_shared_libs_zip) -$(call dist-for-goals, general-tests-shared-libs, $(general_tests_shared_libs_zip)) - -$(call declare-1p-container,$(general_tests_shared_libs_zip),) -$(call declare-container-license-deps,$(general_tests_shared_libs_zip),$(my_host_shared_lib_for_general_tests),$(PRODUCT_OUT)/:/) - -intermediates_dir := -general_tests_shared_libs_zip := diff --git a/core/tasks/general-tests.mk b/core/tasks/general-tests.mk index d6fc0722ef..dcfcfad62f 100644 --- a/core/tasks/general-tests.mk +++ b/core/tasks/general-tests.mk @@ -27,32 +27,61 @@ general_tests_list_zip := $(PRODUCT_OUT)/general-tests_list.zip # Create an artifact to include all test config files in general-tests. general_tests_configs_zip := $(PRODUCT_OUT)/general-tests_configs.zip -# Copy kernel test modules to testcases directories -include $(BUILD_SYSTEM)/tasks/tools/vts-kernel-tests.mk -ltp_copy_pairs := \ - $(call target-native-copy-pairs,$(kernel_ltp_modules),$(kernel_ltp_host_out)) -copy_ltp_tests := $(call copy-many-files,$(ltp_copy_pairs)) +# Filter shared entries between general-tests and device-tests's HOST_SHARED_LIBRARY.FILES, +# to avoid warning about overriding commands. +my_host_shared_lib_for_general_tests := \ + $(foreach m,$(filter $(COMPATIBILITY.device-tests.HOST_SHARED_LIBRARY.FILES),\ + $(COMPATIBILITY.general-tests.HOST_SHARED_LIBRARY.FILES)),$(call word-colon,2,$(m))) +my_general_tests_shared_lib_files := \ + $(filter-out $(COMPATIBILITY.device-tests.HOST_SHARED_LIBRARY.FILES),\ + $(COMPATIBILITY.general-tests.HOST_SHARED_LIBRARY.FILES)) -# PHONY target to be used to build and test `vts_ltp_tests` without building full vts -.PHONY: vts_kernel_ltp_tests -vts_kernel_ltp_tests: $(copy_ltp_tests) +my_host_shared_lib_for_general_tests += $(call copy-many-files,$(my_general_tests_shared_lib_files)) -general_tests_shared_libs_zip := $(PRODUCT_OUT)/general-tests_host-shared-libs.zip +my_host_shared_lib_symlinks := \ + $(filter $(COMPATIBILITY.host-unit-tests.SYMLINKS),\ + $(COMPATIBILITY.general-tests.SYMLINKS)) + +my_general_tests_symlinks := \ + $(filter-out $(COMPATIBILITY.camera-hal-tests.SYMLINKS),\ + $(filter-out $(COMPATIBILITY.host-unit-tests.SYMLINKS),\ + $(COMPATIBILITY.general-tests.SYMLINKS))) + +my_symlinks_for_general_tests := $(foreach f,$(my_general_tests_symlinks),\ + $(strip $(eval _cmf_tuple := $(subst :, ,$(f))) \ + $(eval _cmf_dep := $(word 1,$(_cmf_tuple))) \ + $(eval _cmf_src := $(word 2,$(_cmf_tuple))) \ + $(eval _cmf_dest := $(word 3,$(_cmf_tuple))) \ + $(call symlink-file,$(_cmf_dep),$(_cmf_src),$(_cmf_dest)) \ + $(_cmf_dest))) + +# In this one directly take the overlap into the zip since we can't rewrite rules +my_symlinks_for_general_tests += $(foreach f,$(my_host_shared_lib_symlinks),\ + $(strip $(eval _cmf_tuple := $(subst :, ,$(f))) \ + $(eval _cmf_dep := $(word 1,$(_cmf_tuple))) \ + $(eval _cmf_src := $(word 2,$(_cmf_tuple))) \ + $(eval _cmf_dest := $(word 3,$(_cmf_tuple))) \ + $(_cmf_dest))) -$(general_tests_zip) : $(general_tests_shared_libs_zip) -$(general_tests_zip) : $(copy_ltp_tests) -$(general_tests_zip) : PRIVATE_KERNEL_LTP_HOST_OUT := $(kernel_ltp_host_out) $(general_tests_zip) : PRIVATE_general_tests_list_zip := $(general_tests_list_zip) $(general_tests_zip) : .KATI_IMPLICIT_OUTPUTS := $(general_tests_list_zip) $(general_tests_configs_zip) $(general_tests_zip) : PRIVATE_TOOLS := $(general_tests_tools) $(general_tests_zip) : PRIVATE_INTERMEDIATES_DIR := $(intermediates_dir) +$(general_tests_zip) : PRIVATE_HOST_SHARED_LIBS := $(my_host_shared_lib_for_general_tests) +$(general_tests_zip) : PRIVATE_SYMLINKS := $(my_symlinks_for_general_tests) $(general_tests_zip) : PRIVATE_general_tests_configs_zip := $(general_tests_configs_zip) -$(general_tests_zip) : $(COMPATIBILITY.general-tests.FILES) $(COMPATIBILITY.general-tests.SOONG_INSTALLED_COMPATIBILITY_SUPPORT_FILES) $(general_tests_tools) $(SOONG_ZIP) +$(general_tests_zip) : $(COMPATIBILITY.general-tests.FILES) $(my_host_shared_lib_for_general_tests) $(COMPATIBILITY.general-tests.SOONG_INSTALLED_COMPATIBILITY_SUPPORT_FILES) $(general_tests_tools) $(my_symlinks_for_general_tests) $(SOONG_ZIP) rm -rf $(PRIVATE_INTERMEDIATES_DIR) rm -f $@ $(PRIVATE_general_tests_list_zip) mkdir -p $(PRIVATE_INTERMEDIATES_DIR) $(PRIVATE_INTERMEDIATES_DIR)/tools echo $(sort $(COMPATIBILITY.general-tests.FILES) $(COMPATIBILITY.general-tests.SOONG_INSTALLED_COMPATIBILITY_SUPPORT_FILES)) | tr " " "\n" > $(PRIVATE_INTERMEDIATES_DIR)/list - find $(PRIVATE_KERNEL_LTP_HOST_OUT) >> $(PRIVATE_INTERMEDIATES_DIR)/list + for symlink in $(PRIVATE_SYMLINKS); do \ + echo $$symlink >> $(PRIVATE_INTERMEDIATES_DIR)/list; \ + done + $(hide) for shared_lib in $(PRIVATE_HOST_SHARED_LIBS); do \ + echo $$shared_lib >> $(PRIVATE_INTERMEDIATES_DIR)/shared-libs.list; \ + done + grep $(HOST_OUT_TESTCASES) $(PRIVATE_INTERMEDIATES_DIR)/shared-libs.list > $(PRIVATE_INTERMEDIATES_DIR)/host-shared-libs.list || true grep $(HOST_OUT_TESTCASES) $(PRIVATE_INTERMEDIATES_DIR)/list > $(PRIVATE_INTERMEDIATES_DIR)/host.list || true grep $(TARGET_OUT_TESTCASES) $(PRIVATE_INTERMEDIATES_DIR)/list > $(PRIVATE_INTERMEDIATES_DIR)/target.list || true grep -e .*\\.config$$ $(PRIVATE_INTERMEDIATES_DIR)/host.list > $(PRIVATE_INTERMEDIATES_DIR)/host-test-configs.list || true @@ -62,6 +91,7 @@ $(general_tests_zip) : $(COMPATIBILITY.general-tests.FILES) $(COMPATIBILITY.gene -P host -C $(PRIVATE_INTERMEDIATES_DIR) -D $(PRIVATE_INTERMEDIATES_DIR)/tools \ -P host -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/host.list \ -P target -C $(PRODUCT_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/target.list \ + -P host -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/host-shared-libs.list \ -sha256 $(SOONG_ZIP) -d -o $(PRIVATE_general_tests_configs_zip) \ -P host -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/host-test-configs.list \ @@ -82,3 +112,8 @@ general_tests_zip := general_tests_list_zip := general_tests_configs_zip := general_tests_shared_libs_zip := +my_host_shared_lib_for_general_tests := +my_symlinks_for_general_tests := +my_general_tests_shared_lib_files := +my_general_tests_symlinks := +my_host_shared_lib_symlinks := diff --git a/core/tasks/mcts.mk b/core/tasks/mcts.mk index 09a41915eb..02e916a38d 100644 --- a/core/tasks/mcts.mk +++ b/core/tasks/mcts.mk @@ -15,7 +15,8 @@ ifneq ($(wildcard test/mts/README.md),) mcts_test_suites := -mcts_test_suites += mcts +mcts_all_test_suites := +mcts_all_test_suites += mcts $(foreach module, $(mts_modules), $(eval mcts_test_suites += mcts-$(module))) @@ -29,4 +30,14 @@ $(foreach suite, $(mcts_test_suites), \ $(eval $(call dist-for-goals, $(suite), $(compatibility_zip))) \ ) +$(foreach suite, $(mcts_all_test_suites), \ + $(eval test_suite_name := $(suite)) \ + $(eval test_suite_tradefed := mcts-tradefed) \ + $(eval test_suite_readme := test/mts/README.md) \ + $(eval include $(BUILD_SYSTEM)/tasks/tools/compatibility.mk) \ + $(eval .PHONY: $(suite)) \ + $(eval $(suite): $(compatibility_zip)) \ + $(eval $(call dist-for-goals, $(suite), $(compatibility_zip))) \ +) + endif diff --git a/core/tasks/meta-lic.mk b/core/tasks/meta-lic.mk index 24adfc827f..620b1e29ae 100644 --- a/core/tasks/meta-lic.mk +++ b/core/tasks/meta-lic.mk @@ -225,3 +225,6 @@ $(eval $(call declare-1p-copy-files,packages/services/Car,)) # Moved here from hardware/libhardware_legacy/Android.mk $(eval $(call declare-1p-copy-files,hardware/libhardware_legacy,)) + +# Moved here from system/core/rootdir/Android.mk +$(eval $(call declare-1p-copy-files,system/core/rootdir,)) diff --git a/core/tasks/mke2fs-dist.mk b/core/tasks/mke2fs-dist.mk new file mode 100644 index 0000000000..3540c1f985 --- /dev/null +++ b/core/tasks/mke2fs-dist.mk @@ -0,0 +1,22 @@ +# Copyright (C) 2024 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# TODO: After Soong's recovery partition variation can be set to selectable +# and the meta_lic file duplication issue is resolved, move it to the +# dist section of the corresponding module's Android.bp. +my_dist_files := $(HOST_OUT_EXECUTABLES)/mke2fs +my_dist_files += $(HOST_OUT_EXECUTABLES)/make_f2fs +my_dist_files += $(HOST_OUT_EXECUTABLES)/make_f2fs_casefold +$(call dist-for-goals,dist_files sdk,$(my_dist_files)) +my_dist_files := diff --git a/core/tasks/module-info.mk b/core/tasks/module-info.mk index 75936685a4..dd01f9667c 100644 --- a/core/tasks/module-info.mk +++ b/core/tasks/module-info.mk @@ -13,7 +13,7 @@ define write-optional-json-bool $(if $(strip $(2)),'$(COMMA)$(strip $(1)): "$(strip $(2))"') endef -SOONG_MODULE_INFO := $(SOONG_OUT_DIR)/module-info-$(TARGET_PRODUCT).json +SOONG_MODULE_INFO := $(SOONG_OUT_DIR)/module-info-$(TARGET_PRODUCT)${COVERAGE_SUFFIX}.json $(MODULE_INFO_JSON): PRIVATE_SOONG_MODULE_INFO := $(SOONG_MODULE_INFO) $(MODULE_INFO_JSON): PRIVATE_MERGE_JSON_OBJECTS := $(HOST_OUT_EXECUTABLES)/merge_module_info_json @@ -50,6 +50,8 @@ $(MODULE_INFO_JSON): $(SOONG_MODULE_INFO) $(call write-optional-json-list, "host_dependencies", $(sort $(ALL_MODULES.$(m).HOST_REQUIRED_FROM_TARGET))) \ $(call write-optional-json-list, "target_dependencies", $(sort $(ALL_MODULES.$(m).TARGET_REQUIRED_FROM_HOST))) \ $(call write-optional-json-bool, "test_module_config_base", $(ALL_MODULES.$(m).TEST_MODULE_CONFIG_BASE)) \ + $(call write-optional-json-bool, "make", $(if $(ALL_MODULES.$(m).IS_SOONG_MODULE),,true)) \ + $(call write-optional-json-bool, "make_generated_module_info", true) \ '}')'\n}\n' >> $@.tmp $(PRIVATE_MERGE_JSON_OBJECTS) -o $@ $(PRIVATE_SOONG_MODULE_INFO) $@.tmp rm $@.tmp diff --git a/core/tasks/tools/vts-kernel-tests.mk b/core/tasks/prebuilt_tradefed.mk index e727dc1f55..96c57d5633 100644 --- a/core/tasks/tools/vts-kernel-tests.mk +++ b/core/tasks/prebuilt_tradefed.mk @@ -1,4 +1,4 @@ -# Copyright (C) 2022 The Android Open Source Project +# Copyright (C) 2020 The Android Open Source Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,13 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. --include external/ltp/android/ltp_package_list.mk +ifeq (,$(wildcard tools/tradefederation/core)) +.PHONY: tradefed-core +tradefed-core: tradefed atest_tradefed.sh +.PHONY: tradefed-all +tradefed-all: tradefed atest_tradefed.sh -include $(BUILD_SYSTEM)/tasks/tools/vts_package_utils.mk - -# Copy kernel test modules to testcases directories -kernel_ltp_host_out := $(HOST_OUT_TESTCASES)/vts_kernel_ltp_tests -kernel_ltp_vts_out := $(HOST_OUT)/$(test_suite_name)/android-$(test_suite_name)/testcases/vts_kernel_ltp_tests -kernel_ltp_modules := \ - ltp \ - $(ltp_packages) +$(call dist-for-goals, tradefed, $(HOST_OUT)/etc/tradefed.zip) +endif diff --git a/core/tasks/sts-sdk.mk b/core/tasks/sts-sdk.mk deleted file mode 100644 index b8ce5bf4a5..0000000000 --- a/core/tasks/sts-sdk.mk +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright (C) 2022 The Android Open Source Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -ifneq ($(wildcard test/sts/README-sts-sdk.md),) -test_suite_name := sts-sdk -test_suite_tradefed := sts-tradefed -test_suite_readme := test/sts/README-sts-sdk.md -sts_sdk_zip := $(HOST_OUT)/$(test_suite_name)/sts-sdk.zip - -include $(BUILD_SYSTEM)/tasks/tools/compatibility.mk - -sts_sdk_plugin_skel := $(call intermediates-dir-for,ETC,sts-sdk-plugin-skel.zip)/sts-sdk-plugin-skel.zip - -$(sts_sdk_zip): STS_SDK_ZIP := $(compatibility_zip) -$(sts_sdk_zip): STS_SDK_PLUGIN_SKEL := $(sts_sdk_plugin_skel) -$(sts_sdk_zip): $(MERGE_ZIPS) $(ZIP2ZIP) $(compatibility_zip) $(sts_sdk_plugin_skel) - rm -f $@ $(STS_SDK_ZIP)_filtered - $(ZIP2ZIP) -i $(STS_SDK_ZIP) -o $(STS_SDK_ZIP)_filtered \ - -x android-sts-sdk/tools/sts-tradefed-tests.jar \ - 'android-sts-sdk/tools/*:plugin/src/main/resources/sts-tradefed-tools/' \ - 'android-sts-sdk/jdk/**/*:plugin/src/main/resources/jdk/' - $(MERGE_ZIPS) $@ $(STS_SDK_ZIP)_filtered $(STS_SDK_PLUGIN_SKEL) - rm -f $(STS_SDK_ZIP)_filtered - -.PHONY: sts-sdk -sts-sdk: $(sts_sdk_zip) -$(call dist-for-goals, sts-sdk, $(sts_sdk_zip)) - -endif diff --git a/core/tasks/tools/compatibility.mk b/core/tasks/tools/compatibility.mk index 86c23f83d5..f205cea156 100644 --- a/core/tasks/tools/compatibility.mk +++ b/core/tasks/tools/compatibility.mk @@ -27,9 +27,10 @@ # compatibility_zip: the path to the output zip file. special_mts_test_suites := -special_mts_test_suites += mcts special_mts_test_suites += $(mts_modules) -ifneq ($(filter $(special_mts_test_suites),$(subst -, ,$(test_suite_name))),) +ifneq ($(filter $(special_mts_test_suites),$(patsubst mcts-%,%,$(test_suite_name))),) + test_suite_subdir := android-mts +else ifneq ($(filter $(special_mts_test_suites),$(patsubst mts-%,%,$(test_suite_name))),) test_suite_subdir := android-mts else test_suite_subdir := android-$(test_suite_name) diff --git a/core/tasks/tools/vts_package_utils.mk b/core/tasks/tools/vts_package_utils.mk deleted file mode 100644 index 1a819f2172..0000000000 --- a/core/tasks/tools/vts_package_utils.mk +++ /dev/null @@ -1,34 +0,0 @@ -# -# Copyright (C) 2020 The Android Open Source Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# $(1): List of target native files to copy. -# $(2): Copy destination directory. -# Evaluates to a list of ":"-separated pairs src:dst. -define target-native-copy-pairs -$(foreach m,$(1),\ - $(eval _built_files := $(strip $(ALL_MODULES.$(m).BUILT_INSTALLED)\ - $(ALL_MODULES.$(m)$(TARGET_2ND_ARCH_MODULE_SUFFIX).BUILT_INSTALLED)))\ - $(foreach i, $(sort $(_built_files)),\ - $(eval bui_ins := $(subst :,$(space),$(i)))\ - $(eval ins := $(word 2,$(bui_ins)))\ - $(if $(filter $(TARGET_OUT_ROOT)/%,$(ins)),\ - $(eval bui := $(word 1,$(bui_ins)))\ - $(eval my_copy_dest := $(patsubst data/%,DATA/%,\ - $(patsubst system/%,DATA/%,\ - $(patsubst $(PRODUCT_OUT)/%,%,$(ins)))))\ - $(call declare-copy-target-license-metadata,$(2)/$(my_copy_dest),$(bui))\ - $(bui):$(2)/$(my_copy_dest)))) -endef diff --git a/core/tasks/tradefed-tests-list.mk b/core/tasks/tradefed-tests-list.mk index 61bf13695d..47c360de52 100644 --- a/core/tasks/tradefed-tests-list.mk +++ b/core/tasks/tradefed-tests-list.mk @@ -15,6 +15,11 @@ # List all TradeFed tests from COMPATIBILITY.tradefed_tests_dir .PHONY: tradefed-tests-list +COMPATIBILITY.tradefed_tests_dir := \ + $(COMPATIBILITY.tradefed_tests_dir) \ + tools/tradefederation/core/res/config \ + tools/tradefederation/core/javatests/res/config + tradefed_tests := $(foreach dir, $(COMPATIBILITY.tradefed_tests_dir), \ $(eval tradefed_tests += $(shell find $(dir) -type f -name "*.xml"))) diff --git a/core/tasks/vts-core-tests.mk b/core/tasks/vts-core-tests.mk index 1eeb0789ec..11bb932f7d 100644 --- a/core/tasks/vts-core-tests.mk +++ b/core/tasks/vts-core-tests.mk @@ -16,15 +16,6 @@ test_suite_name := vts test_suite_tradefed := vts-tradefed test_suite_readme := test/vts/tools/vts-core-tradefed/README -include $(BUILD_SYSTEM)/tasks/tools/vts-kernel-tests.mk - -ltp_copy_pairs := \ - $(call target-native-copy-pairs,$(kernel_ltp_modules),$(kernel_ltp_vts_out)) - -copy_ltp_tests := $(call copy-many-files,$(ltp_copy_pairs)) - -test_suite_extra_deps := $(copy_ltp_tests) - include $(BUILD_SYSTEM)/tasks/tools/compatibility.mk .PHONY: vts diff --git a/core/version_util.mk b/core/version_util.mk index 0e346347bb..2bf41ecceb 100644 --- a/core/version_util.mk +++ b/core/version_util.mk @@ -23,6 +23,7 @@ # PLATFORM_DISPLAY_VERSION # PLATFORM_SDK_VERSION # PLATFORM_SDK_EXTENSION_VERSION +# PLATFORM_BASE_SDK_EXTENSION_VERSION # PLATFORM_VERSION_CODENAME # DEFAULT_APP_TARGET_SDK # BUILD_ID @@ -67,8 +68,16 @@ endif PLATFORM_SDK_EXTENSION_VERSION := $(RELEASE_PLATFORM_SDK_EXTENSION_VERSION) .KATI_READONLY := PLATFORM_SDK_EXTENSION_VERSION -# This is the sdk extension version that PLATFORM_SDK_VERSION ships with. -PLATFORM_BASE_SDK_EXTENSION_VERSION := $(PLATFORM_SDK_EXTENSION_VERSION) +ifdef PLATFORM_BASE_SDK_EXTENSION_VERSION + $(error Do not set PLATFORM_BASE_SDK_EXTENSION_VERSION directly. Use RELEASE_PLATFORM_BASE_SDK_EXTENSION_VERSION. value: $(PLATFORM_BASE_SDK_EXTENSION_VERSION)) +endif +ifdef RELEASE_PLATFORM_BASE_SDK_EXTENSION_VERSION + # This is the sdk extension version that PLATFORM_SDK_VERSION ships with. + PLATFORM_BASE_SDK_EXTENSION_VERSION := $(RELEASE_PLATFORM_BASE_SDK_EXTENSION_VERSION) +else + # Fallback to PLATFORM_SDK_EXTENSION_VERSION if RELEASE_PLATFORM_BASE_SDK_EXTENSION_VERSION is undefined. + PLATFORM_BASE_SDK_EXTENSION_VERSION := $(PLATFORM_SDK_EXTENSION_VERSION) +endif .KATI_READONLY := PLATFORM_BASE_SDK_EXTENSION_VERSION ifdef PLATFORM_VERSION_CODENAME diff --git a/envsetup.sh b/envsetup.sh index 06dadd3f38..554a220f1d 100644 --- a/envsetup.sh +++ b/envsetup.sh @@ -362,7 +362,6 @@ function addcompletions() packages/modules/adb/adb.bash system/core/fastboot/fastboot.bash tools/asuite/asuite.sh - prebuilts/bazel/common/bazel-complete.bash ) # Completion can be disabled selectively to allow users to use non-standard completion. # e.g. @@ -442,6 +441,7 @@ function print_lunch_menu() function lunch() { local answer + setup_cog_env_if_needed if [[ $# -gt 1 ]]; then echo "usage: lunch [target]" >&2 @@ -1079,10 +1079,7 @@ function source_vendorsetup() { done done - if [[ "${PWD}" == /google/cog/* ]]; then - f="build/make/cogsetup.sh" - echo "including $f"; . "$T/$f" - fi + setup_cog_env_if_needed } function showcommands() { diff --git a/packaging/distdir.mk b/packaging/distdir.mk index 153ecf65b1..97ed95a569 100644 --- a/packaging/distdir.mk +++ b/packaging/distdir.mk @@ -45,5 +45,3 @@ ifeq ($(DIST),true) endif copy-one-dist-file := -DIST_GOAL_OUTPUT_PAIRS := -DIST_SRC_DST_PAIRS := diff --git a/packaging/main_soong_only.mk b/packaging/main_soong_only.mk new file mode 100644 index 0000000000..f29e5f6f0d --- /dev/null +++ b/packaging/main_soong_only.mk @@ -0,0 +1,60 @@ +# Copyright (C) 2025 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +ifndef KATI +$(error Only Kati is supported.) +endif + +$(info [1/4] initializing packaging system ...) + +.KATI_READONLY := KATI_PACKAGE_MK_DIR + +include build/make/common/core.mk +include build/make/common/strings.mk + +# Define well-known goals and their dependency graph that they've +# traditionally had in make builds. Also it's important to define +# droid first so that it's built by default. + +.PHONY: droid +droid: droid_targets + +.PHONY: droid_targets +droid_targets: droidcore dist_files + +.PHONY: dist_files +dist_files: + +.PHONY: droidcore +droidcore: droidcore-unbundled + +.PHONY: droidcore-unbundled +droidcore-unbundled: + +$(info [2/4] including distdir.mk ...) + +include build/make/packaging/distdir.mk + +$(info [3/4] defining phony modules ...) + +include $(OUT_DIR)/soong/soong_phony_targets.mk + +goals := $(sort $(foreach pair,$(DIST_GOAL_OUTPUT_PAIRS),$(call word-colon,1,$(pair)))) +$(foreach goal,$(goals), \ + $(eval .PHONY: $$(goal)) \ + $(eval $$(goal):) \ + $(if $(call streq,$(DIST),true),\ + $(eval $$(goal): _dist_$$(goal)))) + +$(info [4/4] writing packaging rules ...) diff --git a/shell_utils.sh b/shell_utils.sh index 86f3f49f50..3124db598c 100644 --- a/shell_utils.sh +++ b/shell_utils.sh @@ -63,6 +63,70 @@ function require_lunch } fi +# This function sets up the build environment to be appropriate for Cog. +function setup_cog_env_if_needed() { + local top=$(gettop) + + # return early if not in a cog workspace + if [[ ! "$top" =~ ^/google/cog ]]; then + return 0 + fi + + setup_cog_symlink + + export ANDROID_BUILD_ENVIRONMENT_CONFIG="googler-cog" + + # Running repo command within Cog workspaces is not supported, so override + # it with this function. If the user is running repo within a Cog workspace, + # we'll fail with an error, otherwise, we run the original repo command with + # the given args. + if ! ORIG_REPO_PATH=`which repo`; then + return 0 + fi + function repo { + if [[ "${PWD}" == /google/cog/* ]]; then + echo -e "\e[01;31mERROR:\e[0mrepo command is disallowed within Cog workspaces." + kill -INT $$ # exits the script without exiting the user's shell + fi + ${ORIG_REPO_PATH} "$@" + } +} + +# creates a symlink for the out/ dir when inside a cog workspace. +function setup_cog_symlink() { + local out_dir=$(getoutdir) + local top=$(gettop) + + # return early if out dir is already a symlink + if [[ -L "$out_dir" ]]; then + return 0 + fi + + # return early if out dir is not in the workspace + if [[ ! "$out_dir" =~ ^$top/ ]]; then + return 0 + fi + + local link_destination="${HOME}/.cog/android-build-out" + + # remove existing out/ dir if it exists + if [[ -d "$out_dir" ]]; then + echo "Detected existing out/ directory in the Cog workspace which is not supported. Repairing workspace by removing it and creating the symlink to ~/.cog/android-build-out" + if ! rm -rf "$out_dir"; then + echo "Failed to remove existing out/ directory: $out_dir" >&2 + kill -INT $$ # exits the script without exiting the user's shell + fi + fi + + # create symlink + echo "Creating symlink: $out_dir -> $link_destination" + mkdir -p ${link_destination} + if ! ln -s "$link_destination" "$out_dir"; then + echo "Failed to create cog symlink: $out_dir -> $link_destination" >&2 + kill -INT $$ # exits the script without exiting the user's shell + fi +} + function getoutdir { local top=$(gettop) @@ -114,11 +178,11 @@ function _wrap_build() echo -n "${color_failed}#### failed to build some targets " fi if [ $hours -gt 0 ] ; then - printf "(%02g:%02g:%02g (hh:mm:ss))" $hours $mins $secs + printf "(%02d:%02d:%02d (hh:mm:ss))" $hours $mins $secs elif [ $mins -gt 0 ] ; then - printf "(%02g:%02g (mm:ss))" $mins $secs + printf "(%02d:%02d (mm:ss))" $mins $secs elif [ $secs -gt 0 ] ; then - printf "(%s seconds)" $secs + printf "(%d seconds)" $secs fi echo " ####${color_reset}" echo @@ -150,3 +214,19 @@ function log_tool_invocation() ' SIGINT SIGTERM SIGQUIT EXIT } +# Import the build variables supplied as arguments into this shell's environment. +# For absolute variables, prefix the variable name with a '/'. For example: +# import_build_vars OUT_DIR DIST_DIR /HOST_OUT_EXECUTABLES +# Returns nonzero if the build command failed. Stderr is passed through. +function import_build_vars() +{ + require_top + local script + script=$(cd $TOP && build/soong/bin/get_build_vars "$@") + local ret=$? + if [ $ret -ne 0 ] ; then + return $ret + fi + eval "$script" + return $? +} diff --git a/target/board/BoardConfigGsiCommon.mk b/target/board/BoardConfigGsiCommon.mk index 67e31dfa5f..8a62796e51 100644 --- a/target/board/BoardConfigGsiCommon.mk +++ b/target/board/BoardConfigGsiCommon.mk @@ -69,6 +69,11 @@ BOARD_SUPER_PARTITION_SIZE := 3229614080 BOARD_SUPER_PARTITION_GROUPS := gsi_dynamic_partitions BOARD_GSI_DYNAMIC_PARTITIONS_PARTITION_LIST := system BOARD_GSI_DYNAMIC_PARTITIONS_SIZE := 3221225472 + +# Build pvmfw with GSI: b/376363989 +ifeq (true,$(PRODUCT_BUILD_PVMFW_IMAGE)) +BOARD_PVMFWIMAGE_PARTITION_SIZE := 0x00100000 +endif endif # TODO(b/123695868, b/146149698): diff --git a/target/board/Android.mk b/target/board/android-info.mk index 8133af9a7f..36be0025ad 100644 --- a/target/board/Android.mk +++ b/target/board/android-info.mk @@ -51,29 +51,6 @@ $(call declare-0p-target,$(INSTALLED_ANDROID_INFO_TXT_TARGET)) # Copy compatibility metadata to the device. -# Device Manifest -ifdef DEVICE_MANIFEST_FILE -# $(DEVICE_MANIFEST_FILE) can be a list of files -include $(CLEAR_VARS) -LOCAL_MODULE := vendor_manifest.xml -LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0 legacy_not_a_contribution -LOCAL_LICENSE_CONDITIONS := by_exception_only not_allowed notice -LOCAL_MODULE_STEM := manifest.xml -LOCAL_MODULE_CLASS := ETC -LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR)/etc/vintf - -GEN := $(local-generated-sources-dir)/manifest.xml -$(GEN): PRIVATE_DEVICE_MANIFEST_FILE := $(DEVICE_MANIFEST_FILE) -$(GEN): $(DEVICE_MANIFEST_FILE) $(HOST_OUT_EXECUTABLES)/assemble_vintf - BOARD_SEPOLICY_VERS=$(BOARD_SEPOLICY_VERS) \ - PRODUCT_ENFORCE_VINTF_MANIFEST=$(PRODUCT_ENFORCE_VINTF_MANIFEST) \ - $(HOST_OUT_EXECUTABLES)/assemble_vintf -o $@ \ - -i $(call normalize-path-list,$(PRIVATE_DEVICE_MANIFEST_FILE)) - -LOCAL_PREBUILT_MODULE_FILE := $(GEN) -include $(BUILD_PREBUILT) -endif - # DEVICE_MANIFEST_SKUS: a list of SKUS where DEVICE_MANIFEST_<sku>_FILES is defined. ifdef DEVICE_MANIFEST_SKUS @@ -112,30 +89,6 @@ _add_device_sku_manifest := endif # DEVICE_MANIFEST_SKUS -# ODM manifest -ifdef ODM_MANIFEST_FILES -# ODM_MANIFEST_FILES is a list of files that is combined and installed as the default ODM manifest. -include $(CLEAR_VARS) -LOCAL_MODULE := odm_manifest.xml -LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0 legacy_not_a_contribution -LOCAL_LICENSE_CONDITIONS := by_exception_only not_allowed notice -LOCAL_MODULE_STEM := manifest.xml -LOCAL_MODULE_CLASS := ETC -LOCAL_MODULE_RELATIVE_PATH := vintf -LOCAL_ODM_MODULE := true - -GEN := $(local-generated-sources-dir)/manifest.xml -$(GEN): PRIVATE_SRC_FILES := $(ODM_MANIFEST_FILES) -$(GEN): $(ODM_MANIFEST_FILES) $(HOST_OUT_EXECUTABLES)/assemble_vintf - # Set VINTF_IGNORE_TARGET_FCM_VERSION to true because it should only be in device manifest. - VINTF_IGNORE_TARGET_FCM_VERSION=true \ - $(HOST_OUT_EXECUTABLES)/assemble_vintf -o $@ \ - -i $(call normalize-path-list,$(PRIVATE_SRC_FILES)) - -LOCAL_PREBUILT_MODULE_FILE := $(GEN) -include $(BUILD_PREBUILT) -endif # ODM_MANIFEST_FILES - # ODM_MANIFEST_SKUS: a list of SKUS where ODM_MANIFEST_<sku>_FILES are defined. ifdef ODM_MANIFEST_SKUS diff --git a/target/board/generic_64bitonly_x86_64/device.mk b/target/board/generic_64bitonly_x86_64/device.mk index bb49057abf..5edf5e0822 100644 --- a/target/board/generic_64bitonly_x86_64/device.mk +++ b/target/board/generic_64bitonly_x86_64/device.mk @@ -17,8 +17,3 @@ ifdef NET_ETH0_STARTONBOOT PRODUCT_PROPERTY_OVERRIDES += net.eth0.startonboot=1 endif - -# Ensure we package the BIOS files too. -PRODUCT_HOST_PACKAGES += \ - bios.bin \ - vgabios-cirrus.bin \ diff --git a/target/board/generic_x86/device.mk b/target/board/generic_x86/device.mk index 60f0cc33f1..27fb310c2c 100644 --- a/target/board/generic_x86/device.mk +++ b/target/board/generic_x86/device.mk @@ -17,8 +17,3 @@ ifdef NET_ETH0_STARTONBOOT PRODUCT_VENDOR_PROPERTIES += net.eth0.startonboot=1 endif - -# Ensure we package the BIOS files too. -PRODUCT_HOST_PACKAGES += \ - bios.bin \ - vgabios-cirrus.bin \ diff --git a/target/product/app_function_extensions.mk b/target/product/app_function_extensions.mk new file mode 100644 index 0000000000..a61afdc5ab --- /dev/null +++ b/target/product/app_function_extensions.mk @@ -0,0 +1,22 @@ +# +# Copyright (C) 2024 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# The app function sidecar extensions + +# /system_ext packages +PRODUCT_PACKAGES += \ + com.google.android.appfunctions.sidecar \ + appfunctions.sidecar.xml diff --git a/target/product/base_product.mk b/target/product/base_product.mk index 0ac220bb16..acfc6534f8 100644 --- a/target/product/base_product.mk +++ b/target/product/base_product.mk @@ -25,3 +25,8 @@ PRODUCT_PACKAGES += \ product_compatibility_matrix.xml \ product_manifest.xml \ selinux_policy_product \ + product-build.prop \ + +# Packages included only for eng or userdebug builds, previously debug tagged +PRODUCT_PACKAGES_DEBUG += \ + adb_keys \ diff --git a/target/product/base_system.mk b/target/product/base_system.mk index 5b54051303..40e2aa14a8 100644 --- a/target/product/base_system.mk +++ b/target/product/base_system.mk @@ -17,7 +17,7 @@ # Base modules and settings for the system partition. PRODUCT_PACKAGES += \ abx \ - aconfigd \ + aconfigd-system \ adbd_system_api \ aflags \ am \ @@ -52,7 +52,7 @@ PRODUCT_PACKAGES += \ com.android.adbd \ com.android.adservices \ com.android.appsearch \ - com.android.btservices \ + com.android.bt \ com.android.configinfrastructure \ com.android.conscrypt \ com.android.devicelock \ @@ -83,7 +83,6 @@ PRODUCT_PACKAGES += \ CtsShimPrivPrebuilt \ debuggerd\ device_config \ - DeviceDiagnostics \ dmctl \ dnsmasq \ dmesgd \ @@ -179,6 +178,7 @@ PRODUCT_PACKAGES += \ libmedia \ libmedia_jni \ libmediandk \ + libmonkey_jni \ libmtp \ libnetd_client \ libnetlink \ @@ -205,6 +205,7 @@ PRODUCT_PACKAGES += \ libstdc++ \ libsysutils \ libui \ + libuprobestats_client \ libusbhost \ libutils \ libvintf_jni \ @@ -212,6 +213,7 @@ PRODUCT_PACKAGES += \ libwilhelm \ linker \ llkd \ + llndk_libs \ lmkd \ LocalTransport \ locksettings \ @@ -240,6 +242,7 @@ PRODUCT_PACKAGES += \ PackageInstaller \ package-shareduid-allowlist.xml \ passwd_system \ + pbtombstone \ perfetto \ perfetto-extras \ ping \ @@ -247,6 +250,7 @@ PRODUCT_PACKAGES += \ pintool \ platform.xml \ pm \ + prefetch \ preinstalled-packages-asl-files.xml \ preinstalled-packages-platform.xml \ preinstalled-packages-strict-signature.xml \ @@ -275,7 +279,6 @@ PRODUCT_PACKAGES += \ Shell \ shell_and_utilities_system \ sm \ - snapshotctl \ snapuserd \ storaged \ surfaceflinger \ @@ -288,11 +291,11 @@ PRODUCT_PACKAGES += \ tombstoned \ traced \ traced_probes \ + tradeinmode \ tune2fs \ uiautomator \ uinput \ uncrypt \ - uprobestats \ usbd \ vdc \ vintf \ @@ -310,6 +313,17 @@ ifeq ($(RELEASE_CRASHRECOVERY_MODULE),true) endif +# When we release uprobestats module +ifeq ($(RELEASE_UPROBESTATS_MODULE),true) + PRODUCT_PACKAGES += \ + com.android.uprobestats \ + +else + PRODUCT_PACKAGES += \ + uprobestats \ + +endif + # These packages are not used on Android TV ifneq ($(PRODUCT_IS_ATV),true) PRODUCT_PACKAGES += \ @@ -346,6 +360,23 @@ ifeq ($(RELEASE_USE_WEBVIEW_BOOTSTRAP_MODULE),true) com.android.webview.bootstrap endif +# Only add the jar when it is not in the Tethering module. Otherwise, +# it will be added via com.android.tethering +ifneq ($(RELEASE_MOVE_VCN_TO_MAINLINE),true) + PRODUCT_PACKAGES += \ + framework-connectivity-b +endif + +ifneq (,$(RELEASE_RANGING_STACK)) + PRODUCT_PACKAGES += \ + com.android.ranging +endif + +ifeq ($(RELEASE_MEMORY_MANAGEMENT_DAEMON),true) + PRODUCT_PACKAGES += \ + mm_daemon +endif + # VINTF data for system image PRODUCT_PACKAGES += \ system_manifest.xml \ @@ -420,6 +451,7 @@ PRODUCT_HOST_PACKAGES += \ lpdump \ mke2fs \ mkfs.erofs \ + pbtombstone \ resize2fs \ sgdisk \ sqlite3 \ @@ -458,6 +490,7 @@ PRODUCT_VENDOR_PROPERTIES += ro.zygote?=zygote32 PRODUCT_SYSTEM_PROPERTIES += debug.atrace.tags.enableflags=0 PRODUCT_SYSTEM_PROPERTIES += persist.traced.enable=1 +PRODUCT_SYSTEM_PROPERTIES += ro.surface_flinger.game_default_frame_rate_override=60 # Include kernel configs. PRODUCT_PACKAGES += \ @@ -466,7 +499,6 @@ PRODUCT_PACKAGES += \ # Packages included only for eng or userdebug builds, previously debug tagged PRODUCT_PACKAGES_DEBUG := \ - adb_keys \ adevice_fingerprint \ arping \ dmuserd \ @@ -482,12 +514,14 @@ PRODUCT_PACKAGES_DEBUG := \ logtagd.rc \ ot-cli-ftd \ ot-ctl \ + overlay_remounter \ procrank \ profcollectd \ profcollectctl \ record_binder \ servicedispatcher \ showmap \ + snapshotctl \ sqlite3 \ ss \ start_with_lockagent \ @@ -534,3 +568,4 @@ $(call inherit-product,$(SRC_TARGET_DIR)/product/updatable_apex.mk) $(call soong_config_set, bionic, large_system_property_node, $(RELEASE_LARGE_SYSTEM_PROPERTY_NODE)) $(call soong_config_set, Aconfig, read_from_new_storage, $(RELEASE_READ_FROM_NEW_STORAGE)) +$(call soong_config_set, SettingsLib, legacy_avatar_picker_app_enabled, $(if $(RELEASE_AVATAR_PICKER_APP),,true)) diff --git a/target/product/base_vendor.mk b/target/product/base_vendor.mk index 5b1cae578f..16fc7fd906 100644 --- a/target/product/base_vendor.mk +++ b/target/product/base_vendor.mk @@ -17,7 +17,6 @@ # Base modules and settings for recovery. PRODUCT_PACKAGES += \ adbd.recovery \ - android.hardware.health@2.0-impl-default.recovery \ build_flag_vendor \ cgroups.recovery.json \ charger.recovery \ @@ -72,7 +71,10 @@ PRODUCT_PACKAGES += \ passwd_odm \ passwd_vendor \ selinux_policy_nonsystem \ + selinux_policy_vendor \ + selinux_policy_odm \ shell_and_utilities_vendor \ + odm-build.prop \ # libhealthloop BPF filter. This is in base_vendor.mk because libhealthloop must # be a static library and because the Android build system ignores 'required' @@ -104,6 +106,7 @@ PRODUCT_PACKAGES_SHIPPING_API_LEVEL_29 += \ # VINTF data for vendor image PRODUCT_PACKAGES += \ vendor_compatibility_matrix.xml \ + vendor_manifest.xml \ # Base modules and settings for the debug ramdisk, which is then packed # into a boot-debug.img and a vendor_boot-debug.img. diff --git a/target/product/build_variables.mk b/target/product/build_variables.mk index 9fc9ff9dc1..e99ab0686a 100644 --- a/target/product/build_variables.mk +++ b/target/product/build_variables.mk @@ -20,5 +20,14 @@ # Control libbinder client caching $(call soong_config_set, libbinder, release_libbinder_client_cache, $(RELEASE_LIBBINDER_CLIENT_CACHE)) +# Control caching while adding service in libbinder cache +$(call soong_config_set, libbinder, release_libbinder_addservice_cache, $(RELEASE_LIBBINDER_ADDSERVICE_CACHE)) + +# Remove static list in libbinder cache +$(call soong_config_set, libbinder, release_libbinder_remove_cache_static_list, $(RELEASE_LIBBINDER_REMOVE_CACHE_STATIC_LIST)) + # Use the configured release of sqlite $(call soong_config_set, libsqlite3, release_package_libsqlite3, $(RELEASE_PACKAGE_LIBSQLITE3)) + +# Use the configured MessageQueue implementation +$(call soong_config_set, messagequeue, release_package_messagequeue_implementation, $(RELEASE_PACKAGE_MESSAGEQUEUE_IMPLEMENTATION)) diff --git a/target/product/default_art_config.mk b/target/product/default_art_config.mk index 1a3f2cf0e8..e543ccf035 100644 --- a/target/product/default_art_config.mk +++ b/target/product/default_art_config.mk @@ -65,7 +65,7 @@ PRODUCT_APEX_BOOT_JARS := \ com.android.adservices:framework-adservices \ com.android.adservices:framework-sdksandbox \ com.android.appsearch:framework-appsearch \ - com.android.btservices:framework-bluetooth \ + com.android.bt:framework-bluetooth \ com.android.configinfrastructure:framework-configinfrastructure \ com.android.conscrypt:conscrypt \ com.android.devicelock:framework-devicelock \ @@ -76,6 +76,7 @@ PRODUCT_APEX_BOOT_JARS := \ com.android.mediaprovider:framework-mediaprovider \ com.android.mediaprovider:framework-pdf \ com.android.mediaprovider:framework-pdf-v \ + com.android.mediaprovider:framework-photopicker \ com.android.ondevicepersonalization:framework-ondevicepersonalization \ com.android.os.statsd:framework-statsd \ com.android.permission:framework-permission \ @@ -113,6 +114,23 @@ ifeq ($(RELEASE_PACKAGE_PROFILING_MODULE),true) endif +ifneq (,$(RELEASE_RANGING_STACK)) + PRODUCT_APEX_BOOT_JARS += \ + com.android.uwb:framework-ranging \ + $(call soong_config_set,bootclasspath,release_ranging_stack,true) +endif + +# Check if VCN should be built into the tethering module or not +ifeq ($(RELEASE_MOVE_VCN_TO_MAINLINE),true) + PRODUCT_APEX_BOOT_JARS += \ + com.android.tethering:framework-connectivity-b \ + +else + PRODUCT_BOOT_JARS += \ + framework-connectivity-b \ + +endif + # List of system_server classpath jars delivered via apex. # Keep the list sorted by module names and then library names. # Note: For modules available in Q, DO NOT add new entries here. @@ -153,7 +171,7 @@ PRODUCT_STANDALONE_SYSTEM_SERVER_JARS := \ # Keep the list sorted by module names and then library names. # Note: For modules available in Q, DO NOT add new entries here. PRODUCT_APEX_STANDALONE_SYSTEM_SERVER_JARS := \ - com.android.btservices:service-bluetooth \ + com.android.bt:service-bluetooth \ com.android.devicelock:service-devicelock \ com.android.os.statsd:service-statsd \ com.android.scheduling:service-scheduling \ @@ -168,6 +186,11 @@ ifeq ($(RELEASE_PACKAGE_PROFILING_MODULE),true) endif +ifneq (,$(RELEASE_RANGING_STACK)) + PRODUCT_APEX_STANDALONE_SYSTEM_SERVER_JARS += \ + com.android.uwb:service-ranging +endif + # Overrides the (apex, jar) pairs above when determining the on-device location. The format is: # <old_apex>:<old_jar>:<new_apex>:<new_jar> PRODUCT_CONFIGURED_JAR_LOCATION_OVERRIDES := \ diff --git a/target/product/full_x86.mk b/target/product/full_x86.mk index 07f6472844..a1b71caaed 100644 --- a/target/product/full_x86.mk +++ b/target/product/full_x86.mk @@ -32,11 +32,6 @@ ifdef NET_ETH0_STARTONBOOT PRODUCT_VENDOR_PROPERTIES += net.eth0.startonboot=1 endif -# Ensure we package the BIOS files too. -PRODUCT_HOST_PACKAGES += \ - bios.bin \ - vgabios-cirrus.bin \ - # Enable dynamic partition size PRODUCT_USE_DYNAMIC_PARTITION_SIZE := true diff --git a/target/product/fullmte.mk b/target/product/fullmte.mk index b62249601e..fed66e7ef2 100644 --- a/target/product/fullmte.mk +++ b/target/product/fullmte.mk @@ -20,7 +20,7 @@ # For more details, see: # https://source.android.com/docs/security/test/memory-safety/arm-mte ifeq ($(filter memtag_heap,$(SANITIZE_TARGET)),) - SANITIZE_TARGET := $(strip $(SANITIZE_TARGET) memtag_heap memtag_stack) + SANITIZE_TARGET := $(strip $(SANITIZE_TARGET) memtag_heap memtag_stack memtag_globals) SANITIZE_TARGET_DIAG := $(strip $(SANITIZE_TARGET_DIAG) memtag_heap) endif PRODUCT_PRODUCT_PROPERTIES += persist.arm64.memtag.default=sync diff --git a/target/product/generic/Android.bp b/target/product/generic/Android.bp new file mode 100644 index 0000000000..82b6e76f72 --- /dev/null +++ b/target/product/generic/Android.bp @@ -0,0 +1,1012 @@ +generic_rootdirs = [ + "apex", + "bootstrap-apex", + "config", + "data", + "data_mirror", + "debug_ramdisk", + "dev", + "linkerconfig", + "metadata", + "mnt", + "odm", + "odm_dlkm", + "oem", + "postinstall", + "proc", + "second_stage_resources", + "storage", + "sys", + "system", + "system_dlkm", + "tmp", + "vendor", + "vendor_dlkm", +] + +android_rootdirs = [ + "system_ext", + "product", +] + +generic_symlinks = [ + { + target: "/system/bin/init", + name: "init", + }, + { + target: "/system/etc", + name: "etc", + }, + { + target: "/system/bin", + name: "bin", + }, + { + target: "/vendor", + name: "system/vendor", + }, + { + target: "/system_dlkm/lib/modules", + name: "system/lib/modules", + }, + { + target: "/data/user_de/0/com.android.shell/files/bugreports", + name: "bugreports", + }, + { + target: "/sys/kernel/debug", + name: "d", + }, + { + target: "/storage/self/primary", + name: "sdcard", + }, + { + target: "/product/etc/security/adb_keys", + name: "adb_keys", + }, + // For Treble Generic System Image (GSI), system-as-root GSI needs to work on both devices with + // and without /odm partition. Those symlinks are for devices without /odm partition. For + // devices with /odm partition, mount odm.img under /odm will hide those symlinks. + { + target: "/vendor/odm/app", + name: "odm/app", + }, + { + target: "/vendor/odm/bin", + name: "odm/bin", + }, + { + target: "/vendor/odm/etc", + name: "odm/etc", + }, + { + target: "/vendor/odm/firmware", + name: "odm/firmware", + }, + { + target: "/vendor/odm/framework", + name: "odm/framework", + }, + { + target: "/vendor/odm/lib", + name: "odm/lib", + }, + { + target: "/vendor/odm/lib64", + name: "odm/lib64", + }, + { + target: "/vendor/odm/overlay", + name: "odm/overlay", + }, + { + target: "/vendor/odm/priv-app", + name: "odm/priv-app", + }, + { + target: "/vendor/odm/usr", + name: "odm/usr", + }, +] + +android_symlinks = [ + { + target: "/product", + name: "system/product", + }, + { + target: "/system_ext", + name: "system/system_ext", + }, + { + target: "/data/cache", + name: "cache", + }, +] + +filegroup { + name: "generic_system_sign_key", + srcs: [":avb_testkey_rsa4096"], +} + +phony { + name: "generic_system_fonts", + required: [ + "AndroidClock.ttf", + "CarroisGothicSC-Regular.ttf", + "ComingSoon.ttf", + "CutiveMono.ttf", + "DancingScript-Regular.ttf", + "DroidSansMono.ttf", + "NotoColorEmoji.ttf", + "NotoColorEmojiFlags.ttf", + "NotoNaskhArabic-Bold.ttf", + "NotoNaskhArabic-Regular.ttf", + "NotoNaskhArabicUI-Bold.ttf", + "NotoNaskhArabicUI-Regular.ttf", + "NotoSansAdlam-VF.ttf", + "NotoSansAhom-Regular.otf", + "NotoSansAnatolianHieroglyphs-Regular.otf", + "NotoSansArmenian-VF.ttf", + "NotoSansAvestan-Regular.ttf", + "NotoSansBalinese-Regular.ttf", + "NotoSansBamum-Regular.ttf", + "NotoSansBassaVah-Regular.otf", + "NotoSansBatak-Regular.ttf", + "NotoSansBengali-VF.ttf", + "NotoSansBengaliUI-VF.ttf", + "NotoSansBhaiksuki-Regular.otf", + "NotoSansBrahmi-Regular.ttf", + "NotoSansBuginese-Regular.ttf", + "NotoSansBuhid-Regular.ttf", + "NotoSansCJK-Regular.ttc", + "NotoSansCanadianAboriginal-Regular.ttf", + "NotoSansCarian-Regular.ttf", + "NotoSansChakma-Regular.otf", + "NotoSansCham-Bold.ttf", + "NotoSansCham-Regular.ttf", + "NotoSansCherokee-Regular.ttf", + "NotoSansCoptic-Regular.ttf", + "NotoSansCuneiform-Regular.ttf", + "NotoSansCypriot-Regular.ttf", + "NotoSansDeseret-Regular.ttf", + "NotoSansDevanagari-VF.ttf", + "NotoSansDevanagariUI-VF.ttf", + "NotoSansEgyptianHieroglyphs-Regular.ttf", + "NotoSansElbasan-Regular.otf", + "NotoSansEthiopic-VF.ttf", + "NotoSansGeorgian-VF.ttf", + "NotoSansGlagolitic-Regular.ttf", + "NotoSansGothic-Regular.ttf", + "NotoSansGrantha-Regular.ttf", + "NotoSansGujarati-Bold.ttf", + "NotoSansGujarati-Regular.ttf", + "NotoSansGujaratiUI-Bold.ttf", + "NotoSansGujaratiUI-Regular.ttf", + "NotoSansGunjalaGondi-Regular.otf", + "NotoSansGurmukhi-VF.ttf", + "NotoSansGurmukhiUI-VF.ttf", + "NotoSansHanifiRohingya-Regular.otf", + "NotoSansHanunoo-Regular.ttf", + "NotoSansHatran-Regular.otf", + "NotoSansHebrew-Bold.ttf", + "NotoSansHebrew-Regular.ttf", + "NotoSansImperialAramaic-Regular.ttf", + "NotoSansInscriptionalPahlavi-Regular.ttf", + "NotoSansInscriptionalParthian-Regular.ttf", + "NotoSansJavanese-Regular.otf", + "NotoSansKaithi-Regular.ttf", + "NotoSansKannada-VF.ttf", + "NotoSansKannadaUI-VF.ttf", + "NotoSansKayahLi-Regular.ttf", + "NotoSansKharoshthi-Regular.ttf", + "NotoSansKhmer-VF.ttf", + "NotoSansKhmerUI-Bold.ttf", + "NotoSansKhmerUI-Regular.ttf", + "NotoSansKhojki-Regular.otf", + "NotoSansLao-Bold.ttf", + "NotoSansLao-Regular.ttf", + "NotoSansLaoUI-Bold.ttf", + "NotoSansLaoUI-Regular.ttf", + "NotoSansLepcha-Regular.ttf", + "NotoSansLimbu-Regular.ttf", + "NotoSansLinearA-Regular.otf", + "NotoSansLinearB-Regular.ttf", + "NotoSansLisu-Regular.ttf", + "NotoSansLycian-Regular.ttf", + "NotoSansLydian-Regular.ttf", + "NotoSansMalayalam-VF.ttf", + "NotoSansMalayalamUI-VF.ttf", + "NotoSansMandaic-Regular.ttf", + "NotoSansManichaean-Regular.otf", + "NotoSansMarchen-Regular.otf", + "NotoSansMasaramGondi-Regular.otf", + "NotoSansMedefaidrin-VF.ttf", + "NotoSansMeeteiMayek-Regular.ttf", + "NotoSansMeroitic-Regular.otf", + "NotoSansMiao-Regular.otf", + "NotoSansModi-Regular.ttf", + "NotoSansMongolian-Regular.ttf", + "NotoSansMro-Regular.otf", + "NotoSansMultani-Regular.otf", + "NotoSansMyanmar-Bold.otf", + "NotoSansMyanmar-Medium.otf", + "NotoSansMyanmar-Regular.otf", + "NotoSansMyanmarUI-Bold.otf", + "NotoSansMyanmarUI-Medium.otf", + "NotoSansMyanmarUI-Regular.otf", + "NotoSansNKo-Regular.ttf", + "NotoSansNabataean-Regular.otf", + "NotoSansNewTaiLue-Regular.ttf", + "NotoSansNewa-Regular.otf", + "NotoSansOgham-Regular.ttf", + "NotoSansOlChiki-Regular.ttf", + "NotoSansOldItalic-Regular.ttf", + "NotoSansOldNorthArabian-Regular.otf", + "NotoSansOldPermic-Regular.otf", + "NotoSansOldPersian-Regular.ttf", + "NotoSansOldSouthArabian-Regular.ttf", + "NotoSansOldTurkic-Regular.ttf", + "NotoSansOriya-Bold.ttf", + "NotoSansOriya-Regular.ttf", + "NotoSansOriyaUI-Bold.ttf", + "NotoSansOriyaUI-Regular.ttf", + "NotoSansOsage-Regular.ttf", + "NotoSansOsmanya-Regular.ttf", + "NotoSansPahawhHmong-Regular.otf", + "NotoSansPalmyrene-Regular.otf", + "NotoSansPauCinHau-Regular.otf", + "NotoSansPhagsPa-Regular.ttf", + "NotoSansPhoenician-Regular.ttf", + "NotoSansRejang-Regular.ttf", + "NotoSansRunic-Regular.ttf", + "NotoSansSamaritan-Regular.ttf", + "NotoSansSaurashtra-Regular.ttf", + "NotoSansSharada-Regular.otf", + "NotoSansShavian-Regular.ttf", + "NotoSansSinhala-VF.ttf", + "NotoSansSinhalaUI-VF.ttf", + "NotoSansSoraSompeng-Regular.otf", + "NotoSansSoyombo-VF.ttf", + "NotoSansSundanese-Regular.ttf", + "NotoSansSylotiNagri-Regular.ttf", + "NotoSansSymbols-Regular-Subsetted.ttf", + "NotoSansSymbols-Regular-Subsetted2.ttf", + "NotoSansSyriacEastern-Regular.ttf", + "NotoSansSyriacEstrangela-Regular.ttf", + "NotoSansSyriacWestern-Regular.ttf", + "NotoSansTagalog-Regular.ttf", + "NotoSansTagbanwa-Regular.ttf", + "NotoSansTaiLe-Regular.ttf", + "NotoSansTaiTham-Regular.ttf", + "NotoSansTaiViet-Regular.ttf", + "NotoSansTakri-VF.ttf", + "NotoSansTamil-VF.ttf", + "NotoSansTamilUI-VF.ttf", + "NotoSansTelugu-VF.ttf", + "NotoSansTeluguUI-VF.ttf", + "NotoSansThaana-Bold.ttf", + "NotoSansThaana-Regular.ttf", + "NotoSansThai-Bold.ttf", + "NotoSansThai-Regular.ttf", + "NotoSansThaiUI-Bold.ttf", + "NotoSansThaiUI-Regular.ttf", + "NotoSansTifinagh-Regular.otf", + "NotoSansUgaritic-Regular.ttf", + "NotoSansVai-Regular.ttf", + "NotoSansWancho-Regular.otf", + "NotoSansWarangCiti-Regular.otf", + "NotoSansYi-Regular.ttf", + "NotoSerif-Bold.ttf", + "NotoSerif-BoldItalic.ttf", + "NotoSerif-Italic.ttf", + "NotoSerif-Regular.ttf", + "NotoSerifArmenian-VF.ttf", + "NotoSerifBengali-VF.ttf", + "NotoSerifCJK-Regular.ttc", + "NotoSerifDevanagari-VF.ttf", + "NotoSerifDogra-Regular.ttf", + "NotoSerifEthiopic-VF.ttf", + "NotoSerifGeorgian-VF.ttf", + "NotoSerifGujarati-VF.ttf", + "NotoSerifGurmukhi-VF.ttf", + "NotoSerifHebrew-Bold.ttf", + "NotoSerifHebrew-Regular.ttf", + "NotoSerifHentaigana.ttf", + "NotoSerifKannada-VF.ttf", + "NotoSerifKhmer-Bold.otf", + "NotoSerifKhmer-Regular.otf", + "NotoSerifLao-Bold.ttf", + "NotoSerifLao-Regular.ttf", + "NotoSerifMalayalam-VF.ttf", + "NotoSerifMyanmar-Bold.otf", + "NotoSerifMyanmar-Regular.otf", + "NotoSerifNyiakengPuachueHmong-VF.ttf", + "NotoSerifSinhala-VF.ttf", + "NotoSerifTamil-VF.ttf", + "NotoSerifTelugu-VF.ttf", + "NotoSerifThai-Bold.ttf", + "NotoSerifThai-Regular.ttf", + "NotoSerifTibetan-VF.ttf", + "NotoSerifYezidi-VF.ttf", + "Roboto-Regular.ttf", + "RobotoFlex-Regular.ttf", + "RobotoStatic-Regular.ttf", + "SourceSansPro-Bold.ttf", + "SourceSansPro-BoldItalic.ttf", + "SourceSansPro-Italic.ttf", + "SourceSansPro-Regular.ttf", + "SourceSansPro-SemiBold.ttf", + "SourceSansPro-SemiBoldItalic.ttf", + "font_fallback.xml", + "fonts.xml", + ], +} + +android_filesystem_defaults { + name: "system_ext_image_defaults", + deps: [ + /////////////////////////////////////////// + // base_system_ext + /////////////////////////////////////////// + "build_flag_system_ext", + "fs_config_dirs_system_ext", + "fs_config_files_system_ext", + "group_system_ext", + "passwd_system_ext", + "SatelliteClient", + "selinux_policy_system_ext", + "system_ext_manifest.xml", + "system_ext-build.prop", + // Base modules when shipping api level is less than or equal to 34 + "hwservicemanager", + "android.hidl.allocator@1.0-service", + + /////////////////////////////////////////// + // window_extensions_base + /////////////////////////////////////////// + "androidx.window.extensions", + "androidx.window.sidecar", + + /////////////////////////////////////////// + // base_system + /////////////////////////////////////////// + "charger", + ] + select(release_flag("RELEASE_APPFUNCTION_SIDECAR"), { + true: [ + "com.android.extensions.appfunctions", + "appfunctions.extension.xml", + ], + default: [], + }), +} + +android_filesystem_defaults { + name: "product_image_defaults", + deps: [ + /////////////////////////////////////////// + // media_product + /////////////////////////////////////////// + "webview", + + /////////////////////////////////////////// + // base_product + /////////////////////////////////////////// + + // Base modules and settings for the product partition. + "build_flag_product", + "fs_config_dirs_product", + "fs_config_files_product", + "group_product", + "ModuleMetadata", + "passwd_product", + "product_compatibility_matrix.xml", + "product_manifest.xml", + "selinux_policy_product", + "product-build.prop", + + // AUDIO + "frameworks_sounds", + ] + select(product_variable("debuggable"), { + // Packages included only for eng or userdebug builds, previously debug tagged + true: ["adb_keys"], + default: [], + }), +} + +system_image_fsverity_default = { + inputs: [ + "etc/boot-image.prof", + "etc/classpaths/*.pb", + "etc/dirty-image-objects", + "etc/preloaded-classes", + "framework/*", + "framework/*/*", // framework/{arch} + "framework/oat/*/*", // framework/oat/{arch} + ], + libs: [":framework-res{.export-package.apk}"], +} + +android_filesystem_defaults { + name: "system_image_defaults", + partition_name: "system", + base_dir: "system", + dirs: generic_rootdirs, + symlinks: generic_symlinks, + file_contexts: ":plat_file_contexts", + linker_config: { + gen_linker_config: true, + linker_config_srcs: [":system_linker_config_json_file"], + }, + fsverity: { + inputs: select(soong_config_variable("ANDROID", "PRODUCT_FSVERITY_GENERATE_METADATA"), { + true: [ + "etc/boot-image.prof", + "etc/classpaths/*.pb", + "etc/dirty-image-objects", + "etc/preloaded-classes", + "framework/*", + "framework/*/*", // framework/{arch} + "framework/oat/*/*", // framework/oat/{arch} + ], + default: [], + }), + libs: select(soong_config_variable("ANDROID", "PRODUCT_FSVERITY_GENERATE_METADATA"), { + true: [":framework-res{.export-package.apk}"], + default: [], + }), + }, + build_logtags: true, + gen_aconfig_flags_pb: true, + + compile_multilib: "both", + + use_avb: true, + avb_private_key: ":generic_system_sign_key", + avb_algorithm: "SHA256_RSA4096", + avb_hash_algorithm: "sha256", + + deps: [ + "abx", + "aconfigd-system", + "aflags", + "am", + "android.software.credentials.prebuilt.xml", // generic_system + "android.software.webview.prebuilt.xml", // media_system + "android.software.window_magnification.prebuilt.xml", // handheld_system + "android.system.suspend-service", + "apexd", + "appops", + "approved-ogki-builds.xml", // base_system + "appwidget", + "atrace", + "audioserver", + "bcc", + "blank_screen", + "blkid", + "bmgr", + "bootanimation", + "bootstat", + "bpfloader", + "bu", + "bugreport", + "bugreportz", + "cameraserver", + "cgroups.json", + "cmd", + "content", + "cppreopts.sh", // generic_system + "credstore", + "debuggerd", + "device_config", + "dirty-image-objects", + "dmctl", + "dmesgd", + "dnsmasq", + "dpm", + "dump.erofs", + "dumpstate", + "dumpsys", + "e2fsck", + "enhanced-confirmation.xml", // base_system + "etc_hosts", + "flags_health_check", + "framework-audio_effects.xml", // for handheld // handheld_system + "framework-sysconfig.xml", + "fs_config_dirs_system", + "fs_config_files_system", + "fsck.erofs", + "fsck.f2fs", // for media_system + "fsck_msdos", + "fsverity-release-cert-der", + "gatekeeperd", + "gpu_counter_producer", + "gpuservice", + "group_system", + "gsi_tool", + "gsid", + "heapprofd", + "hid", + "hiddenapi-package-whitelist.xml", // from runtime_libart + "idc_data", + "idmap2", + "idmap2d", + "ime", + "incident", + "incident-helper-cmd", + "incident_helper", + "incidentd", + "init.environ.rc-soong", + "init.usb.configfs.rc", + "init.usb.rc", + "init.zygote32.rc", + "init.zygote64.rc", + "init.zygote64_32.rc", + "initial-package-stopped-states.xml", + "input", + "installd", + "ip", // base_system + "iptables", + "kcmdlinectrl", + "kernel-lifetimes.xml", // base_system + "keychars_data", + "keylayout_data", + "keystore2", + "ld.mc", + "llkd", // base_system + "lmkd", // base_system + "locksettings", // base_system + "logcat", // base_system + "logd", // base_system + "lpdump", // base_system + "lshal", // base_system + "make_f2fs", // media_system + "mdnsd", // base_system + "media_profiles_V1_0.dtd", // base_system + "mediacodec.policy", // base_system + "mediaextractor", // base_system + "mediametrics", // base_system + "misctrl", // from base_system + "mke2fs", // base_system + "mkfs.erofs", // base_system + "monkey", // base_system + "mtectrl", // base_system + "ndc", // base_system + "netd", // base_system + "netutils-wrapper-1.0", // full_base + "notice_xml_system", + "odsign", // base_system + "otapreopt_script", // generic_system + "package-shareduid-allowlist.xml", // base_system + "passwd_system", // base_system + "pbtombstone", // base_system + "perfetto", // base_system + "ping", // base_system + "ping6", // base_system + "pintool", // base_system + "platform.xml", // base_system + "pm", // base_system + "prefetch", //base_system + "preinstalled-packages-asl-files.xml", // base_system + "preinstalled-packages-platform-generic-system.xml", // generic_system + "preinstalled-packages-platform-handheld-system.xml", // handheld_system + "preinstalled-packages-platform.xml", // base_system + "preinstalled-packages-strict-signature.xml", // base_system + "preloaded-classes", // ok + "printflags", // base_system + "privapp-permissions-platform.xml", // base_system + "prng_seeder", // base_system + "public.libraries.android.txt", + "recovery-persist", // base_system + "recovery-refresh", // generic_system + "requestsync", // media_system + "resize2fs", // base_system + "rss_hwm_reset", // base_system + "run-as", // base_system + "schedtest", // base_system + "screencap", // base_system + "screenrecord", // handheld_system + "sdcard", // base_system + "secdiscard", // base_system + "sensorservice", // base_system + "service", // base_system + "servicemanager", // base_system + "settings", // base_system + "sfdo", // base_system + "sgdisk", // base_system + "sm", // base_system + "snapuserd", // base_system + "storaged", // base_system + "surfaceflinger", // base_system + "svc", // base_system + "system_manifest.xml", // base_system + "task_profiles.json", // base_system + "tc", // base_system + "telecom", // base_system + "tombstoned", // base_system + "traced", // base_system + "traced_probes", // base_system + "tradeinmode", // base_system + "tune2fs", // base_system + "uiautomator", // base_system + "uinput", // base_system + "uncrypt", // base_system + "update_engine", // generic_system + "update_engine_sideload", // recovery + "update_verifier", // generic_system + "usbd", // base_system + "vdc", // base_system + "virtual_camera", // handheld_system // release_package_virtual_camera + "vold", // base_system + "vr", // handheld_system + "watchdogd", // base_system + "wifi.rc", // base_system + "wificond", // base_system + "wm", // base_system + ] + select(release_flag("RELEASE_PLATFORM_VERSION_CODENAME"), { + "REL": [], + default: [ + "android.software.preview_sdk.prebuilt.xml", // media_system + ], + }) + select(soong_config_variable("ANDROID", "release_package_profiling_module"), { + "true": [ + "trace_redactor", // base_system (RELEASE_PACKAGE_PROFILING_MODULE) + ], + default: [], + }) + select(release_flag("RELEASE_MEMORY_MANAGEMENT_DAEMON"), { + true: [ + "mm_daemon", // base_system (RELEASE_MEMORY_MANAGEMENT_DAEMON) + ], + default: [], + }) + select(product_variable("debuggable"), { + true: [ + "alloctop", + "adevice_fingerprint", + "arping", + "avbctl", + "bootctl", + "dmuserd", + "evemu-record", + "idlcli", + "init-debug.rc", + "iotop", + "iperf3", + "iw", + "layertracegenerator", + "logpersist.start", + "logtagd.rc", + "ot-cli-ftd", + "ot-ctl", + "overlay_remounter", + "procrank", + "profcollectctl", + "profcollectd", + "record_binder", + "sanitizer-status", + "servicedispatcher", + "showmap", + "snapshotctl", + "sqlite3", + "ss", + "start_with_lockagent", + "strace", + "su", + "tinycap", + "tinyhostless", + "tinymix", + "tinypcminfo", + "tinyplay", // host + "tracepath", + "tracepath6", + "traceroute6", + "unwind_info", + "unwind_reg_info", + "unwind_symbols", + "update_engine_client", + ], + default: [], + }), + multilib: { + common: { + deps: [ + "BackupRestoreConfirmation", // base_system + "BasicDreams", // handheld_system + "BlockedNumberProvider", // handheld_system + "BluetoothMidiService", // handheld_system + "BookmarkProvider", // handheld_system + "BuiltInPrintService", // handheld_system + "CalendarProvider", // handheld_system + "CallLogBackup", // telephony_system + "CameraExtensionsProxy", // handheld_system + "CaptivePortalLogin", // handheld_system + "CarrierDefaultApp", // telephony_system + "CellBroadcastLegacyApp", // telephony_system + "CertInstaller", // handheld_system + "CompanionDeviceManager", // media_system + "ContactsProvider", // base_system + "CredentialManager", // handheld_system + "DeviceAsWebcam", // handheld_system + "DeviceDiagnostics", // handheld_system - internal + "DocumentsUI", // handheld_system + "DownloadProvider", // base_system + "DownloadProviderUi", // handheld_system + "DynamicSystemInstallationService", // base_system + "E2eeContactKeysProvider", // base_system + "EasterEgg", // handheld_system + "ExtShared", // base_system + "ExternalStorageProvider", // handheld_system + "FusedLocation", // handheld_system + "HTMLViewer", // media_system + "InputDevices", // handheld_system + "IntentResolver", // base_system + "KeyChain", // handheld_system + "LiveWallpapersPicker", // generic_system, full_base + "LocalTransport", // base_system + "ManagedProvisioning", // handheld_system + "MediaProviderLegacy", // base_system + "MmsService", // handheld_system + "MtpService", // handheld_system + "MusicFX", // handheld_system + "NetworkStack", // base_system + "ONS", // telephony_system + "PacProcessor", // handheld_system + "PackageInstaller", // base_system + "PartnerBookmarksProvider", // generic_system + "PrintRecommendationService", // handheld_system + "PrintSpooler", // handheld_system + "ProxyHandler", // handheld_system + "SecureElement", // handheld_system + "SettingsProvider", // base_system + "SharedStorageBackup", // handheld_system + "Shell", // base_system + "SimAppDialog", // handheld_system + "SoundPicker", // not installed by anyone + "StatementService", // media_system + "Stk", // generic_system + "Tag", // generic_system + "TeleService", // handheld_system + "Telecom", // handheld_system + "TelephonyProvider", // handheld_system + "Traceur", // handheld_system + "UserDictionaryProvider", // handheld_system + "VpnDialogs", // handheld_system + "WallpaperBackup", // base_system + "adbd_system_api", // base_system + "android.hidl.base-V1.0-java", // base_system + "android.hidl.manager-V1.0-java", // base_system + "android.test.base", // from runtime_libart + "android.test.mock", // base_system + "android.test.runner", // base_system + "aosp_mainline_modules", // ok + "build_flag_system", // base_system + "charger_res_images", // generic_system + "com.android.apex.cts.shim.v1_prebuilt", // ok + "com.android.cellbroadcast", // telephony_system + "com.android.future.usb.accessory", // media_system + "com.android.location.provider", // base_system + "com.android.media.remotedisplay", // media_system + "com.android.media.remotedisplay.xml", // media_system + "com.android.mediadrm.signer", // media_system + "com.android.nfc_extras", // ok + "com.android.nfcservices", // base_system (RELEASE_PACKAGE_NFC_STACK != NfcNci) + "com.android.runtime", // ok + "dex_bootjars", + "ext", // from runtime_libart + "framework-graphics", // base_system + "framework-location", // base_system + "framework-minus-apex-install-dependencies", // base_system + "framework_compatibility_matrix.device.xml", + "generic_system_fonts", // ok + "hwservicemanager_compat_symlink_module", // base_system + "hyph-data", + "ims-common", // base_system + "init_system", // base_system + "javax.obex", // base_system + "llndk.libraries.txt", //ok + "org.apache.http.legacy", // base_system + "perfetto-extras", // system + "sanitizer.libraries.txt", // base_system + "selinux_policy_system_soong", // ok + "services", // base_system + "shell_and_utilities_system", // ok + "system-build.prop", + "system_compatibility_matrix.xml", //base_system + "telephony-common", // libs from TeleService + "voip-common", // base_system + ] + select(soong_config_variable("ANDROID", "release_crashrecovery_module"), { + "true": [ + "com.android.crashrecovery", // base_system (RELEASE_CRASHRECOVERY_MODULE) + ], + default: [], + }) + select(soong_config_variable("ANDROID", "release_package_profiling_module"), { + "true": [ + "com.android.profiling", // base_system (RELEASE_PACKAGE_PROFILING_MODULE) + ], + default: [], + }) + select(release_flag("RELEASE_MOVE_VCN_TO_MAINLINE"), { + true: [], + default: [ + "framework-connectivity-b", // base_system + ], + }) + select(release_flag("RELEASE_AVATAR_PICKER_APP"), { + true: [ + "AvatarPicker", // generic_system (RELEASE_AVATAR_PICKER_APP) + ], + default: [], + }) + select(release_flag("RELEASE_UPROBESTATS_MODULE"), { + true: [ + "com.android.uprobestats", // base_system (RELEASE_UPROBESTATS_MODULE) + ], + default: [], + }), + }, + prefer32: { + deps: [ + "drmserver", // media_system + "mediaserver", // base_system + ], + }, + lib64: { + deps: [ + "android.system.virtualizationcommon-ndk", + "android.system.virtualizationservice-ndk", + "libgsi", + "servicemanager", + ] + select(release_flag("RELEASE_UPROBESTATS_MODULE"), { + true: [], + default: [ + "uprobestats", // base_system internal + ], + }), + }, + both: { + deps: [ + "android.hardware.biometrics.fingerprint@2.1", // generic_system + "android.hardware.radio.config@1.0", // generic_system + "android.hardware.radio.deprecated@1.0", // generic_system + "android.hardware.radio@1.0", // generic_system + "android.hardware.radio@1.1", // generic_system + "android.hardware.radio@1.2", // generic_system + "android.hardware.radio@1.3", // generic_system + "android.hardware.radio@1.4", // generic_system + "android.hardware.secure_element@1.0", // generic_system + "app_process", // base_system + "boringssl_self_test", // base_system + "heapprofd_client", // base_system + "libEGL", // base_system + "libEGL_angle", // base_system + "libETC1", // base_system + "libFFTEm", // base_system + "libGLESv1_CM", // base_system + "libGLESv1_CM_angle", // base_system + "libGLESv2", // base_system + "libGLESv2_angle", // base_system + "libGLESv3", // base_system + "libOpenMAXAL", // base_system + "libOpenSLES", // base_system + "libaaudio", // base_system + "libalarm_jni", // base_system + "libamidi", // base_system + "libandroid", + "libandroid_runtime", + "libandroid_servers", + "libandroidfw", + "libartpalette-system", + "libaudio-resampler", // generic-system + "libaudioeffect_jni", + "libaudiohal", // generic-system + "libaudiopolicyengineconfigurable", // generic-system + "libbinder", + "libbinder_ndk", + "libbinder_rpc_unstable", + "libcamera2ndk", + "libcgrouprc", // llndk library + "libclang_rt.asan", + "libcompiler_rt", + "libcutils", // used by many libs + "libdmabufheap", // used by many libs + "libdrm", // used by many libs // generic_system + "libdrmframework", // base_system + "libdrmframework_jni", // base_system + "libfdtrack", // base_system + "libfilterfw", // base_system + "libfilterpack_imageproc", // media_system + "libfwdlockengine", // generic_system + "libgatekeeper", // base_system + "libgui", // base_system + "libhardware", // base_system + "libhardware_legacy", // base_system + "libhidltransport", // generic_system + "libhwbinder", // generic_system + "libinput", // base_system + "libinputflinger", // base_system + "libiprouteutil", // base_system + "libjnigraphics", // base_system + "libjpeg", // base_system + "liblog", // base_system + "liblogwrap", // generic_system + "liblz4", // generic_system + "libmedia", // base_system + "libmedia_jni", // base_system + "libmediandk", // base_system + "libminui", // generic_system + "libmonkey_jni", // base_system - internal + "libmtp", // base_system + "libnetd_client", // base_system + "libnetlink", // base_system + "libnetutils", // base_system + "libneuralnetworks_packageinfo", // base_system + "libnl", // generic_system + "libpdfium", // base_system + "libpolicy-subsystem", // generic_system + "libpower", // base_system + "libpowermanager", // base_system + "libprotobuf-cpp-full", // generic_system + "libradio_metadata", // base_system + "librs_jni", // handheld_system + "librtp_jni", // base_system + "libsensorservice", // base_system + "libsfplugin_ccodec", // base_system + "libskia", // base_system + "libsonic", // base_system + "libsonivox", // base_system + "libsoundpool", // base_system + "libspeexresampler", // base_system + "libsqlite", // base_system + "libstagefright", // base_system + "libstagefright_foundation", // base_system + "libstagefright_omx", // base_system + "libstdc++", // base_system + "libsysutils", // base_system + "libui", // base_system + "libusbhost", // base_system + "libutils", // base_system + "libvendorsupport", // llndk library + "libvintf_jni", // base_system + "libvulkan", // base_system + "libwebviewchromium_loader", // media_system + "libwebviewchromium_plat_support", // media_system + "libwilhelm", // base_system + "linker", // base_system + ] + select(soong_config_variable("ANDROID", "TARGET_DYNAMIC_64_32_DRMSERVER"), { + "true": ["drmserver"], + default: [], + }) + select(soong_config_variable("ANDROID", "TARGET_DYNAMIC_64_32_MEDIASERVER"), { + "true": ["mediaserver"], + default: [], + }) + select(release_flag("RELEASE_UPROBESTATS_MODULE"), { + true: [], + default: [ + "libuprobestats_client", // base_system internal + ], + }), + }, + }, + arch: { + arm64: { + deps: [ + "libclang_rt.hwasan", + "libc_hwasan", + ], + }, + }, +} + +android_system_image { + name: "aosp_shared_system_image", + defaults: ["system_image_defaults"], + dirs: android_rootdirs, + symlinks: android_symlinks, + type: "erofs", + erofs: { + compressor: "lz4hc,9", + compress_hints: "erofs_compress_hints.txt", + }, + deps: [ + // DO NOT update this list. Instead, update the system_image_defaults to + // sync with the base_system.mk + "logpersist.start", // cf only + ], +} diff --git a/target/product/generic/OWNERS b/target/product/generic/OWNERS new file mode 100644 index 0000000000..6d1446f099 --- /dev/null +++ b/target/product/generic/OWNERS @@ -0,0 +1,6 @@ +# Bug component: 1322713 +inseob@google.com +jeongik@google.com +jiyong@google.com +justinyun@google.com +kiyoungkim@google.com diff --git a/target/product/generic/erofs_compress_hints.txt b/target/product/generic/erofs_compress_hints.txt new file mode 100644 index 0000000000..8b2a711b8f --- /dev/null +++ b/target/product/generic/erofs_compress_hints.txt @@ -0,0 +1 @@ +0 .*\.apex$
\ No newline at end of file diff --git a/target/product/generic_ramdisk.mk b/target/product/generic_ramdisk.mk index ebac62fd6b..32277ece03 100644 --- a/target/product/generic_ramdisk.mk +++ b/target/product/generic_ramdisk.mk @@ -23,6 +23,8 @@ PRODUCT_PACKAGES += \ init_first_stage \ snapuserd_ramdisk \ + ramdisk-build.prop \ + toolbox_ramdisk \ # Debug ramdisk PRODUCT_PACKAGES += \ @@ -35,8 +37,6 @@ PRODUCT_PACKAGES += \ _my_paths := \ $(TARGET_COPY_OUT_RAMDISK)/ \ $(TARGET_COPY_OUT_DEBUG_RAMDISK)/ \ - system/usr/share/zoneinfo/tz_version \ - system/usr/share/zoneinfo/tzdata \ $(TARGET_COPY_OUT_RECOVERY)/root/first_stage_ramdisk/system \ diff --git a/target/product/generic_system.mk b/target/product/generic_system.mk index 0a09eb11d4..2482afccc6 100644 --- a/target/product/generic_system.mk +++ b/target/product/generic_system.mk @@ -36,11 +36,6 @@ PRODUCT_PACKAGES += \ Stk \ Tag \ -ifeq ($(RELEASE_AVATAR_PICKER_APP),true) - PRODUCT_PACKAGES += \ - AvatarPicker -endif - # OTA support PRODUCT_PACKAGES += \ recovery-refresh \ @@ -152,4 +147,5 @@ _my_paths := \ $(call require-artifacts-in-path, $(_my_paths), $(_my_allowed_list)) # Product config map to toggle between sources and prebuilts of required mainline modules +PRODUCT_RELEASE_CONFIG_MAPS += $(wildcard build/release/gms_mainline/required/release_config_map.textproto) PRODUCT_RELEASE_CONFIG_MAPS += $(wildcard vendor/google_shared/build/release/gms_mainline/required/release_config_map.textproto) diff --git a/target/product/go_defaults.mk b/target/product/go_defaults.mk index 4627fde189..ccc4f365e7 100644 --- a/target/product/go_defaults.mk +++ b/target/product/go_defaults.mk @@ -17,7 +17,9 @@ # Inherit common Android Go defaults. $(call inherit-product, build/make/target/product/go_defaults_common.mk) -PRODUCT_RELEASE_CONFIG_MAPS += $(wildcard vendor/google_shared/build/release/go_devices/release_config_map.textproto) +# Product config map to toggle between sources and prebuilts of required mainline modules +PRODUCT_RELEASE_CONFIG_MAPS += $(wildcard build/release/gms_mainline_go/required/release_config_map.textproto) +PRODUCT_RELEASE_CONFIG_MAPS += $(wildcard vendor/google_shared/build/release/gms_mainline_go/required/release_config_map.textproto) # Add the system properties. TARGET_SYSTEM_PROP += \ diff --git a/target/product/go_defaults_common.mk b/target/product/go_defaults_common.mk index fd4047a65b..0fcf16b753 100644 --- a/target/product/go_defaults_common.mk +++ b/target/product/go_defaults_common.mk @@ -24,11 +24,6 @@ PRODUCT_VENDOR_PROPERTIES += \ # Speed profile services and wifi-service to reduce RAM and storage. PRODUCT_SYSTEM_SERVER_COMPILER_FILTER := speed-profile -# Use a profile based boot image for this device. Note that this is currently a -# generic profile and not Android Go optimized. -PRODUCT_USE_PROFILE_FOR_BOOT_IMAGE := true -PRODUCT_DEX_PREOPT_BOOT_IMAGE_PROFILE_LOCATION := frameworks/base/config/boot-image-profile.txt - # Do not generate libartd. PRODUCT_ART_TARGET_INCLUDE_DEBUG_BUILD := false diff --git a/target/product/gsi/Android.bp b/target/product/gsi/Android.bp index 45ba14331b..97b3895b17 100644 --- a/target/product/gsi/Android.bp +++ b/target/product/gsi/Android.bp @@ -46,3 +46,117 @@ install_symlink { installed_location: "etc/init/config", symlink_target: "/system/system_ext/etc/init/config", } + +// init.gsi.rc, GSI-specific init script. +prebuilt_etc { + name: "init.gsi.rc", + src: "init.gsi.rc", + system_ext_specific: true, + relative_install_path: "init", +} + +prebuilt_etc { + name: "init.vndk-nodef.rc", + src: "init.vndk-nodef.rc", + system_ext_specific: true, + relative_install_path: "gsi", +} + +gsi_symlinks = [ + { + target: "/system/system_ext", + name: "system_ext", + }, + { + target: "/system/product", + name: "product", + }, + { + target: "/odm/odm_dlkm/etc", + name: "odm_dlkm/etc", + }, + { + target: "/vendor/vendor_dlkm/etc", + name: "vendor_dlkm/etc", + }, +] + +android_system_image { + name: "android_gsi", + defaults: [ + "system_image_defaults", + "system_ext_image_defaults", + "product_image_defaults", + ], + symlinks: gsi_symlinks, + dirs: ["cache"], + deps: [ + /////////////////////////////////////////// + // gsi_system_ext + /////////////////////////////////////////// + + // handheld packages + "Launcher3QuickStep", + "Provision", + "Settings", + "StorageManager", + "SystemUI", + + // telephony packages + "CarrierConfig", + + // Install a copy of the debug policy to the system_ext partition, and allow + // init-second-stage to load debug policy from system_ext. + // This option is only meant to be set by compliance GSI targets. + "system_ext_userdebug_plat_sepolicy.cil", + + /////////////////////////////////////////// + // gsi_release + /////////////////////////////////////////// + "gsi_skip_mount.cfg", + "init.gsi.rc", + "init.vndk-nodef.rc", + // Overlay the GSI specific setting for framework and SystemUI + "gsi_overlay_framework", + "gsi_overlay_systemui", + + /////////////////////////////////////////// + // VNDK + /////////////////////////////////////////// + "com.android.vndk.v30", + "com.android.vndk.v31", + "com.android.vndk.v32", + "com.android.vndk.v33", + "com.android.vndk.v34", + + /////////////////////////////////////////// + // gsi_product + /////////////////////////////////////////// + "Browser2", + "Camera2", + "Dialer", + "LatinIME", + "apns-full-conf.xml", + "frameworks-base-overlays", + ], + multilib: { + lib64: { + deps: [ + /////////////////////////////////////////// + // AVF + /////////////////////////////////////////// + "com.android.compos", + "features_com.android.virt.xml", + ], + }, + both: { + // PRODUCT_PACKAGES_SHIPPING_API_LEVEL_34 + deps: ["android.hidl.memory@1.0-impl"], + }, + }, + enabled: select(soong_config_variable("ANDROID", "PRODUCT_INSTALL_DEBUG_POLICY_TO_SYSTEM_EXT"), { + "true": true, + default: false, + }), + type: "ext4", +} diff --git a/target/product/gsi/current.txt b/target/product/gsi/current.txt index f771916f7a..cbb8a0e8cd 100644 --- a/target/product/gsi/current.txt +++ b/target/product/gsi/current.txt @@ -24,7 +24,7 @@ LLNDK: libvulkan.so VNDK-SP: android.hardware.common-V2-ndk.so VNDK-SP: android.hardware.common.fmq-V1-ndk.so VNDK-SP: android.hardware.graphics.allocator-V2-ndk.so -VNDK-SP: android.hardware.graphics.common-V5-ndk.so +VNDK-SP: android.hardware.graphics.common-V6-ndk.so VNDK-SP: android.hardware.graphics.common@1.0.so VNDK-SP: android.hardware.graphics.common@1.1.so VNDK-SP: android.hardware.graphics.common@1.2.so diff --git a/target/product/gsi_release.mk b/target/product/gsi_release.mk index 39428d2cfe..115b355920 100644 --- a/target/product/gsi_release.mk +++ b/target/product/gsi_release.mk @@ -79,9 +79,15 @@ PRODUCT_BUILD_SUPER_EMPTY_IMAGE := false PRODUCT_BUILD_SYSTEM_DLKM_IMAGE := false PRODUCT_EXPORT_BOOT_IMAGE_TO_DIST := true +# Build pvmfw with GSI: b/376363989, pvmfw currently only supports AArch64 +ifneq (,$(filter %_arm64,$(TARGET_PRODUCT))) +PRODUCT_BUILD_PVMFW_IMAGE := true +endif + # Additional settings used in all GSI builds PRODUCT_PRODUCT_PROPERTIES += \ ro.crypto.metadata_init_delete_all_keys.enabled=false \ + debug.codec2.bqpool_dealloc_after_stop=1 \ # Window Extensions ifneq ($(PRODUCT_IS_ATV),true) diff --git a/target/product/handheld_system.mk b/target/product/handheld_system.mk index 3f3bd01aab..6799066e40 100644 --- a/target/product/handheld_system.mk +++ b/target/product/handheld_system.mk @@ -34,6 +34,7 @@ $(call inherit-product-if-exists, frameworks/webview/chromium/chromium.mk) PRODUCT_PACKAGES += \ android.software.window_magnification.prebuilt.xml \ + $(if $(RELEASE_AVATAR_PICKER_APP), AvatarPicker,) \ BasicDreams \ BlockedNumberProvider \ BluetoothMidiService \ @@ -46,6 +47,7 @@ PRODUCT_PACKAGES += \ CertInstaller \ CredentialManager \ DeviceAsWebcam \ + DeviceDiagnostics \ DocumentsUI \ DownloadProviderUi \ EasterEgg \ @@ -68,13 +70,19 @@ PRODUCT_PACKAGES += \ SharedStorageBackup \ SimAppDialog \ Telecom \ - TelephonyProvider \ TeleService \ Traceur \ UserDictionaryProvider \ VpnDialogs \ vr \ +# Choose the correct products based on HSUM status +ifeq ($(PRODUCT_USE_HSUM),true) + PRODUCT_PACKAGES += TelephonyProviderHsum +else + PRODUCT_PACKAGES += TelephonyProvider +endif + PRODUCT_PACKAGES += $(RELEASE_PACKAGE_VIRTUAL_CAMERA) # Set virtual_camera_service_enabled soong config variable based on the # RELEASE_PACKAGE_VIRTUAL_CAMERA build. virtual_camera_service_enabled soong config diff --git a/target/product/hsum_common.mk b/target/product/hsum_common.mk new file mode 100644 index 0000000000..b19bc65c90 --- /dev/null +++ b/target/product/hsum_common.mk @@ -0,0 +1,29 @@ +# +# Copyright (C) 2024 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# Contains common default elements for devices running in Headless System User Mode. + +# Should generally be inherited first as using an HSUM configuration can affect downstream choices +# (such as ensuring that the HSUM-variants of packages are selected). + +PRODUCT_SYSTEM_DEFAULT_PROPERTIES += \ + ro.fw.mu.headless_system_user=true + +# Variable for elsewhere choosing the appropriate products based on HSUM status. +PRODUCT_USE_HSUM := true + +PRODUCT_PACKAGES += \ + HsumDefaultConfigOverlay diff --git a/target/product/large_screen_common.mk b/target/product/large_screen_common.mk new file mode 100644 index 0000000000..3eb9ff05e5 --- /dev/null +++ b/target/product/large_screen_common.mk @@ -0,0 +1,21 @@ +# Copyright (C) 2024 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# Window Extensions +$(call inherit-product, $(SRC_TARGET_DIR)/product/window_extensions.mk) + +# Enable Settings 2-pane optimization for large-screen +PRODUCT_SYSTEM_PROPERTIES += \ + persist.settings.large_screen_opt.enabled=true diff --git a/target/product/media_system_ext.mk b/target/product/media_system_ext.mk index 2e20af3d47..1179966777 100644 --- a/target/product/media_system_ext.mk +++ b/target/product/media_system_ext.mk @@ -20,6 +20,10 @@ # base_system_ext.mk. $(call inherit-product, $(SRC_TARGET_DIR)/product/base_system_ext.mk) -# /system_ext packages -PRODUCT_PACKAGES += \ - vndk_apex_snapshot_package \ +# Window Extensions +$(call inherit-product, $(SRC_TARGET_DIR)/product/window_extensions_base.mk) + +# AppFunction Extensions +ifneq (,$(RELEASE_APPFUNCTION_SIDECAR)) + $(call inherit-product, $(SRC_TARGET_DIR)/product/app_function_extensions.mk) +endif diff --git a/target/product/runtime_libart.mk b/target/product/runtime_libart.mk index 9e8afa85a4..71138ac560 100644 --- a/target/product/runtime_libart.mk +++ b/target/product/runtime_libart.mk @@ -142,6 +142,7 @@ ifneq (,$(filter true,$(OVERRIDE_DISABLE_DEXOPT_ALL))) # be too much of a problem for platform developers because a change to framework code should not # trigger dexpreopt for the ART boot image. WITH_DEXPREOPT_ART_BOOT_IMG_ONLY := true + $(call soong_config_set_bool,PrebuiltGmsCore,ExcludeExtractApk,true) endif # Enable resolution of startup const strings. @@ -157,15 +158,14 @@ PRODUCT_SYSTEM_PROPERTIES += \ dalvik.vm.minidebuginfo=true \ dalvik.vm.dex2oat-minidebuginfo=true -# Enable Madvising of the whole art, odex and vdex files to MADV_WILLNEED. +# Enable Madvising of the whole odex and vdex files to MADV_WILLNEED. # The size specified here is the size limit of how much of the file # (in bytes) is madvised. -# We madvise the whole .art file to MADV_WILLNEED with UINT_MAX limit. # For odex and vdex files, we limit madvising to 100MB. +# For art files, we defer to the runtime for default behavior. PRODUCT_SYSTEM_PROPERTIES += \ dalvik.vm.madvise.vdexfile.size=104857600 \ - dalvik.vm.madvise.odexfile.size=104857600 \ - dalvik.vm.madvise.artfile.size=4294967295 + dalvik.vm.madvise.odexfile.size=104857600 # Properties for the Unspecialized App Process Pool PRODUCT_SYSTEM_PROPERTIES += \ diff --git a/target/product/security/Android.bp b/target/product/security/Android.bp index 0d7b35e1c9..214c009ec8 100644 --- a/target/product/security/Android.bp +++ b/target/product/security/Android.bp @@ -33,7 +33,18 @@ prebuilt_etc { // image otacerts_zip { name: "otacerts", - recovery_available: true, relative_install_path: "security", filename: "otacerts.zip", } + +otacerts_zip { + name: "otacerts.recovery", + recovery: true, + relative_install_path: "security", + filename: "otacerts.zip", +} + +adb_keys { + name: "adb_keys", + product_specific: true, +} diff --git a/target/product/security/Android.mk b/target/product/security/Android.mk deleted file mode 100644 index 91b272c1bd..0000000000 --- a/target/product/security/Android.mk +++ /dev/null @@ -1,17 +0,0 @@ -LOCAL_PATH:= $(call my-dir) - -####################################### -# adb key, if configured via PRODUCT_ADB_KEYS -ifdef PRODUCT_ADB_KEYS - ifneq ($(filter eng userdebug,$(TARGET_BUILD_VARIANT)),) - include $(CLEAR_VARS) - LOCAL_MODULE := adb_keys - LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0 - LOCAL_LICENSE_CONDITIONS := notice - LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE - LOCAL_MODULE_CLASS := ETC - LOCAL_MODULE_PATH := $(TARGET_ROOT_OUT) - LOCAL_PREBUILT_MODULE_FILE := $(PRODUCT_ADB_KEYS) - include $(BUILD_PREBUILT) - endif -endif diff --git a/target/product/security/BUILD.bazel b/target/product/security/BUILD.bazel deleted file mode 100644 index c12be79833..0000000000 --- a/target/product/security/BUILD.bazel +++ /dev/null @@ -1,8 +0,0 @@ -filegroup( - name = "android_certificate_directory", - srcs = glob([ - "*.pk8", - "*.pem", - ]), - visibility = ["//visibility:public"], -) diff --git a/target/product/userspace_reboot.mk b/target/product/userspace_reboot.mk index f235d146e3..51feb0721f 100644 --- a/target/product/userspace_reboot.mk +++ b/target/product/userspace_reboot.mk @@ -14,6 +14,4 @@ # limitations under the License. # -# Inherit this when the target supports userspace reboot - -PRODUCT_VENDOR_PROPERTIES := init.userspace_reboot.is_supported=true +# DEPRECATED! Do not inherit this. diff --git a/target/product/virtual_ab_ota/compression.mk b/target/product/virtual_ab_ota/compression.mk index dc1ee3e028..e77c36fb78 100644 --- a/target/product/virtual_ab_ota/compression.mk +++ b/target/product/virtual_ab_ota/compression.mk @@ -18,9 +18,12 @@ $(call inherit-product, $(SRC_TARGET_DIR)/product/virtual_ab_ota/launch_with_ven PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.compression.enabled=true PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.userspace.snapshots.enabled=true -PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.io_uring.enabled=true PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.batch_writes=true +# Optional assignment. On low memory devices, disabling io_uring can relieve cpu and memory +# pressure during an OTA. +PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.io_uring.enabled?=true + # Enabling this property, will improve OTA install time # but will use an additional CPU core # PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.compression.threads=true diff --git a/target/product/virtual_ab_ota/vabc_features.mk b/target/product/virtual_ab_ota/vabc_features.mk index e2745a1356..d092699a47 100644 --- a/target/product/virtual_ab_ota/vabc_features.mk +++ b/target/product/virtual_ab_ota/vabc_features.mk @@ -31,14 +31,15 @@ PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.enabled=true PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.compression.enabled=true PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.userspace.snapshots.enabled=true -PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.io_uring.enabled=true -PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.compression.xor.enabled=true PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.batch_writes=true + +# Optional assignments, low memory devices may benefit from overriding these. +PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.io_uring.enabled?=true +PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.compression.xor.enabled?=true + # Low memory device configurations. If memory usage and cpu utilization is # a bottleneck during OTA, the below configurations can be added to a -# device's .mk file improve performance for low mem devices. Disabling -# ro.virtual_ab.compression.xor.enabled and ro.virtual_ab.io_uring.enabled -# is also recommended +# device's .mk file improve performance for low mem devices. # # PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.read_ahead_size=16 # PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.o_direct.enabled=true diff --git a/target/product/window_extensions.mk b/target/product/window_extensions.mk index 5f5431f677..d27a613a61 100644 --- a/target/product/window_extensions.mk +++ b/target/product/window_extensions.mk @@ -14,11 +14,14 @@ # limitations under the License. # -# /system_ext packages -PRODUCT_PACKAGES += \ - androidx.window.extensions \ - androidx.window.sidecar - -# properties +# Extension of window_extensions_base.mk to enable the activity embedding +# feature for all apps by default. All large screen devices must inherit +# this in build. Optional for other form factors. +# +# Indicated whether the Activity Embedding feature should be guarded by +# Android 15 to avoid app compat impact. +# If true (or not set), the feature is only enabled for apps with target +# SDK of Android 15 or above. +# If false, the feature is enabled for all apps. PRODUCT_PRODUCT_PROPERTIES += \ - persist.wm.extensions.enabled=true + persist.wm.extensions.activity_embedding_guard_with_android_15=false diff --git a/target/product/window_extensions_base.mk b/target/product/window_extensions_base.mk new file mode 100644 index 0000000000..ee0e5e7c89 --- /dev/null +++ b/target/product/window_extensions_base.mk @@ -0,0 +1,33 @@ +# +# Copyright (C) 2024 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# The base version of window_extensions.mk to be included on all non-wearable +# devices. Devices that don't support multi-window can choose to drop this. +# +# Note: by default the Activity Embedding feature is guarded by app's +# targetSDK on Android 15 to avoid app compat impact. +# +# Large screen devices must inherit window_extensions.mk to enable the +# Activity Embedding feature for all apps. + +# /system_ext packages +PRODUCT_PACKAGES += \ + androidx.window.extensions \ + androidx.window.sidecar + +# properties +PRODUCT_PRODUCT_PROPERTIES += \ + persist.wm.extensions.enabled=true diff --git a/teams/Android.bp b/teams/Android.bp index a9699d26f8..7e0795f492 100644 --- a/teams/Android.bp +++ b/teams/Android.bp @@ -13,6 +13,9 @@ // See the License for the specific language governing permissions and // limitations under the License. +// DON'T ADD NEW RULES HERE. For more details refer to +// go/new-android-ownership-model + package { default_applicable_licenses: ["Android-Apache-2.0"], } @@ -67,13 +70,6 @@ team { } team { - name: "trendy_team_wear_wear_cloud_platform", - - // go/trendy/manage/engineers/5917762526281728 - trendy_team_id: "5917762526281728", -} - -team { name: "trendy_team_pixel_system_software", // go/trendy/manage/engineers/4856005120622592 @@ -515,13 +511,6 @@ team { } team { - name: "trendy_team_wear_wear_notifications_alerts_attention_management", - - // go/trendy/manage/engineers/6267643681996800 - trendy_team_id: "6267643681996800", -} - -team { name: "trendy_team_fwk_nfc", // go/trendy/manage/engineers/5962312512864256 @@ -529,13 +518,6 @@ team { } team { - name: "trendy_team_wear_personalization_developer_surfaces", - - // go/trendy/manage/engineers/4819890988810240 - trendy_team_id: "4819890988810240", -} - -team { name: "trendy_team_srajkumar_team", // go/trendy/manage/engineers/5170053894012928 @@ -690,13 +672,6 @@ team { } team { - name: "trendy_team_test_eng_android_wear", - - // go/trendy/manage/engineers/4979150422933504 - trendy_team_id: "4979150422933504", -} - -team { name: "trendy_team_mesch_team", // go/trendy/manage/engineers/5205465899368448 @@ -718,13 +693,6 @@ team { } team { - name: "trendy_team_wear_wear_developer_devx", - - // go/trendy/manage/engineers/4894890764697600 - trendy_team_id: "4894890764697600", -} - -team { name: "trendy_team_android_rust", // go/trendy/manage/engineers/4844600586305536 @@ -928,13 +896,6 @@ team { } team { - name: "trendy_team_wear_wallet_on_wear", - - // go/trendy/manage/engineers/5724960437731328 - trendy_team_id: "5724960437731328", -} - -team { name: "trendy_team_glanceables", // go/trendy/manage/engineers/4658222004600832 @@ -1068,13 +1029,6 @@ team { } team { - name: "trendy_team_wear_3xp", - - // go/trendy/manage/engineers/5692317612539904 - trendy_team_id: "5692317612539904", -} - -team { name: "trendy_team_clockwork", // go/trendy/manage/engineers/4908781678755840 @@ -1208,13 +1162,6 @@ team { } team { - name: "trendy_team_wear_software_nti", - - // go/trendy/manage/engineers/5164973558759424 - trendy_team_id: "5164973558759424", -} - -team { name: "trendy_team_machine_learning", // go/trendy/manage/engineers/5276568318246912 @@ -1306,13 +1253,6 @@ team { } team { - name: "trendy_team_wear_wear_power_emulator", - - // go/trendy/manage/engineers/5160338936725504 - trendy_team_id: "5160338936725504", -} - -team { name: "trendy_team_deprecated_framework_svetoslavganov", // go/trendy/manage/engineers/6404117492531200 @@ -1327,13 +1267,6 @@ team { } team { - name: "trendy_team_wear_opus", - - // go/trendy/manage/engineers/5098351636676608 - trendy_team_id: "5098351636676608", -} - -team { name: "trendy_team_text_to_speech", // go/trendy/manage/engineers/6368933120442368 @@ -1439,13 +1372,6 @@ team { } team { - name: "trendy_team_wear_developer_foundation", - - // go/trendy/manage/engineers/5239127108648960 - trendy_team_id: "5239127108648960", -} - -team { name: "trendy_team_tpm_tvc", // go/trendy/manage/engineers/5390683333230592 @@ -1453,13 +1379,6 @@ team { } team { - name: "trendy_team_wear_wear_ux", - - // go/trendy/manage/engineers/5782097411080192 - trendy_team_id: "5782097411080192", -} - -team { name: "trendy_team_lse_desktop_os_experience", // go/trendy/manage/engineers/5125234900434944 @@ -1670,13 +1589,6 @@ team { } team { - name: "trendy_team_wear_wear_assistant", - - // go/trendy/manage/engineers/5848075306172416 - trendy_team_id: "5848075306172416", -} - -team { name: "trendy_team_android_power_and_comms_infra", // go/trendy/manage/engineers/5325547653332992 @@ -3455,6 +3367,13 @@ team { } team { + name: "trendy_team_aaos_display_safety_triage", + + // go/trendy/manage/engineers/6522093663780864 + trendy_team_id: "6522093663780864", +} + +team { name: "trendy_team_camera_htc_lg_qualcomm", // go/trendy/manage/engineers/6332099480911872 @@ -4414,8 +4333,81 @@ team { } team { + name: "trendy_team_android_media_solutions_playback", + + // go/trendy/manage/engineers/6742515252559872 + trendy_team_id: "6742515252559872", +} + +team { name: "trendy_team_android_telemetry_client_infra", // go/trendy/manage/engineers/5403245077430272 trendy_team_id: "5403245077430272", } + +team { + name: "trendy_team_pte_sysui", + + // go/trendy/manage/engineers/5185897463382016 + trendy_team_id: "5185897463382016", +} + +team { + name: "trendy_team_pixel_troubleshooting_app", + + // go/trendy/manage/engineers/5097003746426880 + trendy_team_id: "5097003746426880", +} + +team { + name: "trendy_team_desktop_firmware", + + // go/trendy/manage/engineers/5787938454863872 + trendy_team_id: "5787938454863872", +} + +team { + name: "trendy_team_art_cloud", + + // go/trendy/manage/engineers/5121440647577600 + trendy_team_id: "5121440647577600", +} + +team { + name: "trendy_team_ravenwood", + + // go/trendy/manage/engineers/6027181500497920 + trendy_team_id: "6027181500497920", +} + +team { + name: "trendy_team_automotive_cast", + + // go/trendy/manage/engineers/5293683026264064 + trendy_team_id: "5293683026264064", +} + +team { + name: "trendy_team_wear_standalone_kids", + + // go/trendy/manage/engineers/6303298703949824 + trendy_team_id: "6303298703949824", +} + +team { + name: "trendy_team_desktop_stats", + + // go/trendy/manage/engineers/5440764114206720 + trendy_team_id: "5440764114206720", +} + +team { + name: "trendy_team_desktop_wifi", + + // go/trendy/manage/engineers/6463689697099776 + trendy_team_id: "6463689697099776", +} + +// DON'T ADD NEW RULES HERE. For more details refer to +// go/new-android-ownership-model diff --git a/tools/Android.bp b/tools/Android.bp index 59831a61ec..243cb5647b 100644 --- a/tools/Android.bp +++ b/tools/Android.bp @@ -123,3 +123,11 @@ python_binary_host { "merge-event-log-tags.py", ], } + +python_binary_host { + name: "java-event-log-tags", + srcs: [ + "event_log_tags.py", + "java-event-log-tags.py", + ], +} diff --git a/tools/BUILD.bazel b/tools/BUILD.bazel deleted file mode 100644 index 9ec0dcef85..0000000000 --- a/tools/BUILD.bazel +++ /dev/null @@ -1,35 +0,0 @@ -py_library( - name = "event_log_tags", - srcs = ["event_log_tags.py"], - imports = ["."], -) - -py_binary( - name = "java-event-log-tags", - srcs = ["java-event-log-tags.py"], - python_version = "PY3", - visibility = ["//visibility:public"], - deps = [":event_log_tags"], -) - -py_binary( - name = "merge-event-log-tags", - srcs = ["merge-event-log-tags.py"], - python_version = "PY3", - visibility = ["//visibility:public"], - deps = [":event_log_tags"], -) - -py_binary( - name = "check_elf_file", - srcs = ["check_elf_file.py"], - python_version = "PY3", - visibility = ["//visibility:public"], -) - -py_binary( - name = "auto_gen_test_config", - srcs = ["auto_gen_test_config.py"], - python_version = "PY3", - visibility = ["//visibility:public"], -) diff --git a/tools/aconfig/OWNERS b/tools/aconfig/OWNERS index 9a76279cce..c92fc7cda3 100644 --- a/tools/aconfig/OWNERS +++ b/tools/aconfig/OWNERS @@ -1,7 +1,8 @@ -amhk@google.com dzshen@google.com -jham@google.com -joeo@google.com opg@google.com tedbauer@google.com zhidou@google.com + +amhk@google.com #{LAST_RESORT_SUGGESTION} +jham@google.com #{LAST_RESORT_SUGGESTION} +joeo@google.com #{LAST_RESORT_SUGGESTION} diff --git a/tools/aconfig/TEST_MAPPING b/tools/aconfig/TEST_MAPPING index 15e41876cf..6e53018a41 100644 --- a/tools/aconfig/TEST_MAPPING +++ b/tools/aconfig/TEST_MAPPING @@ -102,12 +102,10 @@ { // aconfig_storage file java integration tests "name": "aconfig_storage_file.test.java" - } - ], - "postsubmit": [ + }, { - // aconfig_storage read api java integration tests - "name": "aconfig_storage_read_api.test.java" + // aconfig_storage read functional test + "name": "aconfig_storage_read_functional" } ] } diff --git a/tools/aconfig/aconfig/Android.bp b/tools/aconfig/aconfig/Android.bp index 68521af91f..cce0ca9364 100644 --- a/tools/aconfig/aconfig/Android.bp +++ b/tools/aconfig/aconfig/Android.bp @@ -68,6 +68,14 @@ aconfig_values { ], } +aconfig_values { + name: "aconfig.test.flag.second_values", + package: "com.android.aconfig.test", + srcs: [ + "tests/third.values", + ], +} + aconfig_value_set { name: "aconfig.test.flag.value_set", values: [ @@ -234,6 +242,12 @@ rust_aconfig_library { name: "libaconfig_test_rust_library", crate_name: "aconfig_test_rust_library", aconfig_declarations: "aconfig.test.flags", + host_supported: true, + apex_available: [ + "//apex_available:platform", + "com.android.configinfrastructure", + ], + min_sdk_version: "34", } rust_test { diff --git a/tools/aconfig/aconfig/data/Android.bp b/tools/aconfig/aconfig/data/Android.bp new file mode 100644 index 0000000000..1b5eef09ed --- /dev/null +++ b/tools/aconfig/aconfig/data/Android.bp @@ -0,0 +1,14 @@ +package { + default_applicable_licenses: ["Android-Apache-2.0"], +} + +python_binary_host { + name: "convert_finalized_flags_to_proto", + srcs: ["convert_finalized_flags_to_proto.py"], + libs: ["aconfig_internal_proto_python"], + version: { + py3: { + embedded_launcher: true, + }, + }, +} diff --git a/tools/aconfig/aconfig/data/convert_finalized_flags_to_proto.py b/tools/aconfig/aconfig/data/convert_finalized_flags_to_proto.py new file mode 100644 index 0000000000..15ff03cc97 --- /dev/null +++ b/tools/aconfig/aconfig/data/convert_finalized_flags_to_proto.py @@ -0,0 +1,83 @@ +#!/usr/bin/env python3 +# +# Copyright (C) 2024 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import collections +import sys +import os + +from io import TextIOWrapper +from protos import aconfig_internal_pb2 +from typing import Dict, List, Set + +def extract_finalized_flags(flag_file: TextIOWrapper): + finalized_flags_for_sdk = list() + + for line in f: + flag_name = line.strip() + if flag_name: + finalized_flags_for_sdk.append(flag_name) + + return finalized_flags_for_sdk + +def remove_duplicate_flags(all_flags_with_duplicates: Dict[int, List]): + result_flags = collections.defaultdict(set) + + for api_level in sorted(all_flags_with_duplicates.keys(), key=int): + for flag in all_flags_with_duplicates[api_level]: + if not any(flag in value_set for value_set in result_flags.values()): + result_flags[api_level].add(flag) + + return result_flags + +def build_proto(all_flags: Set): + finalized_flags = aconfig_internal_pb2.finalized_flags() + for api_level, qualified_name_list in all_flags.items(): + for qualified_name in qualified_name_list: + package_name, flag_name = qualified_name.rsplit('.', 1) + finalized_flag = aconfig_internal_pb2.finalized_flag() + finalized_flag.name = flag_name + finalized_flag.package = package_name + finalized_flag.min_sdk = api_level + finalized_flags.finalized_flag.append(finalized_flag) + return finalized_flags + +if __name__ == '__main__': + if len(sys.argv) == 1: + sys.exit('No prebuilts/sdk directory provided.') + all_api_info_dir = sys.argv[1] + + all_flags_with_duplicates = {} + for sdk_dir in os.listdir(all_api_info_dir): + api_level = sdk_dir.rsplit('/', 1)[0].rstrip('0').rstrip('.') + + # No support for minor versions yet. This also removes non-numeric dirs. + # Update once floats are acceptable. + if not api_level.isdigit(): + continue + + flag_file_path = os.path.join(all_api_info_dir, sdk_dir, 'finalized-flags.txt') + try: + with open(flag_file_path, 'r') as f: + finalized_flags_for_sdk = extract_finalized_flags(f) + all_flags_with_duplicates[int(api_level)] = finalized_flags_for_sdk + except FileNotFoundError: + # Either this version is not finalized yet or looking at a + # /prebuilts/sdk/version before finalized-flags.txt was introduced. + continue + + all_flags = remove_duplicate_flags(all_flags_with_duplicates) + finalized_flags = build_proto(all_flags) + sys.stdout.buffer.write(finalized_flags.SerializeToString()) diff --git a/tools/aconfig/aconfig/src/codegen/cpp.rs b/tools/aconfig/aconfig/src/codegen/cpp.rs index 2c569da8f6..30e1a89684 100644 --- a/tools/aconfig/aconfig/src/codegen/cpp.rs +++ b/tools/aconfig/aconfig/src/codegen/cpp.rs @@ -24,14 +24,13 @@ use aconfig_protos::{ProtoFlagPermission, ProtoFlagState, ProtoParsedFlag}; use crate::codegen; use crate::codegen::CodegenMode; -use crate::commands::OutputFile; +use crate::commands::{should_include_flag, OutputFile}; pub fn generate_cpp_code<I>( package: &str, parsed_flags_iter: I, codegen_mode: CodegenMode, flag_ids: HashMap<String, u16>, - allow_instrumentation: bool, ) -> Result<Vec<OutputFile>> where I: Iterator<Item = ProtoParsedFlag>, @@ -59,7 +58,6 @@ where is_test_mode: codegen_mode == CodegenMode::Test, class_elements, container, - allow_instrumentation, }; let files = [ @@ -104,7 +102,6 @@ pub struct Context<'a> { pub is_test_mode: bool, pub class_elements: Vec<ClassElement>, pub container: String, - pub allow_instrumentation: bool, } #[derive(Serialize)] @@ -127,6 +124,23 @@ fn create_class_element( flag_ids: HashMap<String, u16>, rw_count: &mut i32, ) -> ClassElement { + let no_assigned_offset = !should_include_flag(pf); + + let flag_offset = match flag_ids.get(pf.name()) { + Some(offset) => offset, + None => { + // System/vendor/product RO+disabled flags have no offset in storage files. + // Assign placeholder value. + if no_assigned_offset { + &0 + } + // All other flags _must_ have an offset. + else { + panic!("{}", format!("missing flag offset for {}", pf.name())); + } + } + }; + ClassElement { readwrite_idx: if pf.permission() == ProtoFlagPermission::READ_WRITE { let index = *rw_count; @@ -144,7 +158,7 @@ fn create_class_element( }, flag_name: pf.name().to_string(), flag_macro: pf.name().to_uppercase(), - flag_offset: *flag_ids.get(pf.name()).expect("values checked at flag parse time"), + flag_offset: *flag_offset, device_config_namespace: pf.namespace().to_string(), device_config_flag: codegen::create_device_config_ident(package, pf.name()) .expect("values checked at flag parse time"), @@ -283,39 +297,23 @@ public: virtual ~flag_provider_interface() = default; virtual bool disabled_ro() = 0; - - virtual void disabled_ro(bool val) = 0; - virtual bool disabled_rw() = 0; - - virtual void disabled_rw(bool val) = 0; - virtual bool disabled_rw_exported() = 0; - - virtual void disabled_rw_exported(bool val) = 0; - virtual bool disabled_rw_in_other_namespace() = 0; - - virtual void disabled_rw_in_other_namespace(bool val) = 0; - virtual bool enabled_fixed_ro() = 0; - - virtual void enabled_fixed_ro(bool val) = 0; - virtual bool enabled_fixed_ro_exported() = 0; - - virtual void enabled_fixed_ro_exported(bool val) = 0; - virtual bool enabled_ro() = 0; - - virtual void enabled_ro(bool val) = 0; - virtual bool enabled_ro_exported() = 0; - - virtual void enabled_ro_exported(bool val) = 0; - virtual bool enabled_rw() = 0; + virtual void disabled_ro(bool val) = 0; + virtual void disabled_rw(bool val) = 0; + virtual void disabled_rw_exported(bool val) = 0; + virtual void disabled_rw_in_other_namespace(bool val) = 0; + virtual void enabled_fixed_ro(bool val) = 0; + virtual void enabled_fixed_ro_exported(bool val) = 0; + virtual void enabled_ro(bool val) = 0; + virtual void enabled_ro_exported(bool val) = 0; virtual void enabled_rw(bool val) = 0; virtual void reset_flags() {} @@ -450,56 +448,6 @@ void com_android_aconfig_test_reset_flags(); "#; - const EXPORTED_EXPORTED_HEADER_EXPECTED: &str = r#" -#pragma once - -#ifdef __cplusplus - -#include <memory> - -namespace com::android::aconfig::test { - -class flag_provider_interface { -public: - virtual ~flag_provider_interface() = default; - - virtual bool disabled_rw_exported() = 0; - - virtual bool enabled_fixed_ro_exported() = 0; - - virtual bool enabled_ro_exported() = 0; -}; - -extern std::unique_ptr<flag_provider_interface> provider_; - -inline bool disabled_rw_exported() { - return provider_->disabled_rw_exported(); -} - -inline bool enabled_fixed_ro_exported() { - return provider_->enabled_fixed_ro_exported(); -} - -inline bool enabled_ro_exported() { - return provider_->enabled_ro_exported(); -} - -} - -extern "C" { -#endif // __cplusplus - -bool com_android_aconfig_test_disabled_rw_exported(); - -bool com_android_aconfig_test_enabled_fixed_ro_exported(); - -bool com_android_aconfig_test_enabled_ro_exported(); - -#ifdef __cplusplus -} // extern "C" -#endif -"#; - const EXPORTED_FORCE_READ_ONLY_HEADER_EXPECTED: &str = r#" #pragma once @@ -584,7 +532,13 @@ bool com_android_aconfig_test_enabled_rw(); const PROD_SOURCE_FILE_EXPECTED: &str = r#" #include "com_android_aconfig_test.h" -#include <server_configurable_flags/get_flags.h> + +#include <unistd.h> +#include "aconfig_storage/aconfig_storage_read_api.hpp" +#include <android/log.h> +#define LOG_TAG "aconfig_cpp_codegen" +#define ALOGE(...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__) + #include <vector> namespace com::android::aconfig::test { @@ -592,36 +546,116 @@ namespace com::android::aconfig::test { class flag_provider : public flag_provider_interface { public: + flag_provider() + : cache_(4, -1) + , boolean_start_index_() + , flag_value_file_(nullptr) + , package_exists_in_storage_(true) { + + auto package_map_file = aconfig_storage::get_mapped_file( + "system", + aconfig_storage::StorageFileType::package_map); + if (!package_map_file.ok()) { + ALOGE("error: failed to get package map file: %s", package_map_file.error().c_str()); + package_exists_in_storage_ = false; + return; + } + + auto context = aconfig_storage::get_package_read_context( + **package_map_file, "com.android.aconfig.test"); + if (!context.ok()) { + ALOGE("error: failed to get package read context: %s", context.error().c_str()); + package_exists_in_storage_ = false; + return; + } + + if (!(context->package_exists)) { + package_exists_in_storage_ = false; + return; + } + + // cache package boolean flag start index + boolean_start_index_ = context->boolean_start_index; + + // unmap package map file and free memory + delete *package_map_file; + + auto flag_value_file = aconfig_storage::get_mapped_file( + "system", + aconfig_storage::StorageFileType::flag_val); + if (!flag_value_file.ok()) { + ALOGE("error: failed to get flag value file: %s", flag_value_file.error().c_str()); + package_exists_in_storage_ = false; + return; + } + + // cache flag value file + flag_value_file_ = std::unique_ptr<aconfig_storage::MappedStorageFile>( + *flag_value_file); + + } + + virtual bool disabled_ro() override { return false; } virtual bool disabled_rw() override { if (cache_[0] == -1) { - cache_[0] = server_configurable_flags::GetServerConfigurableFlag( - "aconfig_flags.aconfig_test", - "com.android.aconfig.test.disabled_rw", - "false") == "true"; + if (!package_exists_in_storage_) { + return false; + } + + auto value = aconfig_storage::get_boolean_flag_value( + *flag_value_file_, + boolean_start_index_ + 0); + + if (!value.ok()) { + ALOGE("error: failed to read flag value: %s", value.error().c_str()); + return false; + } + + cache_[0] = *value; } return cache_[0]; } virtual bool disabled_rw_exported() override { if (cache_[1] == -1) { - cache_[1] = server_configurable_flags::GetServerConfigurableFlag( - "aconfig_flags.aconfig_test", - "com.android.aconfig.test.disabled_rw_exported", - "false") == "true"; + if (!package_exists_in_storage_) { + return false; + } + + auto value = aconfig_storage::get_boolean_flag_value( + *flag_value_file_, + boolean_start_index_ + 1); + + if (!value.ok()) { + ALOGE("error: failed to read flag value: %s", value.error().c_str()); + return false; + } + + cache_[1] = *value; } return cache_[1]; } virtual bool disabled_rw_in_other_namespace() override { if (cache_[2] == -1) { - cache_[2] = server_configurable_flags::GetServerConfigurableFlag( - "aconfig_flags.other_namespace", - "com.android.aconfig.test.disabled_rw_in_other_namespace", - "false") == "true"; + if (!package_exists_in_storage_) { + return false; + } + + auto value = aconfig_storage::get_boolean_flag_value( + *flag_value_file_, + boolean_start_index_ + 2); + + if (!value.ok()) { + ALOGE("error: failed to read flag value: %s", value.error().c_str()); + return false; + } + + cache_[2] = *value; } return cache_[2]; } @@ -644,16 +678,32 @@ namespace com::android::aconfig::test { virtual bool enabled_rw() override { if (cache_[3] == -1) { - cache_[3] = server_configurable_flags::GetServerConfigurableFlag( - "aconfig_flags.aconfig_test", - "com.android.aconfig.test.enabled_rw", - "true") == "true"; + if (!package_exists_in_storage_) { + return true; + } + + auto value = aconfig_storage::get_boolean_flag_value( + *flag_value_file_, + boolean_start_index_ + 7); + + if (!value.ok()) { + ALOGE("error: failed to read flag value: %s", value.error().c_str()); + return true; + } + + cache_[3] = *value; } return cache_[3]; } private: std::vector<int8_t> cache_ = std::vector<int8_t>(4, -1); + + uint32_t boolean_start_index_; + + std::unique_ptr<aconfig_storage::MappedStorageFile> flag_value_file_; + + bool package_exists_in_storage_; }; std::unique_ptr<flag_provider_interface> provider_ = @@ -700,7 +750,13 @@ bool com_android_aconfig_test_enabled_rw() { const TEST_SOURCE_FILE_EXPECTED: &str = r#" #include "com_android_aconfig_test.h" -#include <server_configurable_flags/get_flags.h> + +#include <unistd.h> +#include "aconfig_storage/aconfig_storage_read_api.hpp" +#include <android/log.h> +#define LOG_TAG "aconfig_cpp_codegen" +#define ALOGE(...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__) + #include <unordered_map> #include <string> @@ -710,10 +766,63 @@ namespace com::android::aconfig::test { private: std::unordered_map<std::string, bool> overrides_; + uint32_t boolean_start_index_; + + std::unique_ptr<aconfig_storage::MappedStorageFile> flag_value_file_; + + bool package_exists_in_storage_; + public: flag_provider() : overrides_() - {} + , boolean_start_index_() + , flag_value_file_(nullptr) + , package_exists_in_storage_(true) { + + auto package_map_file = aconfig_storage::get_mapped_file( + "system", + aconfig_storage::StorageFileType::package_map); + + if (!package_map_file.ok()) { + ALOGE("error: failed to get package map file: %s", package_map_file.error().c_str()); + package_exists_in_storage_ = false; + return; + } + + auto context = aconfig_storage::get_package_read_context( + **package_map_file, "com.android.aconfig.test"); + + if (!context.ok()) { + ALOGE("error: failed to get package read context: %s", context.error().c_str()); + package_exists_in_storage_ = false; + return; + } + + if (!(context->package_exists)) { + package_exists_in_storage_ = false; + return; + } + + // cache package boolean flag start index + boolean_start_index_ = context->boolean_start_index; + + // unmap package map file and free memory + delete *package_map_file; + + auto flag_value_file = aconfig_storage::get_mapped_file( + "system", + aconfig_storage::StorageFileType::flag_val); + if (!flag_value_file.ok()) { + ALOGE("error: failed to get flag value file: %s", flag_value_file.error().c_str()); + package_exists_in_storage_ = false; + return; + } + + // cache flag value file + flag_value_file_ = std::unique_ptr<aconfig_storage::MappedStorageFile>( + *flag_value_file); + + } virtual bool disabled_ro() override { auto it = overrides_.find("disabled_ro"); @@ -733,10 +842,20 @@ namespace com::android::aconfig::test { if (it != overrides_.end()) { return it->second; } else { - return server_configurable_flags::GetServerConfigurableFlag( - "aconfig_flags.aconfig_test", - "com.android.aconfig.test.disabled_rw", - "false") == "true"; + if (!package_exists_in_storage_) { + return false; + } + + auto value = aconfig_storage::get_boolean_flag_value( + *flag_value_file_, + boolean_start_index_ + 0); + + if (!value.ok()) { + ALOGE("error: failed to read flag value: %s", value.error().c_str()); + return false; + } else { + return *value; + } } } @@ -749,10 +868,20 @@ namespace com::android::aconfig::test { if (it != overrides_.end()) { return it->second; } else { - return server_configurable_flags::GetServerConfigurableFlag( - "aconfig_flags.aconfig_test", - "com.android.aconfig.test.disabled_rw_exported", - "false") == "true"; + if (!package_exists_in_storage_) { + return false; + } + + auto value = aconfig_storage::get_boolean_flag_value( + *flag_value_file_, + boolean_start_index_ + 1); + + if (!value.ok()) { + ALOGE("error: failed to read flag value: %s", value.error().c_str()); + return false; + } else { + return *value; + } } } @@ -765,10 +894,20 @@ namespace com::android::aconfig::test { if (it != overrides_.end()) { return it->second; } else { - return server_configurable_flags::GetServerConfigurableFlag( - "aconfig_flags.other_namespace", - "com.android.aconfig.test.disabled_rw_in_other_namespace", - "false") == "true"; + if (!package_exists_in_storage_) { + return false; + } + + auto value = aconfig_storage::get_boolean_flag_value( + *flag_value_file_, + boolean_start_index_ + 2); + + if (!value.ok()) { + ALOGE("error: failed to read flag value: %s", value.error().c_str()); + return false; + } else { + return *value; + } } } @@ -833,10 +972,20 @@ namespace com::android::aconfig::test { if (it != overrides_.end()) { return it->second; } else { - return server_configurable_flags::GetServerConfigurableFlag( - "aconfig_flags.aconfig_test", - "com.android.aconfig.test.enabled_rw", - "true") == "true"; + if (!package_exists_in_storage_) { + return true; + } + + auto value = aconfig_storage::get_boolean_flag_value( + *flag_value_file_, + boolean_start_index_ + 7); + + if (!value.ok()) { + ALOGE("error: failed to read flag value: %s", value.error().c_str()); + return true; + } else { + return *value; + } } } @@ -941,68 +1090,6 @@ void com_android_aconfig_test_reset_flags() { "#; - const EXPORTED_SOURCE_FILE_EXPECTED: &str = r#" -#include "com_android_aconfig_test.h" -#include <server_configurable_flags/get_flags.h> -#include <vector> - -namespace com::android::aconfig::test { - - class flag_provider : public flag_provider_interface { - public: - virtual bool disabled_rw_exported() override { - if (cache_[0] == -1) { - cache_[0] = server_configurable_flags::GetServerConfigurableFlag( - "aconfig_flags.aconfig_test", - "com.android.aconfig.test.disabled_rw_exported", - "false") == "true"; - } - return cache_[0]; - } - - virtual bool enabled_fixed_ro_exported() override { - if (cache_[1] == -1) { - cache_[1] = server_configurable_flags::GetServerConfigurableFlag( - "aconfig_flags.aconfig_test", - "com.android.aconfig.test.enabled_fixed_ro_exported", - "false") == "true"; - } - return cache_[1]; - } - - virtual bool enabled_ro_exported() override { - if (cache_[2] == -1) { - cache_[2] = server_configurable_flags::GetServerConfigurableFlag( - "aconfig_flags.aconfig_test", - "com.android.aconfig.test.enabled_ro_exported", - "false") == "true"; - } - return cache_[2]; - } - - private: - std::vector<int8_t> cache_ = std::vector<int8_t>(3, -1); - }; - - std::unique_ptr<flag_provider_interface> provider_ = - std::make_unique<flag_provider>(); -} - -bool com_android_aconfig_test_disabled_rw_exported() { - return com::android::aconfig::test::disabled_rw_exported(); -} - -bool com_android_aconfig_test_enabled_fixed_ro_exported() { - return com::android::aconfig::test::enabled_fixed_ro_exported(); -} - -bool com_android_aconfig_test_enabled_ro_exported() { - return com::android::aconfig::test::enabled_ro_exported(); -} - - -"#; - const FORCE_READ_ONLY_SOURCE_FILE_EXPECTED: &str = r#" #include "com_android_aconfig_test.h" @@ -1187,7 +1274,6 @@ bool com_android_aconfig_test_enabled_ro() { mode: CodegenMode, expected_header: &str, expected_src: &str, - allow_instrumentation: bool, ) { let modified_parsed_flags = crate::commands::modify_parsed_flags_based_on_mode(parsed_flags, mode).unwrap(); @@ -1198,7 +1284,6 @@ bool com_android_aconfig_test_enabled_ro() { modified_parsed_flags.into_iter(), mode, flag_ids, - allow_instrumentation, ) .unwrap(); let mut generated_files_map = HashMap::new(); @@ -1238,7 +1323,6 @@ bool com_android_aconfig_test_enabled_ro() { CodegenMode::Production, EXPORTED_PROD_HEADER_EXPECTED, PROD_SOURCE_FILE_EXPECTED, - false, ); } @@ -1250,19 +1334,6 @@ bool com_android_aconfig_test_enabled_ro() { CodegenMode::Test, EXPORTED_TEST_HEADER_EXPECTED, TEST_SOURCE_FILE_EXPECTED, - false, - ); - } - - #[test] - fn test_generate_cpp_code_for_exported() { - let parsed_flags = crate::test::parse_test_flags(); - test_generate_cpp_code( - parsed_flags, - CodegenMode::Exported, - EXPORTED_EXPORTED_HEADER_EXPECTED, - EXPORTED_SOURCE_FILE_EXPECTED, - false, ); } @@ -1274,7 +1345,6 @@ bool com_android_aconfig_test_enabled_ro() { CodegenMode::ForceReadOnly, EXPORTED_FORCE_READ_ONLY_HEADER_EXPECTED, FORCE_READ_ONLY_SOURCE_FILE_EXPECTED, - false, ); } @@ -1286,7 +1356,6 @@ bool com_android_aconfig_test_enabled_ro() { CodegenMode::Production, READ_ONLY_EXPORTED_PROD_HEADER_EXPECTED, READ_ONLY_PROD_SOURCE_FILE_EXPECTED, - false, ); } } diff --git a/tools/aconfig/aconfig/src/codegen/java.rs b/tools/aconfig/aconfig/src/codegen/java.rs index a74ef854d6..8588a720cf 100644 --- a/tools/aconfig/aconfig/src/codegen/java.rs +++ b/tools/aconfig/aconfig/src/codegen/java.rs @@ -22,30 +22,42 @@ use tinytemplate::TinyTemplate; use crate::codegen; use crate::codegen::CodegenMode; -use crate::commands::OutputFile; +use crate::commands::{should_include_flag, OutputFile}; use aconfig_protos::{ProtoFlagPermission, ProtoFlagState, ProtoParsedFlag}; use std::collections::HashMap; +// Arguments to configure codegen for generate_java_code. +pub struct JavaCodegenConfig { + pub codegen_mode: CodegenMode, + pub flag_ids: HashMap<String, u16>, + pub allow_instrumentation: bool, + pub package_fingerprint: u64, + pub new_exported: bool, + pub check_api_level: bool, +} + pub fn generate_java_code<I>( package: &str, parsed_flags_iter: I, - codegen_mode: CodegenMode, - flag_ids: HashMap<String, u16>, - allow_instrumentation: bool, + config: JavaCodegenConfig, ) -> Result<Vec<OutputFile>> where I: Iterator<Item = ProtoParsedFlag>, { - let flag_elements: Vec<FlagElement> = - parsed_flags_iter.map(|pf| create_flag_element(package, &pf, flag_ids.clone())).collect(); + let flag_elements: Vec<FlagElement> = parsed_flags_iter + .map(|pf| { + create_flag_element(package, &pf, config.flag_ids.clone(), config.check_api_level) + }) + .collect(); let namespace_flags = gen_flags_by_namespace(&flag_elements); let properties_set: BTreeSet<String> = flag_elements.iter().map(|fe| format_property_name(&fe.device_config_namespace)).collect(); - let is_test_mode = codegen_mode == CodegenMode::Test; - let library_exported = codegen_mode == CodegenMode::Exported; + let is_test_mode = config.codegen_mode == CodegenMode::Test; + let library_exported = config.codegen_mode == CodegenMode::Exported; let runtime_lookup_required = flag_elements.iter().any(|elem| elem.is_read_write) || library_exported; let container = (flag_elements.first().expect("zero template flags").container).to_string(); + let is_platform_container = matches!(container.as_str(), "system" | "product" | "vendor"); let context = Context { flag_elements, namespace_flags, @@ -54,15 +66,15 @@ where properties_set, package_name: package.to_string(), library_exported, - allow_instrumentation, + allow_instrumentation: config.allow_instrumentation, container, + is_platform_container, + package_fingerprint: format!("0x{:X}L", config.package_fingerprint), + new_exported: config.new_exported, }; let mut template = TinyTemplate::new(); template.add_template("Flags.java", include_str!("../../templates/Flags.java.template"))?; - template.add_template( - "FeatureFlagsImpl.java", - include_str!("../../templates/FeatureFlagsImpl.java.template"), - )?; + add_feature_flags_impl_template(&context, &mut template)?; template.add_template( "FeatureFlags.java", include_str!("../../templates/FeatureFlags.java.template"), @@ -123,6 +135,9 @@ struct Context { pub library_exported: bool, pub allow_instrumentation: bool, pub container: String, + pub is_platform_container: bool, + pub package_fingerprint: String, + pub new_exported: bool, } #[derive(Serialize, Debug)] @@ -137,30 +152,55 @@ struct FlagElement { pub default_value: bool, pub device_config_namespace: String, pub device_config_flag: String, + pub flag_name: String, pub flag_name_constant_suffix: String, pub flag_offset: u16, pub is_read_write: bool, pub method_name: String, pub properties: String, + pub finalized_sdk_present: bool, + pub finalized_sdk_value: i32, } fn create_flag_element( package: &str, pf: &ProtoParsedFlag, flag_offsets: HashMap<String, u16>, + check_api_level: bool, ) -> FlagElement { let device_config_flag = codegen::create_device_config_ident(package, pf.name()) .expect("values checked at flag parse time"); + + let no_assigned_offset = !should_include_flag(pf); + + let flag_offset = match flag_offsets.get(pf.name()) { + Some(offset) => offset, + None => { + // System/vendor/product RO+disabled flags have no offset in storage files. + // Assign placeholder value. + if no_assigned_offset { + &0 + } + // All other flags _must_ have an offset. + else { + panic!("{}", format!("missing flag offset for {}", pf.name())); + } + } + }; + FlagElement { container: pf.container().to_string(), default_value: pf.state() == ProtoFlagState::ENABLED, device_config_namespace: pf.namespace().to_string(), device_config_flag, + flag_name: pf.name().to_string(), flag_name_constant_suffix: pf.name().to_ascii_uppercase(), - flag_offset: *flag_offsets.get(pf.name()).expect("didnt find package offset :("), + flag_offset: *flag_offset, is_read_write: pf.permission() == ProtoFlagPermission::READ_WRITE, method_name: format_java_method_name(pf.name()), properties: format_property_name(pf.namespace()), + finalized_sdk_present: check_api_level, + finalized_sdk_value: i32::MAX, // TODO: b/378936061 - Read value from artifact. } } @@ -190,6 +230,58 @@ fn format_property_name(property_name: &str) -> String { format!("mProperties{}{}", &name[0..1].to_ascii_uppercase(), &name[1..]) } +fn add_feature_flags_impl_template( + context: &Context, + template: &mut TinyTemplate, +) -> Result<(), tinytemplate::error::Error> { + if context.is_test_mode { + // Test mode has its own template, so use regardless of any other settings. + template.add_template( + "FeatureFlagsImpl.java", + include_str!("../../templates/FeatureFlagsImpl.test_mode.java.template"), + )?; + return Ok(()); + } + + match (context.library_exported, context.new_exported, context.allow_instrumentation) { + // Exported library with new_exported enabled, use new storage exported template. + (true, true, _) => { + template.add_template( + "FeatureFlagsImpl.java", + include_str!("../../templates/FeatureFlagsImpl.exported.java.template"), + )?; + } + + // Exported library with new_exported NOT enabled, use legacy (device + // config) template, because regardless of allow_instrumentation, we use + // device config for exported libs if new_exported isn't enabled. + // Remove once new_exported is fully rolled out. + (true, false, _) => { + template.add_template( + "FeatureFlagsImpl.java", + include_str!("../../templates/FeatureFlagsImpl.java.template"), + )?; + } + + // New storage internal mode. + (false, _, true) => { + template.add_template( + "FeatureFlagsImpl.java", + include_str!("../../templates/FeatureFlagsImpl.new_storage.java.template"), + )?; + } + + // Device config internal mode. Use legacy (device config) template. + (false, _, false) => { + template.add_template( + "FeatureFlagsImpl.java", + include_str!("../../templates/FeatureFlagsImpl.java.template"), + )?; + } + }; + Ok(()) +} + #[cfg(test)] mod tests { use super::*; @@ -494,12 +586,18 @@ mod tests { crate::commands::modify_parsed_flags_based_on_mode(parsed_flags, mode).unwrap(); let flag_ids = assign_flag_ids(crate::test::TEST_PACKAGE, modified_parsed_flags.iter()).unwrap(); + let config = JavaCodegenConfig { + codegen_mode: mode, + flag_ids, + allow_instrumentation: true, + package_fingerprint: 5801144784618221668, + new_exported: false, + check_api_level: false, + }; let generated_files = generate_java_code( crate::test::TEST_PACKAGE, modified_parsed_flags.into_iter(), - mode, - flag_ids, - false, + config, ) .unwrap(); let expect_flags_content = EXPECTED_FLAG_COMMON_CONTENT.to_string() @@ -507,25 +605,38 @@ mod tests { private static FeatureFlags FEATURE_FLAGS = new FeatureFlagsImpl(); }"#; - let expected_featureflagsmpl_content_0 = r#" + let expected_featureflagsmpl_content = r#" package com.android.aconfig.test; // TODO(b/303773055): Remove the annotation after access issue is resolved. import android.compat.annotation.UnsupportedAppUsage; - import android.provider.DeviceConfig; - import android.provider.DeviceConfig.Properties; - "#; - - let expected_featureflagsmpl_content_1 = r#" + import android.os.Build; + import android.os.flagging.PlatformAconfigPackageInternal; + import android.util.Log; /** @hide */ public final class FeatureFlagsImpl implements FeatureFlags { - private static volatile boolean aconfig_test_is_cached = false; - private static volatile boolean other_namespace_is_cached = false; + private static final String TAG = "FeatureFlagsImpl"; + private static volatile boolean isCached = false; private static boolean disabledRw = false; private static boolean disabledRwExported = false; private static boolean disabledRwInOtherNamespace = false; private static boolean enabledRw = true; - "#; - let expected_featureflagsmpl_content_2 = r#" + private void init() { + try { + PlatformAconfigPackageInternal reader = PlatformAconfigPackageInternal.load("com.android.aconfig.test", 0x5081CE7221C77064L); + disabledRw = reader.getBooleanFlagValue(0); + disabledRwExported = reader.getBooleanFlagValue(1); + enabledRw = reader.getBooleanFlagValue(7); + disabledRwInOtherNamespace = reader.getBooleanFlagValue(2); + } catch (Exception e) { + Log.e(TAG, e.toString()); + } catch (LinkageError e) { + // for mainline module running on older devices. + // This should be replaces to version check, after the version bump. + Log.e(TAG, e.toString()); + } + isCached = true; + } + @Override @com.android.aconfig.annotations.AconfigFlagAccessor @UnsupportedAppUsage @@ -536,8 +647,8 @@ mod tests { @com.android.aconfig.annotations.AconfigFlagAccessor @UnsupportedAppUsage public boolean disabledRw() { - if (!aconfig_test_is_cached) { - load_overrides_aconfig_test(); + if (!isCached) { + init(); } return disabledRw; } @@ -545,8 +656,8 @@ mod tests { @com.android.aconfig.annotations.AconfigFlagAccessor @UnsupportedAppUsage public boolean disabledRwExported() { - if (!aconfig_test_is_cached) { - load_overrides_aconfig_test(); + if (!isCached) { + init(); } return disabledRwExported; } @@ -554,8 +665,8 @@ mod tests { @com.android.aconfig.annotations.AconfigFlagAccessor @UnsupportedAppUsage public boolean disabledRwInOtherNamespace() { - if (!other_namespace_is_cached) { - load_overrides_other_namespace(); + if (!isCached) { + init(); } return disabledRwInOtherNamespace; } @@ -587,64 +698,17 @@ mod tests { @com.android.aconfig.annotations.AconfigFlagAccessor @UnsupportedAppUsage public boolean enabledRw() { - if (!aconfig_test_is_cached) { - load_overrides_aconfig_test(); + if (!isCached) { + init(); } return enabledRw; } } "#; - let expect_featureflagsimpl_content_old = expected_featureflagsmpl_content_0.to_owned() - + expected_featureflagsmpl_content_1 - + r#" - private void load_overrides_aconfig_test() { - try { - Properties properties = DeviceConfig.getProperties("aconfig_test"); - disabledRw = - properties.getBoolean(Flags.FLAG_DISABLED_RW, false); - disabledRwExported = - properties.getBoolean(Flags.FLAG_DISABLED_RW_EXPORTED, false); - enabledRw = - properties.getBoolean(Flags.FLAG_ENABLED_RW, true); - } catch (NullPointerException e) { - throw new RuntimeException( - "Cannot read value from namespace aconfig_test " - + "from DeviceConfig. It could be that the code using flag " - + "executed before SettingsProvider initialization. Please use " - + "fixed read-only flag by adding is_fixed_read_only: true in " - + "flag declaration.", - e - ); - } - aconfig_test_is_cached = true; - } - - private void load_overrides_other_namespace() { - try { - Properties properties = DeviceConfig.getProperties("other_namespace"); - disabledRwInOtherNamespace = - properties.getBoolean(Flags.FLAG_DISABLED_RW_IN_OTHER_NAMESPACE, false); - } catch (NullPointerException e) { - throw new RuntimeException( - "Cannot read value from namespace other_namespace " - + "from DeviceConfig. It could be that the code using flag " - + "executed before SettingsProvider initialization. Please use " - + "fixed read-only flag by adding is_fixed_read_only: true in " - + "flag declaration.", - e - ); - } - other_namespace_is_cached = true; - }"# - + expected_featureflagsmpl_content_2; - let mut file_set = HashMap::from([ ("com/android/aconfig/test/Flags.java", expect_flags_content.as_str()), - ( - "com/android/aconfig/test/FeatureFlagsImpl.java", - &expect_featureflagsimpl_content_old, - ), + ("com/android/aconfig/test/FeatureFlagsImpl.java", expected_featureflagsmpl_content), ("com/android/aconfig/test/FeatureFlags.java", EXPECTED_FEATUREFLAGS_COMMON_CONTENT), ( "com/android/aconfig/test/CustomFeatureFlags.java", @@ -672,159 +736,187 @@ mod tests { } assert!(file_set.is_empty()); + } + #[test] + fn test_generate_java_code_exported() { let parsed_flags = crate::test::parse_test_flags(); - let mode = CodegenMode::Production; + let mode = CodegenMode::Exported; let modified_parsed_flags = crate::commands::modify_parsed_flags_based_on_mode(parsed_flags, mode).unwrap(); let flag_ids = assign_flag_ids(crate::test::TEST_PACKAGE, modified_parsed_flags.iter()).unwrap(); + let config = JavaCodegenConfig { + codegen_mode: mode, + flag_ids, + allow_instrumentation: true, + package_fingerprint: 5801144784618221668, + new_exported: false, + check_api_level: false, + }; let generated_files = generate_java_code( crate::test::TEST_PACKAGE, modified_parsed_flags.into_iter(), - mode, - flag_ids, - true, + config, ) .unwrap(); - let expect_featureflagsimpl_content_new = expected_featureflagsmpl_content_0.to_owned() - + r#" - import android.aconfig.storage.StorageInternalReader; - import android.util.Log; - "# - + expected_featureflagsmpl_content_1 - + r#" - StorageInternalReader reader; - boolean readFromNewStorage; - - private final static String TAG = "AconfigJavaCodegen"; - private final static String SUCCESS_LOG = "success: %s value matches"; - private final static String MISMATCH_LOG = "error: %s value mismatch, new storage value is %s, old storage value is %s"; - private final static String ERROR_LOG = "error: failed to read flag value"; - - private void init() { - if (reader != null) return; - if (DeviceConfig.getBoolean("core_experiments_team_internal", "com.android.providers.settings.storage_test_mission_1", false)) { - readFromNewStorage = true; - try { - reader = new StorageInternalReader("system", "com.android.aconfig.test"); - } catch (Exception e) { - reader = null; - } + let expect_flags_content = r#" + package com.android.aconfig.test; + /** @hide */ + public final class Flags { + /** @hide */ + public static final String FLAG_DISABLED_RW_EXPORTED = "com.android.aconfig.test.disabled_rw_exported"; + /** @hide */ + public static final String FLAG_ENABLED_FIXED_RO_EXPORTED = "com.android.aconfig.test.enabled_fixed_ro_exported"; + /** @hide */ + public static final String FLAG_ENABLED_RO_EXPORTED = "com.android.aconfig.test.enabled_ro_exported"; + public static boolean disabledRwExported() { + return FEATURE_FLAGS.disabledRwExported(); + } + public static boolean enabledFixedRoExported() { + return FEATURE_FLAGS.enabledFixedRoExported(); } + public static boolean enabledRoExported() { + return FEATURE_FLAGS.enabledRoExported(); + } + private static FeatureFlags FEATURE_FLAGS = new FeatureFlagsImpl(); } + "#; - private void load_overrides_aconfig_test() { - try { - Properties properties = DeviceConfig.getProperties("aconfig_test"); - disabledRw = - properties.getBoolean(Flags.FLAG_DISABLED_RW, false); - disabledRwExported = - properties.getBoolean(Flags.FLAG_DISABLED_RW_EXPORTED, false); - enabledRw = - properties.getBoolean(Flags.FLAG_ENABLED_RW, true); - } catch (NullPointerException e) { - throw new RuntimeException( - "Cannot read value from namespace aconfig_test " - + "from DeviceConfig. It could be that the code using flag " - + "executed before SettingsProvider initialization. Please use " - + "fixed read-only flag by adding is_fixed_read_only: true in " - + "flag declaration.", - e - ); - } - aconfig_test_is_cached = true; - init(); - if (readFromNewStorage && reader != null) { - boolean val; - try { - val = reader.getBooleanFlagValue(1); - if (val == disabledRw) { - Log.i(TAG, String.format(SUCCESS_LOG, "disabledRw")); - } else { - Log.i(TAG, String.format(MISMATCH_LOG, "disabledRw", val, disabledRw)); - } - - if (DeviceConfig.getBoolean("core_experiments_team_internal", "com.android.providers.settings.use_new_storage_value", false)) { - disabledRw = val; - } - - val = reader.getBooleanFlagValue(2); - if (val == disabledRwExported) { - Log.i(TAG, String.format(SUCCESS_LOG, "disabledRwExported")); - } else { - Log.i(TAG, String.format(MISMATCH_LOG, "disabledRwExported", val, disabledRwExported)); - } - - if (DeviceConfig.getBoolean("core_experiments_team_internal", "com.android.providers.settings.use_new_storage_value", false)) { - disabledRwExported = val; - } - - val = reader.getBooleanFlagValue(8); - if (val == enabledRw) { - Log.i(TAG, String.format(SUCCESS_LOG, "enabledRw")); - } else { - Log.i(TAG, String.format(MISMATCH_LOG, "enabledRw", val, enabledRw)); - } - - if (DeviceConfig.getBoolean("core_experiments_team_internal", "com.android.providers.settings.use_new_storage_value", false)) { - enabledRw = val; - } + let expect_feature_flags_content = r#" + package com.android.aconfig.test; + /** @hide */ + public interface FeatureFlags { + boolean disabledRwExported(); + boolean enabledFixedRoExported(); + boolean enabledRoExported(); + } + "#; - } catch (Exception e) { - Log.e(TAG, ERROR_LOG, e); + let expect_feature_flags_impl_content = r#" + package com.android.aconfig.test; + import android.os.Binder; + import android.provider.DeviceConfig; + import android.provider.DeviceConfig.Properties; + /** @hide */ + public final class FeatureFlagsImpl implements FeatureFlags { + private static volatile boolean aconfig_test_is_cached = false; + private static boolean disabledRwExported = false; + private static boolean enabledFixedRoExported = false; + private static boolean enabledRoExported = false; + + private void load_overrides_aconfig_test() { + final long ident = Binder.clearCallingIdentity(); + try { + Properties properties = DeviceConfig.getProperties("aconfig_test"); + disabledRwExported = + properties.getBoolean(Flags.FLAG_DISABLED_RW_EXPORTED, false); + enabledFixedRoExported = + properties.getBoolean(Flags.FLAG_ENABLED_FIXED_RO_EXPORTED, false); + enabledRoExported = + properties.getBoolean(Flags.FLAG_ENABLED_RO_EXPORTED, false); + } catch (NullPointerException e) { + throw new RuntimeException( + "Cannot read value from namespace aconfig_test " + + "from DeviceConfig. It could be that the code using flag " + + "executed before SettingsProvider initialization. Please use " + + "fixed read-only flag by adding is_fixed_read_only: true in " + + "flag declaration.", + e + ); + } catch (SecurityException e) { + // for isolated process case, skip loading flag value from the storage, use the default + } finally { + Binder.restoreCallingIdentity(ident); } + aconfig_test_is_cached = true; } - } + @Override + public boolean disabledRwExported() { + if (!aconfig_test_is_cached) { + load_overrides_aconfig_test(); + } + return disabledRwExported; + } + @Override + public boolean enabledFixedRoExported() { + if (!aconfig_test_is_cached) { + load_overrides_aconfig_test(); + } + return enabledFixedRoExported; + } + @Override + public boolean enabledRoExported() { + if (!aconfig_test_is_cached) { + load_overrides_aconfig_test(); + } + return enabledRoExported; + } + }"#; - private void load_overrides_other_namespace() { - try { - Properties properties = DeviceConfig.getProperties("other_namespace"); - disabledRwInOtherNamespace = - properties.getBoolean(Flags.FLAG_DISABLED_RW_IN_OTHER_NAMESPACE, false); - } catch (NullPointerException e) { - throw new RuntimeException( - "Cannot read value from namespace other_namespace " - + "from DeviceConfig. It could be that the code using flag " - + "executed before SettingsProvider initialization. Please use " - + "fixed read-only flag by adding is_fixed_read_only: true in " - + "flag declaration.", - e - ); + let expect_custom_feature_flags_content = r#" + package com.android.aconfig.test; + + import java.util.Arrays; + import java.util.HashSet; + import java.util.List; + import java.util.Set; + import java.util.function.BiPredicate; + import java.util.function.Predicate; + + /** @hide */ + public class CustomFeatureFlags implements FeatureFlags { + + private BiPredicate<String, Predicate<FeatureFlags>> mGetValueImpl; + + public CustomFeatureFlags(BiPredicate<String, Predicate<FeatureFlags>> getValueImpl) { + mGetValueImpl = getValueImpl; } - other_namespace_is_cached = true; - init(); - if (readFromNewStorage && reader != null) { - boolean val; - try { - val = reader.getBooleanFlagValue(3); - if (val == disabledRwInOtherNamespace) { - Log.i(TAG, String.format(SUCCESS_LOG, "disabledRwInOtherNamespace")); - } else { - Log.i(TAG, String.format(MISMATCH_LOG, "disabledRwInOtherNamespace", val, disabledRwInOtherNamespace)); - } - if (DeviceConfig.getBoolean("core_experiments_team_internal", "com.android.providers.settings.use_new_storage_value", false)) { - disabledRwInOtherNamespace = val; - } + @Override + public boolean disabledRwExported() { + return getValue(Flags.FLAG_DISABLED_RW_EXPORTED, + FeatureFlags::disabledRwExported); + } + @Override + public boolean enabledFixedRoExported() { + return getValue(Flags.FLAG_ENABLED_FIXED_RO_EXPORTED, + FeatureFlags::enabledFixedRoExported); + } + @Override + public boolean enabledRoExported() { + return getValue(Flags.FLAG_ENABLED_RO_EXPORTED, + FeatureFlags::enabledRoExported); + } - } catch (Exception e) { - Log.e(TAG, ERROR_LOG, e); - } + protected boolean getValue(String flagName, Predicate<FeatureFlags> getter) { + return mGetValueImpl.test(flagName, getter); } - }"# + expected_featureflagsmpl_content_2; + + public List<String> getFlagNames() { + return Arrays.asList( + Flags.FLAG_DISABLED_RW_EXPORTED, + Flags.FLAG_ENABLED_FIXED_RO_EXPORTED, + Flags.FLAG_ENABLED_RO_EXPORTED + ); + } + + private Set<String> mReadOnlyFlagsSet = new HashSet<>( + Arrays.asList( + "" + ) + ); + } + "#; let mut file_set = HashMap::from([ - ("com/android/aconfig/test/Flags.java", expect_flags_content.as_str()), - ( - "com/android/aconfig/test/FeatureFlagsImpl.java", - &expect_featureflagsimpl_content_new, - ), - ("com/android/aconfig/test/FeatureFlags.java", EXPECTED_FEATUREFLAGS_COMMON_CONTENT), + ("com/android/aconfig/test/Flags.java", expect_flags_content), + ("com/android/aconfig/test/FeatureFlags.java", expect_feature_flags_content), + ("com/android/aconfig/test/FeatureFlagsImpl.java", expect_feature_flags_impl_content), ( "com/android/aconfig/test/CustomFeatureFlags.java", - EXPECTED_CUSTOMFEATUREFLAGS_CONTENT, + expect_custom_feature_flags_content, ), ( "com/android/aconfig/test/FakeFeatureFlagsImpl.java", @@ -851,19 +943,25 @@ mod tests { } #[test] - fn test_generate_java_code_exported() { + fn test_generate_java_code_new_exported() { let parsed_flags = crate::test::parse_test_flags(); let mode = CodegenMode::Exported; let modified_parsed_flags = crate::commands::modify_parsed_flags_based_on_mode(parsed_flags, mode).unwrap(); let flag_ids = assign_flag_ids(crate::test::TEST_PACKAGE, modified_parsed_flags.iter()).unwrap(); + let config = JavaCodegenConfig { + codegen_mode: mode, + flag_ids, + allow_instrumentation: true, + package_fingerprint: 5801144784618221668, + new_exported: true, + check_api_level: false, + }; let generated_files = generate_java_code( crate::test::TEST_PACKAGE, modified_parsed_flags.into_iter(), - mode, - flag_ids, - true, + config, ) .unwrap(); @@ -902,55 +1000,50 @@ mod tests { let expect_feature_flags_impl_content = r#" package com.android.aconfig.test; - import android.provider.DeviceConfig; - import android.provider.DeviceConfig.Properties; + import android.os.Build; + import android.os.flagging.AconfigPackage; + import android.util.Log; /** @hide */ public final class FeatureFlagsImpl implements FeatureFlags { - private static volatile boolean aconfig_test_is_cached = false; + private static final String TAG = "FeatureFlagsImplExport"; + private static volatile boolean isCached = false; private static boolean disabledRwExported = false; private static boolean enabledFixedRoExported = false; private static boolean enabledRoExported = false; - - - private void load_overrides_aconfig_test() { + private void init() { try { - Properties properties = DeviceConfig.getProperties("aconfig_test"); - disabledRwExported = - properties.getBoolean(Flags.FLAG_DISABLED_RW_EXPORTED, false); - enabledFixedRoExported = - properties.getBoolean(Flags.FLAG_ENABLED_FIXED_RO_EXPORTED, false); - enabledRoExported = - properties.getBoolean(Flags.FLAG_ENABLED_RO_EXPORTED, false); - } catch (NullPointerException e) { - throw new RuntimeException( - "Cannot read value from namespace aconfig_test " - + "from DeviceConfig. It could be that the code using flag " - + "executed before SettingsProvider initialization. Please use " - + "fixed read-only flag by adding is_fixed_read_only: true in " - + "flag declaration.", - e - ); + AconfigPackage reader = AconfigPackage.load("com.android.aconfig.test"); + disabledRwExported = reader.getBooleanFlagValue("disabled_rw_exported", false); + enabledFixedRoExported = reader.getBooleanFlagValue("enabled_fixed_ro_exported", false); + enabledRoExported = reader.getBooleanFlagValue("enabled_ro_exported", false); + } catch (Exception e) { + // pass + Log.e(TAG, e.toString()); + } catch (LinkageError e) { + // for mainline module running on older devices. + // This should be replaces to version check, after the version bump. + Log.e(TAG, e.toString()); } - aconfig_test_is_cached = true; + isCached = true; } @Override public boolean disabledRwExported() { - if (!aconfig_test_is_cached) { - load_overrides_aconfig_test(); + if (!isCached) { + init(); } return disabledRwExported; } @Override public boolean enabledFixedRoExported() { - if (!aconfig_test_is_cached) { - load_overrides_aconfig_test(); + if (!isCached) { + init(); } return enabledFixedRoExported; } @Override public boolean enabledRoExported() { - if (!aconfig_test_is_cached) { - load_overrides_aconfig_test(); + if (!isCached) { + init(); } return enabledRoExported; } @@ -1051,12 +1144,18 @@ mod tests { crate::commands::modify_parsed_flags_based_on_mode(parsed_flags, mode).unwrap(); let flag_ids = assign_flag_ids(crate::test::TEST_PACKAGE, modified_parsed_flags.iter()).unwrap(); + let config = JavaCodegenConfig { + codegen_mode: mode, + flag_ids, + allow_instrumentation: true, + package_fingerprint: 5801144784618221668, + new_exported: false, + check_api_level: false, + }; let generated_files = generate_java_code( crate::test::TEST_PACKAGE, modified_parsed_flags.into_iter(), - mode, - flag_ids, - true, + config, ) .unwrap(); @@ -1172,12 +1271,18 @@ mod tests { crate::commands::modify_parsed_flags_based_on_mode(parsed_flags, mode).unwrap(); let flag_ids = assign_flag_ids(crate::test::TEST_PACKAGE, modified_parsed_flags.iter()).unwrap(); + let config = JavaCodegenConfig { + codegen_mode: mode, + flag_ids, + allow_instrumentation: true, + package_fingerprint: 5801144784618221668, + new_exported: false, + check_api_level: false, + }; let generated_files = generate_java_code( crate::test::TEST_PACKAGE, modified_parsed_flags.into_iter(), - mode, - flag_ids, - true, + config, ) .unwrap(); let expect_featureflags_content = r#" diff --git a/tools/aconfig/aconfig/src/codegen/mod.rs b/tools/aconfig/aconfig/src/codegen/mod.rs index 1ea3b37849..9ed66dbd03 100644 --- a/tools/aconfig/aconfig/src/codegen/mod.rs +++ b/tools/aconfig/aconfig/src/codegen/mod.rs @@ -50,67 +50,6 @@ impl std::fmt::Display for CodegenMode { #[cfg(test)] mod tests { use super::*; - use aconfig_protos::is_valid_container_ident; - - #[test] - fn test_is_valid_name_ident() { - assert!(is_valid_name_ident("foo")); - assert!(is_valid_name_ident("foo_bar_123")); - assert!(is_valid_name_ident("foo_")); - - assert!(!is_valid_name_ident("")); - assert!(!is_valid_name_ident("123_foo")); - assert!(!is_valid_name_ident("foo-bar")); - assert!(!is_valid_name_ident("foo-b\u{00e5}r")); - assert!(!is_valid_name_ident("foo__bar")); - assert!(!is_valid_name_ident("_foo")); - } - - #[test] - fn test_is_valid_package_ident() { - assert!(is_valid_package_ident("foo.bar")); - assert!(is_valid_package_ident("foo.bar_baz")); - assert!(is_valid_package_ident("foo.bar.a123")); - - assert!(!is_valid_package_ident("foo_bar_123")); - assert!(!is_valid_package_ident("foo")); - assert!(!is_valid_package_ident("foo._bar")); - assert!(!is_valid_package_ident("")); - assert!(!is_valid_package_ident("123_foo")); - assert!(!is_valid_package_ident("foo-bar")); - assert!(!is_valid_package_ident("foo-b\u{00e5}r")); - assert!(!is_valid_package_ident("foo.bar.123")); - assert!(!is_valid_package_ident(".foo.bar")); - assert!(!is_valid_package_ident("foo.bar.")); - assert!(!is_valid_package_ident(".")); - assert!(!is_valid_package_ident("..")); - assert!(!is_valid_package_ident("foo..bar")); - assert!(!is_valid_package_ident("foo.__bar")); - } - - #[test] - fn test_is_valid_container_ident() { - assert!(is_valid_container_ident("foo.bar")); - assert!(is_valid_container_ident("foo.bar_baz")); - assert!(is_valid_container_ident("foo.bar.a123")); - assert!(is_valid_container_ident("foo")); - assert!(is_valid_container_ident("foo_bar_123")); - - assert!(!is_valid_container_ident("")); - assert!(!is_valid_container_ident("foo._bar")); - assert!(!is_valid_container_ident("_foo")); - assert!(!is_valid_container_ident("123_foo")); - assert!(!is_valid_container_ident("foo-bar")); - assert!(!is_valid_container_ident("foo-b\u{00e5}r")); - assert!(!is_valid_container_ident("foo.bar.123")); - assert!(!is_valid_container_ident(".foo.bar")); - assert!(!is_valid_container_ident("foo.bar.")); - assert!(!is_valid_container_ident(".")); - assert!(!is_valid_container_ident("..")); - assert!(!is_valid_container_ident("foo..bar")); - assert!(!is_valid_container_ident("foo.__bar")); - } - #[test] fn test_create_device_config_ident() { assert_eq!( diff --git a/tools/aconfig/aconfig/src/codegen/rust.rs b/tools/aconfig/aconfig/src/codegen/rust.rs index 1292e0adb5..2ee5f36822 100644 --- a/tools/aconfig/aconfig/src/codegen/rust.rs +++ b/tools/aconfig/aconfig/src/codegen/rust.rs @@ -24,14 +24,13 @@ use std::collections::HashMap; use crate::codegen; use crate::codegen::CodegenMode; -use crate::commands::OutputFile; +use crate::commands::{should_include_flag, OutputFile}; pub fn generate_rust_code<I>( package: &str, flag_ids: HashMap<String, u16>, parsed_flags_iter: I, codegen_mode: CodegenMode, - allow_instrumentation: bool, ) -> Result<OutputFile> where I: Iterator<Item = ProtoParsedFlag>, @@ -46,7 +45,6 @@ where template_flags, modules: package.split('.').map(|s| s.to_string()).collect::<Vec<_>>(), has_readwrite, - allow_instrumentation, container, }; let mut template = TinyTemplate::new(); @@ -70,7 +68,6 @@ struct TemplateContext { pub template_flags: Vec<TemplateParsedFlag>, pub modules: Vec<String>, pub has_readwrite: bool, - pub allow_instrumentation: bool, pub container: String, } @@ -88,6 +85,21 @@ struct TemplateParsedFlag { impl TemplateParsedFlag { #[allow(clippy::nonminimal_bool)] fn new(package: &str, flag_offsets: HashMap<String, u16>, pf: &ProtoParsedFlag) -> Self { + let flag_offset = match flag_offsets.get(pf.name()) { + Some(offset) => offset, + None => { + // System/vendor/product RO+disabled flags have no offset in storage files. + // Assign placeholder value. + if !should_include_flag(pf) { + &0 + } + // All other flags _must_ have an offset. + else { + panic!("{}", format!("missing flag offset for {}", pf.name())); + } + } + }; + Self { readwrite: pf.permission() == ProtoFlagPermission::READ_WRITE, default_value: match pf.state() { @@ -96,7 +108,7 @@ impl TemplateParsedFlag { }, name: pf.name().to_string(), container: pf.container().to_string(), - flag_offset: *flag_offsets.get(pf.name()).expect("didnt find package offset :("), + flag_offset: *flag_offset, device_config_namespace: pf.namespace().to_string(), device_config_flag: codegen::create_device_config_ident(package, pf.name()) .expect("values checked at flag parse time"), @@ -113,43 +125,186 @@ mod tests { use aconfig_storage_read_api::{Mmap, AconfigStorageError, StorageFileType, PackageReadContext, get_mapped_storage_file, get_boolean_flag_value, get_package_read_context}; use std::path::Path; use std::io::Write; +use std::sync::LazyLock; use log::{log, LevelFilter, Level}; -static STORAGE_MIGRATION_MARKER_FILE: &str = - "/metadata/aconfig_test_missions/mission_1"; -static MIGRATION_LOG_TAG: &str = "AconfigTestMission1"; - /// flag provider pub struct FlagProvider; -lazy_static::lazy_static! { - /// flag value cache for disabled_rw - static ref CACHED_disabled_rw: bool = flags_rust::GetServerConfigurableFlag( - "aconfig_flags.aconfig_test", - "com.android.aconfig.test.disabled_rw", - "false") == "true"; - - /// flag value cache for disabled_rw_exported - static ref CACHED_disabled_rw_exported: bool = flags_rust::GetServerConfigurableFlag( - "aconfig_flags.aconfig_test", - "com.android.aconfig.test.disabled_rw_exported", - "false") == "true"; - - /// flag value cache for disabled_rw_in_other_namespace - static ref CACHED_disabled_rw_in_other_namespace: bool = flags_rust::GetServerConfigurableFlag( - "aconfig_flags.other_namespace", - "com.android.aconfig.test.disabled_rw_in_other_namespace", - "false") == "true"; - - /// flag value cache for enabled_rw - static ref CACHED_enabled_rw: bool = flags_rust::GetServerConfigurableFlag( - "aconfig_flags.aconfig_test", - "com.android.aconfig.test.enabled_rw", - "true") == "true"; +static PACKAGE_OFFSET: LazyLock<Result<Option<u32>, AconfigStorageError>> = LazyLock::new(|| unsafe { + get_mapped_storage_file("system", StorageFileType::PackageMap) + .and_then(|package_map| get_package_read_context(&package_map, "com.android.aconfig.test")) + .map(|context| context.map(|c| c.boolean_start_index)) +}); -} +static FLAG_VAL_MAP: LazyLock<Result<Mmap, AconfigStorageError>> = LazyLock::new(|| unsafe { + get_mapped_storage_file("system", StorageFileType::FlagVal) +}); + +/// flag value cache for disabled_rw +static CACHED_disabled_rw: LazyLock<bool> = LazyLock::new(|| { + // This will be called multiple times. Subsequent calls after the first are noops. + logger::init( + logger::Config::default() + .with_tag_on_device("aconfig_rust_codegen") + .with_max_level(LevelFilter::Info)); + + let flag_value_result = FLAG_VAL_MAP + .as_ref() + .map_err(|err| format!("failed to get flag val map: {err}")) + .and_then(|flag_val_map| { + PACKAGE_OFFSET + .as_ref() + .map_err(|err| format!("failed to get package read offset: {err}")) + .and_then(|package_offset| { + match package_offset { + Some(offset) => { + get_boolean_flag_value(&flag_val_map, offset + 0) + .map_err(|err| format!("failed to get flag: {err}")) + }, + None => { + log!(Level::Error, "no context found for package com.android.aconfig.test"); + Err(format!("failed to flag package com.android.aconfig.test")) + } + } + }) + }); + + match flag_value_result { + Ok(flag_value) => { + return flag_value; + }, + Err(err) => { + log!(Level::Error, "aconfig_rust_codegen: error: {err}"); + return false; + } + } +}); + +/// flag value cache for disabled_rw_exported +static CACHED_disabled_rw_exported: LazyLock<bool> = LazyLock::new(|| { + // This will be called multiple times. Subsequent calls after the first are noops. + logger::init( + logger::Config::default() + .with_tag_on_device("aconfig_rust_codegen") + .with_max_level(LevelFilter::Info)); + + let flag_value_result = FLAG_VAL_MAP + .as_ref() + .map_err(|err| format!("failed to get flag val map: {err}")) + .and_then(|flag_val_map| { + PACKAGE_OFFSET + .as_ref() + .map_err(|err| format!("failed to get package read offset: {err}")) + .and_then(|package_offset| { + match package_offset { + Some(offset) => { + get_boolean_flag_value(&flag_val_map, offset + 1) + .map_err(|err| format!("failed to get flag: {err}")) + }, + None => { + log!(Level::Error, "no context found for package com.android.aconfig.test"); + Err(format!("failed to flag package com.android.aconfig.test")) + } + } + }) + }); + + match flag_value_result { + Ok(flag_value) => { + return flag_value; + }, + Err(err) => { + log!(Level::Error, "aconfig_rust_codegen: error: {err}"); + return false; + } + } +}); + +/// flag value cache for disabled_rw_in_other_namespace +static CACHED_disabled_rw_in_other_namespace: LazyLock<bool> = LazyLock::new(|| { + // This will be called multiple times. Subsequent calls after the first are noops. + logger::init( + logger::Config::default() + .with_tag_on_device("aconfig_rust_codegen") + .with_max_level(LevelFilter::Info)); + + let flag_value_result = FLAG_VAL_MAP + .as_ref() + .map_err(|err| format!("failed to get flag val map: {err}")) + .and_then(|flag_val_map| { + PACKAGE_OFFSET + .as_ref() + .map_err(|err| format!("failed to get package read offset: {err}")) + .and_then(|package_offset| { + match package_offset { + Some(offset) => { + get_boolean_flag_value(&flag_val_map, offset + 2) + .map_err(|err| format!("failed to get flag: {err}")) + }, + None => { + log!(Level::Error, "no context found for package com.android.aconfig.test"); + Err(format!("failed to flag package com.android.aconfig.test")) + } + } + }) + }); + + match flag_value_result { + Ok(flag_value) => { + return flag_value; + }, + Err(err) => { + log!(Level::Error, "aconfig_rust_codegen: error: {err}"); + return false; + } + } +}); + + +/// flag value cache for enabled_rw +static CACHED_enabled_rw: LazyLock<bool> = LazyLock::new(|| { + // This will be called multiple times. Subsequent calls after the first are noops. + logger::init( + logger::Config::default() + .with_tag_on_device("aconfig_rust_codegen") + .with_max_level(LevelFilter::Info)); + + let flag_value_result = FLAG_VAL_MAP + .as_ref() + .map_err(|err| format!("failed to get flag val map: {err}")) + .and_then(|flag_val_map| { + PACKAGE_OFFSET + .as_ref() + .map_err(|err| format!("failed to get package read offset: {err}")) + .and_then(|package_offset| { + match package_offset { + Some(offset) => { + get_boolean_flag_value(&flag_val_map, offset + 7) + .map_err(|err| format!("failed to get flag: {err}")) + }, + None => { + log!(Level::Error, "no context found for package com.android.aconfig.test"); + Err(format!("failed to flag package com.android.aconfig.test")) + } + } + }) + }); + + match flag_value_result { + Ok(flag_value) => { + return flag_value; + }, + Err(err) => { + log!(Level::Error, "aconfig_rust_codegen: error: {err}"); + return true; + } + } +}); impl FlagProvider { + + /// query flag disabled_ro pub fn disabled_ro(&self) -> bool { false @@ -194,15 +349,18 @@ impl FlagProvider { pub fn enabled_rw(&self) -> bool { *CACHED_enabled_rw } + + } /// flag provider pub static PROVIDER: FlagProvider = FlagProvider; + /// query flag disabled_ro #[inline(always)] pub fn disabled_ro() -> bool { - false + false } /// query flag disabled_rw @@ -254,707 +412,190 @@ pub fn enabled_rw() -> bool { } "#; - const PROD_INSTRUMENTED_EXPECTED: &str = r#" + const TEST_EXPECTED: &str = r#" //! codegenerated rust flag lib use aconfig_storage_read_api::{Mmap, AconfigStorageError, StorageFileType, PackageReadContext, get_mapped_storage_file, get_boolean_flag_value, get_package_read_context}; +use std::collections::BTreeMap; use std::path::Path; use std::io::Write; +use std::sync::{LazyLock, Mutex}; use log::{log, LevelFilter, Level}; -static STORAGE_MIGRATION_MARKER_FILE: &str = - "/metadata/aconfig_test_missions/mission_1"; -static MIGRATION_LOG_TAG: &str = "AconfigTestMission1"; - -/// flag provider -pub struct FlagProvider; - -lazy_static::lazy_static! { - - static ref PACKAGE_OFFSET: Result<Option<u32>, AconfigStorageError> = unsafe { - get_mapped_storage_file("system", StorageFileType::PackageMap) - .and_then(|package_map| get_package_read_context(&package_map, "com.android.aconfig.test")) - .map(|context| context.map(|c| c.boolean_start_index)) - }; - - static ref FLAG_VAL_MAP: Result<Mmap, AconfigStorageError> = unsafe { - get_mapped_storage_file("system", StorageFileType::FlagVal) - }; - /// flag value cache for disabled_rw - - static ref CACHED_disabled_rw: bool = { - let result = flags_rust::GetServerConfigurableFlag( - "aconfig_flags.aconfig_test", - "com.android.aconfig.test.disabled_rw", - "false") == "true"; - - let use_new_storage_value = flags_rust::GetServerConfigurableFlag( - "aconfig_flags.core_experiments_team_internal", - "com.android.providers.settings.use_new_storage_value", - "false") == "true"; - - if Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() { - // This will be called multiple times. Subsequent calls after the first are noops. - logger::init( - logger::Config::default() - .with_tag_on_device(MIGRATION_LOG_TAG) - .with_max_level(LevelFilter::Info)); - - let aconfig_storage_result = FLAG_VAL_MAP - .as_ref() - .map_err(|err| format!("failed to get flag val map: {err}")) - .and_then(|flag_val_map| { - PACKAGE_OFFSET - .as_ref() - .map_err(|err| format!("failed to get package read offset: {err}")) - .and_then(|package_offset| { - match package_offset { - Some(offset) => { - get_boolean_flag_value(&flag_val_map, offset + 1) - .map_err(|err| format!("failed to get flag: {err}")) - }, - None => Err("no context found for package 'com.android.aconfig.test'".to_string()) - } - }) - }); - - match aconfig_storage_result { - Ok(storage_result) if storage_result == result => { - if use_new_storage_value { - return storage_result; - } else { - return result; - } - }, - Ok(storage_result) => { - log!(Level::Error, "AconfigTestMission1: error: mismatch for flag 'disabled_rw'. Legacy storage was {result}, new storage was {storage_result}"); - if use_new_storage_value { - return storage_result; - } else { - return result; - } - }, - Err(err) => { - log!(Level::Error, "AconfigTestMission1: error: {err}"); - if use_new_storage_value { - panic!("failed to read flag value: {err}"); - } - } - } - } - - result - }; - - /// flag value cache for disabled_rw_exported - - static ref CACHED_disabled_rw_exported: bool = { - let result = flags_rust::GetServerConfigurableFlag( - "aconfig_flags.aconfig_test", - "com.android.aconfig.test.disabled_rw_exported", - "false") == "true"; - - let use_new_storage_value = flags_rust::GetServerConfigurableFlag( - "aconfig_flags.core_experiments_team_internal", - "com.android.providers.settings.use_new_storage_value", - "false") == "true"; - - if Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() { - // This will be called multiple times. Subsequent calls after the first are noops. - logger::init( - logger::Config::default() - .with_tag_on_device(MIGRATION_LOG_TAG) - .with_max_level(LevelFilter::Info)); - - let aconfig_storage_result = FLAG_VAL_MAP - .as_ref() - .map_err(|err| format!("failed to get flag val map: {err}")) - .and_then(|flag_val_map| { - PACKAGE_OFFSET - .as_ref() - .map_err(|err| format!("failed to get package read offset: {err}")) - .and_then(|package_offset| { - match package_offset { - Some(offset) => { - get_boolean_flag_value(&flag_val_map, offset + 2) - .map_err(|err| format!("failed to get flag: {err}")) - }, - None => Err("no context found for package 'com.android.aconfig.test'".to_string()) - } - }) - }); - - match aconfig_storage_result { - Ok(storage_result) if storage_result == result => { - if use_new_storage_value { - return storage_result; - } else { - return result; - } - }, - Ok(storage_result) => { - log!(Level::Error, "AconfigTestMission1: error: mismatch for flag 'disabled_rw_exported'. Legacy storage was {result}, new storage was {storage_result}"); - if use_new_storage_value { - return storage_result; - } else { - return result; - } - }, - Err(err) => { - log!(Level::Error, "AconfigTestMission1: error: {err}"); - if use_new_storage_value { - panic!("failed to read flag value: {err}"); - } - } - } - } - - result - }; - - /// flag value cache for disabled_rw_in_other_namespace - - static ref CACHED_disabled_rw_in_other_namespace: bool = { - let result = flags_rust::GetServerConfigurableFlag( - "aconfig_flags.other_namespace", - "com.android.aconfig.test.disabled_rw_in_other_namespace", - "false") == "true"; - - let use_new_storage_value = flags_rust::GetServerConfigurableFlag( - "aconfig_flags.core_experiments_team_internal", - "com.android.providers.settings.use_new_storage_value", - "false") == "true"; - - if Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() { - // This will be called multiple times. Subsequent calls after the first are noops. - logger::init( - logger::Config::default() - .with_tag_on_device(MIGRATION_LOG_TAG) - .with_max_level(LevelFilter::Info)); - - let aconfig_storage_result = FLAG_VAL_MAP - .as_ref() - .map_err(|err| format!("failed to get flag val map: {err}")) - .and_then(|flag_val_map| { - PACKAGE_OFFSET - .as_ref() - .map_err(|err| format!("failed to get package read offset: {err}")) - .and_then(|package_offset| { - match package_offset { - Some(offset) => { - get_boolean_flag_value(&flag_val_map, offset + 3) - .map_err(|err| format!("failed to get flag: {err}")) - }, - None => Err("no context found for package 'com.android.aconfig.test'".to_string()) - } - }) - }); - - match aconfig_storage_result { - Ok(storage_result) if storage_result == result => { - if use_new_storage_value { - return storage_result; - } else { - return result; - } - }, - Ok(storage_result) => { - log!(Level::Error, "AconfigTestMission1: error: mismatch for flag 'disabled_rw_in_other_namespace'. Legacy storage was {result}, new storage was {storage_result}"); - if use_new_storage_value { - return storage_result; - } else { - return result; - } - }, - Err(err) => { - log!(Level::Error, "AconfigTestMission1: error: {err}"); - if use_new_storage_value { - panic!("failed to read flag value: {err}"); - } - } - } - } - - result - }; - - /// flag value cache for enabled_rw - - static ref CACHED_enabled_rw: bool = { - let result = flags_rust::GetServerConfigurableFlag( - "aconfig_flags.aconfig_test", - "com.android.aconfig.test.enabled_rw", - "true") == "true"; - - let use_new_storage_value = flags_rust::GetServerConfigurableFlag( - "aconfig_flags.core_experiments_team_internal", - "com.android.providers.settings.use_new_storage_value", - "false") == "true"; - - if Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() { - // This will be called multiple times. Subsequent calls after the first are noops. - logger::init( - logger::Config::default() - .with_tag_on_device(MIGRATION_LOG_TAG) - .with_max_level(LevelFilter::Info)); - - let aconfig_storage_result = FLAG_VAL_MAP - .as_ref() - .map_err(|err| format!("failed to get flag val map: {err}")) - .and_then(|flag_val_map| { - PACKAGE_OFFSET - .as_ref() - .map_err(|err| format!("failed to get package read offset: {err}")) - .and_then(|package_offset| { - match package_offset { - Some(offset) => { - get_boolean_flag_value(&flag_val_map, offset + 8) - .map_err(|err| format!("failed to get flag: {err}")) - }, - None => Err("no context found for package 'com.android.aconfig.test'".to_string()) - } - }) - }); - - match aconfig_storage_result { - Ok(storage_result) if storage_result == result => { - if use_new_storage_value { - return storage_result; - } else { - return result; - } - }, - Ok(storage_result) => { - log!(Level::Error, "AconfigTestMission1: error: mismatch for flag 'enabled_rw'. Legacy storage was {result}, new storage was {storage_result}"); - if use_new_storage_value { - return storage_result; - } else { - return result; - } - }, - Err(err) => { - log!(Level::Error, "AconfigTestMission1: error: {err}"); - if use_new_storage_value { - panic!("failed to read flag value: {err}"); - } - } - } - } - - result - }; - -} - -impl FlagProvider { - - - /// query flag disabled_ro - pub fn disabled_ro(&self) -> bool { - false - } - - /// query flag disabled_rw - pub fn disabled_rw(&self) -> bool { - *CACHED_disabled_rw - } - - /// query flag disabled_rw_exported - pub fn disabled_rw_exported(&self) -> bool { - *CACHED_disabled_rw_exported - } - - /// query flag disabled_rw_in_other_namespace - pub fn disabled_rw_in_other_namespace(&self) -> bool { - *CACHED_disabled_rw_in_other_namespace - } - - /// query flag enabled_fixed_ro - pub fn enabled_fixed_ro(&self) -> bool { - true - } - - /// query flag enabled_fixed_ro_exported - pub fn enabled_fixed_ro_exported(&self) -> bool { - true - } - - /// query flag enabled_ro - pub fn enabled_ro(&self) -> bool { - true - } - - /// query flag enabled_ro_exported - pub fn enabled_ro_exported(&self) -> bool { - true - } - - /// query flag enabled_rw - pub fn enabled_rw(&self) -> bool { - *CACHED_enabled_rw - } - - -} - /// flag provider -pub static PROVIDER: FlagProvider = FlagProvider; - - -/// query flag disabled_ro -#[inline(always)] -pub fn disabled_ro() -> bool { - - - let result = false; - if !Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() { - return result; - } - - // This will be called multiple times. Subsequent calls after the first - // are noops. - logger::init( - logger::Config::default() - .with_tag_on_device(MIGRATION_LOG_TAG) - .with_max_level(LevelFilter::Info), - ); - - unsafe { - let package_map = match get_mapped_storage_file("system", StorageFileType::PackageMap) { - Ok(file) => file, - Err(err) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'disabled_ro': {err}"); - return result; - } - }; - - let package_read_context = match get_package_read_context(&package_map, "com.android.aconfig.test") { - Ok(Some(context)) => context, - Ok(None) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'disabled_ro': did not get context"); - return result; - }, - Err(err) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'disabled_ro': {err}"); - return result; - } - }; - let flag_val_map = match get_mapped_storage_file("system", StorageFileType::FlagVal) { - Ok(val_map) => val_map, - Err(err) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'disabled_ro': {err}"); - return result; - } - }; - let value = match get_boolean_flag_value(&flag_val_map, 0 + package_read_context.boolean_start_index) { - Ok(val) => val, - Err(err) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'disabled_ro': {err}"); - return result; - } - }; - - if result != value { - log!(Level::Error, "AconfigTestMission1: error: flag mismatch for 'disabled_ro'. Legacy storage was {result}, new storage was {value}"); - } else { - let default_value = false; - } - } - - result - -} - -/// query flag disabled_rw -#[inline(always)] -pub fn disabled_rw() -> bool { - PROVIDER.disabled_rw() -} - -/// query flag disabled_rw_exported -#[inline(always)] -pub fn disabled_rw_exported() -> bool { - PROVIDER.disabled_rw_exported() -} - -/// query flag disabled_rw_in_other_namespace -#[inline(always)] -pub fn disabled_rw_in_other_namespace() -> bool { - PROVIDER.disabled_rw_in_other_namespace() +pub struct FlagProvider { + overrides: BTreeMap<&'static str, bool>, } -/// query flag enabled_fixed_ro -#[inline(always)] -pub fn enabled_fixed_ro() -> bool { - +static PACKAGE_OFFSET: LazyLock<Result<Option<u32>, AconfigStorageError>> = LazyLock::new(|| unsafe { + get_mapped_storage_file("system", StorageFileType::PackageMap) + .and_then(|package_map| get_package_read_context(&package_map, "com.android.aconfig.test")) + .map(|context| context.map(|c| c.boolean_start_index)) +}); - let result = true; - if !Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() { - return result; - } +static FLAG_VAL_MAP: LazyLock<Result<Mmap, AconfigStorageError>> = LazyLock::new(|| unsafe { + get_mapped_storage_file("system", StorageFileType::FlagVal) +}); - // This will be called multiple times. Subsequent calls after the first - // are noops. +/// flag value cache for disabled_rw +static CACHED_disabled_rw: LazyLock<bool> = LazyLock::new(|| { + // This will be called multiple times. Subsequent calls after the first are noops. logger::init( logger::Config::default() - .with_tag_on_device(MIGRATION_LOG_TAG) - .with_max_level(LevelFilter::Info), - ); - - unsafe { - let package_map = match get_mapped_storage_file("system", StorageFileType::PackageMap) { - Ok(file) => file, - Err(err) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro': {err}"); - return result; - } - }; - - let package_read_context = match get_package_read_context(&package_map, "com.android.aconfig.test") { - Ok(Some(context)) => context, - Ok(None) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro': did not get context"); - return result; - }, - Err(err) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro': {err}"); - return result; - } - }; - let flag_val_map = match get_mapped_storage_file("system", StorageFileType::FlagVal) { - Ok(val_map) => val_map, - Err(err) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro': {err}"); - return result; - } - }; - let value = match get_boolean_flag_value(&flag_val_map, 4 + package_read_context.boolean_start_index) { - Ok(val) => val, - Err(err) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro': {err}"); - return result; - } - }; + .with_tag_on_device("aconfig_rust_codegen") + .with_max_level(LevelFilter::Info)); + + let flag_value_result = FLAG_VAL_MAP + .as_ref() + .map_err(|err| format!("failed to get flag val map: {err}")) + .and_then(|flag_val_map| { + PACKAGE_OFFSET + .as_ref() + .map_err(|err| format!("failed to get package read offset: {err}")) + .and_then(|package_offset| { + match package_offset { + Some(offset) => { + get_boolean_flag_value(&flag_val_map, offset + 0) + .map_err(|err| format!("failed to get flag: {err}")) + }, + None => { + log!(Level::Error, "no context found for package com.android.aconfig.test"); + Err(format!("failed to flag package com.android.aconfig.test")) + } + } + }) + }); - if result != value { - log!(Level::Error, "AconfigTestMission1: error: flag mismatch for 'enabled_fixed_ro'. Legacy storage was {result}, new storage was {value}"); - } else { - let default_value = true; + match flag_value_result { + Ok(flag_value) => { + return flag_value; + }, + Err(err) => { + log!(Level::Error, "aconfig_rust_codegen: error: {err}"); + return false; } } +}); + +/// flag value cache for disabled_rw_exported +static CACHED_disabled_rw_exported: LazyLock<bool> = LazyLock::new(|| { + // This will be called multiple times. Subsequent calls after the first are noops. + logger::init( + logger::Config::default() + .with_tag_on_device("aconfig_rust_codegen") + .with_max_level(LevelFilter::Info)); + + let flag_value_result = FLAG_VAL_MAP + .as_ref() + .map_err(|err| format!("failed to get flag val map: {err}")) + .and_then(|flag_val_map| { + PACKAGE_OFFSET + .as_ref() + .map_err(|err| format!("failed to get package read offset: {err}")) + .and_then(|package_offset| { + match package_offset { + Some(offset) => { + get_boolean_flag_value(&flag_val_map, offset + 1) + .map_err(|err| format!("failed to get flag: {err}")) + }, + None => { + log!(Level::Error, "no context found for package com.android.aconfig.test"); + Err(format!("failed to flag package com.android.aconfig.test")) + } + } + }) + }); - result - -} - -/// query flag enabled_fixed_ro_exported -#[inline(always)] -pub fn enabled_fixed_ro_exported() -> bool { - - - let result = true; - if !Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() { - return result; - } - - // This will be called multiple times. Subsequent calls after the first - // are noops. - logger::init( - logger::Config::default() - .with_tag_on_device(MIGRATION_LOG_TAG) - .with_max_level(LevelFilter::Info), - ); - - unsafe { - let package_map = match get_mapped_storage_file("system", StorageFileType::PackageMap) { - Ok(file) => file, - Err(err) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro_exported': {err}"); - return result; - } - }; - - let package_read_context = match get_package_read_context(&package_map, "com.android.aconfig.test") { - Ok(Some(context)) => context, - Ok(None) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro_exported': did not get context"); - return result; + match flag_value_result { + Ok(flag_value) => { + return flag_value; }, Err(err) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro_exported': {err}"); - return result; - } - }; - let flag_val_map = match get_mapped_storage_file("system", StorageFileType::FlagVal) { - Ok(val_map) => val_map, - Err(err) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro_exported': {err}"); - return result; - } - }; - let value = match get_boolean_flag_value(&flag_val_map, 5 + package_read_context.boolean_start_index) { - Ok(val) => val, - Err(err) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro_exported': {err}"); - return result; + log!(Level::Error, "aconfig_rust_codegen: error: {err}"); + return false; } - }; - - if result != value { - log!(Level::Error, "AconfigTestMission1: error: flag mismatch for 'enabled_fixed_ro_exported'. Legacy storage was {result}, new storage was {value}"); - } else { - let default_value = true; } - } - - result - -} - -/// query flag enabled_ro -#[inline(always)] -pub fn enabled_ro() -> bool { - - - let result = true; - if !Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() { - return result; - } - - // This will be called multiple times. Subsequent calls after the first - // are noops. - logger::init( - logger::Config::default() - .with_tag_on_device(MIGRATION_LOG_TAG) - .with_max_level(LevelFilter::Info), - ); - - unsafe { - let package_map = match get_mapped_storage_file("system", StorageFileType::PackageMap) { - Ok(file) => file, - Err(err) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro': {err}"); - return result; - } - }; +}); + +/// flag value cache for disabled_rw_in_other_namespace +static CACHED_disabled_rw_in_other_namespace: LazyLock<bool> = LazyLock::new(|| { + // This will be called multiple times. Subsequent calls after the first are noops. + logger::init( + logger::Config::default() + .with_tag_on_device("aconfig_rust_codegen") + .with_max_level(LevelFilter::Info)); + + let flag_value_result = FLAG_VAL_MAP + .as_ref() + .map_err(|err| format!("failed to get flag val map: {err}")) + .and_then(|flag_val_map| { + PACKAGE_OFFSET + .as_ref() + .map_err(|err| format!("failed to get package read offset: {err}")) + .and_then(|package_offset| { + match package_offset { + Some(offset) => { + get_boolean_flag_value(&flag_val_map, offset + 2) + .map_err(|err| format!("failed to get flag: {err}")) + }, + None => { + log!(Level::Error, "no context found for package com.android.aconfig.test"); + Err(format!("failed to flag package com.android.aconfig.test")) + } + } + }) + }); - let package_read_context = match get_package_read_context(&package_map, "com.android.aconfig.test") { - Ok(Some(context)) => context, - Ok(None) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro': did not get context"); - return result; + match flag_value_result { + Ok(flag_value) => { + return flag_value; }, Err(err) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro': {err}"); - return result; - } - }; - let flag_val_map = match get_mapped_storage_file("system", StorageFileType::FlagVal) { - Ok(val_map) => val_map, - Err(err) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro': {err}"); - return result; - } - }; - let value = match get_boolean_flag_value(&flag_val_map, 6 + package_read_context.boolean_start_index) { - Ok(val) => val, - Err(err) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro': {err}"); - return result; + log!(Level::Error, "aconfig_rust_codegen: error: {err}"); + return false; } - }; - - if result != value { - log!(Level::Error, "AconfigTestMission1: error: flag mismatch for 'enabled_ro'. Legacy storage was {result}, new storage was {value}"); - } else { - let default_value = true; } - } - - result - -} - -/// query flag enabled_ro_exported -#[inline(always)] -pub fn enabled_ro_exported() -> bool { - - - let result = true; - if !Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() { - return result; - } - - // This will be called multiple times. Subsequent calls after the first - // are noops. - logger::init( - logger::Config::default() - .with_tag_on_device(MIGRATION_LOG_TAG) - .with_max_level(LevelFilter::Info), - ); - - unsafe { - let package_map = match get_mapped_storage_file("system", StorageFileType::PackageMap) { - Ok(file) => file, - Err(err) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro_exported': {err}"); - return result; - } - }; +}); + + +/// flag value cache for enabled_rw +static CACHED_enabled_rw: LazyLock<bool> = LazyLock::new(|| { + // This will be called multiple times. Subsequent calls after the first are noops. + logger::init( + logger::Config::default() + .with_tag_on_device("aconfig_rust_codegen") + .with_max_level(LevelFilter::Info)); + + let flag_value_result = FLAG_VAL_MAP + .as_ref() + .map_err(|err| format!("failed to get flag val map: {err}")) + .and_then(|flag_val_map| { + PACKAGE_OFFSET + .as_ref() + .map_err(|err| format!("failed to get package read offset: {err}")) + .and_then(|package_offset| { + match package_offset { + Some(offset) => { + get_boolean_flag_value(&flag_val_map, offset + 7) + .map_err(|err| format!("failed to get flag: {err}")) + }, + None => { + log!(Level::Error, "no context found for package com.android.aconfig.test"); + Err(format!("failed to flag package com.android.aconfig.test")) + } + } + }) + }); - let package_read_context = match get_package_read_context(&package_map, "com.android.aconfig.test") { - Ok(Some(context)) => context, - Ok(None) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro_exported': did not get context"); - return result; + match flag_value_result { + Ok(flag_value) => { + return flag_value; }, Err(err) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro_exported': {err}"); - return result; - } - }; - let flag_val_map = match get_mapped_storage_file("system", StorageFileType::FlagVal) { - Ok(val_map) => val_map, - Err(err) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro_exported': {err}"); - return result; + log!(Level::Error, "aconfig_rust_codegen: error: {err}"); + return true; } - }; - let value = match get_boolean_flag_value(&flag_val_map, 7 + package_read_context.boolean_start_index) { - Ok(val) => val, - Err(err) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro_exported': {err}"); - return result; - } - }; - - if result != value { - log!(Level::Error, "AconfigTestMission1: error: flag mismatch for 'enabled_ro_exported'. Legacy storage was {result}, new storage was {value}"); - } else { - let default_value = true; } - } - - result - -} - -/// query flag enabled_rw -#[inline(always)] -pub fn enabled_rw() -> bool { - PROVIDER.enabled_rw() -} -"#; - - const TEST_EXPECTED: &str = r#" -//! codegenerated rust flag lib - -use std::collections::BTreeMap; -use std::sync::Mutex; - -/// flag provider -pub struct FlagProvider { - overrides: BTreeMap<&'static str, bool>, -} +}); impl FlagProvider { /// query flag disabled_ro @@ -972,10 +613,7 @@ impl FlagProvider { /// query flag disabled_rw pub fn disabled_rw(&self) -> bool { self.overrides.get("disabled_rw").copied().unwrap_or( - flags_rust::GetServerConfigurableFlag( - "aconfig_flags.aconfig_test", - "com.android.aconfig.test.disabled_rw", - "false") == "true" + *CACHED_disabled_rw ) } @@ -987,10 +625,7 @@ impl FlagProvider { /// query flag disabled_rw_exported pub fn disabled_rw_exported(&self) -> bool { self.overrides.get("disabled_rw_exported").copied().unwrap_or( - flags_rust::GetServerConfigurableFlag( - "aconfig_flags.aconfig_test", - "com.android.aconfig.test.disabled_rw_exported", - "false") == "true" + *CACHED_disabled_rw_exported ) } @@ -1002,10 +637,7 @@ impl FlagProvider { /// query flag disabled_rw_in_other_namespace pub fn disabled_rw_in_other_namespace(&self) -> bool { self.overrides.get("disabled_rw_in_other_namespace").copied().unwrap_or( - flags_rust::GetServerConfigurableFlag( - "aconfig_flags.other_namespace", - "com.android.aconfig.test.disabled_rw_in_other_namespace", - "false") == "true" + *CACHED_disabled_rw_in_other_namespace ) } @@ -1065,10 +697,7 @@ impl FlagProvider { /// query flag enabled_rw pub fn enabled_rw(&self) -> bool { self.overrides.get("enabled_rw").copied().unwrap_or( - flags_rust::GetServerConfigurableFlag( - "aconfig_flags.aconfig_test", - "com.android.aconfig.test.enabled_rw", - "true") == "true" + *CACHED_enabled_rw ) } @@ -1202,91 +831,14 @@ pub fn reset_flags() { } "#; - const EXPORTED_EXPECTED: &str = r#" -//! codegenerated rust flag lib -use aconfig_storage_read_api::{Mmap, AconfigStorageError, StorageFileType, PackageReadContext, get_mapped_storage_file, get_boolean_flag_value, get_package_read_context}; -use std::path::Path; -use std::io::Write; -use log::{log, LevelFilter, Level}; - -static STORAGE_MIGRATION_MARKER_FILE: &str = - "/metadata/aconfig_test_missions/mission_1"; -static MIGRATION_LOG_TAG: &str = "AconfigTestMission1"; - -/// flag provider -pub struct FlagProvider; - -lazy_static::lazy_static! { - /// flag value cache for disabled_rw_exported - static ref CACHED_disabled_rw_exported: bool = flags_rust::GetServerConfigurableFlag( - "aconfig_flags.aconfig_test", - "com.android.aconfig.test.disabled_rw_exported", - "false") == "true"; - - /// flag value cache for enabled_fixed_ro_exported - static ref CACHED_enabled_fixed_ro_exported: bool = flags_rust::GetServerConfigurableFlag( - "aconfig_flags.aconfig_test", - "com.android.aconfig.test.enabled_fixed_ro_exported", - "false") == "true"; - - /// flag value cache for enabled_ro_exported - static ref CACHED_enabled_ro_exported: bool = flags_rust::GetServerConfigurableFlag( - "aconfig_flags.aconfig_test", - "com.android.aconfig.test.enabled_ro_exported", - "false") == "true"; - -} - -impl FlagProvider { - /// query flag disabled_rw_exported - pub fn disabled_rw_exported(&self) -> bool { - *CACHED_disabled_rw_exported - } - - /// query flag enabled_fixed_ro_exported - pub fn enabled_fixed_ro_exported(&self) -> bool { - *CACHED_enabled_fixed_ro_exported - } - - /// query flag enabled_ro_exported - pub fn enabled_ro_exported(&self) -> bool { - *CACHED_enabled_ro_exported - } -} - -/// flag provider -pub static PROVIDER: FlagProvider = FlagProvider; - -/// query flag disabled_rw_exported -#[inline(always)] -pub fn disabled_rw_exported() -> bool { - PROVIDER.disabled_rw_exported() -} - -/// query flag enabled_fixed_ro_exported -#[inline(always)] -pub fn enabled_fixed_ro_exported() -> bool { - PROVIDER.enabled_fixed_ro_exported() -} - -/// query flag enabled_ro_exported -#[inline(always)] -pub fn enabled_ro_exported() -> bool { - PROVIDER.enabled_ro_exported() -} -"#; - const FORCE_READ_ONLY_EXPECTED: &str = r#" //! codegenerated rust flag lib use aconfig_storage_read_api::{Mmap, AconfigStorageError, StorageFileType, PackageReadContext, get_mapped_storage_file, get_boolean_flag_value, get_package_read_context}; use std::path::Path; use std::io::Write; +use std::sync::LazyLock; use log::{log, LevelFilter, Level}; -static STORAGE_MIGRATION_MARKER_FILE: &str = - "/metadata/aconfig_test_missions/mission_1"; -static MIGRATION_LOG_TAG: &str = "AconfigTestMission1"; - /// flag provider pub struct FlagProvider; @@ -1363,7 +915,7 @@ pub fn enabled_rw() -> bool { "#; use crate::commands::assign_flag_ids; - fn test_generate_rust_code(mode: CodegenMode, allow_instrumentation: bool, expected: &str) { + fn test_generate_rust_code(mode: CodegenMode, expected: &str) { let parsed_flags = crate::test::parse_test_flags(); let modified_parsed_flags = crate::commands::modify_parsed_flags_based_on_mode(parsed_flags, mode).unwrap(); @@ -1374,7 +926,6 @@ pub fn enabled_rw() -> bool { flag_ids, modified_parsed_flags.into_iter(), mode, - allow_instrumentation, ) .unwrap(); assert_eq!("src/lib.rs", format!("{}", generated.path.display())); @@ -1389,26 +940,16 @@ pub fn enabled_rw() -> bool { #[test] fn test_generate_rust_code_for_prod() { - test_generate_rust_code(CodegenMode::Production, false, PROD_EXPECTED); - } - - #[test] - fn test_generate_rust_code_for_prod_instrumented() { - test_generate_rust_code(CodegenMode::Production, true, PROD_INSTRUMENTED_EXPECTED); + test_generate_rust_code(CodegenMode::Production, PROD_EXPECTED); } #[test] fn test_generate_rust_code_for_test() { - test_generate_rust_code(CodegenMode::Test, false, TEST_EXPECTED); - } - - #[test] - fn test_generate_rust_code_for_exported() { - test_generate_rust_code(CodegenMode::Exported, false, EXPORTED_EXPECTED); + test_generate_rust_code(CodegenMode::Test, TEST_EXPECTED); } #[test] fn test_generate_rust_code_for_force_read_only() { - test_generate_rust_code(CodegenMode::ForceReadOnly, false, FORCE_READ_ONLY_EXPECTED); + test_generate_rust_code(CodegenMode::ForceReadOnly, FORCE_READ_ONLY_EXPECTED); } } diff --git a/tools/aconfig/aconfig/src/commands.rs b/tools/aconfig/aconfig/src/commands.rs index 59f06627ee..ab726aada5 100644 --- a/tools/aconfig/aconfig/src/commands.rs +++ b/tools/aconfig/aconfig/src/commands.rs @@ -18,11 +18,12 @@ use anyhow::{bail, ensure, Context, Result}; use itertools::Itertools; use protobuf::Message; use std::collections::HashMap; +use std::hash::Hasher; use std::io::Read; use std::path::PathBuf; use crate::codegen::cpp::generate_cpp_code; -use crate::codegen::java::generate_java_code; +use crate::codegen::java::{generate_java_code, JavaCodegenConfig}; use crate::codegen::rust::generate_rust_code; use crate::codegen::CodegenMode; use crate::dump::{DumpFormat, DumpPredicate}; @@ -31,6 +32,7 @@ use aconfig_protos::{ ParsedFlagExt, ProtoFlagMetadata, ProtoFlagPermission, ProtoFlagState, ProtoParsedFlag, ProtoParsedFlags, ProtoTracepoint, }; +use aconfig_storage_file::sip_hasher13::SipHasher13; use aconfig_storage_file::StorageFileType; pub struct Input { @@ -67,6 +69,7 @@ pub fn parse_flags( declarations: Vec<Input>, values: Vec<Input>, default_permission: ProtoFlagPermission, + allow_read_write: bool, ) -> Result<Vec<u8>> { let mut parsed_flags = ProtoParsedFlags::new(); @@ -77,8 +80,18 @@ pub fn parse_flags( .read_to_string(&mut contents) .with_context(|| format!("failed to read {}", input.source))?; - let flag_declarations = aconfig_protos::flag_declarations::try_from_text_proto(&contents) - .with_context(|| input.error_context())?; + let mut flag_declarations = + aconfig_protos::flag_declarations::try_from_text_proto(&contents) + .with_context(|| input.error_context())?; + + // system_ext flags should be treated as system flags as we are combining /system_ext + // and /system as one container + // TODO: remove this logic when we start enforcing that system_ext cannot be set as + // container in aconfig declaration files. + if flag_declarations.container() == "system_ext" { + flag_declarations.set_container(String::from("system")); + } + ensure!( package == flag_declarations.package(), "failed to parse {}: expected package {}, got {}", @@ -183,6 +196,16 @@ pub fn parse_flags( } } + if !allow_read_write { + if let Some(pf) = parsed_flags + .parsed_flag + .iter() + .find(|pf| pf.permission() == ProtoFlagPermission::READ_WRITE) + { + bail!("flag {} has permission READ_WRITE, but allow_read_write is false", pf.name()); + } + } + // Create a sorted parsed_flags aconfig_protos::parsed_flags::sort_parsed_flags(&mut parsed_flags); aconfig_protos::parsed_flags::verify_fields(&parsed_flags)?; @@ -195,28 +218,31 @@ pub fn create_java_lib( mut input: Input, codegen_mode: CodegenMode, allow_instrumentation: bool, + new_exported: bool, + check_api_level: bool, ) -> Result<Vec<OutputFile>> { let parsed_flags = input.try_parse_flags()?; - let modified_parsed_flags = modify_parsed_flags_based_on_mode(parsed_flags, codegen_mode)?; + let modified_parsed_flags = + modify_parsed_flags_based_on_mode(parsed_flags.clone(), codegen_mode)?; let Some(package) = find_unique_package(&modified_parsed_flags) else { bail!("no parsed flags, or the parsed flags use different packages"); }; let package = package.to_string(); + let mut flag_names = extract_flag_names(parsed_flags)?; + let package_fingerprint = compute_flags_fingerprint(&mut flag_names); let flag_ids = assign_flag_ids(&package, modified_parsed_flags.iter())?; - generate_java_code( - &package, - modified_parsed_flags.into_iter(), + let config = JavaCodegenConfig { codegen_mode, flag_ids, allow_instrumentation, - ) + package_fingerprint, + new_exported, + check_api_level, + }; + generate_java_code(&package, modified_parsed_flags.into_iter(), config) } -pub fn create_cpp_lib( - mut input: Input, - codegen_mode: CodegenMode, - allow_instrumentation: bool, -) -> Result<Vec<OutputFile>> { +pub fn create_cpp_lib(mut input: Input, codegen_mode: CodegenMode) -> Result<Vec<OutputFile>> { // TODO(327420679): Enable export mode for native flag library ensure!( codegen_mode != CodegenMode::Exported, @@ -229,20 +255,10 @@ pub fn create_cpp_lib( }; let package = package.to_string(); let flag_ids = assign_flag_ids(&package, modified_parsed_flags.iter())?; - generate_cpp_code( - &package, - modified_parsed_flags.into_iter(), - codegen_mode, - flag_ids, - allow_instrumentation, - ) + generate_cpp_code(&package, modified_parsed_flags.into_iter(), codegen_mode, flag_ids) } -pub fn create_rust_lib( - mut input: Input, - codegen_mode: CodegenMode, - allow_instrumentation: bool, -) -> Result<OutputFile> { +pub fn create_rust_lib(mut input: Input, codegen_mode: CodegenMode) -> Result<OutputFile> { // // TODO(327420679): Enable export mode for native flag library ensure!( codegen_mode != CodegenMode::Exported, @@ -255,23 +271,18 @@ pub fn create_rust_lib( }; let package = package.to_string(); let flag_ids = assign_flag_ids(&package, modified_parsed_flags.iter())?; - generate_rust_code( - &package, - flag_ids, - modified_parsed_flags.into_iter(), - codegen_mode, - allow_instrumentation, - ) + generate_rust_code(&package, flag_ids, modified_parsed_flags.into_iter(), codegen_mode) } pub fn create_storage( caches: Vec<Input>, container: &str, file: &StorageFileType, + version: u32, ) -> Result<Vec<u8>> { let parsed_flags_vec: Vec<ProtoParsedFlags> = caches.into_iter().map(|mut input| input.try_parse_flags()).collect::<Result<Vec<_>>>()?; - generate_storage_file(container, parsed_flags_vec.iter(), file) + generate_storage_file(container, parsed_flags_vec.iter(), file, version) } pub fn create_device_config_defaults(mut input: Input) -> Result<Vec<u8>> { @@ -395,27 +406,121 @@ where { assert!(parsed_flags_iter.clone().tuple_windows().all(|(a, b)| a.name() <= b.name())); let mut flag_ids = HashMap::new(); - for (id_to_assign, pf) in (0_u32..).zip(parsed_flags_iter) { + let mut flag_idx = 0; + for pf in parsed_flags_iter { if package != pf.package() { return Err(anyhow::anyhow!("encountered a flag not in current package")); } // put a cap on how many flags a package can contain to 65535 - if id_to_assign > u16::MAX as u32 { + if flag_idx > u16::MAX as u32 { return Err(anyhow::anyhow!("the number of flags in a package cannot exceed 65535")); } - flag_ids.insert(pf.name().to_string(), id_to_assign as u16); + if should_include_flag(pf) { + flag_ids.insert(pf.name().to_string(), flag_idx as u16); + flag_idx += 1; + } } Ok(flag_ids) } +// Creates a fingerprint of the flag names (which requires sorting the vector). +// Fingerprint is used by both codegen and storage files. +pub fn compute_flags_fingerprint(flag_names: &mut Vec<String>) -> u64 { + flag_names.sort(); + + let mut hasher = SipHasher13::new(); + for flag in flag_names { + hasher.write(flag.as_bytes()); + } + hasher.finish() +} + +// Converts ProtoParsedFlags into a vector of strings containing all of the flag +// names. Helper fn for creating fingerprint for codegen files. Flags must all +// belong to the same package. +fn extract_flag_names(flags: ProtoParsedFlags) -> Result<Vec<String>> { + let separated_flags: Vec<ProtoParsedFlag> = flags.parsed_flag.into_iter().collect::<Vec<_>>(); + + // All flags must belong to the same package as the fingerprint is per-package. + let Some(_package) = find_unique_package(&separated_flags) else { + bail!("No parsed flags, or the parsed flags use different packages."); + }; + + Ok(separated_flags + .into_iter() + .filter(should_include_flag) + .map(|flag| flag.name.unwrap()) + .collect::<Vec<_>>()) +} + +// Exclude system/vendor/product flags that are RO+disabled. +pub fn should_include_flag(pf: &ProtoParsedFlag) -> bool { + let should_filter_container = pf.container == Some("vendor".to_string()) + || pf.container == Some("system".to_string()) + || pf.container == Some("product".to_string()); + + let disabled_ro = pf.state == Some(ProtoFlagState::DISABLED.into()) + && pf.permission == Some(ProtoFlagPermission::READ_ONLY.into()); + + !should_filter_container || !disabled_ro +} + #[cfg(test)] mod tests { use super::*; use aconfig_protos::ProtoFlagPurpose; #[test] + fn test_offset_fingerprint() { + let parsed_flags = crate::test::parse_test_flags(); + let expected_fingerprint: u64 = 11551379960324242360; + + let mut extracted_flags = extract_flag_names(parsed_flags).unwrap(); + let hash_result = compute_flags_fingerprint(&mut extracted_flags); + + assert_eq!(hash_result, expected_fingerprint); + } + + #[test] + fn test_offset_fingerprint_matches_from_package() { + let parsed_flags: ProtoParsedFlags = crate::test::parse_test_flags(); + + // All test flags are in the same package, so fingerprint from all of them. + let mut extracted_flags = extract_flag_names(parsed_flags.clone()).unwrap(); + let result_from_parsed_flags = compute_flags_fingerprint(&mut extracted_flags); + + let mut flag_names_vec = parsed_flags + .parsed_flag + .clone() + .into_iter() + .filter(should_include_flag) + .map(|flag| flag.name.unwrap()) + .map(String::from) + .collect::<Vec<_>>(); + let result_from_names = compute_flags_fingerprint(&mut flag_names_vec); + + // Assert the same hash is generated for each case. + assert_eq!(result_from_parsed_flags, result_from_names); + } + + #[test] + fn test_offset_fingerprint_different_packages_does_not_match() { + // Parse flags from two packages. + let parsed_flags: ProtoParsedFlags = crate::test::parse_test_flags(); + let second_parsed_flags = crate::test::parse_second_package_flags(); + + let mut extracted_flags = extract_flag_names(parsed_flags).unwrap(); + let result_from_parsed_flags = compute_flags_fingerprint(&mut extracted_flags); + let mut second_extracted_flags = extract_flag_names(second_parsed_flags).unwrap(); + let second_result = compute_flags_fingerprint(&mut second_extracted_flags); + + // Different flags should have a different fingerprint. + assert_ne!(result_from_parsed_flags, second_result); + } + + #[test] fn test_parse_flags() { let parsed_flags = crate::test::parse_test_flags(); // calls parse_flags aconfig_protos::parsed_flags::verify_fields(&parsed_flags).unwrap(); @@ -486,6 +591,7 @@ mod tests { declaration, value, ProtoFlagPermission::READ_ONLY, + true, ) .unwrap(); let parsed_flags = @@ -519,6 +625,7 @@ mod tests { declaration, value, ProtoFlagPermission::READ_WRITE, + true, ) .unwrap_err(); assert_eq!( @@ -550,6 +657,7 @@ mod tests { declaration, value, ProtoFlagPermission::READ_WRITE, + true, ) .unwrap_err(); assert_eq!( @@ -557,6 +665,121 @@ mod tests { "failed to parse memory: expected container argument.container, got declaration.container" ); } + #[test] + fn test_parse_flags_no_allow_read_write_default_error() { + let first_flag = r#" + package: "com.first" + container: "com.first.container" + flag { + name: "first" + namespace: "first_ns" + description: "This is the description of the first flag." + bug: "123" + } + "#; + let declaration = + vec![Input { source: "memory".to_string(), reader: Box::new(first_flag.as_bytes()) }]; + + let error = crate::commands::parse_flags( + "com.first", + Some("com.first.container"), + declaration, + vec![], + ProtoFlagPermission::READ_WRITE, + false, + ) + .unwrap_err(); + assert_eq!( + format!("{:?}", error), + "flag first has permission READ_WRITE, but allow_read_write is false" + ); + } + + #[test] + fn test_parse_flags_no_allow_read_write_value_error() { + let first_flag = r#" + package: "com.first" + container: "com.first.container" + flag { + name: "first" + namespace: "first_ns" + description: "This is the description of the first flag." + bug: "123" + } + "#; + let declaration = + vec![Input { source: "memory".to_string(), reader: Box::new(first_flag.as_bytes()) }]; + + let first_flag_value = r#" + flag_value { + package: "com.first" + name: "first" + state: DISABLED + permission: READ_WRITE + } + "#; + let value = vec![Input { + source: "memory".to_string(), + reader: Box::new(first_flag_value.as_bytes()), + }]; + let error = crate::commands::parse_flags( + "com.first", + Some("com.first.container"), + declaration, + value, + ProtoFlagPermission::READ_ONLY, + false, + ) + .unwrap_err(); + assert_eq!( + format!("{:?}", error), + "flag first has permission READ_WRITE, but allow_read_write is false" + ); + } + + #[test] + fn test_parse_flags_no_allow_read_write_success() { + let first_flag = r#" + package: "com.first" + container: "com.first.container" + flag { + name: "first" + namespace: "first_ns" + description: "This is the description of the first flag." + bug: "123" + } + "#; + let declaration = + vec![Input { source: "memory".to_string(), reader: Box::new(first_flag.as_bytes()) }]; + + let first_flag_value = r#" + flag_value { + package: "com.first" + name: "first" + state: DISABLED + permission: READ_ONLY + } + "#; + let value = vec![Input { + source: "memory".to_string(), + reader: Box::new(first_flag_value.as_bytes()), + }]; + let flags_bytes = crate::commands::parse_flags( + "com.first", + Some("com.first.container"), + declaration, + value, + ProtoFlagPermission::READ_ONLY, + false, + ) + .unwrap(); + let parsed_flags = + aconfig_protos::parsed_flags::try_from_binary_proto(&flags_bytes).unwrap(); + assert_eq!(1, parsed_flags.parsed_flag.len()); + let parsed_flag = parsed_flags.parsed_flag.first().unwrap(); + assert_eq!(ProtoFlagState::DISABLED, parsed_flag.state()); + assert_eq!(ProtoFlagPermission::READ_ONLY, parsed_flag.permission()); + } #[test] fn test_parse_flags_override_fixed_read_only() { @@ -592,6 +815,7 @@ mod tests { declaration, value, ProtoFlagPermission::READ_WRITE, + true, ) .unwrap_err(); assert_eq!( @@ -626,6 +850,7 @@ mod tests { declaration, value, ProtoFlagPermission::READ_ONLY, + true, ) .unwrap(); let parsed_flags = @@ -666,6 +891,30 @@ mod tests { } #[test] + fn test_dump_multiple_filters() { + let input = parse_test_flags_as_input(); + let bytes = dump_parsed_flags( + vec![input], + DumpFormat::Custom("{fully_qualified_name}".to_string()), + &["container:system+state:ENABLED", "container:system+permission:READ_WRITE"], + false, + ) + .unwrap(); + let text = std::str::from_utf8(&bytes).unwrap(); + let expected_flag_list = &[ + "com.android.aconfig.test.disabled_rw", + "com.android.aconfig.test.disabled_rw_exported", + "com.android.aconfig.test.disabled_rw_in_other_namespace", + "com.android.aconfig.test.enabled_fixed_ro", + "com.android.aconfig.test.enabled_fixed_ro_exported", + "com.android.aconfig.test.enabled_ro", + "com.android.aconfig.test.enabled_ro_exported", + "com.android.aconfig.test.enabled_rw", + ]; + assert_eq!(expected_flag_list.map(|s| format!("{}\n", s)).join(""), text); + } + + #[test] fn test_dump_textproto_format_dedup() { let input = parse_test_flags_as_input(); let input2 = parse_test_flags_as_input(); @@ -727,15 +976,14 @@ mod tests { let package = find_unique_package(&parsed_flags.parsed_flag).unwrap().to_string(); let flag_ids = assign_flag_ids(&package, parsed_flags.parsed_flag.iter()).unwrap(); let expected_flag_ids = HashMap::from([ - (String::from("disabled_ro"), 0_u16), - (String::from("disabled_rw"), 1_u16), - (String::from("disabled_rw_exported"), 2_u16), - (String::from("disabled_rw_in_other_namespace"), 3_u16), - (String::from("enabled_fixed_ro"), 4_u16), - (String::from("enabled_fixed_ro_exported"), 5_u16), - (String::from("enabled_ro"), 6_u16), - (String::from("enabled_ro_exported"), 7_u16), - (String::from("enabled_rw"), 8_u16), + (String::from("disabled_rw"), 0_u16), + (String::from("disabled_rw_exported"), 1_u16), + (String::from("disabled_rw_in_other_namespace"), 2_u16), + (String::from("enabled_fixed_ro"), 3_u16), + (String::from("enabled_fixed_ro_exported"), 4_u16), + (String::from("enabled_ro"), 5_u16), + (String::from("enabled_ro_exported"), 6_u16), + (String::from("enabled_rw"), 7_u16), ]); assert_eq!(flag_ids, expected_flag_ids); } diff --git a/tools/aconfig/aconfig/src/main.rs b/tools/aconfig/aconfig/src/main.rs index 1fb64f9c56..ef3b7abdc4 100644 --- a/tools/aconfig/aconfig/src/main.rs +++ b/tools/aconfig/aconfig/src/main.rs @@ -16,6 +16,8 @@ //! `aconfig` is a build time tool to manage build time configurations, such as feature flags. +use aconfig_storage_file::DEFAULT_FILE_VERSION; +use aconfig_storage_file::MAX_SUPPORTED_FILE_VERSION; use anyhow::{anyhow, bail, Context, Result}; use clap::{builder::ArgAction, builder::EnumValueParser, Arg, ArgMatches, Command}; use core::any::Any; @@ -38,9 +40,86 @@ mod test; use commands::{Input, OutputFile}; +const HELP_DUMP_CACHE: &str = r#" +An aconfig cache file, created via `aconfig create-cache`. +"#; + +const HELP_DUMP_FORMAT: &str = r#" +Change the output format for each flag. + +The argument to --format is a format string. Each flag will be a copy of this string, with certain +placeholders replaced by attributes of the flag. The placeholders are + + {package} + {name} + {namespace} + {description} + {bug} + {state} + {state:bool} + {permission} + {trace} + {trace:paths} + {is_fixed_read_only} + {is_exported} + {container} + {metadata} + {fully_qualified_name} + +Note: the format strings "textproto" and "protobuf" are handled in a special way: they output all +flag attributes in text or binary protobuf format. + +Examples: + + # See which files were read to determine the value of a flag; the files were read in the order + # listed. + --format='{fully_qualified_name} {trace}' + + # Trace the files read for a specific flag. Useful during debugging. + --filter=fully_qualified_name:com.foo.flag_name --format='{trace}' + + # Print a somewhat human readable description of each flag. + --format='The flag {name} in package {package} is {state} and has permission {permission}.' +"#; + const HELP_DUMP_FILTER: &str = r#" -Limit which flags to output. If multiple --filter arguments are provided, the output will be -limited to flags that match any of the filters. +Limit which flags to output. If --filter is omitted, all flags will be printed. If multiple +--filter options are provided, the output will be limited to flags that match any of the filters. + +The argument to --filter is a search query. Multiple queries can be AND-ed together by +concatenating them with a plus sign. + +Valid queries are: + + package:<string> + name:<string> + namespace:<string> + bug:<string> + state:ENABLED|DISABLED + permission:READ_ONLY|READ_WRITE + is_fixed_read_only:true|false + is_exported:true|false + container:<string> + fully_qualified_name:<string> + +Note: there is currently no support for filtering based on these flag attributes: description, +trace, metadata. + +Examples: + + # Print a single flag: + --filter=fully_qualified_name:com.foo.flag_name + + # Print all known information about a single flag: + --filter=fully_qualified_name:com.foo.flag_name --format=textproto + + # Print all flags in the com.foo package, and all enabled flags in the com.bar package: + --filter=package:com.foo --filter=package.com.bar+state:ENABLED +"#; + +const HELP_DUMP_DEDUP: &str = r#" +Allow the same flag to be present in multiple cache files; if duplicates are found, collapse into +a single instance. "#; fn cli() -> Command { @@ -49,8 +128,7 @@ fn cli() -> Command { .subcommand( Command::new("create-cache") .arg(Arg::new("package").long("package").required(true)) - // TODO(b/312769710): Make this argument required. - .arg(Arg::new("container").long("container")) + .arg(Arg::new("container").long("container").required(true)) .arg(Arg::new("declarations").long("declarations").action(ArgAction::Append)) .arg(Arg::new("values").long("values").action(ArgAction::Append)) .arg( @@ -61,6 +139,12 @@ fn cli() -> Command { &commands::DEFAULT_FLAG_PERMISSION, )), ) + .arg( + Arg::new("allow-read-write") + .long("allow-read-write") + .value_parser(clap::value_parser!(bool)) + .default_value("true"), + ) .arg(Arg::new("cache").long("cache").required(true)), ) .subcommand( @@ -78,6 +162,22 @@ fn cli() -> Command { .long("allow-instrumentation") .value_parser(clap::value_parser!(bool)) .default_value("false"), + ) + .arg( + Arg::new("new-exported") + .long("new-exported") + .value_parser(clap::value_parser!(bool)) + .default_value("false"), + ) + // Allows build flag toggling of checking API level in exported + // flag lib for finalized API flags. + // TODO: b/378936061 - Remove once build flag for API level + // check is fully enabled. + .arg( + Arg::new("check-api-level") + .long("check-api-level") + .value_parser(clap::value_parser!(bool)) + .default_value("false"), ), ) .subcommand( @@ -127,22 +227,34 @@ fn cli() -> Command { .subcommand( Command::new("dump-cache") .alias("dump") - .arg(Arg::new("cache").long("cache").action(ArgAction::Append)) + .arg( + Arg::new("cache") + .long("cache") + .action(ArgAction::Append) + .long_help(HELP_DUMP_CACHE.trim()), + ) .arg( Arg::new("format") .long("format") .value_parser(|s: &str| DumpFormat::try_from(s)) .default_value( "{fully_qualified_name} [{container}]: {permission} + {state}", - ), + ) + .long_help(HELP_DUMP_FORMAT.trim()), ) .arg( Arg::new("filter") .long("filter") .action(ArgAction::Append) - .help(HELP_DUMP_FILTER.trim()), + .long_help(HELP_DUMP_FILTER.trim()), + ) + .arg( + Arg::new("dedup") + .long("dedup") + .num_args(0) + .action(ArgAction::SetTrue) + .long_help(HELP_DUMP_DEDUP.trim()), ) - .arg(Arg::new("dedup").long("dedup").num_args(0).action(ArgAction::SetTrue)) .arg(Arg::new("out").long("out").default_value("-")), ) .subcommand( @@ -159,7 +271,13 @@ fn cli() -> Command { .value_parser(|s: &str| StorageFileType::try_from(s)), ) .arg(Arg::new("cache").long("cache").action(ArgAction::Append).required(true)) - .arg(Arg::new("out").long("out").required(true)), + .arg(Arg::new("out").long("out").required(true)) + .arg( + Arg::new("version") + .long("version") + .required(false) + .value_parser(|s: &str| s.parse::<u32>()), + ), ) } @@ -235,12 +353,15 @@ fn main() -> Result<()> { sub_matches, "default-permission", )?; + let allow_read_write = get_optional_arg::<bool>(sub_matches, "allow-read-write") + .expect("failed to parse allow-read-write"); let output = commands::parse_flags( package, container, declarations, values, *default_permission, + *allow_read_write, ) .context("failed to create cache")?; let path = get_required_arg::<String>(sub_matches, "cache")?; @@ -251,8 +372,16 @@ fn main() -> Result<()> { let mode = get_required_arg::<CodegenMode>(sub_matches, "mode")?; let allow_instrumentation = get_required_arg::<bool>(sub_matches, "allow-instrumentation")?; - let generated_files = commands::create_java_lib(cache, *mode, *allow_instrumentation) - .context("failed to create java lib")?; + let new_exported = get_required_arg::<bool>(sub_matches, "new-exported")?; + let check_api_level = get_required_arg::<bool>(sub_matches, "check-api-level")?; + let generated_files = commands::create_java_lib( + cache, + *mode, + *allow_instrumentation, + *new_exported, + *check_api_level, + ) + .context("failed to create java lib")?; let dir = PathBuf::from(get_required_arg::<String>(sub_matches, "out")?); generated_files .iter() @@ -261,10 +390,8 @@ fn main() -> Result<()> { Some(("create-cpp-lib", sub_matches)) => { let cache = open_single_file(sub_matches, "cache")?; let mode = get_required_arg::<CodegenMode>(sub_matches, "mode")?; - let allow_instrumentation = - get_required_arg::<bool>(sub_matches, "allow-instrumentation")?; - let generated_files = commands::create_cpp_lib(cache, *mode, *allow_instrumentation) - .context("failed to create cpp lib")?; + let generated_files = + commands::create_cpp_lib(cache, *mode).context("failed to create cpp lib")?; let dir = PathBuf::from(get_required_arg::<String>(sub_matches, "out")?); generated_files .iter() @@ -273,10 +400,8 @@ fn main() -> Result<()> { Some(("create-rust-lib", sub_matches)) => { let cache = open_single_file(sub_matches, "cache")?; let mode = get_required_arg::<CodegenMode>(sub_matches, "mode")?; - let allow_instrumentation = - get_required_arg::<bool>(sub_matches, "allow-instrumentation")?; - let generated_file = commands::create_rust_lib(cache, *mode, *allow_instrumentation) - .context("failed to create rust lib")?; + let generated_file = + commands::create_rust_lib(cache, *mode).context("failed to create rust lib")?; let dir = PathBuf::from(get_required_arg::<String>(sub_matches, "out")?); write_output_file_realtive_to_dir(&dir, &generated_file)?; } @@ -309,12 +434,18 @@ fn main() -> Result<()> { write_output_to_file_or_stdout(path, &output)?; } Some(("create-storage", sub_matches)) => { + let version = + get_optional_arg::<u32>(sub_matches, "version").unwrap_or(&DEFAULT_FILE_VERSION); + if *version > MAX_SUPPORTED_FILE_VERSION { + bail!("Invalid version selected ({})", version); + } let file = get_required_arg::<StorageFileType>(sub_matches, "file") .context("Invalid storage file selection")?; let cache = open_zero_or_more_files(sub_matches, "cache")?; let container = get_required_arg::<String>(sub_matches, "container")?; let path = get_required_arg::<String>(sub_matches, "out")?; - let output = commands::create_storage(cache, container, file) + + let output = commands::create_storage(cache, container, file, *version) .context("failed to create storage files")?; write_output_to_file_or_stdout(path, &output)?; } diff --git a/tools/aconfig/aconfig/src/storage/flag_info.rs b/tools/aconfig/aconfig/src/storage/flag_info.rs new file mode 100644 index 0000000000..0943daa86c --- /dev/null +++ b/tools/aconfig/aconfig/src/storage/flag_info.rs @@ -0,0 +1,99 @@ +/* + * Copyright (C) 2024 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +use crate::commands::assign_flag_ids; +use crate::storage::FlagPackage; +use aconfig_protos::{ProtoFlagPermission, ProtoFlagState}; +use aconfig_storage_file::{FlagInfoHeader, FlagInfoList, FlagInfoNode, StorageFileType}; +use anyhow::{anyhow, Result}; + +fn new_header(container: &str, num_flags: u32, version: u32) -> FlagInfoHeader { + FlagInfoHeader { + version, + container: String::from(container), + file_type: StorageFileType::FlagInfo as u8, + file_size: 0, + num_flags, + boolean_flag_offset: 0, + } +} + +pub fn create_flag_info( + container: &str, + packages: &[FlagPackage], + version: u32, +) -> Result<FlagInfoList> { + // Exclude system/vendor/product flags that are RO+disabled. + let mut filtered_packages = packages.to_vec(); + if container == "system" || container == "vendor" || container == "product" { + for package in filtered_packages.iter_mut() { + package.boolean_flags.retain(|b| { + !(b.state == Some(ProtoFlagState::DISABLED.into()) + && b.permission == Some(ProtoFlagPermission::READ_ONLY.into())) + }); + } + } + + let num_flags = filtered_packages.iter().map(|pkg| pkg.boolean_flags.len() as u32).sum(); + + let mut is_flag_rw = vec![false; num_flags as usize]; + for pkg in filtered_packages { + let start_index = pkg.boolean_start_index as usize; + let flag_ids = assign_flag_ids(pkg.package_name, pkg.boolean_flags.iter().copied())?; + for pf in pkg.boolean_flags { + let fid = flag_ids + .get(pf.name()) + .ok_or(anyhow!(format!("missing flag id for {}", pf.name())))?; + is_flag_rw[start_index + (*fid as usize)] = + pf.permission() == ProtoFlagPermission::READ_WRITE; + } + } + + let mut list = FlagInfoList { + header: new_header(container, num_flags, version), + nodes: is_flag_rw.iter().map(|&rw| FlagInfoNode::create(rw)).collect(), + }; + + // initialize all header fields + list.header.boolean_flag_offset = list.header.into_bytes().len() as u32; + let bytes_per_node = FlagInfoNode::create(false).into_bytes().len() as u32; + list.header.file_size = list.header.boolean_flag_offset + num_flags * bytes_per_node; + + Ok(list) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::storage::{group_flags_by_package, tests::parse_all_test_flags}; + use aconfig_storage_file::DEFAULT_FILE_VERSION; + + pub fn create_test_flag_info_list_from_source() -> Result<FlagInfoList> { + let caches = parse_all_test_flags(); + let packages = group_flags_by_package(caches.iter(), DEFAULT_FILE_VERSION); + create_flag_info("mockup", &packages, DEFAULT_FILE_VERSION) + } + + #[test] + // this test point locks down the flag info creation and each field + fn test_list_contents() { + let flag_info_list = create_test_flag_info_list_from_source(); + assert!(flag_info_list.is_ok()); + let expected_flag_info_list = + aconfig_storage_file::test_utils::create_test_flag_info_list(DEFAULT_FILE_VERSION); + assert_eq!(flag_info_list.unwrap(), expected_flag_info_list); + } +} diff --git a/tools/aconfig/aconfig/src/storage/flag_table.rs b/tools/aconfig/aconfig/src/storage/flag_table.rs index a9712119bf..a3b4e8fe1e 100644 --- a/tools/aconfig/aconfig/src/storage/flag_table.rs +++ b/tools/aconfig/aconfig/src/storage/flag_table.rs @@ -14,18 +14,17 @@ * limitations under the License. */ -use crate::commands::assign_flag_ids; +use crate::commands::{assign_flag_ids, should_include_flag}; use crate::storage::FlagPackage; use aconfig_protos::ProtoFlagPermission; use aconfig_storage_file::{ get_table_size, FlagTable, FlagTableHeader, FlagTableNode, StorageFileType, StoredFlagType, - FILE_VERSION, }; use anyhow::{anyhow, Result}; -fn new_header(container: &str, num_flags: u32) -> FlagTableHeader { +fn new_header(container: &str, num_flags: u32, version: u32) -> FlagTableHeader { FlagTableHeader { - version: FILE_VERSION, + version, container: String::from(container), file_type: StorageFileType::FlagMap as u8, file_size: 0, @@ -63,9 +62,13 @@ impl FlagTableNodeWrapper { } fn create_nodes(package: &FlagPackage, num_buckets: u32) -> Result<Vec<Self>> { + // Exclude system/vendor/product flags that are RO+disabled. + let mut filtered_package = package.clone(); + filtered_package.boolean_flags.retain(|pf| should_include_flag(pf)); + let flag_ids = - assign_flag_ids(package.package_name, package.boolean_flags.iter().copied())?; - package + assign_flag_ids(package.package_name, filtered_package.boolean_flags.iter().copied())?; + filtered_package .boolean_flags .iter() .map(|&pf| { @@ -86,12 +89,16 @@ impl FlagTableNodeWrapper { } } -pub fn create_flag_table(container: &str, packages: &[FlagPackage]) -> Result<FlagTable> { +pub fn create_flag_table( + container: &str, + packages: &[FlagPackage], + version: u32, +) -> Result<FlagTable> { // create table let num_flags = packages.iter().map(|pkg| pkg.boolean_flags.len() as u32).sum(); let num_buckets = get_table_size(num_flags)?; - let mut header = new_header(container, num_flags); + let mut header = new_header(container, num_flags, version); let mut buckets = vec![None; num_buckets as usize]; let mut node_wrappers = packages .iter() @@ -138,13 +145,15 @@ pub fn create_flag_table(container: &str, packages: &[FlagPackage]) -> Result<Fl #[cfg(test)] mod tests { + use aconfig_storage_file::DEFAULT_FILE_VERSION; + use super::*; use crate::storage::{group_flags_by_package, tests::parse_all_test_flags}; fn create_test_flag_table_from_source() -> Result<FlagTable> { let caches = parse_all_test_flags(); - let packages = group_flags_by_package(caches.iter()); - create_flag_table("mockup", &packages) + let packages = group_flags_by_package(caches.iter(), DEFAULT_FILE_VERSION); + create_flag_table("mockup", &packages, DEFAULT_FILE_VERSION) } #[test] @@ -152,7 +161,8 @@ mod tests { fn test_table_contents() { let flag_table = create_test_flag_table_from_source(); assert!(flag_table.is_ok()); - let expected_flag_table = aconfig_storage_file::test_utils::create_test_flag_table(); + let expected_flag_table = + aconfig_storage_file::test_utils::create_test_flag_table(DEFAULT_FILE_VERSION); assert_eq!(flag_table.unwrap(), expected_flag_table); } } diff --git a/tools/aconfig/aconfig/src/storage/flag_value.rs b/tools/aconfig/aconfig/src/storage/flag_value.rs index c15ba54112..3cfa447098 100644 --- a/tools/aconfig/aconfig/src/storage/flag_value.rs +++ b/tools/aconfig/aconfig/src/storage/flag_value.rs @@ -16,13 +16,13 @@ use crate::commands::assign_flag_ids; use crate::storage::FlagPackage; -use aconfig_protos::ProtoFlagState; -use aconfig_storage_file::{FlagValueHeader, FlagValueList, StorageFileType, FILE_VERSION}; +use aconfig_protos::{ProtoFlagPermission, ProtoFlagState}; +use aconfig_storage_file::{FlagValueHeader, FlagValueList, StorageFileType}; use anyhow::{anyhow, Result}; -fn new_header(container: &str, num_flags: u32) -> FlagValueHeader { +fn new_header(container: &str, num_flags: u32, version: u32) -> FlagValueHeader { FlagValueHeader { - version: FILE_VERSION, + version, container: String::from(container), file_type: StorageFileType::FlagVal as u8, file_size: 0, @@ -31,16 +31,27 @@ fn new_header(container: &str, num_flags: u32) -> FlagValueHeader { } } -pub fn create_flag_value(container: &str, packages: &[FlagPackage]) -> Result<FlagValueList> { - // create list - let num_flags = packages.iter().map(|pkg| pkg.boolean_flags.len() as u32).sum(); - +pub fn create_flag_value( + container: &str, + packages: &[FlagPackage], + version: u32, +) -> Result<FlagValueList> { + // Exclude system/vendor/product flags that are RO+disabled. + let mut filtered_packages = packages.to_vec(); + if container == "system" || container == "vendor" || container == "product" { + for package in filtered_packages.iter_mut() { + package.boolean_flags.retain(|b| { + !(b.state == Some(ProtoFlagState::DISABLED.into()) + && b.permission == Some(ProtoFlagPermission::READ_ONLY.into())) + }); + } + } + let num_flags = filtered_packages.iter().map(|pkg| pkg.boolean_flags.len() as u32).sum(); let mut list = FlagValueList { - header: new_header(container, num_flags), + header: new_header(container, num_flags, version), booleans: vec![false; num_flags as usize], }; - - for pkg in packages.iter() { + for pkg in filtered_packages { let start_index = pkg.boolean_start_index as usize; let flag_ids = assign_flag_ids(pkg.package_name, pkg.boolean_flags.iter().copied())?; for pf in pkg.boolean_flags.iter() { @@ -61,13 +72,15 @@ pub fn create_flag_value(container: &str, packages: &[FlagPackage]) -> Result<Fl #[cfg(test)] mod tests { + use aconfig_storage_file::DEFAULT_FILE_VERSION; + use super::*; use crate::storage::{group_flags_by_package, tests::parse_all_test_flags}; pub fn create_test_flag_value_list_from_source() -> Result<FlagValueList> { let caches = parse_all_test_flags(); - let packages = group_flags_by_package(caches.iter()); - create_flag_value("mockup", &packages) + let packages = group_flags_by_package(caches.iter(), DEFAULT_FILE_VERSION); + create_flag_value("mockup", &packages, DEFAULT_FILE_VERSION) } #[test] @@ -76,7 +89,7 @@ mod tests { let flag_value_list = create_test_flag_value_list_from_source(); assert!(flag_value_list.is_ok()); let expected_flag_value_list = - aconfig_storage_file::test_utils::create_test_flag_value_list(); + aconfig_storage_file::test_utils::create_test_flag_value_list(DEFAULT_FILE_VERSION); assert_eq!(flag_value_list.unwrap(), expected_flag_value_list); } } diff --git a/tools/aconfig/aconfig/src/storage/mod.rs b/tools/aconfig/aconfig/src/storage/mod.rs index 73339f24b3..61e65d1dfc 100644 --- a/tools/aconfig/aconfig/src/storage/mod.rs +++ b/tools/aconfig/aconfig/src/storage/mod.rs @@ -14,23 +14,27 @@ * limitations under the License. */ +pub mod flag_info; pub mod flag_table; pub mod flag_value; pub mod package_table; -use anyhow::{anyhow, Result}; +use anyhow::Result; use std::collections::{HashMap, HashSet}; +use crate::commands::compute_flags_fingerprint; use crate::storage::{ - flag_table::create_flag_table, flag_value::create_flag_value, + flag_info::create_flag_info, flag_table::create_flag_table, flag_value::create_flag_value, package_table::create_package_table, }; -use aconfig_protos::{ProtoParsedFlag, ProtoParsedFlags}; +use aconfig_protos::{ProtoFlagPermission, ProtoFlagState, ProtoParsedFlag, ProtoParsedFlags}; use aconfig_storage_file::StorageFileType; +#[derive(Clone)] pub struct FlagPackage<'a> { pub package_name: &'a str, pub package_id: u32, + pub fingerprint: u64, pub flag_names: HashSet<&'a str>, pub boolean_flags: Vec<&'a ProtoParsedFlag>, // The index of the first boolean flag in this aconfig package among all boolean @@ -43,6 +47,7 @@ impl<'a> FlagPackage<'a> { FlagPackage { package_name, package_id, + fingerprint: 0, flag_names: HashSet::new(), boolean_flags: vec![], boolean_start_index: 0, @@ -56,7 +61,7 @@ impl<'a> FlagPackage<'a> { } } -pub fn group_flags_by_package<'a, I>(parsed_flags_vec_iter: I) -> Vec<FlagPackage<'a>> +pub fn group_flags_by_package<'a, I>(parsed_flags_vec_iter: I, version: u32) -> Vec<FlagPackage<'a>> where I: Iterator<Item = &'a ProtoParsedFlags>, { @@ -69,15 +74,33 @@ where if index == packages.len() { packages.push(FlagPackage::new(parsed_flag.package(), index as u32)); } + + // Exclude system/vendor/product flags that are RO+disabled. + if (parsed_flag.container == Some("system".to_string()) + || parsed_flag.container == Some("vendor".to_string()) + || parsed_flag.container == Some("product".to_string())) + && parsed_flag.permission == Some(ProtoFlagPermission::READ_ONLY.into()) + && parsed_flag.state == Some(ProtoFlagState::DISABLED.into()) + { + continue; + } + packages[index].insert(parsed_flag); } } - // cacluate boolean flag start index for each package + // Calculate boolean flag start index for each package let mut boolean_start_index = 0; for p in packages.iter_mut() { p.boolean_start_index = boolean_start_index; boolean_start_index += p.boolean_flags.len() as u32; + + if version >= 2 { + let mut flag_names_vec = + p.flag_names.clone().into_iter().map(String::from).collect::<Vec<_>>(); + let fingerprint = compute_flags_fingerprint(&mut flag_names_vec); + p.fingerprint = fingerprint; + } } packages @@ -87,31 +110,37 @@ pub fn generate_storage_file<'a, I>( container: &str, parsed_flags_vec_iter: I, file: &StorageFileType, + version: u32, ) -> Result<Vec<u8>> where I: Iterator<Item = &'a ProtoParsedFlags>, { - let packages = group_flags_by_package(parsed_flags_vec_iter); + let packages = group_flags_by_package(parsed_flags_vec_iter, version); match file { StorageFileType::PackageMap => { - let package_table = create_package_table(container, &packages)?; + let package_table = create_package_table(container, &packages, version)?; Ok(package_table.into_bytes()) } StorageFileType::FlagMap => { - let flag_table = create_flag_table(container, &packages)?; + let flag_table = create_flag_table(container, &packages, version)?; Ok(flag_table.into_bytes()) } StorageFileType::FlagVal => { - let flag_value = create_flag_value(container, &packages)?; + let flag_value = create_flag_value(container, &packages, version)?; Ok(flag_value.into_bytes()) } - _ => Err(anyhow!("aconfig does not support the creation of this storage file type")), + StorageFileType::FlagInfo => { + let flag_info = create_flag_info(container, &packages, version)?; + Ok(flag_info.into_bytes()) + } } } #[cfg(test)] mod tests { + use aconfig_storage_file::DEFAULT_FILE_VERSION; + use super::*; use crate::Input; @@ -154,6 +183,7 @@ mod tests { reader: Box::new(value_content), }], crate::commands::DEFAULT_FLAG_PERMISSION, + true, ) .unwrap(); aconfig_protos::parsed_flags::try_from_binary_proto(&bytes).unwrap() @@ -164,7 +194,50 @@ mod tests { #[test] fn test_flag_package() { let caches = parse_all_test_flags(); - let packages = group_flags_by_package(caches.iter()); + let packages = group_flags_by_package(caches.iter(), DEFAULT_FILE_VERSION); + + for pkg in packages.iter() { + let pkg_name = pkg.package_name; + assert_eq!(pkg.flag_names.len(), pkg.boolean_flags.len()); + for pf in pkg.boolean_flags.iter() { + assert!(pkg.flag_names.contains(pf.name())); + assert_eq!(pf.package(), pkg_name); + } + } + + assert_eq!(packages.len(), 3); + + assert_eq!(packages[0].package_name, "com.android.aconfig.storage.test_1"); + assert_eq!(packages[0].package_id, 0); + assert_eq!(packages[0].flag_names.len(), 3); + assert!(packages[0].flag_names.contains("enabled_rw")); + assert!(packages[0].flag_names.contains("disabled_rw")); + assert!(packages[0].flag_names.contains("enabled_ro")); + assert_eq!(packages[0].boolean_start_index, 0); + assert_eq!(packages[0].fingerprint, 0); + + assert_eq!(packages[1].package_name, "com.android.aconfig.storage.test_2"); + assert_eq!(packages[1].package_id, 1); + assert_eq!(packages[1].flag_names.len(), 3); + assert!(packages[1].flag_names.contains("enabled_ro")); + assert!(packages[1].flag_names.contains("disabled_rw")); + assert!(packages[1].flag_names.contains("enabled_fixed_ro")); + assert_eq!(packages[1].boolean_start_index, 3); + assert_eq!(packages[0].fingerprint, 0); + + assert_eq!(packages[2].package_name, "com.android.aconfig.storage.test_4"); + assert_eq!(packages[2].package_id, 2); + assert_eq!(packages[2].flag_names.len(), 2); + assert!(packages[2].flag_names.contains("enabled_rw")); + assert!(packages[2].flag_names.contains("enabled_fixed_ro")); + assert_eq!(packages[2].boolean_start_index, 6); + assert_eq!(packages[2].fingerprint, 0); + } + + #[test] + fn test_flag_package_with_fingerprint() { + let caches = parse_all_test_flags(); + let packages = group_flags_by_package(caches.iter(), 2); for pkg in packages.iter() { let pkg_name = pkg.package_name; @@ -184,6 +257,7 @@ mod tests { assert!(packages[0].flag_names.contains("disabled_rw")); assert!(packages[0].flag_names.contains("enabled_ro")); assert_eq!(packages[0].boolean_start_index, 0); + assert_eq!(packages[0].fingerprint, 15248948510590158086u64); assert_eq!(packages[1].package_name, "com.android.aconfig.storage.test_2"); assert_eq!(packages[1].package_id, 1); @@ -192,6 +266,7 @@ mod tests { assert!(packages[1].flag_names.contains("disabled_rw")); assert!(packages[1].flag_names.contains("enabled_fixed_ro")); assert_eq!(packages[1].boolean_start_index, 3); + assert_eq!(packages[1].fingerprint, 4431940502274857964u64); assert_eq!(packages[2].package_name, "com.android.aconfig.storage.test_4"); assert_eq!(packages[2].package_id, 2); @@ -199,5 +274,6 @@ mod tests { assert!(packages[2].flag_names.contains("enabled_rw")); assert!(packages[2].flag_names.contains("enabled_fixed_ro")); assert_eq!(packages[2].boolean_start_index, 6); + assert_eq!(packages[2].fingerprint, 16233229917711622375u64); } } diff --git a/tools/aconfig/aconfig/src/storage/package_table.rs b/tools/aconfig/aconfig/src/storage/package_table.rs index c53602f9cb..53daa7ff2a 100644 --- a/tools/aconfig/aconfig/src/storage/package_table.rs +++ b/tools/aconfig/aconfig/src/storage/package_table.rs @@ -18,14 +18,13 @@ use anyhow::Result; use aconfig_storage_file::{ get_table_size, PackageTable, PackageTableHeader, PackageTableNode, StorageFileType, - FILE_VERSION, }; use crate::storage::FlagPackage; -fn new_header(container: &str, num_packages: u32) -> PackageTableHeader { +fn new_header(container: &str, num_packages: u32, version: u32) -> PackageTableHeader { PackageTableHeader { - version: FILE_VERSION, + version, container: String::from(container), file_type: StorageFileType::PackageMap as u8, file_size: 0, @@ -48,6 +47,7 @@ impl PackageTableNodeWrapper { let node = PackageTableNode { package_name: String::from(package.package_name), package_id: package.package_id, + fingerprint: package.fingerprint, boolean_start_index: package.boolean_start_index, next_offset: None, }; @@ -56,20 +56,26 @@ impl PackageTableNodeWrapper { } } -pub fn create_package_table(container: &str, packages: &[FlagPackage]) -> Result<PackageTable> { +pub fn create_package_table( + container: &str, + packages: &[FlagPackage], + version: u32, +) -> Result<PackageTable> { // create table let num_packages = packages.len() as u32; let num_buckets = get_table_size(num_packages)?; - let mut header = new_header(container, num_packages); + let mut header = new_header(container, num_packages, version); let mut buckets = vec![None; num_buckets as usize]; - let mut node_wrappers: Vec<_> = - packages.iter().map(|pkg| PackageTableNodeWrapper::new(pkg, num_buckets)).collect(); + let mut node_wrappers: Vec<_> = packages + .iter() + .map(|pkg: &FlagPackage<'_>| PackageTableNodeWrapper::new(pkg, num_buckets)) + .collect(); // initialize all header fields header.bucket_offset = header.into_bytes().len() as u32; header.node_offset = header.bucket_offset + num_buckets * 4; header.file_size = header.node_offset - + node_wrappers.iter().map(|x| x.node.into_bytes().len()).sum::<usize>() as u32; + + node_wrappers.iter().map(|x| x.node.into_bytes(version).len()).sum::<usize>() as u32; // sort node_wrappers by bucket index for efficiency node_wrappers.sort_by(|a, b| a.bucket_index.cmp(&b.bucket_index)); @@ -87,7 +93,7 @@ pub fn create_package_table(container: &str, packages: &[FlagPackage]) -> Result if buckets[node_bucket_idx as usize].is_none() { buckets[node_bucket_idx as usize] = Some(offset); } - offset += node_wrappers[i].node.into_bytes().len() as u32; + offset += node_wrappers[i].node.into_bytes(version).len() as u32; if let Some(index) = next_node_bucket_idx { if index == node_bucket_idx { @@ -106,21 +112,59 @@ pub fn create_package_table(container: &str, packages: &[FlagPackage]) -> Result #[cfg(test)] mod tests { + use aconfig_storage_file::{DEFAULT_FILE_VERSION, MAX_SUPPORTED_FILE_VERSION}; + use super::*; use crate::storage::{group_flags_by_package, tests::parse_all_test_flags}; - pub fn create_test_package_table_from_source() -> Result<PackageTable> { + pub fn create_test_package_table_from_source(version: u32) -> Result<PackageTable> { let caches = parse_all_test_flags(); - let packages = group_flags_by_package(caches.iter()); - create_package_table("mockup", &packages) + let packages = group_flags_by_package(caches.iter(), version); + create_package_table("mockup", &packages, version) } #[test] // this test point locks down the table creation and each field - fn test_table_contents() { - let package_table = create_test_package_table_from_source(); - assert!(package_table.is_ok()); - let expected_package_table = aconfig_storage_file::test_utils::create_test_package_table(); - assert_eq!(package_table.unwrap(), expected_package_table); + fn test_table_contents_default_version() { + let package_table_result = create_test_package_table_from_source(DEFAULT_FILE_VERSION); + assert!(package_table_result.is_ok()); + let package_table = package_table_result.unwrap(); + + let expected_package_table = + aconfig_storage_file::test_utils::create_test_package_table(DEFAULT_FILE_VERSION); + + assert_eq!(package_table.header, expected_package_table.header); + assert_eq!(package_table.buckets, expected_package_table.buckets); + for (node, expected_node) in + package_table.nodes.iter().zip(expected_package_table.nodes.iter()) + { + assert_eq!(node.package_name, expected_node.package_name); + assert_eq!(node.package_id, expected_node.package_id); + assert_eq!(node.boolean_start_index, expected_node.boolean_start_index); + assert_eq!(node.next_offset, expected_node.next_offset); + } + } + + #[test] + // this test point locks down the table creation and each field + fn test_table_contents_max_version() { + let package_table_result = + create_test_package_table_from_source(MAX_SUPPORTED_FILE_VERSION); + assert!(package_table_result.is_ok()); + let package_table = package_table_result.unwrap(); + + let expected_package_table = + aconfig_storage_file::test_utils::create_test_package_table(MAX_SUPPORTED_FILE_VERSION); + + assert_eq!(package_table.header, expected_package_table.header); + assert_eq!(package_table.buckets, expected_package_table.buckets); + for (node, expected_node) in + package_table.nodes.iter().zip(expected_package_table.nodes.iter()) + { + assert_eq!(node.package_name, expected_node.package_name); + assert_eq!(node.package_id, expected_node.package_id); + assert_eq!(node.boolean_start_index, expected_node.boolean_start_index); + assert_eq!(node.next_offset, expected_node.next_offset); + } } } diff --git a/tools/aconfig/aconfig/src/test.rs b/tools/aconfig/aconfig/src/test.rs index 7409cda6e8..10da252ceb 100644 --- a/tools/aconfig/aconfig/src/test.rs +++ b/tools/aconfig/aconfig/src/test.rs @@ -266,6 +266,7 @@ parsed_flag { reader: Box::new(include_bytes!("../tests/read_only_test.values").as_slice()), }], crate::commands::DEFAULT_FLAG_PERMISSION, + true, ) .unwrap(); aconfig_protos::parsed_flags::try_from_binary_proto(&bytes).unwrap() @@ -290,6 +291,26 @@ parsed_flag { }, ], crate::commands::DEFAULT_FLAG_PERMISSION, + true, + ) + .unwrap(); + aconfig_protos::parsed_flags::try_from_binary_proto(&bytes).unwrap() + } + + pub fn parse_second_package_flags() -> ProtoParsedFlags { + let bytes = crate::commands::parse_flags( + "com.android.aconfig.second_test", + Some("system"), + vec![Input { + source: "tests/test_second_package.aconfig".to_string(), + reader: Box::new(include_bytes!("../tests/test_second_package.aconfig").as_slice()), + }], + vec![Input { + source: "tests/third.values".to_string(), + reader: Box::new(include_bytes!("../tests/third.values").as_slice()), + }], + crate::commands::DEFAULT_FLAG_PERMISSION, + true, ) .unwrap(); aconfig_protos::parsed_flags::try_from_binary_proto(&bytes).unwrap() diff --git a/tools/aconfig/aconfig/templates/FeatureFlags.java.template b/tools/aconfig/aconfig/templates/FeatureFlags.java.template index 38c8f13aaf..d2799b2474 100644 --- a/tools/aconfig/aconfig/templates/FeatureFlags.java.template +++ b/tools/aconfig/aconfig/templates/FeatureFlags.java.template @@ -19,4 +19,4 @@ public interface FeatureFlags \{ {{ -endif }} boolean {item.method_name}(); {{ -endfor }} -}
\ No newline at end of file +} diff --git a/tools/aconfig/aconfig/templates/FeatureFlagsImpl.exported.java.template b/tools/aconfig/aconfig/templates/FeatureFlagsImpl.exported.java.template new file mode 100644 index 0000000000..8b6082432e --- /dev/null +++ b/tools/aconfig/aconfig/templates/FeatureFlagsImpl.exported.java.template @@ -0,0 +1,44 @@ +package {package_name}; {#- CODEGEN FOR EXPORTED MODE FOR NEW STORAGE #} + +import android.os.Build; +import android.os.flagging.AconfigPackage; +import android.util.Log; +/** @hide */ +public final class FeatureFlagsImpl implements FeatureFlags \{ + private static final String TAG = "FeatureFlagsImplExport"; + private static volatile boolean isCached = false; +{{ for flag in flag_elements }} + private static boolean {flag.method_name} = false; +{{ -endfor }} {#- end flag_elements #} + private void init() \{ + try \{ + AconfigPackage reader = AconfigPackage.load("{package_name}"); + {{ -for namespace_with_flags in namespace_flags }} + {{ -for flag in namespace_with_flags.flags }} + {{ -if flag.finalized_sdk_present }} + {flag.method_name} = Build.VERSION.SDK_INT >= {flag.finalized_sdk_value} ? true : reader.getBooleanFlagValue("{flag.flag_name}", {flag.default_value}); + {{ - else }} {#- else finalized_sdk_present #} + {flag.method_name} = reader.getBooleanFlagValue("{flag.flag_name}", {flag.default_value}); + {{ -endif}} {#- end finalized_sdk_present#} + {{ -endfor }} {#- end namespace_with_flags.flags #} + {{ -endfor }} {#- end namespace_flags #} + } catch (Exception e) \{ + // pass + Log.e(TAG, e.toString()); + } catch (LinkageError e) \{ + // for mainline module running on older devices. + // This should be replaces to version check, after the version bump. + Log.e(TAG, e.toString()); + } + isCached = true; + } +{{ -for flag in flag_elements }} + @Override + public boolean {flag.method_name}() \{ + if (!isCached) \{ + init(); + } + return {flag.method_name}; + } +{{ endfor }} {#- end flag_elements #} +} diff --git a/tools/aconfig/aconfig/templates/FeatureFlagsImpl.java.template b/tools/aconfig/aconfig/templates/FeatureFlagsImpl.java.template index 96e762391a..ea2a2ee11a 100644 --- a/tools/aconfig/aconfig/templates/FeatureFlagsImpl.java.template +++ b/tools/aconfig/aconfig/templates/FeatureFlagsImpl.java.template @@ -1,68 +1,137 @@ package {package_name}; {{ -if not is_test_mode }} -{{ if not library_exported- }} +{{ -if allow_instrumentation }} +{{ if not library_exported- }}{#- only new storage for prod mode #} // TODO(b/303773055): Remove the annotation after access issue is resolved. import android.compat.annotation.UnsupportedAppUsage; -{{ -endif }} - {{ -if runtime_lookup_required }} -import android.provider.DeviceConfig; -import android.provider.DeviceConfig.Properties; - - -{{ -if not library_exported }} -{{ -if allow_instrumentation }} -import android.aconfig.storage.StorageInternalReader; +import android.os.Build; +{{ if is_platform_container }} +import android.os.flagging.PlatformAconfigPackageInternal; +{{ -else }} {#- else is_platform_container #} +import android.os.flagging.AconfigPackageInternal; +{{ -endif }} {#- end of is_platform_container#} import android.util.Log; -{{ -endif }} -{{ -endif }} - -{{ -endif }} +{{ -endif }} {#- end of runtime_lookup_required#} /** @hide */ public final class FeatureFlagsImpl implements FeatureFlags \{ {{ -if runtime_lookup_required }} -{{ -for namespace_with_flags in namespace_flags }} - private static volatile boolean {namespace_with_flags.namespace}_is_cached = false; -{{ -endfor- }} - + private static final String TAG = "FeatureFlagsImpl"; + private static volatile boolean isCached = false; {{ for flag in flag_elements }} -{{- if flag.is_read_write }} +{{ -if flag.is_read_write }} private static boolean {flag.method_name} = {flag.default_value}; -{{ -endif }} +{{ -endif }} {#- end of is_read_write#} {{ -endfor }} -{{ -if not library_exported }} -{{ -if allow_instrumentation }} - StorageInternalReader reader; - boolean readFromNewStorage; - - private final static String TAG = "AconfigJavaCodegen"; - private final static String SUCCESS_LOG = "success: %s value matches"; - private final static String MISMATCH_LOG = "error: %s value mismatch, new storage value is %s, old storage value is %s"; - private final static String ERROR_LOG = "error: failed to read flag value"; private void init() \{ - if (reader != null) return; - if (DeviceConfig.getBoolean("core_experiments_team_internal", "com.android.providers.settings.storage_test_mission_1", false)) \{ - readFromNewStorage = true; - try \{ - reader = new StorageInternalReader("{container}", "{package_name}"); - } catch (Exception e) \{ - reader = null; - } + try \{ +{{ if is_platform_container }} + PlatformAconfigPackageInternal reader = PlatformAconfigPackageInternal.load("{package_name}", {package_fingerprint}); +{{ -else }} {#- else is_platform_container #} + AconfigPackageInternal reader = AconfigPackageInternal.load("{package_name}", {package_fingerprint}); +{{ -endif }} {#- end of is_platform_container#} + {{ -for namespace_with_flags in namespace_flags }} + {{ -for flag in namespace_with_flags.flags }} + {{ -if flag.is_read_write }} + {flag.method_name} = reader.getBooleanFlagValue({flag.flag_offset}); + {{ -endif }} {#- is_read_write#} + {{ -endfor }} + {{ -endfor }} + } catch (Exception e) \{ + Log.e(TAG, e.toString()); + } catch (LinkageError e) \{ + // for mainline module running on older devices. + // This should be replaces to version check, after the version bump. + Log.e(TAG, e.toString()); } + isCached = true; } - -{{ -endif }} -{{ -endif }} +{{ -endif }}{#- end of runtime_lookup_required #} +{{ -for flag in flag_elements }} + @Override + @com.android.aconfig.annotations.AconfigFlagAccessor + @UnsupportedAppUsage + public boolean {flag.method_name}() \{ +{{ -if flag.is_read_write }} + if (!isCached) \{ + init(); + } + return {flag.method_name}; +{{ -else }}{#- else is_read_write #} + return {flag.default_value}; +{{ -endif }} {#- end of is_read_write#} + } +{{ endfor }} +} +{{ -else- }}{#- device config for exproted mode #} +{{ -if new_exported }} +import android.os.Build; +import android.os.flagging.AconfigPackage; +import android.util.Log; +/** @hide */ +public final class FeatureFlagsImpl implements FeatureFlags \{ + private static final String TAG = "FeatureFlagsImplExport"; + private static volatile boolean isCached = false; +{{ for flag in flag_elements }} + private static boolean {flag.method_name} = false; +{{ -endfor }} + private void init() \{ + try \{ + AconfigPackage reader = AconfigPackage.load("{package_name}"); + {{ -for namespace_with_flags in namespace_flags }} + {{ -for flag in namespace_with_flags.flags }} + {{ -if flag.finalized_sdk_present }} + {flag.method_name} = Build.VERSION.SDK_INT >= {flag.finalized_sdk_value} ? true : reader.getBooleanFlagValue("{flag.flag_name}", {flag.default_value}); + {{ - else }} {#- else finalized_sdk_present #} + {flag.method_name} = reader.getBooleanFlagValue("{flag.flag_name}", {flag.default_value}); + {{ -endif}} {#- end of finalized_sdk_present#} + {{ -endfor }} + {{ -endfor }} + } catch (Exception e) \{ + // pass + Log.e(TAG, e.toString()); + } catch (LinkageError e) \{ + // for mainline module running on older devices. + // This should be replaces to version check, after the version bump. + Log.e(TAG, e.toString()); + } + isCached = true; + } +{{ -for flag in flag_elements }} + @Override + public boolean {flag.method_name}() \{ + if (!isCached) \{ + init(); + } + return {flag.method_name}; + } +{{ endfor }} +} +{{ else }} +import android.os.Binder; +import android.provider.DeviceConfig; +import android.provider.DeviceConfig.Properties; +/** @hide */ +public final class FeatureFlagsImpl implements FeatureFlags \{ +{{ -for namespace_with_flags in namespace_flags }} + private static volatile boolean {namespace_with_flags.namespace}_is_cached = false; +{{ -endfor- }} +{{ for flag in flag_elements }} +{{ -if flag.is_read_write }} + private static boolean {flag.method_name} = {flag.default_value}; +{{ -endif }} {#- end of is_read_write#} +{{ -endfor }} {{ for namespace_with_flags in namespace_flags }} private void load_overrides_{namespace_with_flags.namespace}() \{ + final long ident = Binder.clearCallingIdentity(); try \{ Properties properties = DeviceConfig.getProperties("{namespace_with_flags.namespace}"); {{ -for flag in namespace_with_flags.flags }} {{ -if flag.is_read_write }} {flag.method_name} = properties.getBoolean(Flags.FLAG_{flag.flag_name_constant_suffix}, {flag.default_value}); -{{ -endif }} +{{ -endif }} {#- end of is_read_write#} {{ -endfor }} } catch (NullPointerException e) \{ throw new RuntimeException( @@ -73,37 +142,74 @@ public final class FeatureFlagsImpl implements FeatureFlags \{ + "flag declaration.", e ); + } catch (SecurityException e) \{ + // for isolated process case, skip loading flag value from the storage, use the default + } finally \{ + Binder.restoreCallingIdentity(ident); } {namespace_with_flags.namespace}_is_cached = true; -{{ -if not library_exported }} -{{ -if allow_instrumentation }} - init(); - if (readFromNewStorage && reader != null) \{ - boolean val; - try \{ -{{ -for flag in namespace_with_flags.flags }} -{{ -if flag.is_read_write }} - - val = reader.getBooleanFlagValue({flag.flag_offset}); - if (val == {flag.method_name}) \{ - Log.i(TAG, String.format(SUCCESS_LOG, "{flag.method_name}")); - } else \{ - Log.i(TAG, String.format(MISMATCH_LOG, "{flag.method_name}", val, {flag.method_name})); - } + } +{{ endfor- }} +{{ -for flag in flag_elements }} + @Override + public boolean {flag.method_name}() \{ + if (!{flag.device_config_namespace}_is_cached) \{ + load_overrides_{flag.device_config_namespace}(); + } + return {flag.method_name}; + } +{{ endfor }} +} +{{ -endif- }} {#- end new_exported mode #} +{{ -endif- }} {#- end exported mode #} +{{ else }} {#- else for allow_instrumentation is not enabled #} +{{ if not library_exported- }} +// TODO(b/303773055): Remove the annotation after access issue is resolved. +import android.compat.annotation.UnsupportedAppUsage; +{{ -endif }} {#- end of not library_exported#} - if (DeviceConfig.getBoolean("core_experiments_team_internal", "com.android.providers.settings.use_new_storage_value", false)) \{ - {flag.method_name} = val; - } +{{ -if runtime_lookup_required }} +import android.os.Binder; +import android.provider.DeviceConfig; +import android.provider.DeviceConfig.Properties; +{{ -endif }} {#- end of runtime_lookup_required#} +/** @hide */ +public final class FeatureFlagsImpl implements FeatureFlags \{ +{{ -if runtime_lookup_required }} +{{ -for namespace_with_flags in namespace_flags }} + private static volatile boolean {namespace_with_flags.namespace}_is_cached = false; +{{ -endfor- }} -{{ -endif }} +{{ for flag in flag_elements }} +{{- if flag.is_read_write }} + private static boolean {flag.method_name} = {flag.default_value}; +{{ -endif }} {#- end of is_read_write#} +{{ -endfor }} +{{ for namespace_with_flags in namespace_flags }} + private void load_overrides_{namespace_with_flags.namespace}() \{ + final long ident = Binder.clearCallingIdentity(); + try \{ + Properties properties = DeviceConfig.getProperties("{namespace_with_flags.namespace}"); +{{ -for flag in namespace_with_flags.flags }} +{{ -if flag.is_read_write }} + {flag.method_name} = + properties.getBoolean(Flags.FLAG_{flag.flag_name_constant_suffix}, {flag.default_value}); +{{ -endif }} {#- end of is_read_write#} {{ -endfor }} - } catch (Exception e) \{ - Log.e(TAG, ERROR_LOG, e); - } + } catch (NullPointerException e) \{ + throw new RuntimeException( + "Cannot read value from namespace {namespace_with_flags.namespace} " + + "from DeviceConfig. It could be that the code using flag " + + "executed before SettingsProvider initialization. Please use " + + "fixed read-only flag by adding is_fixed_read_only: true in " + + "flag declaration.", + e + ); + } finally \{ + Binder.restoreCallingIdentity(ident); } -{{ -endif }} -{{ -endif }} - } + {namespace_with_flags.namespace}_is_cached = true; +} {{ endfor- }} {{ -endif }}{#- end of runtime_lookup_required #} {{ -for flag in flag_elements }} @@ -111,21 +217,21 @@ public final class FeatureFlagsImpl implements FeatureFlags \{ {{ -if not library_exported }} @com.android.aconfig.annotations.AconfigFlagAccessor @UnsupportedAppUsage -{{ -endif }} +{{ -endif }}{#- end of not library_exported #} public boolean {flag.method_name}() \{ {{ -if flag.is_read_write }} if (!{flag.device_config_namespace}_is_cached) \{ load_overrides_{flag.device_config_namespace}(); } return {flag.method_name}; -{{ -else }} +{{ -else }} {#- else is_read_write #} return {flag.default_value}; -{{ -endif }} +{{ -endif }}{#- end of is_read_write #} } {{ endfor }} } -{{ else }} -{#- Generate only stub if in test mode #} +{{ endif}} {#- endif for allow_instrumentation #} +{{ else }} {#- Generate only stub if in test mode #} /** @hide */ public final class FeatureFlagsImpl implements FeatureFlags \{ {{ for flag in flag_elements }} diff --git a/tools/aconfig/aconfig/templates/FeatureFlagsImpl.new_storage.java.template b/tools/aconfig/aconfig/templates/FeatureFlagsImpl.new_storage.java.template new file mode 100644 index 0000000000..9492a8335a --- /dev/null +++ b/tools/aconfig/aconfig/templates/FeatureFlagsImpl.new_storage.java.template @@ -0,0 +1,63 @@ +package {package_name}; {#- CODEGEN FOR INTERNAL MODE FOR NEW STORAGE #} +// TODO(b/303773055): Remove the annotation after access issue is resolved. +import android.compat.annotation.UnsupportedAppUsage; +{{ -if runtime_lookup_required }} +import android.os.Build; +{{ if is_platform_container }} +import android.os.flagging.PlatformAconfigPackageInternal; +{{ -else }} {#- else is_platform_container #} +import android.os.flagging.AconfigPackageInternal; +{{ -endif }} {#- end of is_platform_container#} +import android.util.Log; +{{ -endif }} {#- end of runtime_lookup_required#} +/** @hide */ +public final class FeatureFlagsImpl implements FeatureFlags \{ +{{ -if runtime_lookup_required }} + private static final String TAG = "FeatureFlagsImpl"; + private static volatile boolean isCached = false; +{{ for flag in flag_elements }} +{{ -if flag.is_read_write }} + private static boolean {flag.method_name} = {flag.default_value}; +{{ -endif }} {#- end of is_read_write#} +{{ -endfor }} {#- else flag_elements #} + + private void init() \{ + try \{ +{{ if is_platform_container }} + PlatformAconfigPackageInternal reader = PlatformAconfigPackageInternal.load("{package_name}", {package_fingerprint}); +{{ -else }} {#- else is_platform_container #} + AconfigPackageInternal reader = AconfigPackageInternal.load("{package_name}", {package_fingerprint}); +{{ -endif }} {#- end of is_platform_container#} + {{ -for namespace_with_flags in namespace_flags }} + {{ -for flag in namespace_with_flags.flags }} + {{ -if flag.is_read_write }} + {flag.method_name} = reader.getBooleanFlagValue({flag.flag_offset}); + {{ -endif }} {#- is_read_write#} + {{ -endfor }} {#- else namespace_with_flags.flags #} + {{ -endfor }} {#- else namespace_flags #} + } catch (Exception e) \{ + Log.e(TAG, e.toString()); + } catch (LinkageError e) \{ + // for mainline module running on older devices. + // This should be replaces to version check, after the version bump. + Log.e(TAG, e.toString()); + } + isCached = true; + } +{{ -endif }}{#- end of runtime_lookup_required #} +{{ -for flag in flag_elements }} + @Override + @com.android.aconfig.annotations.AconfigFlagAccessor + @UnsupportedAppUsage + public boolean {flag.method_name}() \{ +{{ -if flag.is_read_write }} + if (!isCached) \{ + init(); + } + return {flag.method_name}; +{{ -else }}{#- else is_read_write #} + return {flag.default_value}; +{{ -endif }} {#- end of is_read_write#} + } +{{ endfor }} {#- else flag_elements #} +} diff --git a/tools/aconfig/aconfig/templates/FeatureFlagsImpl.test_mode.java.template b/tools/aconfig/aconfig/templates/FeatureFlagsImpl.test_mode.java.template new file mode 100644 index 0000000000..8eda26310e --- /dev/null +++ b/tools/aconfig/aconfig/templates/FeatureFlagsImpl.test_mode.java.template @@ -0,0 +1,14 @@ +package {package_name}; {#- CODEGEN FOR TEST MODE #} +/** @hide */ +public final class FeatureFlagsImpl implements FeatureFlags \{ +{{ for flag in flag_elements }} + @Override +{{ -if not library_exported }} + @com.android.aconfig.annotations.AconfigFlagAccessor +{{ -endif }} + public boolean {flag.method_name}() \{ + throw new UnsupportedOperationException( + "Method is not implemented."); + } +{{ endfor- }} +} diff --git a/tools/aconfig/aconfig/templates/cpp_exported_header.template b/tools/aconfig/aconfig/templates/cpp_exported_header.template index 0f7853e405..4643c9775c 100644 --- a/tools/aconfig/aconfig/templates/cpp_exported_header.template +++ b/tools/aconfig/aconfig/templates/cpp_exported_header.template @@ -27,12 +27,13 @@ public: {{ -for item in class_elements}} virtual bool {item.flag_name}() = 0; + {{ -endfor }} + {{ -if is_test_mode }} + {{ -for item in class_elements}} virtual void {item.flag_name}(bool val) = 0; - {{ -endif }} {{ -endfor }} - {{ -if is_test_mode }} virtual void reset_flags() \{} {{ -endif }} }; diff --git a/tools/aconfig/aconfig/templates/cpp_source_file.template b/tools/aconfig/aconfig/templates/cpp_source_file.template index b6012e7a0d..36ab774f54 100644 --- a/tools/aconfig/aconfig/templates/cpp_source_file.template +++ b/tools/aconfig/aconfig/templates/cpp_source_file.template @@ -1,18 +1,13 @@ #include "{header}.h" -{{ if allow_instrumentation }} {{ if readwrite- }} -#include <sys/stat.h> +#include <unistd.h> #include "aconfig_storage/aconfig_storage_read_api.hpp" #include <android/log.h> #define LOG_TAG "aconfig_cpp_codegen" -#define ALOGI(...) __android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__) +#define ALOGE(...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__) {{ -endif }} -{{ endif }} -{{ if readwrite- }} -#include <server_configurable_flags/get_flags.h> -{{ endif }} {{ if is_test_mode }} #include <unordered_map> #include <string> @@ -29,32 +24,103 @@ namespace {cpp_namespace} \{ private: std::unordered_map<std::string, bool> overrides_; + {{ if readwrite- }} + uint32_t boolean_start_index_; + + std::unique_ptr<aconfig_storage::MappedStorageFile> flag_value_file_; + + bool package_exists_in_storage_; + {{ -endif }} + public: + {{ if readwrite- }} + flag_provider() + : overrides_() + , boolean_start_index_() + , flag_value_file_(nullptr) + , package_exists_in_storage_(true) \{ + + auto package_map_file = aconfig_storage::get_mapped_file( + "{container}", + aconfig_storage::StorageFileType::package_map); + + if (!package_map_file.ok()) \{ + ALOGE("error: failed to get package map file: %s", package_map_file.error().c_str()); + package_exists_in_storage_ = false; + return; + } + + auto context = aconfig_storage::get_package_read_context( + **package_map_file, "{package}"); + + if (!context.ok()) \{ + ALOGE("error: failed to get package read context: %s", context.error().c_str()); + package_exists_in_storage_ = false; + return; + } + + if (!(context->package_exists)) \{ + package_exists_in_storage_ = false; + return; + } + + // cache package boolean flag start index + boolean_start_index_ = context->boolean_start_index; + + // unmap package map file and free memory + delete *package_map_file; + + auto flag_value_file = aconfig_storage::get_mapped_file( + "{container}", + aconfig_storage::StorageFileType::flag_val); + if (!flag_value_file.ok()) \{ + ALOGE("error: failed to get flag value file: %s", flag_value_file.error().c_str()); + package_exists_in_storage_ = false; + return; + } + + // cache flag value file + flag_value_file_ = std::unique_ptr<aconfig_storage::MappedStorageFile>( + *flag_value_file); + + } + {{ -else }} flag_provider() : overrides_() \{} + {{ -endif }} -{{ for item in class_elements }} + {{ for item in class_elements }} virtual bool {item.flag_name}() override \{ auto it = overrides_.find("{item.flag_name}"); - if (it != overrides_.end()) \{ - return it->second; + if (it != overrides_.end()) \{ + return it->second; } else \{ - {{ if item.readwrite- }} - return server_configurable_flags::GetServerConfigurableFlag( - "aconfig_flags.{item.device_config_namespace}", - "{item.device_config_flag}", - "{item.default_value}") == "true"; - {{ -else }} - return {item.default_value}; - {{ -endif }} + {{ if item.readwrite- }} + if (!package_exists_in_storage_) \{ + return {item.default_value}; + } + + auto value = aconfig_storage::get_boolean_flag_value( + *flag_value_file_, + boolean_start_index_ + {item.flag_offset}); + + if (!value.ok()) \{ + ALOGE("error: failed to read flag value: %s", value.error().c_str()); + return {item.default_value}; + } else \{ + return *value; + } + {{ -else }} + return {item.default_value}; + {{ -endif }} } } virtual void {item.flag_name}(bool val) override \{ overrides_["{item.flag_name}"] = val; } -{{ endfor }} + {{ endfor }} virtual void reset_flags() override \{ overrides_.clear(); @@ -66,38 +132,32 @@ namespace {cpp_namespace} \{ class flag_provider : public flag_provider_interface \{ public: - {{ if allow_instrumentation- }} {{ if readwrite- }} flag_provider() - {{ if readwrite- }} : cache_({readwrite_count}, -1) , boolean_start_index_() - {{ -else- }} - : boolean_start_index_() - {{ -endif }} , flag_value_file_(nullptr) - , read_from_new_storage_(false) - , use_new_storage_value(false) \{ - - struct stat buffer; - if (stat("/metadata/aconfig_test_missions/mission_1", &buffer) == 0) \{ - read_from_new_storage_ = true; - } else \{ - return; - } + , package_exists_in_storage_(true) \{ auto package_map_file = aconfig_storage::get_mapped_file( "{container}", aconfig_storage::StorageFileType::package_map); if (!package_map_file.ok()) \{ - ALOGI("error: failed to get package map file: %s", package_map_file.error().c_str()); + ALOGE("error: failed to get package map file: %s", package_map_file.error().c_str()); + package_exists_in_storage_ = false; return; } auto context = aconfig_storage::get_package_read_context( **package_map_file, "{package}"); if (!context.ok()) \{ - ALOGI("error: failed to get package read context: %s", context.error().c_str()); + ALOGE("error: failed to get package read context: %s", context.error().c_str()); + package_exists_in_storage_ = false; + return; + } + + if (!(context->package_exists)) \{ + package_exists_in_storage_ = false; return; } @@ -111,7 +171,8 @@ namespace {cpp_namespace} \{ "{container}", aconfig_storage::StorageFileType::flag_val); if (!flag_value_file.ok()) \{ - ALOGI("error: failed to get flag value file: %s", flag_value_file.error().c_str()); + ALOGE("error: failed to get flag value file: %s", flag_value_file.error().c_str()); + package_exists_in_storage_ = false; return; } @@ -119,30 +180,15 @@ namespace {cpp_namespace} \{ flag_value_file_ = std::unique_ptr<aconfig_storage::MappedStorageFile>( *flag_value_file); - use_new_storage_value = server_configurable_flags::GetServerConfigurableFlag( - "aconfig_flags.core_experiments_team_internal", - "com.android.providers.settings.use_new_storage_value", - "false") == "true"; } {{ -endif }} - {{ -endif }} {{ -for item in class_elements }} virtual bool {item.flag_name}() override \{ {{ -if item.readwrite }} if (cache_[{item.readwrite_idx}] == -1) \{ - cache_[{item.readwrite_idx}] = server_configurable_flags::GetServerConfigurableFlag( - "aconfig_flags.{item.device_config_namespace}", - "{item.device_config_flag}", - "{item.default_value}") == "true"; - } - - - {{ if allow_instrumentation- }} - if (read_from_new_storage_) \{ - if (!flag_value_file_) \{ - ALOGI("error: failed to get flag {item.flag_name}: flag value file is null"); - return cache_[{item.readwrite_idx}]; + if (!package_exists_in_storage_) \{ + return {item.default_value}; } auto value = aconfig_storage::get_boolean_flag_value( @@ -150,25 +196,12 @@ namespace {cpp_namespace} \{ boolean_start_index_ + {item.flag_offset}); if (!value.ok()) \{ - ALOGI("error: failed to read flag value: %s", value.error().c_str()); - return cache_[{item.readwrite_idx}]; + ALOGE("error: failed to read flag value: %s", value.error().c_str()); + return {item.default_value}; } - bool expected_value = cache_[{item.readwrite_idx}]; - if (*value != expected_value) \{ - ALOGI("error: {item.flag_name} value mismatch, new storage value is %s, old storage value is %s", - *value ? "true" : "false", expected_value ? "true" : "false"); - } - - if (use_new_storage_value) \{ - return *value; - } else \{ - return expected_value; - } + cache_[{item.readwrite_idx}] = *value; } - {{ -endif }} - - return cache_[{item.readwrite_idx}]; {{ -else }} {{ -if item.is_fixed_read_only }} @@ -183,14 +216,12 @@ namespace {cpp_namespace} \{ {{ if readwrite- }} private: std::vector<int8_t> cache_ = std::vector<int8_t>({readwrite_count}, -1); - {{ if allow_instrumentation- }} + uint32_t boolean_start_index_; std::unique_ptr<aconfig_storage::MappedStorageFile> flag_value_file_; - bool read_from_new_storage_; - bool use_new_storage_value; - {{ -endif }} + bool package_exists_in_storage_; {{ -endif }} }; diff --git a/tools/aconfig/aconfig/templates/rust.template b/tools/aconfig/aconfig/templates/rust.template index 77a9984baa..56323e25ca 100644 --- a/tools/aconfig/aconfig/templates/rust.template +++ b/tools/aconfig/aconfig/templates/rust.template @@ -2,105 +2,68 @@ use aconfig_storage_read_api::\{Mmap, AconfigStorageError, StorageFileType, PackageReadContext, get_mapped_storage_file, get_boolean_flag_value, get_package_read_context}; use std::path::Path; use std::io::Write; +use std::sync::LazyLock; use log::\{log, LevelFilter, Level}; -static STORAGE_MIGRATION_MARKER_FILE: &str = - "/metadata/aconfig_test_missions/mission_1"; -static MIGRATION_LOG_TAG: &str = "AconfigTestMission1"; - /// flag provider pub struct FlagProvider; {{ if has_readwrite- }} -lazy_static::lazy_static! \{ - {{ if allow_instrumentation }} - static ref PACKAGE_OFFSET: Result<Option<u32>, AconfigStorageError> = unsafe \{ - get_mapped_storage_file("{container}", StorageFileType::PackageMap) - .and_then(|package_map| get_package_read_context(&package_map, "{package}")) - .map(|context| context.map(|c| c.boolean_start_index)) - }; - - static ref FLAG_VAL_MAP: Result<Mmap, AconfigStorageError> = unsafe \{ - get_mapped_storage_file("{container}", StorageFileType::FlagVal) - }; - {{ -endif }} - +static PACKAGE_OFFSET: LazyLock<Result<Option<u32>, AconfigStorageError>> = LazyLock::new(|| unsafe \{ + get_mapped_storage_file("{container}", StorageFileType::PackageMap) + .and_then(|package_map| get_package_read_context(&package_map, "{package}")) + .map(|context| context.map(|c| c.boolean_start_index)) +}); + +static FLAG_VAL_MAP: LazyLock<Result<Mmap, AconfigStorageError>> = LazyLock::new(|| unsafe \{ + get_mapped_storage_file("{container}", StorageFileType::FlagVal) +}); {{ -for flag in template_flags }} - {{ -if flag.readwrite }} - /// flag value cache for {flag.name} - {{ if allow_instrumentation }} - static ref CACHED_{flag.name}: bool = \{ - let result = flags_rust::GetServerConfigurableFlag( - "aconfig_flags.{flag.device_config_namespace}", - "{flag.device_config_flag}", - "{flag.default_value}") == "true"; - - let use_new_storage_value = flags_rust::GetServerConfigurableFlag( - "aconfig_flags.core_experiments_team_internal", - "com.android.providers.settings.use_new_storage_value", - "false") == "true"; - if Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() \{ - // This will be called multiple times. Subsequent calls after the first are noops. - logger::init( - logger::Config::default() - .with_tag_on_device(MIGRATION_LOG_TAG) - .with_max_level(LevelFilter::Info)); +{{ -if flag.readwrite }} +/// flag value cache for {flag.name} +static CACHED_{flag.name}: LazyLock<bool> = LazyLock::new(|| \{ - let aconfig_storage_result = FLAG_VAL_MAP + // This will be called multiple times. Subsequent calls after the first are noops. + logger::init( + logger::Config::default() + .with_tag_on_device("aconfig_rust_codegen") + .with_max_level(LevelFilter::Info)); + + let flag_value_result = FLAG_VAL_MAP + .as_ref() + .map_err(|err| format!("failed to get flag val map: \{err}")) + .and_then(|flag_val_map| \{ + PACKAGE_OFFSET .as_ref() - .map_err(|err| format!("failed to get flag val map: \{err}")) - .and_then(|flag_val_map| \{ - PACKAGE_OFFSET - .as_ref() - .map_err(|err| format!("failed to get package read offset: \{err}")) - .and_then(|package_offset| \{ - match package_offset \{ - Some(offset) => \{ - get_boolean_flag_value(&flag_val_map, offset + {flag.flag_offset}) - .map_err(|err| format!("failed to get flag: \{err}")) - }, - None => Err("no context found for package '{package}'".to_string()) - } - }) - }); - - match aconfig_storage_result \{ - Ok(storage_result) if storage_result == result => \{ - if use_new_storage_value \{ - return storage_result; - } else \{ - return result; - } - }, - Ok(storage_result) => \{ - log!(Level::Error, "AconfigTestMission1: error: mismatch for flag '{flag.name}'. Legacy storage was \{result}, new storage was \{storage_result}"); - if use_new_storage_value \{ - return storage_result; - } else \{ - return result; + .map_err(|err| format!("failed to get package read offset: \{err}")) + .and_then(|package_offset| \{ + match package_offset \{ + Some(offset) => \{ + get_boolean_flag_value(&flag_val_map, offset + {flag.flag_offset}) + .map_err(|err| format!("failed to get flag: \{err}")) + }, + None => \{ + log!(Level::Error, "no context found for package {package}"); + Err(format!("failed to flag package {package}")) + } } - }, - Err(err) => \{ - log!(Level::Error, "AconfigTestMission1: error: \{err}"); - if use_new_storage_value \{ - panic!("failed to read flag value: \{err}"); - } - } - } + }) + }); + + match flag_value_result \{ + Ok(flag_value) => \{ + return flag_value; + }, + Err(err) => \{ + log!(Level::Error, "aconfig_rust_codegen: error: \{err}"); + return {flag.default_value}; } + } - result - }; - {{ else }} - static ref CACHED_{flag.name}: bool = flags_rust::GetServerConfigurableFlag( - "aconfig_flags.{flag.device_config_namespace}", - "{flag.device_config_flag}", - "{flag.default_value}") == "true"; - {{ endif }} - {{ -endif }} +}); +{{ -endif }} {{ -endfor }} -} {{ -endif }} impl FlagProvider \{ @@ -124,72 +87,11 @@ pub static PROVIDER: FlagProvider = FlagProvider; {{ for flag in template_flags }} /// query flag {flag.name} #[inline(always)] -{{ -if flag.readwrite }} pub fn {flag.name}() -> bool \{ +{{ -if flag.readwrite }} PROVIDER.{flag.name}() {{ -else }} -pub fn {flag.name}() -> bool \{ - {{ if not allow_instrumentation }} {flag.default_value} - {{ else }} - - let result = {flag.default_value}; - if !Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() \{ - return result; - } - - // This will be called multiple times. Subsequent calls after the first - // are noops. - logger::init( - logger::Config::default() - .with_tag_on_device(MIGRATION_LOG_TAG) - .with_max_level(LevelFilter::Info), - ); - - unsafe \{ - let package_map = match get_mapped_storage_file("{flag.container}", StorageFileType::PackageMap) \{ - Ok(file) => file, - Err(err) => \{ - log!(Level::Error, "AconfigTestMission1: error: failed to read flag '{flag.name}': \{err}"); - return result; - } - }; - - let package_read_context = match get_package_read_context(&package_map, "{package}") \{ - Ok(Some(context)) => context, - Ok(None) => \{ - log!(Level::Error, "AconfigTestMission1: error: failed to read flag '{flag.name}': did not get context"); - return result; - }, - Err(err) => \{ - log!(Level::Error, "AconfigTestMission1: error: failed to read flag '{flag.name}': \{err}"); - return result; - } - }; - let flag_val_map = match get_mapped_storage_file("{flag.container}", StorageFileType::FlagVal) \{ - Ok(val_map) => val_map, - Err(err) => \{ - log!(Level::Error, "AconfigTestMission1: error: failed to read flag '{flag.name}': \{err}"); - return result; - } - }; - let value = match get_boolean_flag_value(&flag_val_map, {flag.flag_offset} + package_read_context.boolean_start_index) \{ - Ok(val) => val, - Err(err) => \{ - log!(Level::Error, "AconfigTestMission1: error: failed to read flag '{flag.name}': \{err}"); - return result; - } - }; - - if result != value \{ - log!(Level::Error, "AconfigTestMission1: error: flag mismatch for '{flag.name}'. Legacy storage was \{result}, new storage was \{value}"); - } else \{ - let default_value = {flag.default_value}; - } - } - - result - {{ endif }} {{ -endif }} } {{ endfor }} diff --git a/tools/aconfig/aconfig/templates/rust_test.template b/tools/aconfig/aconfig/templates/rust_test.template index d01f40aab7..139a5ec62a 100644 --- a/tools/aconfig/aconfig/templates/rust_test.template +++ b/tools/aconfig/aconfig/templates/rust_test.template @@ -1,23 +1,81 @@ //! codegenerated rust flag lib - +use aconfig_storage_read_api::\{Mmap, AconfigStorageError, StorageFileType, PackageReadContext, get_mapped_storage_file, get_boolean_flag_value, get_package_read_context}; use std::collections::BTreeMap; -use std::sync::Mutex; +use std::path::Path; +use std::io::Write; +use std::sync::\{LazyLock, Mutex}; +use log::\{log, LevelFilter, Level}; /// flag provider pub struct FlagProvider \{ overrides: BTreeMap<&'static str, bool>, } +{{ if has_readwrite- }} +static PACKAGE_OFFSET: LazyLock<Result<Option<u32>, AconfigStorageError>> = LazyLock::new(|| unsafe \{ + get_mapped_storage_file("{container}", StorageFileType::PackageMap) + .and_then(|package_map| get_package_read_context(&package_map, "{package}")) + .map(|context| context.map(|c| c.boolean_start_index)) +}); + +static FLAG_VAL_MAP: LazyLock<Result<Mmap, AconfigStorageError>> = LazyLock::new(|| unsafe \{ + get_mapped_storage_file("{container}", StorageFileType::FlagVal) +}); + +{{ -for flag in template_flags }} +{{ -if flag.readwrite }} +/// flag value cache for {flag.name} +static CACHED_{flag.name}: LazyLock<bool> = LazyLock::new(|| \{ + + // This will be called multiple times. Subsequent calls after the first are noops. + logger::init( + logger::Config::default() + .with_tag_on_device("aconfig_rust_codegen") + .with_max_level(LevelFilter::Info)); + + let flag_value_result = FLAG_VAL_MAP + .as_ref() + .map_err(|err| format!("failed to get flag val map: \{err}")) + .and_then(|flag_val_map| \{ + PACKAGE_OFFSET + .as_ref() + .map_err(|err| format!("failed to get package read offset: \{err}")) + .and_then(|package_offset| \{ + match package_offset \{ + Some(offset) => \{ + get_boolean_flag_value(&flag_val_map, offset + {flag.flag_offset}) + .map_err(|err| format!("failed to get flag: \{err}")) + }, + None => \{ + log!(Level::Error, "no context found for package {package}"); + Err(format!("failed to flag package {package}")) + } + } + }) + }); + + match flag_value_result \{ + Ok(flag_value) => \{ + return flag_value; + }, + Err(err) => \{ + log!(Level::Error, "aconfig_rust_codegen: error: \{err}"); + return {flag.default_value}; + } + } + +}); +{{ -endif }} +{{ -endfor }} +{{ -endif }} + impl FlagProvider \{ {{ for flag in template_flags }} /// query flag {flag.name} pub fn {flag.name}(&self) -> bool \{ self.overrides.get("{flag.name}").copied().unwrap_or( {{ if flag.readwrite -}} - flags_rust::GetServerConfigurableFlag( - "aconfig_flags.{flag.device_config_namespace}", - "{flag.device_config_flag}", - "{flag.default_value}") == "true" + *CACHED_{flag.name} {{ -else- }} {flag.default_value} {{ -endif }} diff --git a/tools/aconfig/aconfig/tests/test_second_package.aconfig b/tools/aconfig/aconfig/tests/test_second_package.aconfig new file mode 100644 index 0000000000..188bc96cfb --- /dev/null +++ b/tools/aconfig/aconfig/tests/test_second_package.aconfig @@ -0,0 +1,10 @@ +package: "com.android.aconfig.second_test" +container: "system" + +flag { + name: "testing_flag" + namespace: "another_namespace" + description: "This is a flag for testing." + bug: "123" +} + diff --git a/tools/aconfig/aconfig/tests/third.values b/tools/aconfig/aconfig/tests/third.values new file mode 100644 index 0000000000..675832a4bc --- /dev/null +++ b/tools/aconfig/aconfig/tests/third.values @@ -0,0 +1,6 @@ +flag_value { + package: "com.android.aconfig.second_test" + name: "testing_flag" + state: DISABLED + permission: READ_WRITE +} diff --git a/tools/aconfig/aconfig_device_paths/Android.bp b/tools/aconfig/aconfig_device_paths/Android.bp index 2d943de672..3531450e49 100644 --- a/tools/aconfig/aconfig_device_paths/Android.bp +++ b/tools/aconfig/aconfig_device_paths/Android.bp @@ -26,7 +26,6 @@ rust_defaults { "libaconfig_protos", "libanyhow", "libprotobuf", - "libregex", ], } @@ -35,12 +34,17 @@ rust_library { crate_name: "aconfig_device_paths", host_supported: true, defaults: ["libaconfig_device_paths.defaults"], + apex_available: [ + "//apex_available:platform", + "com.android.configinfrastructure", + ], + min_sdk_version: "34", } genrule { name: "libaconfig_java_device_paths_src", - srcs: ["src/DevicePathsTemplate.java"], - out: ["DevicePaths.java"], + srcs: ["src/DeviceProtosTemplate.java"], + out: ["DeviceProtos.java"], tool_files: ["partition_aconfig_flags_paths.txt"], cmd: "sed -e '/TEMPLATE/{r$(location partition_aconfig_flags_paths.txt)' -e 'd}' $(in) > $(out)", } @@ -48,5 +52,50 @@ genrule { java_library { name: "aconfig_device_paths_java", srcs: [":libaconfig_java_device_paths_src"], - sdk_version: "core_current", + static_libs: [ + "libaconfig_java_proto_nano", + ], + sdk_version: "core_platform", + apex_available: [ + "//apex_available:platform", + "com.android.configinfrastructure", + ], + min_sdk_version: "34", +} + +genrule { + name: "libaconfig_java_host_device_paths_src", + srcs: ["src/HostDeviceProtosTemplate.java"], + out: ["HostDeviceProtos.java"], + tool_files: [ + "partition_aconfig_flags_paths.txt", + "mainline_aconfig_flags_paths.txt", + ], + cmd: "sed -e '/TEMPLATE/{r$(location partition_aconfig_flags_paths.txt)' -e 'd}' $(in) > $(out).tmp && " + + "sed -e '/MAINLINE_T/{r$(location mainline_aconfig_flags_paths.txt)' -e 'd}' $(out).tmp > $(out)", +} + +java_library_host { + name: "aconfig_host_device_paths_java", + srcs: [":libaconfig_java_host_device_paths_src"], +} + +genrule { + name: "java_device_paths_test_util_src", + srcs: ["src/DeviceProtosTestUtilTemplate.java"], + out: ["DeviceProtosTestUtil.java"], + tool_files: ["partition_aconfig_flags_paths.txt"], + cmd: "sed -e '/TEMPLATE/{r$(location partition_aconfig_flags_paths.txt)' -e 'd}' $(in) > $(out)", +} + +java_library { + name: "aconfig_device_paths_java_util", + srcs: [":java_device_paths_test_util_src"], + static_libs: [ + "libaconfig_java_proto_nano", + ], + sdk_version: "core_platform", + apex_available: [ + "//apex_available:platform", + ], } diff --git a/tools/aconfig/aconfig_device_paths/mainline_aconfig_flags_paths.txt b/tools/aconfig/aconfig_device_paths/mainline_aconfig_flags_paths.txt new file mode 100644 index 0000000000..aad2b23896 --- /dev/null +++ b/tools/aconfig/aconfig_device_paths/mainline_aconfig_flags_paths.txt @@ -0,0 +1,20 @@ +"/apex/com.android.adservices/etc/aconfig_flags.pb", +"/apex/com.android.appsearch/etc/aconfig_flags.pb", +"/apex/com.android.art/etc/aconfig_flags.pb", +"/apex/com.android.bt/etc/aconfig_flags.pb", +"/apex/com.android.cellbroadcast/etc/aconfig_flags.pb", +"/apex/com.android.configinfrastructure/etc/aconfig_flags.pb", +"/apex/com.android.conscrypt/etc/aconfig_flags.pb", +"/apex/com.android.devicelock/etc/aconfig_flags.pb", +"/apex/com.android.healthfitness/etc/aconfig_flags.pb", +"/apex/com.android.ipsec/etc/aconfig_flags.pb", +"/apex/com.android.media/etc/aconfig_flags.pb", +"/apex/com.android.mediaprovider/etc/aconfig_flags.pb", +"/apex/com.android.ondevicepersonalization/etc/aconfig_flags.pb", +"/apex/com.android.os.statsd/etc/aconfig_flags.pb", +"/apex/com.android.permission/etc/aconfig_flags.pb", +"/apex/com.android.profiling/etc/aconfig_flags.pb", +"/apex/com.android.tethering/etc/aconfig_flags.pb", +"/apex/com.android.uwb/etc/aconfig_flags.pb", +"/apex/com.android.virt/etc/aconfig_flags.pb", +"/apex/com.android.wifi/etc/aconfig_flags.pb", diff --git a/tools/aconfig/aconfig_device_paths/partition_aconfig_flags_paths.txt b/tools/aconfig/aconfig_device_paths/partition_aconfig_flags_paths.txt index 140cd21ac8..e997e3ddfa 100644 --- a/tools/aconfig/aconfig_device_paths/partition_aconfig_flags_paths.txt +++ b/tools/aconfig/aconfig_device_paths/partition_aconfig_flags_paths.txt @@ -1,4 +1,3 @@ "/system/etc/aconfig_flags.pb", -"/system_ext/etc/aconfig_flags.pb", "/product/etc/aconfig_flags.pb", "/vendor/etc/aconfig_flags.pb", diff --git a/tools/aconfig/aconfig_device_paths/src/DevicePathsTemplate.java b/tools/aconfig/aconfig_device_paths/src/DeviceProtosTemplate.java index 16355a33f2..4d4119947f 100644 --- a/tools/aconfig/aconfig_device_paths/src/DevicePathsTemplate.java +++ b/tools/aconfig/aconfig_device_paths/src/DeviceProtosTemplate.java @@ -15,7 +15,12 @@ */ package android.aconfig; +import android.aconfig.nano.Aconfig.parsed_flag; +import android.aconfig.nano.Aconfig.parsed_flags; + import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -23,14 +28,38 @@ import java.util.List; /** * @hide */ -public class DevicePaths { - static final String[] PATHS = { +public class DeviceProtos { + public static final String[] PATHS = { TEMPLATE }; private static final String APEX_DIR = "/apex"; private static final String APEX_ACONFIG_PATH_SUFFIX = "/etc/aconfig_flags.pb"; + /** + * Returns a list of all on-device aconfig protos. + * + * May throw an exception if the protos can't be read at the call site. For + * example, some of the protos are in the apex/ partition, which is mounted + * somewhat late in the boot process. + * + * @throws IOException if we can't read one of the protos yet + * @return a list of all on-device aconfig protos + */ + public static List<parsed_flag> loadAndParseFlagProtos() throws IOException { + ArrayList<parsed_flag> result = new ArrayList(); + + for (String path : parsedFlagsProtoPaths()) { + try (FileInputStream inputStream = new FileInputStream(path)) { + parsed_flags parsedFlags = parsed_flags.parseFrom(inputStream.readAllBytes()); + for (parsed_flag flag : parsedFlags.parsedFlag) { + result.add(flag); + } + } + } + + return result; + } /** * Returns the list of all on-device aconfig protos paths. diff --git a/tools/aconfig/aconfig_device_paths/src/DeviceProtosTestUtilTemplate.java b/tools/aconfig/aconfig_device_paths/src/DeviceProtosTestUtilTemplate.java new file mode 100644 index 0000000000..45d67663ef --- /dev/null +++ b/tools/aconfig/aconfig_device_paths/src/DeviceProtosTestUtilTemplate.java @@ -0,0 +1,93 @@ +/* + * Copyright (C) 2024 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package android.aconfig; + +import android.aconfig.nano.Aconfig.parsed_flag; +import android.aconfig.nano.Aconfig.parsed_flags; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +/** @hide */ +public class DeviceProtosTestUtil { + public static final String[] PATHS = { + TEMPLATE + }; + + private static final String APEX_DIR = "/apex/"; + private static final String APEX_ACONFIG_PATH_SUFFIX = "/etc/aconfig_flags.pb"; + private static final String SYSTEM_APEX_DIR = "/system/apex"; + + /** + * Returns a list of all on-device aconfig protos. + * + * <p>May throw an exception if the protos can't be read at the call site. For example, some of + * the protos are in the apex/ partition, which is mounted somewhat late in the boot process. + * + * @throws IOException if we can't read one of the protos yet + * @return a list of all on-device aconfig protos + */ + public static List<parsed_flag> loadAndParseFlagProtos() throws IOException { + ArrayList<parsed_flag> result = new ArrayList(); + + for (String path : parsedFlagsProtoPaths()) { + try (FileInputStream inputStream = new FileInputStream(path)) { + parsed_flags parsedFlags = parsed_flags.parseFrom(inputStream.readAllBytes()); + for (parsed_flag flag : parsedFlags.parsedFlag) { + result.add(flag); + } + } + } + + return result; + } + + /** + * Returns the list of all on-device aconfig protos paths. + * + * @hide + */ + public static List<String> parsedFlagsProtoPaths() { + ArrayList<String> paths = new ArrayList(Arrays.asList(PATHS)); + + File apexDirectory = new File(SYSTEM_APEX_DIR); + if (!apexDirectory.isDirectory()) { + return paths; + } + + File[] subdirs = apexDirectory.listFiles(); + if (subdirs == null) { + return paths; + } + + for (File prefix : subdirs) { + String apexName = prefix.getName().replace("com.google", "com"); + apexName = apexName.substring(0, apexName.lastIndexOf('.')); + + File protoPath = new File(APEX_DIR + apexName + APEX_ACONFIG_PATH_SUFFIX); + if (!protoPath.exists()) { + continue; + } + + paths.add(protoPath.getAbsolutePath()); + } + return paths; + } +} diff --git a/tools/aconfig/aconfig_device_paths/src/HostDeviceProtosTemplate.java b/tools/aconfig/aconfig_device_paths/src/HostDeviceProtosTemplate.java new file mode 100644 index 0000000000..e7d0a76a8a --- /dev/null +++ b/tools/aconfig/aconfig_device_paths/src/HostDeviceProtosTemplate.java @@ -0,0 +1,89 @@ +/* + * Copyright (C) 2024 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package android.aconfig; + +import java.io.File; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; + +/** + * A host lib that can read all aconfig proto file paths on a given device. + * This lib is only available on device with root access (userdebug/eng). + */ +public class HostDeviceProtos { + /** + * An interface that executes ADB command and return the result. + */ + public static interface AdbCommandExecutor { + /** Executes the ADB command. */ + String executeAdbCommand(String command); + } + + static final String[] PATHS = { + TEMPLATE + }; + + static final String[] MAINLINE_PATHS = { + MAINLINE_T + }; + + private static final String APEX_DIR = "/apex"; + private static final String RECURSIVELY_LIST_APEX_DIR_COMMAND = + "shell su 0 find /apex | grep aconfig_flags"; + private static final String APEX_ACONFIG_PATH_SUFFIX = "/etc/aconfig_flags.pb"; + + + /** + * Returns the list of all on-device aconfig proto paths from host side. + */ + public static List<String> parsedFlagsProtoPaths(AdbCommandExecutor adbCommandExecutor) { + ArrayList<String> paths = new ArrayList(Arrays.asList(PATHS)); + + String adbCommandOutput = adbCommandExecutor.executeAdbCommand( + RECURSIVELY_LIST_APEX_DIR_COMMAND); + + if (adbCommandOutput == null || adbCommandOutput.isEmpty()) { + paths.addAll(Arrays.asList(MAINLINE_PATHS)); + return paths; + } + + Set<String> allFiles = new HashSet<>(Arrays.asList(adbCommandOutput.split("\n"))); + + Set<String> subdirs = allFiles.stream().map(file -> { + String[] filePaths = file.split("/"); + // The first element is "", the second element is "apex". + return filePaths.length > 2 ? filePaths[2] : ""; + }).collect(Collectors.toSet()); + + for (String prefix : subdirs) { + // For each mainline modules, there are two directories, one <modulepackage>/, + // and one <modulepackage>@<versioncode>/. Just read the former. + if (prefix.contains("@")) { + continue; + } + + String protoPath = APEX_DIR + "/" + prefix + APEX_ACONFIG_PATH_SUFFIX; + if (allFiles.contains(protoPath)) { + paths.add(protoPath); + } + } + return paths; + } +} diff --git a/tools/aconfig/aconfig_device_paths/src/lib.rs b/tools/aconfig/aconfig_device_paths/src/lib.rs index 9ab9cea267..8871b4f8ac 100644 --- a/tools/aconfig/aconfig_device_paths/src/lib.rs +++ b/tools/aconfig/aconfig_device_paths/src/lib.rs @@ -62,13 +62,12 @@ mod tests { #[test] fn test_read_partition_paths() { - assert_eq!(read_partition_paths().len(), 4); + assert_eq!(read_partition_paths().len(), 3); assert_eq!( read_partition_paths(), vec![ PathBuf::from("/system/etc/aconfig_flags.pb"), - PathBuf::from("/system_ext/etc/aconfig_flags.pb"), PathBuf::from("/product/etc/aconfig_flags.pb"), PathBuf::from("/vendor/etc/aconfig_flags.pb") ] diff --git a/tools/aconfig/aconfig_device_paths/test/Android.bp b/tools/aconfig/aconfig_device_paths/test/Android.bp new file mode 100644 index 0000000000..37f561ff81 --- /dev/null +++ b/tools/aconfig/aconfig_device_paths/test/Android.bp @@ -0,0 +1,35 @@ +// Copyright (C) 2024 The Android Open Source Project +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package { + default_team: "trendy_team_android_core_experiments", + default_applicable_licenses: ["Android-Apache-2.0"], +} + +android_test { + name: "aconfig_device_paths_java_test", + srcs: [ + "src/**/*.java", + ], + static_libs: [ + "androidx.test.runner", + "junit", + "aconfig_device_paths_java_util", + ], + test_suites: [ + "general-tests", + ], + platform_apis: true, + certificate: "platform", +} diff --git a/tools/aconfig/aconfig_storage_read_api/tests/java/AndroidManifest.xml b/tools/aconfig/aconfig_device_paths/test/AndroidManifest.xml index 78bfb37dc9..5e01879157 100644 --- a/tools/aconfig/aconfig_storage_read_api/tests/java/AndroidManifest.xml +++ b/tools/aconfig/aconfig_device_paths/test/AndroidManifest.xml @@ -15,12 +15,13 @@ ~ limitations under the License. --> -<manifest xmlns:android="http://schemas.android.com/apk/res/android" package="android.aconfig_storage.test"> +<manifest xmlns:android="http://schemas.android.com/apk/res/android" + package="android.aconfig.storage.test"> <application> <uses-library android:name="android.test.runner" /> </application> <instrumentation android:name="androidx.test.runner.AndroidJUnitRunner" - android:targetPackage="android.aconfig_storage.test" /> + android:targetPackage="android.aconfig.storage.test" /> </manifest> diff --git a/tools/aconfig/aconfig_device_paths/test/src/DeviceProtosTestUtilTest.java b/tools/aconfig/aconfig_device_paths/test/src/DeviceProtosTestUtilTest.java new file mode 100644 index 0000000000..8dd0fd0065 --- /dev/null +++ b/tools/aconfig/aconfig_device_paths/test/src/DeviceProtosTestUtilTest.java @@ -0,0 +1,52 @@ +/* + * Copyright (C) 2024 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.aconfig.test; + +import static org.junit.Assert.assertTrue; + +import android.aconfig.DeviceProtosTestUtil; +import android.aconfig.nano.Aconfig.parsed_flag; + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; + +import java.util.List; +import java.util.Set; + +@RunWith(JUnit4.class) +public class DeviceProtosTestUtilTest { + + private static final Set<String> PLATFORM_CONTAINERS = Set.of("system", "vendor", "product"); + + @Test + public void testDeviceProtos_loadAndParseFlagProtos() throws Exception { + List<parsed_flag> flags = DeviceProtosTestUtil.loadAndParseFlagProtos(); + int platformFlags = 0; + int mainlineFlags = 0; + for (parsed_flag pf : flags) { + if (PLATFORM_CONTAINERS.contains(pf.container)) { + platformFlags++; + } else { + mainlineFlags++; + } + } + + assertTrue(platformFlags > 3); + assertTrue(mainlineFlags > 3); + } +} diff --git a/tools/aconfig/aconfig_flags/Android.bp b/tools/aconfig/aconfig_flags/Android.bp new file mode 100644 index 0000000000..1b4e148ce3 --- /dev/null +++ b/tools/aconfig/aconfig_flags/Android.bp @@ -0,0 +1,61 @@ +/* + * Copyright (C) 2024 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +rust_library { + name: "libaconfig_flags", + crate_name: "aconfig_flags", + srcs: [ + "src/lib.rs", + ], + rustlibs: [ + "libaconfig_flags_rust", + ], + host_supported: true, + apex_available: [ + "//apex_available:platform", + "com.android.configinfrastructure", + ], + min_sdk_version: "34", +} + +aconfig_declarations { + name: "aconfig_flags", + package: "com.android.aconfig.flags", + container: "system", + srcs: ["flags.aconfig"], +} + +rust_aconfig_library { + name: "libaconfig_flags_rust", + crate_name: "aconfig_flags_rust", + aconfig_declarations: "aconfig_flags", + host_supported: true, + apex_available: [ + "//apex_available:platform", + "com.android.configinfrastructure", + ], + min_sdk_version: "34", +} + +cc_aconfig_library { + name: "libaconfig_flags_cc", + aconfig_declarations: "aconfig_flags", +} + +java_aconfig_library { + name: "aconfig_flags_java", + aconfig_declarations: "aconfig_flags", +} diff --git a/tools/aconfig/aconfig_flags/Cargo.toml b/tools/aconfig/aconfig_flags/Cargo.toml new file mode 100644 index 0000000000..6eb9f14058 --- /dev/null +++ b/tools/aconfig/aconfig_flags/Cargo.toml @@ -0,0 +1,10 @@ +[package] +name = "aconfig_flags" +version = "0.1.0" +edition = "2021" + +[features] +default = ["cargo"] +cargo = [] + +[dependencies]
\ No newline at end of file diff --git a/tools/aconfig/aconfig_flags/flags.aconfig b/tools/aconfig/aconfig_flags/flags.aconfig new file mode 100644 index 0000000000..2488b5c8ab --- /dev/null +++ b/tools/aconfig/aconfig_flags/flags.aconfig @@ -0,0 +1,43 @@ +package: "com.android.aconfig.flags" +container: "system" + +flag { + name: "enable_only_new_storage" + namespace: "core_experiments_team_internal" + bug: "312235596" + description: "When enabled, aconfig flags are read from the new aconfig storage only." +} + +flag { + name: "enable_aconfigd_from_mainline" + namespace: "core_experiments_team_internal" + bug: "369808805" + description: "When enabled, launch aconfigd from config infra module." +} + +flag { + name: "tools_read_from_new_storage" + namespace: "core_experiments_team_internal" + bug: "370499640" + description: "When enabled, tools read directly from the new aconfig storage." +} + +flag { + name: "tools_read_from_new_storage_bugfix" + namespace: "core_experiments_team_internal" + bug: "370499640" + description: "When enabled, tools read directly from the new aconfig storage." + metadata { + purpose: PURPOSE_BUGFIX + } +} + +flag { + name: "invoke_updatable_aflags" + namespace: "core_experiments_team_internal" + bug: "385383899" + description: "When enabled, the system aflags binary invokes the updatable aflags." + metadata { + purpose: PURPOSE_BUGFIX + } +} diff --git a/tools/aconfig/aconfig_flags/src/lib.rs b/tools/aconfig/aconfig_flags/src/lib.rs new file mode 100644 index 0000000000..dc507aef6f --- /dev/null +++ b/tools/aconfig/aconfig_flags/src/lib.rs @@ -0,0 +1,69 @@ +/* + * Copyright (C) 2024 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//! `aconfig_flags` is a crate for reading aconfig flags from Rust +// When building with the Android tool-chain +// +// - the flag functions will read from aconfig_flags_inner +// - the feature "cargo" will be disabled +// +// When building with cargo +// +// - the flag functions will all return some trivial value, like true +// - the feature "cargo" will be enabled +// +// This module hides these differences from the rest of aconfig. + +/// Module used when building with the Android tool-chain +#[cfg(not(feature = "cargo"))] +pub mod auto_generated { + /// Returns the value for the enable_only_new_storage flag. + pub fn enable_only_new_storage() -> bool { + aconfig_flags_rust::enable_only_new_storage() + } + + /// Returns the value for the enable_aconfigd_from_mainline flag. + pub fn enable_aconfigd_from_mainline() -> bool { + aconfig_flags_rust::enable_only_new_storage() + } + + /// Returns the value for the invoke_updatable_aflags flag. + pub fn invoke_updatable_aflags() -> bool { + aconfig_flags_rust::invoke_updatable_aflags() + } +} + +/// Module used when building with cargo +#[cfg(feature = "cargo")] +pub mod auto_generated { + /// Returns a placeholder value for the enable_only_new_storage flag. + pub fn enable_only_new_storage() -> bool { + // Used only to enable typechecking and testing with cargo + true + } + + /// Returns a placeholder value for the enable_aconfigd_from_mainline flag. + pub fn enable_aconfigd_from_mainline() -> bool { + // Used only to enable typechecking and testing with cargo + true + } + + /// Returns the value for the invoke_updatable_aflags flag. + pub fn invoke_updatable_aflags() -> bool { + // Used only to enable typechecking and testing with cargo + true + } +} diff --git a/tools/aconfig/aconfig_protos/Android.bp b/tools/aconfig/aconfig_protos/Android.bp index d24199443c..080688ebbc 100644 --- a/tools/aconfig/aconfig_protos/Android.bp +++ b/tools/aconfig/aconfig_protos/Android.bp @@ -58,6 +58,11 @@ rust_protobuf { crate_name: "aconfig_rust_proto", source_stem: "aconfig_rust_proto", host_supported: true, + apex_available: [ + "//apex_available:platform", + "com.android.configinfrastructure", + ], + min_sdk_version: "34", } rust_defaults { @@ -81,6 +86,11 @@ rust_library { crate_name: "aconfig_protos", host_supported: true, defaults: ["aconfig_protos.defaults"], + apex_available: [ + "//apex_available:platform", + "com.android.configinfrastructure", + ], + min_sdk_version: "34", } rust_test_host { @@ -88,3 +98,13 @@ rust_test_host { test_suites: ["general-tests"], defaults: ["aconfig_protos.defaults"], } + +// Internal protos + +python_library_host { + name: "aconfig_internal_proto_python", + srcs: ["protos/aconfig_internal.proto"], + proto: { + canonical_path_from_root: false, + }, +} diff --git a/tools/aconfig/aconfig_protos/protos/aconfig_internal.proto b/tools/aconfig/aconfig_protos/protos/aconfig_internal.proto new file mode 100644 index 0000000000..7930f568fc --- /dev/null +++ b/tools/aconfig/aconfig_protos/protos/aconfig_internal.proto @@ -0,0 +1,42 @@ +// Copyright (C) 2023 The Android Open Source Project +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License + +// This is the schema definition for protos intended for internal aconfig +// use ONLY. There are no guarantees regarding backwards compatibility. +// Do not put protos here intended for storage or communication. + +syntax = "proto2"; + +package android.aconfig_internal; + + +// This protobuf defines messages used to store data about flags used to guard +// APIs which are finalized for a given SDK. +message finalized_flag { + // Name of the flag (required). Does not include package name. + // Must match flag name in the aconfig declaration header. + optional string name = 1; + + // Package the flag belongs to (required). Must match package in the aconfig declaration header. + optional string package = 2; + + // SDK level in which the flag was finalized. + optional int32 min_sdk = 3; + + // TODO - b/378936061: Add support for minor SDK version & SDK extension. +}; + +message finalized_flags { + repeated finalized_flag finalized_flag = 1; +} diff --git a/tools/aconfig/aconfig_protos/src/lib.rs b/tools/aconfig/aconfig_protos/src/lib.rs index 81bbd7e130..64b82d6796 100644 --- a/tools/aconfig/aconfig_protos/src/lib.rs +++ b/tools/aconfig/aconfig_protos/src/lib.rs @@ -1073,4 +1073,63 @@ parsed_flag { // two identical flags with dedup enabled assert_eq!(first, parsed_flags::merge(vec![first.clone(), first.clone()], true).unwrap()); } + + #[test] + fn test_is_valid_name_ident() { + assert!(is_valid_name_ident("foo")); + assert!(is_valid_name_ident("foo_bar_123")); + assert!(is_valid_name_ident("foo_")); + + assert!(!is_valid_name_ident("")); + assert!(!is_valid_name_ident("123_foo")); + assert!(!is_valid_name_ident("foo-bar")); + assert!(!is_valid_name_ident("foo-b\u{00e5}r")); + assert!(!is_valid_name_ident("foo__bar")); + assert!(!is_valid_name_ident("_foo")); + } + + #[test] + fn test_is_valid_package_ident() { + assert!(is_valid_package_ident("foo.bar")); + assert!(is_valid_package_ident("foo.bar_baz")); + assert!(is_valid_package_ident("foo.bar.a123")); + + assert!(!is_valid_package_ident("foo_bar_123")); + assert!(!is_valid_package_ident("foo")); + assert!(!is_valid_package_ident("foo._bar")); + assert!(!is_valid_package_ident("")); + assert!(!is_valid_package_ident("123_foo")); + assert!(!is_valid_package_ident("foo-bar")); + assert!(!is_valid_package_ident("foo-b\u{00e5}r")); + assert!(!is_valid_package_ident("foo.bar.123")); + assert!(!is_valid_package_ident(".foo.bar")); + assert!(!is_valid_package_ident("foo.bar.")); + assert!(!is_valid_package_ident(".")); + assert!(!is_valid_package_ident("..")); + assert!(!is_valid_package_ident("foo..bar")); + assert!(!is_valid_package_ident("foo.__bar")); + } + + #[test] + fn test_is_valid_container_ident() { + assert!(is_valid_container_ident("foo.bar")); + assert!(is_valid_container_ident("foo.bar_baz")); + assert!(is_valid_container_ident("foo.bar.a123")); + assert!(is_valid_container_ident("foo")); + assert!(is_valid_container_ident("foo_bar_123")); + + assert!(!is_valid_container_ident("")); + assert!(!is_valid_container_ident("foo._bar")); + assert!(!is_valid_container_ident("_foo")); + assert!(!is_valid_container_ident("123_foo")); + assert!(!is_valid_container_ident("foo-bar")); + assert!(!is_valid_container_ident("foo-b\u{00e5}r")); + assert!(!is_valid_container_ident("foo.bar.123")); + assert!(!is_valid_container_ident(".foo.bar")); + assert!(!is_valid_container_ident("foo.bar.")); + assert!(!is_valid_container_ident(".")); + assert!(!is_valid_container_ident("..")); + assert!(!is_valid_container_ident("foo..bar")); + assert!(!is_valid_container_ident("foo.__bar")); + } } diff --git a/tools/aconfig/aconfig_storage_file/Android.bp b/tools/aconfig/aconfig_storage_file/Android.bp index 40b4464167..e875c7be6a 100644 --- a/tools/aconfig/aconfig_storage_file/Android.bp +++ b/tools/aconfig/aconfig_storage_file/Android.bp @@ -14,6 +14,7 @@ rust_defaults { "libclap", "libcxx", "libaconfig_storage_protos", + "libserde", ], } @@ -36,7 +37,10 @@ rust_binary_host { name: "aconfig-storage", defaults: ["aconfig_storage_file.defaults"], srcs: ["src/main.rs"], - rustlibs: ["libaconfig_storage_file"], + rustlibs: [ + "libaconfig_storage_file", + "libserde_json", + ], } rust_test_host { diff --git a/tools/aconfig/aconfig_storage_file/Cargo.toml b/tools/aconfig/aconfig_storage_file/Cargo.toml index 192dfad40a..a40557803f 100644 --- a/tools/aconfig/aconfig_storage_file/Cargo.toml +++ b/tools/aconfig/aconfig_storage_file/Cargo.toml @@ -14,6 +14,8 @@ tempfile = "3.9.0" thiserror = "1.0.56" clap = { version = "4.1.8", features = ["derive"] } cxx = "1.0" +serde = { version = "1.0.152", features = ["derive"] } +serde_json = "1.0.93" [[bin]] name = "aconfig-storage" diff --git a/tools/aconfig/aconfig_storage_file/src/flag_info.rs b/tools/aconfig/aconfig_storage_file/src/flag_info.rs index beac38d156..cf16834be2 100644 --- a/tools/aconfig/aconfig_storage_file/src/flag_info.rs +++ b/tools/aconfig/aconfig_storage_file/src/flag_info.rs @@ -20,10 +20,11 @@ use crate::{read_str_from_bytes, read_u32_from_bytes, read_u8_from_bytes}; use crate::{AconfigStorageError, StorageFileType}; use anyhow::anyhow; +use serde::{Deserialize, Serialize}; use std::fmt; /// Flag info header struct -#[derive(PartialEq)] +#[derive(PartialEq, Serialize, Deserialize)] pub struct FlagInfoHeader { pub version: u32, pub container: String, @@ -89,7 +90,7 @@ impl FlagInfoHeader { } /// bit field for flag info -#[derive(Clone, Debug, PartialEq, Eq)] +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] pub enum FlagInfoBit { HasServerOverride = 1 << 0, IsReadWrite = 1 << 1, @@ -97,7 +98,7 @@ pub enum FlagInfoBit { } /// Flag info node struct -#[derive(PartialEq, Clone)] +#[derive(PartialEq, Clone, Serialize, Deserialize)] pub struct FlagInfoNode { pub attributes: u8, } @@ -138,7 +139,7 @@ impl FlagInfoNode { } /// Flag info list struct -#[derive(PartialEq)] +#[derive(PartialEq, Serialize, Deserialize)] pub struct FlagInfoList { pub header: FlagInfoHeader, pub nodes: Vec<FlagInfoNode>, @@ -193,12 +194,15 @@ impl FlagInfoList { #[cfg(test)] mod tests { use super::*; - use crate::test_utils::create_test_flag_info_list; + use crate::{ + test_utils::create_test_flag_info_list, DEFAULT_FILE_VERSION, MAX_SUPPORTED_FILE_VERSION, + }; - #[test] // this test point locks down the value list serialization - fn test_serialization() { - let flag_info_list = create_test_flag_info_list(); + // TODO: b/376108268 - Use parameterized tests. + #[test] + fn test_serialization_default() { + let flag_info_list = create_test_flag_info_list(DEFAULT_FILE_VERSION); let header: &FlagInfoHeader = &flag_info_list.header; let reinterpreted_header = FlagInfoHeader::from_bytes(&header.into_bytes()); @@ -219,20 +223,42 @@ mod tests { } #[test] + fn test_serialization_max() { + let flag_info_list = create_test_flag_info_list(MAX_SUPPORTED_FILE_VERSION); + + let header: &FlagInfoHeader = &flag_info_list.header; + let reinterpreted_header = FlagInfoHeader::from_bytes(&header.into_bytes()); + assert!(reinterpreted_header.is_ok()); + assert_eq!(header, &reinterpreted_header.unwrap()); + + let nodes: &Vec<FlagInfoNode> = &flag_info_list.nodes; + for node in nodes.iter() { + let reinterpreted_node = FlagInfoNode::from_bytes(&node.into_bytes()).unwrap(); + assert_eq!(node, &reinterpreted_node); + } + + let flag_info_bytes = flag_info_list.into_bytes(); + let reinterpreted_info_list = FlagInfoList::from_bytes(&flag_info_bytes); + assert!(reinterpreted_info_list.is_ok()); + assert_eq!(&flag_info_list, &reinterpreted_info_list.unwrap()); + assert_eq!(flag_info_bytes.len() as u32, header.file_size); + } + // this test point locks down that version number should be at the top of serialized // bytes + #[test] fn test_version_number() { - let flag_info_list = create_test_flag_info_list(); + let flag_info_list = create_test_flag_info_list(DEFAULT_FILE_VERSION); let bytes = &flag_info_list.into_bytes(); let mut head = 0; - let version = read_u32_from_bytes(bytes, &mut head).unwrap(); - assert_eq!(version, 1); + let version_from_file = read_u32_from_bytes(bytes, &mut head).unwrap(); + assert_eq!(version_from_file, DEFAULT_FILE_VERSION); } - #[test] // this test point locks down file type check + #[test] fn test_file_type_check() { - let mut flag_info_list = create_test_flag_info_list(); + let mut flag_info_list = create_test_flag_info_list(DEFAULT_FILE_VERSION); flag_info_list.header.file_type = 123u8; let error = FlagInfoList::from_bytes(&flag_info_list.into_bytes()).unwrap_err(); assert_eq!( diff --git a/tools/aconfig/aconfig_storage_file/src/flag_table.rs b/tools/aconfig/aconfig_storage_file/src/flag_table.rs index 64b90eabfa..6fbee023ce 100644 --- a/tools/aconfig/aconfig_storage_file/src/flag_table.rs +++ b/tools/aconfig/aconfig_storage_file/src/flag_table.rs @@ -23,10 +23,11 @@ use crate::{ }; use crate::{AconfigStorageError, StorageFileType, StoredFlagType}; use anyhow::anyhow; +use serde::{Deserialize, Serialize}; use std::fmt; /// Flag table header struct -#[derive(PartialEq)] +#[derive(PartialEq, Serialize, Deserialize)] pub struct FlagTableHeader { pub version: u32, pub container: String, @@ -95,7 +96,7 @@ impl FlagTableHeader { } /// Flag table node struct -#[derive(PartialEq, Clone)] +#[derive(PartialEq, Clone, Serialize, Deserialize)] pub struct FlagTableNode { pub package_id: u32, pub flag_name: String, @@ -150,11 +151,11 @@ impl FlagTableNode { /// Calculate node bucket index pub fn find_bucket_index(package_id: u32, flag_name: &str, num_buckets: u32) -> u32 { let full_flag_name = package_id.to_string() + "/" + flag_name; - get_bucket_index(&full_flag_name, num_buckets) + get_bucket_index(full_flag_name.as_bytes(), num_buckets) } } -#[derive(PartialEq)] +#[derive(PartialEq, Serialize, Deserialize)] pub struct FlagTable { pub header: FlagTableHeader, pub buckets: Vec<Option<u32>>, @@ -219,12 +220,15 @@ impl FlagTable { #[cfg(test)] mod tests { use super::*; - use crate::test_utils::create_test_flag_table; + use crate::{ + test_utils::create_test_flag_table, DEFAULT_FILE_VERSION, MAX_SUPPORTED_FILE_VERSION, + }; - #[test] // this test point locks down the table serialization - fn test_serialization() { - let flag_table = create_test_flag_table(); + // TODO: b/376108268 - Use parameterized tests. + #[test] + fn test_serialization_default() { + let flag_table = create_test_flag_table(DEFAULT_FILE_VERSION); let header: &FlagTableHeader = &flag_table.header; let reinterpreted_header = FlagTableHeader::from_bytes(&header.into_bytes()); @@ -245,20 +249,42 @@ mod tests { } #[test] + fn test_serialization_max() { + let flag_table = create_test_flag_table(MAX_SUPPORTED_FILE_VERSION); + + let header: &FlagTableHeader = &flag_table.header; + let reinterpreted_header = FlagTableHeader::from_bytes(&header.into_bytes()); + assert!(reinterpreted_header.is_ok()); + assert_eq!(header, &reinterpreted_header.unwrap()); + + let nodes: &Vec<FlagTableNode> = &flag_table.nodes; + for node in nodes.iter() { + let reinterpreted_node = FlagTableNode::from_bytes(&node.into_bytes()).unwrap(); + assert_eq!(node, &reinterpreted_node); + } + + let flag_table_bytes = flag_table.into_bytes(); + let reinterpreted_table = FlagTable::from_bytes(&flag_table_bytes); + assert!(reinterpreted_table.is_ok()); + assert_eq!(&flag_table, &reinterpreted_table.unwrap()); + assert_eq!(flag_table_bytes.len() as u32, header.file_size); + } + // this test point locks down that version number should be at the top of serialized // bytes + #[test] fn test_version_number() { - let flag_table = create_test_flag_table(); + let flag_table = create_test_flag_table(DEFAULT_FILE_VERSION); let bytes = &flag_table.into_bytes(); let mut head = 0; - let version = read_u32_from_bytes(bytes, &mut head).unwrap(); - assert_eq!(version, 1); + let version_from_file = read_u32_from_bytes(bytes, &mut head).unwrap(); + assert_eq!(version_from_file, DEFAULT_FILE_VERSION); } - #[test] // this test point locks down file type check + #[test] fn test_file_type_check() { - let mut flag_table = create_test_flag_table(); + let mut flag_table = create_test_flag_table(DEFAULT_FILE_VERSION); flag_table.header.file_type = 123u8; let error = FlagTable::from_bytes(&flag_table.into_bytes()).unwrap_err(); assert_eq!( diff --git a/tools/aconfig/aconfig_storage_file/src/flag_value.rs b/tools/aconfig/aconfig_storage_file/src/flag_value.rs index 506924b339..9a14bec7de 100644 --- a/tools/aconfig/aconfig_storage_file/src/flag_value.rs +++ b/tools/aconfig/aconfig_storage_file/src/flag_value.rs @@ -20,10 +20,11 @@ use crate::{read_str_from_bytes, read_u32_from_bytes, read_u8_from_bytes}; use crate::{AconfigStorageError, StorageFileType}; use anyhow::anyhow; +use serde::{Deserialize, Serialize}; use std::fmt; /// Flag value header struct -#[derive(PartialEq)] +#[derive(PartialEq, Serialize, Deserialize)] pub struct FlagValueHeader { pub version: u32, pub container: String, @@ -89,7 +90,7 @@ impl FlagValueHeader { } /// Flag value list struct -#[derive(PartialEq)] +#[derive(PartialEq, Serialize, Deserialize)] pub struct FlagValueList { pub header: FlagValueHeader, pub booleans: Vec<bool>, @@ -131,12 +132,32 @@ impl FlagValueList { #[cfg(test)] mod tests { use super::*; - use crate::test_utils::create_test_flag_value_list; + use crate::{ + test_utils::create_test_flag_value_list, DEFAULT_FILE_VERSION, MAX_SUPPORTED_FILE_VERSION, + }; #[test] // this test point locks down the value list serialization - fn test_serialization() { - let flag_value_list = create_test_flag_value_list(); + // TODO: b/376108268 - Use parameterized tests. + fn test_serialization_default() { + let flag_value_list = create_test_flag_value_list(DEFAULT_FILE_VERSION); + + let header: &FlagValueHeader = &flag_value_list.header; + let reinterpreted_header = FlagValueHeader::from_bytes(&header.into_bytes()); + assert!(reinterpreted_header.is_ok()); + assert_eq!(header, &reinterpreted_header.unwrap()); + + let flag_value_bytes = flag_value_list.into_bytes(); + let reinterpreted_value_list = FlagValueList::from_bytes(&flag_value_bytes); + assert!(reinterpreted_value_list.is_ok()); + assert_eq!(&flag_value_list, &reinterpreted_value_list.unwrap()); + assert_eq!(flag_value_bytes.len() as u32, header.file_size); + } + + #[test] + // this test point locks down the value list serialization + fn test_serialization_max() { + let flag_value_list = create_test_flag_value_list(MAX_SUPPORTED_FILE_VERSION); let header: &FlagValueHeader = &flag_value_list.header; let reinterpreted_header = FlagValueHeader::from_bytes(&header.into_bytes()); @@ -154,17 +175,17 @@ mod tests { // this test point locks down that version number should be at the top of serialized // bytes fn test_version_number() { - let flag_value_list = create_test_flag_value_list(); + let flag_value_list = create_test_flag_value_list(DEFAULT_FILE_VERSION); let bytes = &flag_value_list.into_bytes(); let mut head = 0; - let version = read_u32_from_bytes(bytes, &mut head).unwrap(); - assert_eq!(version, 1); + let version_from_file = read_u32_from_bytes(bytes, &mut head).unwrap(); + assert_eq!(version_from_file, DEFAULT_FILE_VERSION); } #[test] // this test point locks down file type check fn test_file_type_check() { - let mut flag_value_list = create_test_flag_value_list(); + let mut flag_value_list = create_test_flag_value_list(DEFAULT_FILE_VERSION); flag_value_list.header.file_type = 123u8; let error = FlagValueList::from_bytes(&flag_value_list.into_bytes()).unwrap_err(); assert_eq!( diff --git a/tools/aconfig/aconfig_storage_file/src/lib.rs b/tools/aconfig/aconfig_storage_file/src/lib.rs index 26e9c1a3be..e99132092d 100644 --- a/tools/aconfig/aconfig_storage_file/src/lib.rs +++ b/tools/aconfig/aconfig_storage_file/src/lib.rs @@ -37,26 +37,33 @@ pub mod flag_table; pub mod flag_value; pub mod package_table; pub mod protos; +pub mod sip_hasher13; pub mod test_utils; use anyhow::anyhow; +use serde::{Deserialize, Serialize}; use std::cmp::Ordering; -use std::collections::hash_map::DefaultHasher; use std::fs::File; -use std::hash::{Hash, Hasher}; +use std::hash::Hasher; use std::io::Read; pub use crate::flag_info::{FlagInfoBit, FlagInfoHeader, FlagInfoList, FlagInfoNode}; pub use crate::flag_table::{FlagTable, FlagTableHeader, FlagTableNode}; pub use crate::flag_value::{FlagValueHeader, FlagValueList}; pub use crate::package_table::{PackageTable, PackageTableHeader, PackageTableNode}; +pub use crate::sip_hasher13::SipHasher13; use crate::AconfigStorageError::{ BytesParseFail, HashTableSizeLimit, InvalidFlagValueType, InvalidStoredFlagType, }; -/// Storage file version -pub const FILE_VERSION: u32 = 1; +/// The max storage file version from which we can safely read/write. May be +/// experimental. +pub const MAX_SUPPORTED_FILE_VERSION: u32 = 2; + +/// The newest fully-released version. Unless otherwise specified, this is the +/// version we will write. +pub const DEFAULT_FILE_VERSION: u32 = 1; /// Good hash table prime number pub(crate) const HASH_PRIMES: [u32; 29] = [ @@ -106,7 +113,7 @@ impl TryFrom<u8> for StorageFileType { /// Flag type enum as stored by storage file /// ONLY APPEND, NEVER REMOVE FOR BACKWARD COMPATIBILITY. THE MAX IS U16. -#[derive(Clone, Copy, Debug, PartialEq, Eq)] +#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)] pub enum StoredFlagType { ReadWriteBoolean = 0, ReadOnlyBoolean = 1, @@ -211,10 +218,12 @@ pub fn get_table_size(entries: u32) -> Result<u32, AconfigStorageError> { } /// Get the corresponding bucket index given the key and number of buckets -pub(crate) fn get_bucket_index<T: Hash>(val: &T, num_buckets: u32) -> u32 { - let mut s = DefaultHasher::new(); - val.hash(&mut s); - (s.finish() % num_buckets as u64) as u32 +pub(crate) fn get_bucket_index(val: &[u8], num_buckets: u32) -> u32 { + let mut s = SipHasher13::new(); + s.write(val); + s.write_u8(0xff); + let ret = (s.finish() % num_buckets as u64) as u32; + ret } /// Read and parse bytes as u8 @@ -240,6 +249,11 @@ pub(crate) fn read_u16_from_bytes( Ok(val) } +/// Read and parse the first 4 bytes of buf as u32. +pub fn read_u32_from_start_of_bytes(buf: &[u8]) -> Result<u32, AconfigStorageError> { + read_u32_from_bytes(buf, &mut 0) +} + /// Read and parse bytes as u32 pub fn read_u32_from_bytes(buf: &[u8], head: &mut usize) -> Result<u32, AconfigStorageError> { let val = @@ -250,6 +264,16 @@ pub fn read_u32_from_bytes(buf: &[u8], head: &mut usize) -> Result<u32, AconfigS Ok(val) } +// Read and parse bytes as u64 +pub fn read_u64_from_bytes(buf: &[u8], head: &mut usize) -> Result<u64, AconfigStorageError> { + let val = + u64::from_le_bytes(buf[*head..*head + 8].try_into().map_err(|errmsg| { + BytesParseFail(anyhow!("fail to parse u64 from bytes: {}", errmsg)) + })?); + *head += 8; + Ok(val) +} + /// Read and parse bytes as string pub(crate) fn read_str_from_bytes( buf: &[u8], @@ -512,10 +536,15 @@ mod tests { // this test point locks down the flag list api fn test_list_flag() { let package_table = - write_bytes_to_temp_file(&create_test_package_table().into_bytes()).unwrap(); - let flag_table = write_bytes_to_temp_file(&create_test_flag_table().into_bytes()).unwrap(); - let flag_value_list = - write_bytes_to_temp_file(&create_test_flag_value_list().into_bytes()).unwrap(); + write_bytes_to_temp_file(&create_test_package_table(DEFAULT_FILE_VERSION).into_bytes()) + .unwrap(); + let flag_table = + write_bytes_to_temp_file(&create_test_flag_table(DEFAULT_FILE_VERSION).into_bytes()) + .unwrap(); + let flag_value_list = write_bytes_to_temp_file( + &create_test_flag_value_list(DEFAULT_FILE_VERSION).into_bytes(), + ) + .unwrap(); let package_table_path = package_table.path().display().to_string(); let flag_table_path = flag_table.path().display().to_string(); @@ -580,12 +609,19 @@ mod tests { // this test point locks down the flag list with info api fn test_list_flag_with_info() { let package_table = - write_bytes_to_temp_file(&create_test_package_table().into_bytes()).unwrap(); - let flag_table = write_bytes_to_temp_file(&create_test_flag_table().into_bytes()).unwrap(); - let flag_value_list = - write_bytes_to_temp_file(&create_test_flag_value_list().into_bytes()).unwrap(); - let flag_info_list = - write_bytes_to_temp_file(&create_test_flag_info_list().into_bytes()).unwrap(); + write_bytes_to_temp_file(&create_test_package_table(DEFAULT_FILE_VERSION).into_bytes()) + .unwrap(); + let flag_table = + write_bytes_to_temp_file(&create_test_flag_table(DEFAULT_FILE_VERSION).into_bytes()) + .unwrap(); + let flag_value_list = write_bytes_to_temp_file( + &create_test_flag_value_list(DEFAULT_FILE_VERSION).into_bytes(), + ) + .unwrap(); + let flag_info_list = write_bytes_to_temp_file( + &create_test_flag_info_list(DEFAULT_FILE_VERSION).into_bytes(), + ) + .unwrap(); let package_table_path = package_table.path().display().to_string(); let flag_table_path = flag_table.path().display().to_string(); diff --git a/tools/aconfig/aconfig_storage_file/src/main.rs b/tools/aconfig/aconfig_storage_file/src/main.rs index 8b9e38da02..a9cfd19066 100644 --- a/tools/aconfig/aconfig_storage_file/src/main.rs +++ b/tools/aconfig/aconfig_storage_file/src/main.rs @@ -20,9 +20,29 @@ use aconfig_storage_file::{ list_flags, list_flags_with_info, read_file_to_bytes, AconfigStorageError, FlagInfoList, FlagTable, FlagValueList, PackageTable, StorageFileType, }; - use clap::{builder::ArgAction, Arg, Command}; +use serde::Serialize; +use serde_json; +use std::fmt; +use std::fs; +use std::fs::File; +use std::io::Write; +/** + * Usage Examples + * + * Print file: + * $ aconfig-storage print --file=path/to/flag.map --type=flag_map + * + * List flags: + * $ aconfig-storage list --flag-map=path/to/flag.map \ + * --flag-val=path/to/flag.val --package-map=path/to/package.map + * + * Write binary file for testing: + * $ aconfig-storage print --file=path/to/flag.map --type=flag_map --format=json > flag_map.json + * $ vim flag_map.json // Manually make updates + * $ aconfig-storage write-bytes --input-file=flag_map.json --output-file=path/to/flag.map --type=flag_map + */ fn cli() -> Command { Command::new("aconfig-storage") .subcommand_required(true) @@ -34,7 +54,8 @@ fn cli() -> Command { .long("type") .required(true) .value_parser(|s: &str| StorageFileType::try_from(s)), - ), + ) + .arg(Arg::new("format").long("format").required(false).action(ArgAction::Set)), ) .subcommand( Command::new("list") @@ -50,41 +71,75 @@ fn cli() -> Command { Arg::new("flag-info").long("flag-info").required(false).action(ArgAction::Set), ), ) + .subcommand( + Command::new("write-bytes") + // Where to write the output bytes. Suggest to use the StorageFileType names (e.g. flag.map). + .arg( + Arg::new("output-file") + .long("output-file") + .required(true) + .action(ArgAction::Set), + ) + // Input file should be json. + .arg( + Arg::new("input-file").long("input-file").required(true).action(ArgAction::Set), + ) + .arg( + Arg::new("type") + .long("type") + .required(true) + .value_parser(|s: &str| StorageFileType::try_from(s)), + ), + ) } fn print_storage_file( file_path: &str, file_type: &StorageFileType, + as_json: bool, ) -> Result<(), AconfigStorageError> { let bytes = read_file_to_bytes(file_path)?; match file_type { StorageFileType::PackageMap => { let package_table = PackageTable::from_bytes(&bytes)?; - println!("{:?}", package_table); + println!("{}", to_print_format(package_table, as_json)); } StorageFileType::FlagMap => { let flag_table = FlagTable::from_bytes(&bytes)?; - println!("{:?}", flag_table); + println!("{}", to_print_format(flag_table, as_json)); } StorageFileType::FlagVal => { let flag_value = FlagValueList::from_bytes(&bytes)?; - println!("{:?}", flag_value); + println!("{}", to_print_format(flag_value, as_json)); } StorageFileType::FlagInfo => { let flag_info = FlagInfoList::from_bytes(&bytes)?; - println!("{:?}", flag_info); + println!("{}", to_print_format(flag_info, as_json)); } } Ok(()) } +fn to_print_format<T>(file_contents: T, as_json: bool) -> String +where + T: Serialize + fmt::Debug, +{ + if as_json { + serde_json::to_string(&file_contents).unwrap() + } else { + format!("{:?}", file_contents) + } +} + fn main() -> Result<(), AconfigStorageError> { let matches = cli().get_matches(); match matches.subcommand() { Some(("print", sub_matches)) => { let file_path = sub_matches.get_one::<String>("file").unwrap(); let file_type = sub_matches.get_one::<StorageFileType>("type").unwrap(); - print_storage_file(file_path, file_type)? + let format = sub_matches.get_one::<String>("format"); + let as_json: bool = format == Some(&"json".to_string()); + print_storage_file(file_path, file_type, as_json)? } Some(("list", sub_matches)) => { let package_map = sub_matches.get_one::<String>("package-map").unwrap(); @@ -96,10 +151,10 @@ fn main() -> Result<(), AconfigStorageError> { let flags = list_flags_with_info(package_map, flag_map, flag_val, info_file)?; for flag in flags.iter() { println!( - "{} {} {} {:?} IsReadWrite: {}, HasServerOverride: {}, HasLocalOverride: {}", - flag.package_name, flag.flag_name, flag.flag_value, flag.value_type, - flag.is_readwrite, flag.has_server_override, flag.has_local_override, - ); + "{} {} {} {:?} IsReadWrite: {}, HasServerOverride: {}, HasLocalOverride: {}", + flag.package_name, flag.flag_name, flag.flag_value, flag.value_type, + flag.is_readwrite, flag.has_server_override, flag.has_local_override, + ); } } None => { @@ -113,6 +168,40 @@ fn main() -> Result<(), AconfigStorageError> { } } } + // Converts JSON of the file into raw bytes (as is used on-device). + // Intended to generate/easily update these files for testing. + Some(("write-bytes", sub_matches)) => { + let input_file_path = sub_matches.get_one::<String>("input-file").unwrap(); + let input_json = fs::read_to_string(input_file_path).unwrap(); + + let file_type = sub_matches.get_one::<StorageFileType>("type").unwrap(); + let output_bytes: Vec<u8>; + match file_type { + StorageFileType::FlagVal => { + let list: FlagValueList = serde_json::from_str(&input_json).unwrap(); + output_bytes = list.into_bytes(); + } + StorageFileType::FlagInfo => { + let list: FlagInfoList = serde_json::from_str(&input_json).unwrap(); + output_bytes = list.into_bytes(); + } + StorageFileType::FlagMap => { + let table: FlagTable = serde_json::from_str(&input_json).unwrap(); + output_bytes = table.into_bytes(); + } + StorageFileType::PackageMap => { + let table: PackageTable = serde_json::from_str(&input_json).unwrap(); + output_bytes = table.into_bytes(); + } + } + + let output_file_path = sub_matches.get_one::<String>("output-file").unwrap(); + let file = File::create(output_file_path); + if file.is_err() { + panic!("can't make file"); + } + let _ = file.unwrap().write_all(&output_bytes); + } _ => unreachable!(), } Ok(()) diff --git a/tools/aconfig/aconfig_storage_file/src/package_table.rs b/tools/aconfig/aconfig_storage_file/src/package_table.rs index b734972f33..21357c7e4a 100644 --- a/tools/aconfig/aconfig_storage_file/src/package_table.rs +++ b/tools/aconfig/aconfig_storage_file/src/package_table.rs @@ -17,13 +17,17 @@ //! package table module defines the package table file format and methods for serialization //! and deserialization -use crate::{get_bucket_index, read_str_from_bytes, read_u32_from_bytes, read_u8_from_bytes}; +use crate::{ + get_bucket_index, read_str_from_bytes, read_u32_from_bytes, read_u64_from_bytes, + read_u8_from_bytes, +}; use crate::{AconfigStorageError, StorageFileType}; use anyhow::anyhow; +use serde::{Deserialize, Serialize}; use std::fmt; /// Package table header struct -#[derive(PartialEq)] +#[derive(PartialEq, Serialize, Deserialize)] pub struct PackageTableHeader { pub version: u32, pub container: String, @@ -92,10 +96,11 @@ impl PackageTableHeader { } /// Package table node struct -#[derive(PartialEq)] +#[derive(PartialEq, Serialize, Deserialize)] pub struct PackageTableNode { pub package_name: String, pub package_id: u32, + pub fingerprint: u64, // The index of the first boolean flag in this aconfig package among all boolean // flags in this container. pub boolean_start_index: u32, @@ -107,8 +112,12 @@ impl fmt::Debug for PackageTableNode { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { writeln!( f, - "Package: {}, Id: {}, Boolean flag start index: {}, Next: {:?}", - self.package_name, self.package_id, self.boolean_start_index, self.next_offset + "Package: {}, Id: {}, Fingerprint: {}, Boolean flag start index: {}, Next: {:?}", + self.package_name, + self.package_id, + self.fingerprint, + self.boolean_start_index, + self.next_offset )?; Ok(()) } @@ -116,7 +125,16 @@ impl fmt::Debug for PackageTableNode { impl PackageTableNode { /// Serialize to bytes - pub fn into_bytes(&self) -> Vec<u8> { + pub fn into_bytes(&self, version: u32) -> Vec<u8> { + match version { + 1 => Self::into_bytes_v1(self), + 2 => Self::into_bytes_v2(self), + // TODO(b/316357686): into_bytes should return a Result. + _ => Self::into_bytes_v2(&self), + } + } + + fn into_bytes_v1(&self) -> Vec<u8> { let mut result = Vec::new(); let name_bytes = self.package_name.as_bytes(); result.extend_from_slice(&(name_bytes.len() as u32).to_le_bytes()); @@ -127,18 +145,60 @@ impl PackageTableNode { result } - /// Deserialize from bytes - pub fn from_bytes(bytes: &[u8]) -> Result<Self, AconfigStorageError> { + fn into_bytes_v2(&self) -> Vec<u8> { + let mut result = Vec::new(); + let name_bytes = self.package_name.as_bytes(); + result.extend_from_slice(&(name_bytes.len() as u32).to_le_bytes()); + result.extend_from_slice(name_bytes); + result.extend_from_slice(&self.package_id.to_le_bytes()); + result.extend_from_slice(&self.fingerprint.to_le_bytes()); + result.extend_from_slice(&self.boolean_start_index.to_le_bytes()); + result.extend_from_slice(&self.next_offset.unwrap_or(0).to_le_bytes()); + result + } + + /// Deserialize from bytes based on file version. + pub fn from_bytes(bytes: &[u8], version: u32) -> Result<Self, AconfigStorageError> { + match version { + 1 => Self::from_bytes_v1(bytes), + 2 => Self::from_bytes_v2(bytes), + _ => { + return Err(AconfigStorageError::BytesParseFail(anyhow!( + "Binary file is an unsupported version: {}", + version + ))) + } + } + } + + fn from_bytes_v1(bytes: &[u8]) -> Result<Self, AconfigStorageError> { let mut head = 0; - let node = Self { - package_name: read_str_from_bytes(bytes, &mut head)?, - package_id: read_u32_from_bytes(bytes, &mut head)?, - boolean_start_index: read_u32_from_bytes(bytes, &mut head)?, - next_offset: match read_u32_from_bytes(bytes, &mut head)? { - 0 => None, - val => Some(val), - }, + let package_name = read_str_from_bytes(bytes, &mut head)?; + let package_id = read_u32_from_bytes(bytes, &mut head)?; + // v1 does not have fingerprint, so just set to 0. + let fingerprint: u64 = 0; + let boolean_start_index = read_u32_from_bytes(bytes, &mut head)?; + let next_offset = match read_u32_from_bytes(bytes, &mut head)? { + 0 => None, + val => Some(val), + }; + + let node = Self { package_name, package_id, fingerprint, boolean_start_index, next_offset }; + Ok(node) + } + + fn from_bytes_v2(bytes: &[u8]) -> Result<Self, AconfigStorageError> { + let mut head = 0; + let package_name = read_str_from_bytes(bytes, &mut head)?; + let package_id = read_u32_from_bytes(bytes, &mut head)?; + let fingerprint = read_u64_from_bytes(bytes, &mut head)?; + let boolean_start_index = read_u32_from_bytes(bytes, &mut head)?; + let next_offset = match read_u32_from_bytes(bytes, &mut head)? { + 0 => None, + val => Some(val), }; + + let node = Self { package_name, package_id, fingerprint, boolean_start_index, next_offset }; Ok(node) } @@ -146,12 +206,12 @@ impl PackageTableNode { /// construction side (aconfig binary) and consumption side (flag read lib) /// use the same method of hashing pub fn find_bucket_index(package: &str, num_buckets: u32) -> u32 { - get_bucket_index(&package, num_buckets) + get_bucket_index(package.as_bytes(), num_buckets) } } /// Package table struct -#[derive(PartialEq)] +#[derive(PartialEq, Serialize, Deserialize)] pub struct PackageTable { pub header: PackageTableHeader, pub buckets: Vec<Option<u32>>, @@ -179,7 +239,11 @@ impl PackageTable { [ self.header.into_bytes(), self.buckets.iter().map(|v| v.unwrap_or(0).to_le_bytes()).collect::<Vec<_>>().concat(), - self.nodes.iter().map(|v| v.into_bytes()).collect::<Vec<_>>().concat(), + self.nodes + .iter() + .map(|v| v.into_bytes(self.header.version)) + .collect::<Vec<_>>() + .concat(), ] .concat() } @@ -198,8 +262,8 @@ impl PackageTable { .collect(); let nodes = (0..num_packages) .map(|_| { - let node = PackageTableNode::from_bytes(&bytes[head..])?; - head += node.into_bytes().len(); + let node = PackageTableNode::from_bytes(&bytes[head..], header.version)?; + head += node.into_bytes(header.version).len(); Ok(node) }) .collect::<Result<Vec<_>, AconfigStorageError>>() @@ -219,11 +283,13 @@ impl PackageTable { mod tests { use super::*; use crate::test_utils::create_test_package_table; + use crate::{read_u32_from_start_of_bytes, DEFAULT_FILE_VERSION, MAX_SUPPORTED_FILE_VERSION}; #[test] // this test point locks down the table serialization - fn test_serialization() { - let package_table = create_test_package_table(); + // TODO: b/376108268 - Use parameterized tests. + fn test_serialization_default() { + let package_table = create_test_package_table(DEFAULT_FILE_VERSION); let header: &PackageTableHeader = &package_table.header; let reinterpreted_header = PackageTableHeader::from_bytes(&header.into_bytes()); assert!(reinterpreted_header.is_ok()); @@ -231,7 +297,32 @@ mod tests { let nodes: &Vec<PackageTableNode> = &package_table.nodes; for node in nodes.iter() { - let reinterpreted_node = PackageTableNode::from_bytes(&node.into_bytes()).unwrap(); + let reinterpreted_node = + PackageTableNode::from_bytes(&node.into_bytes(header.version), header.version) + .unwrap(); + assert_eq!(node, &reinterpreted_node); + } + + let package_table_bytes = package_table.into_bytes(); + let reinterpreted_table = PackageTable::from_bytes(&package_table_bytes); + assert!(reinterpreted_table.is_ok()); + assert_eq!(&package_table, &reinterpreted_table.unwrap()); + assert_eq!(package_table_bytes.len() as u32, header.file_size); + } + + #[test] + fn test_serialization_max() { + let package_table = create_test_package_table(MAX_SUPPORTED_FILE_VERSION); + let header: &PackageTableHeader = &package_table.header; + let reinterpreted_header = PackageTableHeader::from_bytes(&header.into_bytes()); + assert!(reinterpreted_header.is_ok()); + assert_eq!(header, &reinterpreted_header.unwrap()); + + let nodes: &Vec<PackageTableNode> = &package_table.nodes; + for node in nodes.iter() { + let reinterpreted_node = + PackageTableNode::from_bytes(&node.into_bytes(header.version), header.version) + .unwrap(); assert_eq!(node, &reinterpreted_node); } @@ -246,17 +337,36 @@ mod tests { // this test point locks down that version number should be at the top of serialized // bytes fn test_version_number() { - let package_table = create_test_package_table(); + let package_table = create_test_package_table(DEFAULT_FILE_VERSION); let bytes = &package_table.into_bytes(); - let mut head = 0; - let version = read_u32_from_bytes(bytes, &mut head).unwrap(); - assert_eq!(version, 1); + let unpacked_version = read_u32_from_start_of_bytes(bytes).unwrap(); + assert_eq!(unpacked_version, DEFAULT_FILE_VERSION); + } + + #[test] + fn test_round_trip_default() { + let table: PackageTable = create_test_package_table(DEFAULT_FILE_VERSION); + let table_bytes = table.into_bytes(); + + let reinterpreted_table = PackageTable::from_bytes(&table_bytes).unwrap(); + + assert_eq!(table, reinterpreted_table); + } + + #[test] + fn test_round_trip_max() { + let table: PackageTable = create_test_package_table(MAX_SUPPORTED_FILE_VERSION); + let table_bytes = table.into_bytes(); + + let reinterpreted_table = PackageTable::from_bytes(&table_bytes).unwrap(); + + assert_eq!(table, reinterpreted_table); } #[test] // this test point locks down file type check fn test_file_type_check() { - let mut package_table = create_test_package_table(); + let mut package_table = create_test_package_table(DEFAULT_FILE_VERSION); package_table.header.file_type = 123u8; let error = PackageTable::from_bytes(&package_table.into_bytes()).unwrap_err(); assert_eq!( diff --git a/tools/aconfig/aconfig_storage_file/src/sip_hasher13.rs b/tools/aconfig/aconfig_storage_file/src/sip_hasher13.rs new file mode 100644 index 0000000000..9be3175e18 --- /dev/null +++ b/tools/aconfig/aconfig_storage_file/src/sip_hasher13.rs @@ -0,0 +1,327 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//! An implementation of SipHash13 + +use std::cmp; +use std::mem; +use std::ptr; +use std::slice; + +use std::hash::Hasher; + +/// An implementation of SipHash 2-4. +/// +#[derive(Debug, Clone, Default)] +pub struct SipHasher13 { + k0: u64, + k1: u64, + length: usize, // how many bytes we've processed + state: State, // hash State + tail: u64, // unprocessed bytes le + ntail: usize, // how many bytes in tail are valid +} + +#[derive(Debug, Clone, Copy, Default)] +#[repr(C)] +struct State { + // v0, v2 and v1, v3 show up in pairs in the algorithm, + // and simd implementations of SipHash will use vectors + // of v02 and v13. By placing them in this order in the struct, + // the compiler can pick up on just a few simd optimizations by itself. + v0: u64, + v2: u64, + v1: u64, + v3: u64, +} + +macro_rules! compress { + ($state:expr) => {{ + compress!($state.v0, $state.v1, $state.v2, $state.v3) + }}; + ($v0:expr, $v1:expr, $v2:expr, $v3:expr) => {{ + $v0 = $v0.wrapping_add($v1); + $v1 = $v1.rotate_left(13); + $v1 ^= $v0; + $v0 = $v0.rotate_left(32); + $v2 = $v2.wrapping_add($v3); + $v3 = $v3.rotate_left(16); + $v3 ^= $v2; + $v0 = $v0.wrapping_add($v3); + $v3 = $v3.rotate_left(21); + $v3 ^= $v0; + $v2 = $v2.wrapping_add($v1); + $v1 = $v1.rotate_left(17); + $v1 ^= $v2; + $v2 = $v2.rotate_left(32); + }}; +} + +/// Load an integer of the desired type from a byte stream, in LE order. Uses +/// `copy_nonoverlapping` to let the compiler generate the most efficient way +/// to load it from a possibly unaligned address. +/// +/// Unsafe because: unchecked indexing at i..i+size_of(int_ty) +macro_rules! load_int_le { + ($buf:expr, $i:expr, $int_ty:ident) => {{ + debug_assert!($i + mem::size_of::<$int_ty>() <= $buf.len()); + let mut data = 0 as $int_ty; + ptr::copy_nonoverlapping( + $buf.get_unchecked($i), + &mut data as *mut _ as *mut u8, + mem::size_of::<$int_ty>(), + ); + data.to_le() + }}; +} + +/// Load an u64 using up to 7 bytes of a byte slice. +/// +/// Unsafe because: unchecked indexing at start..start+len +#[inline] +unsafe fn u8to64_le(buf: &[u8], start: usize, len: usize) -> u64 { + debug_assert!(len < 8); + let mut i = 0; // current byte index (from LSB) in the output u64 + let mut out = 0; + if i + 3 < len { + out = load_int_le!(buf, start + i, u32) as u64; + i += 4; + } + if i + 1 < len { + out |= (load_int_le!(buf, start + i, u16) as u64) << (i * 8); + i += 2 + } + if i < len { + out |= (*buf.get_unchecked(start + i) as u64) << (i * 8); + i += 1; + } + debug_assert_eq!(i, len); + out +} + +impl SipHasher13 { + /// Creates a new `SipHasher13` with the two initial keys set to 0. + #[inline] + pub fn new() -> SipHasher13 { + SipHasher13::new_with_keys(0, 0) + } + + /// Creates a `SipHasher13` that is keyed off the provided keys. + #[inline] + pub fn new_with_keys(key0: u64, key1: u64) -> SipHasher13 { + let mut sip_hasher = SipHasher13 { + k0: key0, + k1: key1, + length: 0, + state: State { v0: 0, v1: 0, v2: 0, v3: 0 }, + tail: 0, + ntail: 0, + }; + sip_hasher.reset(); + sip_hasher + } + + #[inline] + fn c_rounds(state: &mut State) { + compress!(state); + } + + #[inline] + fn d_rounds(state: &mut State) { + compress!(state); + compress!(state); + compress!(state); + } + + #[inline] + fn reset(&mut self) { + self.length = 0; + self.state.v0 = self.k0 ^ 0x736f6d6570736575; + self.state.v1 = self.k1 ^ 0x646f72616e646f6d; + self.state.v2 = self.k0 ^ 0x6c7967656e657261; + self.state.v3 = self.k1 ^ 0x7465646279746573; + self.ntail = 0; + } + + // Specialized write function that is only valid for buffers with len <= 8. + // It's used to force inlining of write_u8 and write_usize, those would normally be inlined + // except for composite types (that includes slices and str hashing because of delimiter). + // Without this extra push the compiler is very reluctant to inline delimiter writes, + // degrading performance substantially for the most common use cases. + #[inline] + fn short_write(&mut self, msg: &[u8]) { + debug_assert!(msg.len() <= 8); + let length = msg.len(); + self.length += length; + + let needed = 8 - self.ntail; + let fill = cmp::min(length, needed); + if fill == 8 { + // safe to call since msg hasn't been loaded + self.tail = unsafe { load_int_le!(msg, 0, u64) }; + } else { + // safe to call since msg hasn't been loaded, and fill <= msg.len() + self.tail |= unsafe { u8to64_le(msg, 0, fill) } << (8 * self.ntail); + if length < needed { + self.ntail += length; + return; + } + } + self.state.v3 ^= self.tail; + Self::c_rounds(&mut self.state); + self.state.v0 ^= self.tail; + + // Buffered tail is now flushed, process new input. + self.ntail = length - needed; + // safe to call since number of `needed` bytes has been loaded + // and self.ntail + needed == msg.len() + self.tail = unsafe { u8to64_le(msg, needed, self.ntail) }; + } +} + +impl Hasher for SipHasher13 { + // see short_write comment for explanation + #[inline] + fn write_usize(&mut self, i: usize) { + // safe to call, since convert the pointer to u8 + let bytes = unsafe { + slice::from_raw_parts(&i as *const usize as *const u8, mem::size_of::<usize>()) + }; + self.short_write(bytes); + } + + // see short_write comment for explanation + #[inline] + fn write_u8(&mut self, i: u8) { + self.short_write(&[i]); + } + + #[inline] + fn write(&mut self, msg: &[u8]) { + let length = msg.len(); + self.length += length; + + let mut needed = 0; + + // loading unprocessed byte from last write + if self.ntail != 0 { + needed = 8 - self.ntail; + // safe to call, since msg hasn't been processed + // and cmp::min(length, needed) < 8 + self.tail |= unsafe { u8to64_le(msg, 0, cmp::min(length, needed)) } << 8 * self.ntail; + if length < needed { + self.ntail += length; + return; + } else { + self.state.v3 ^= self.tail; + Self::c_rounds(&mut self.state); + self.state.v0 ^= self.tail; + self.ntail = 0; + } + } + + // Buffered tail is now flushed, process new input. + let len = length - needed; + let left = len & 0x7; + + let mut i = needed; + while i < len - left { + // safe to call since if i < len - left, it means msg has at least 1 byte to load + let mi = unsafe { load_int_le!(msg, i, u64) }; + + self.state.v3 ^= mi; + Self::c_rounds(&mut self.state); + self.state.v0 ^= mi; + + i += 8; + } + + // safe to call since if left == 0, since this call will load nothing + // if left > 0, it means there are number of `left` bytes in msg + self.tail = unsafe { u8to64_le(msg, i, left) }; + self.ntail = left; + } + + #[inline] + fn finish(&self) -> u64 { + let mut state = self.state; + + let b: u64 = ((self.length as u64 & 0xff) << 56) | self.tail; + + state.v3 ^= b; + Self::c_rounds(&mut state); + state.v0 ^= b; + + state.v2 ^= 0xff; + Self::d_rounds(&mut state); + + state.v0 ^ state.v1 ^ state.v2 ^ state.v3 + } +} + +#[cfg(test)] +mod tests { + use super::*; + + use std::hash::{Hash, Hasher}; + use std::string::String; + + #[test] + // this test point locks down the value list serialization + fn test_sip_hash13_string_hash() { + let mut sip_hash13 = SipHasher13::new(); + let test_str1 = String::from("com.google.android.test"); + test_str1.hash(&mut sip_hash13); + assert_eq!(17898838669067067585, sip_hash13.finish()); + + let test_str2 = String::from("adfadfadf adfafadadf 1231241241"); + test_str2.hash(&mut sip_hash13); + assert_eq!(13543518987672889310, sip_hash13.finish()); + } + + #[test] + fn test_sip_hash13_write() { + let mut sip_hash13 = SipHasher13::new(); + let test_str1 = String::from("com.google.android.test"); + sip_hash13.write(test_str1.as_bytes()); + sip_hash13.write_u8(0xff); + assert_eq!(17898838669067067585, sip_hash13.finish()); + + let mut sip_hash132 = SipHasher13::new(); + let test_str1 = String::from("com.google.android.test"); + sip_hash132.write(test_str1.as_bytes()); + assert_eq!(9685440969685209025, sip_hash132.finish()); + sip_hash132.write(test_str1.as_bytes()); + assert_eq!(6719694176662736568, sip_hash132.finish()); + + let mut sip_hash133 = SipHasher13::new(); + let test_str2 = String::from("abcdefg"); + test_str2.hash(&mut sip_hash133); + assert_eq!(2492161047327640297, sip_hash133.finish()); + + let mut sip_hash134 = SipHasher13::new(); + let test_str3 = String::from("abcdefgh"); + test_str3.hash(&mut sip_hash134); + assert_eq!(6689927370435554326, sip_hash134.finish()); + } + + #[test] + fn test_sip_hash13_write_short() { + let mut sip_hash13 = SipHasher13::new(); + sip_hash13.write_u8(0x61); + assert_eq!(4644417185603328019, sip_hash13.finish()); + } +} diff --git a/tools/aconfig/aconfig_storage_file/src/test_utils.rs b/tools/aconfig/aconfig_storage_file/src/test_utils.rs index 106666c47f..7c603df40e 100644 --- a/tools/aconfig/aconfig_storage_file/src/test_utils.rs +++ b/tools/aconfig/aconfig_storage_file/src/test_utils.rs @@ -24,32 +24,59 @@ use anyhow::anyhow; use std::io::Write; use tempfile::NamedTempFile; -pub fn create_test_package_table() -> PackageTable { +pub fn create_test_package_table(version: u32) -> PackageTable { let header = PackageTableHeader { - version: 1, + version: version, container: String::from("mockup"), file_type: StorageFileType::PackageMap as u8, - file_size: 209, + file_size: match version { + 1 => 209, + 2 => 233, + _ => panic!("Unsupported version."), + }, num_packages: 3, bucket_offset: 31, node_offset: 59, }; - let buckets: Vec<Option<u32>> = vec![Some(59), None, None, Some(109), None, None, None]; + let buckets: Vec<Option<u32>> = match version { + 1 => vec![Some(59), None, None, Some(109), None, None, None], + 2 => vec![Some(59), None, None, Some(117), None, None, None], + _ => panic!("Unsupported version."), + }; let first_node = PackageTableNode { package_name: String::from("com.android.aconfig.storage.test_2"), package_id: 1, + fingerprint: match version { + 1 => 0, + 2 => 4431940502274857964u64, + _ => panic!("Unsupported version."), + }, boolean_start_index: 3, next_offset: None, }; let second_node = PackageTableNode { package_name: String::from("com.android.aconfig.storage.test_1"), package_id: 0, + fingerprint: match version { + 1 => 0, + 2 => 15248948510590158086u64, + _ => panic!("Unsupported version."), + }, boolean_start_index: 0, - next_offset: Some(159), + next_offset: match version { + 1 => Some(159), + 2 => Some(175), + _ => panic!("Unsupported version."), + }, }; let third_node = PackageTableNode { package_name: String::from("com.android.aconfig.storage.test_4"), package_id: 2, + fingerprint: match version { + 1 => 0, + 2 => 16233229917711622375u64, + _ => panic!("Unsupported version."), + }, boolean_start_index: 6, next_offset: None, }; @@ -76,9 +103,9 @@ impl FlagTableNode { } } -pub fn create_test_flag_table() -> FlagTable { +pub fn create_test_flag_table(version: u32) -> FlagTable { let header = FlagTableHeader { - version: 1, + version: version, container: String::from("mockup"), file_type: StorageFileType::FlagMap as u8, file_size: 321, @@ -118,9 +145,9 @@ pub fn create_test_flag_table() -> FlagTable { FlagTable { header, buckets, nodes } } -pub fn create_test_flag_value_list() -> FlagValueList { +pub fn create_test_flag_value_list(version: u32) -> FlagValueList { let header = FlagValueHeader { - version: 1, + version: version, container: String::from("mockup"), file_type: StorageFileType::FlagVal as u8, file_size: 35, @@ -131,9 +158,9 @@ pub fn create_test_flag_value_list() -> FlagValueList { FlagValueList { header, booleans } } -pub fn create_test_flag_info_list() -> FlagInfoList { +pub fn create_test_flag_info_list(version: u32) -> FlagInfoList { let header = FlagInfoHeader { - version: 1, + version: version, container: String::from("mockup"), file_type: StorageFileType::FlagInfo as u8, file_size: 35, diff --git a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/AconfigStorageException.java b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/AconfigStorageException.java index 86a75f2f65..324c55d57d 100644 --- a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/AconfigStorageException.java +++ b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/AconfigStorageException.java @@ -16,12 +16,122 @@ package android.aconfig.storage; +/** + * Exception thrown when an error occurs while accessing Aconfig Storage. + * + * <p>This exception indicates a general problem with Aconfig Storage, such as an inability to read + * or write data. + */ public class AconfigStorageException extends RuntimeException { + + /** Generic error code indicating an unspecified Aconfig Storage error. */ + public static final int ERROR_GENERIC = 0; + + /** Error code indicating that the Aconfig Storage system is not found on the device. */ + public static final int ERROR_STORAGE_SYSTEM_NOT_FOUND = 1; + + /** Error code indicating that the requested configuration package is not found. */ + public static final int ERROR_PACKAGE_NOT_FOUND = 2; + + /** Error code indicating that the specified container is not found. */ + public static final int ERROR_CONTAINER_NOT_FOUND = 3; + + /** Error code indicating that there was an error reading the Aconfig Storage file. */ + public static final int ERROR_CANNOT_READ_STORAGE_FILE = 4; + + public static final int ERROR_FILE_FINGERPRINT_MISMATCH = 5; + + private final int mErrorCode; + + /** + * Constructs a new {@code AconfigStorageException} with a generic error code and the specified + * detail message. + * + * @param msg The detail message for this exception. + */ public AconfigStorageException(String msg) { super(msg); + mErrorCode = ERROR_GENERIC; } + /** + * Constructs a new {@code AconfigStorageException} with a generic error code, the specified + * detail message, and cause. + * + * @param msg The detail message for this exception. + * @param cause The cause of this exception. + */ public AconfigStorageException(String msg, Throwable cause) { super(msg, cause); + mErrorCode = ERROR_GENERIC; + } + + /** + * Constructs a new {@code AconfigStorageException} with the specified error code and detail + * message. + * + * @param errorCode The error code for this exception. + * @param msg The detail message for this exception. + */ + public AconfigStorageException(int errorCode, String msg) { + super(msg); + mErrorCode = errorCode; + } + + /** + * Constructs a new {@code AconfigStorageException} with the specified error code, detail + * message, and cause. + * + * @param errorCode The error code for this exception. + * @param msg The detail message for this exception. + * @param cause The cause of this exception. + */ + public AconfigStorageException(int errorCode, String msg, Throwable cause) { + super(msg, cause); + mErrorCode = errorCode; + } + + /** + * Returns the error code associated with this exception. + * + * @return The error code. + */ + public int getErrorCode() { + return mErrorCode; + } + + /** + * Returns the error message for this exception, including the error code and the original + * message. + * + * @return The error message. + */ + @Override + public String getMessage() { + return errorString() + ": " + super.getMessage(); + } + + /** + * Returns a string representation of the error code. + * + * @return The error code string. + */ + private String errorString() { + switch (mErrorCode) { + case ERROR_GENERIC: + return "ERROR_GENERIC"; + case ERROR_STORAGE_SYSTEM_NOT_FOUND: + return "ERROR_STORAGE_SYSTEM_NOT_FOUND"; + case ERROR_PACKAGE_NOT_FOUND: + return "ERROR_PACKAGE_NOT_FOUND"; + case ERROR_CONTAINER_NOT_FOUND: + return "ERROR_CONTAINER_NOT_FOUND"; + case ERROR_CANNOT_READ_STORAGE_FILE: + return "ERROR_CANNOT_READ_STORAGE_FILE"; + case ERROR_FILE_FINGERPRINT_MISMATCH: + return "ERROR_FILE_FINGERPRINT_MISMATCH"; + default: + return "<Unknown error code " + mErrorCode + ">"; + } } } diff --git a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/ByteBufferReader.java b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/ByteBufferReader.java index 1c72364e6b..1fbcb859cd 100644 --- a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/ByteBufferReader.java +++ b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/ByteBufferReader.java @@ -41,14 +41,30 @@ public class ByteBufferReader { return this.mByteBuffer.getInt(); } + public long readLong() { + return this.mByteBuffer.getLong(); + } + public String readString() { int length = readInt(); + if (length > 1024) { + throw new AconfigStorageException( + "String length exceeds maximum allowed size (1024 bytes): " + length); + } byte[] bytes = new byte[length]; mByteBuffer.get(bytes, 0, length); return new String(bytes, StandardCharsets.UTF_8); } + public int readByte(int i) { + return Byte.toUnsignedInt(mByteBuffer.get(i)); + } + public void position(int newPosition) { mByteBuffer.position(newPosition); } + + public int position() { + return mByteBuffer.position(); + } } diff --git a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FileType.java b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FileType.java index b0b1b9b186..c35487358d 100644 --- a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FileType.java +++ b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FileType.java @@ -42,4 +42,20 @@ public enum FileType { return null; } } + + @Override + public String toString() { + switch (type) { + case 0: + return "PACKAGE_MAP"; + case 1: + return "FLAG_MAP"; + case 2: + return "FLAG_VAL"; + case 3: + return "FLAG_INFO"; + default: + return "unrecognized type"; + } + } } diff --git a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FlagTable.java b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FlagTable.java index e85fdee20f..757844a603 100644 --- a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FlagTable.java +++ b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FlagTable.java @@ -16,41 +16,57 @@ package android.aconfig.storage; +import static java.nio.charset.StandardCharsets.UTF_8; + import java.nio.ByteBuffer; -import java.util.HashMap; -import java.util.Map; import java.util.Objects; public class FlagTable { private Header mHeader; - private Map<String, Node> mNodeMap; + private ByteBufferReader mReader; public static FlagTable fromBytes(ByteBuffer bytes) { FlagTable flagTable = new FlagTable(); - ByteBufferReader reader = new ByteBufferReader(bytes); - Header header = Header.fromBytes(reader); - flagTable.mHeader = header; - flagTable.mNodeMap = new HashMap(TableUtils.getTableSize(header.mNumFlags)); - reader.position(header.mNodeOffset); - for (int i = 0; i < header.mNumFlags; i++) { - Node node = Node.fromBytes(reader); - flagTable.mNodeMap.put(makeKey(node.mPackageId, node.mFlagName), node); - } + flagTable.mReader = new ByteBufferReader(bytes); + flagTable.mHeader = Header.fromBytes(flagTable.mReader); + return flagTable; } public Node get(int packageId, String flagName) { - return mNodeMap.get(makeKey(packageId, flagName)); + int numBuckets = (mHeader.mNodeOffset - mHeader.mBucketOffset) / 4; + int bucketIndex = TableUtils.getBucketIndex(makeKey(packageId, flagName), numBuckets); + int newPosition = mHeader.mBucketOffset + bucketIndex * 4; + if (newPosition >= mHeader.mNodeOffset) { + return null; + } + + mReader.position(newPosition); + int nodeIndex = mReader.readInt(); + if (nodeIndex < mHeader.mNodeOffset || nodeIndex >= mHeader.mFileSize) { + return null; + } + + while (nodeIndex != -1) { + mReader.position(nodeIndex); + Node node = Node.fromBytes(mReader); + if (Objects.equals(flagName, node.mFlagName) && packageId == node.mPackageId) { + return node; + } + nodeIndex = node.mNextOffset; + } + + return null; } public Header getHeader() { return mHeader; } - private static String makeKey(int packageId, String flagName) { + private static byte[] makeKey(int packageId, String flagName) { StringBuilder ret = new StringBuilder(); - return ret.append(packageId).append('/').append(flagName).toString(); + return ret.append(packageId).append('/').append(flagName).toString().getBytes(UTF_8); } public static class Header { diff --git a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FlagValueList.java b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FlagValueList.java index 0ddc147e82..493436d2a2 100644 --- a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FlagValueList.java +++ b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FlagValueList.java @@ -17,33 +17,21 @@ package android.aconfig.storage; import java.nio.ByteBuffer; -import java.util.ArrayList; -import java.util.List; public class FlagValueList { private Header mHeader; - private List<Boolean> mList; - - private int mSize; + private ByteBufferReader mReader; public static FlagValueList fromBytes(ByteBuffer bytes) { FlagValueList flagValueList = new FlagValueList(); - ByteBufferReader reader = new ByteBufferReader(bytes); - Header header = Header.fromBytes(reader); - flagValueList.mHeader = header; - flagValueList.mList = new ArrayList(header.mNumFlags); - reader.position(header.mBooleanValueOffset); - for (int i = 0; i < header.mNumFlags; i++) { - boolean val = reader.readByte() == 1; - flagValueList.mList.add(val); - } - flagValueList.mSize = flagValueList.mList.size(); + flagValueList.mReader = new ByteBufferReader(bytes); + flagValueList.mHeader = Header.fromBytes(flagValueList.mReader); return flagValueList; } - public boolean get(int index) { - return mList.get(index); + public boolean getBoolean(int index) { + return mReader.readByte(mHeader.mBooleanValueOffset + index) == 1; } public Header getHeader() { @@ -51,7 +39,7 @@ public class FlagValueList { } public int size() { - return mSize; + return mHeader.mNumFlags; } public static class Header { diff --git a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/PackageTable.java b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/PackageTable.java index d04e1ac391..1e7c2cae1e 100644 --- a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/PackageTable.java +++ b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/PackageTable.java @@ -16,32 +16,66 @@ package android.aconfig.storage; +import static java.nio.charset.StandardCharsets.UTF_8; + import java.nio.ByteBuffer; -import java.util.HashMap; -import java.util.Map; +import java.util.ArrayList; +import java.util.List; import java.util.Objects; public class PackageTable { + private static final int FINGERPRINT_BYTES = 8; + // int: mPackageId + int: mBooleanStartIndex + int: mNextOffset + private static final int NODE_SKIP_BYTES = 12; + private Header mHeader; - private Map<String, Node> mNodeMap; + private ByteBufferReader mReader; public static PackageTable fromBytes(ByteBuffer bytes) { PackageTable packageTable = new PackageTable(); - ByteBufferReader reader = new ByteBufferReader(bytes); - Header header = Header.fromBytes(reader); - packageTable.mHeader = header; - packageTable.mNodeMap = new HashMap(TableUtils.getTableSize(header.mNumPackages)); - reader.position(header.mNodeOffset); - for (int i = 0; i < header.mNumPackages; i++) { - Node node = Node.fromBytes(reader); - packageTable.mNodeMap.put(node.mPackageName, node); - } + packageTable.mReader = new ByteBufferReader(bytes); + packageTable.mHeader = Header.fromBytes(packageTable.mReader); + return packageTable; } public Node get(String packageName) { - return mNodeMap.get(packageName); + int numBuckets = (mHeader.mNodeOffset - mHeader.mBucketOffset) / 4; + int bucketIndex = TableUtils.getBucketIndex(packageName.getBytes(UTF_8), numBuckets); + int newPosition = mHeader.mBucketOffset + bucketIndex * 4; + if (newPosition >= mHeader.mNodeOffset) { + return null; + } + mReader.position(newPosition); + int nodeIndex = mReader.readInt(); + + if (nodeIndex < mHeader.mNodeOffset || nodeIndex >= mHeader.mFileSize) { + return null; + } + + while (nodeIndex != -1) { + mReader.position(nodeIndex); + Node node = Node.fromBytes(mReader, mHeader.mVersion); + if (Objects.equals(packageName, node.mPackageName)) { + return node; + } + nodeIndex = node.mNextOffset; + } + + return null; + } + + public List<String> getPackageList() { + List<String> list = new ArrayList<>(mHeader.mNumPackages); + mReader.position(mHeader.mNodeOffset); + int fingerprintBytes = mHeader.mVersion == 1 ? 0 : FINGERPRINT_BYTES; + int skipBytes = fingerprintBytes + NODE_SKIP_BYTES; + for (int i = 0; i < mHeader.mNumPackages; i++) { + list.add(mReader.readString()); + mReader.position(mReader.position() + skipBytes); + } + return list; } public Header getHeader() { @@ -58,7 +92,7 @@ public class PackageTable { private int mBucketOffset; private int mNodeOffset; - public static Header fromBytes(ByteBufferReader reader) { + private static Header fromBytes(ByteBufferReader reader) { Header header = new Header(); header.mVersion = reader.readInt(); header.mContainer = reader.readString(); @@ -108,16 +142,42 @@ public class PackageTable { private String mPackageName; private int mPackageId; + private long mPackageFingerprint; private int mBooleanStartIndex; private int mNextOffset; + private boolean mHasPackageFingerprint; + + private static Node fromBytes(ByteBufferReader reader, int version) { + switch (version) { + case 1: + return fromBytesV1(reader); + case 2: + return fromBytesV2(reader); + default: + // Do we want to throw here? + return new Node(); + } + } + + private static Node fromBytesV1(ByteBufferReader reader) { + Node node = new Node(); + node.mPackageName = reader.readString(); + node.mPackageId = reader.readInt(); + node.mBooleanStartIndex = reader.readInt(); + node.mNextOffset = reader.readInt(); + node.mNextOffset = node.mNextOffset == 0 ? -1 : node.mNextOffset; + return node; + } - public static Node fromBytes(ByteBufferReader reader) { + private static Node fromBytesV2(ByteBufferReader reader) { Node node = new Node(); node.mPackageName = reader.readString(); node.mPackageId = reader.readInt(); + node.mPackageFingerprint = reader.readLong(); node.mBooleanStartIndex = reader.readInt(); node.mNextOffset = reader.readInt(); node.mNextOffset = node.mNextOffset == 0 ? -1 : node.mNextOffset; + node.mHasPackageFingerprint = true; return node; } @@ -151,6 +211,10 @@ public class PackageTable { return mPackageId; } + public long getPackageFingerprint() { + return mPackageFingerprint; + } + public int getBooleanStartIndex() { return mBooleanStartIndex; } @@ -158,5 +222,9 @@ public class PackageTable { public int getNextOffset() { return mNextOffset; } + + public boolean hasPackageFingerprint() { + return mHasPackageFingerprint; + } } } diff --git a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/SipHasher13.java b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/SipHasher13.java new file mode 100644 index 0000000000..64714ee5f8 --- /dev/null +++ b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/SipHasher13.java @@ -0,0 +1,115 @@ +/* + * Copyright (C) 2024 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.aconfig.storage; + +public class SipHasher13 { + static class State { + private long v0; + private long v2; + private long v1; + private long v3; + + public State(long k0, long k1) { + v0 = k0 ^ 0x736f6d6570736575L; + v1 = k1 ^ 0x646f72616e646f6dL; + v2 = k0 ^ 0x6c7967656e657261L; + v3 = k1 ^ 0x7465646279746573L; + } + + public void compress(long m) { + v3 ^= m; + cRounds(); + v0 ^= m; + } + + public long finish() { + v2 ^= 0xff; + dRounds(); + return v0 ^ v1 ^ v2 ^ v3; + } + + private void cRounds() { + v0 += v1; + v1 = Long.rotateLeft(v1, 13); + v1 ^= v0; + v0 = Long.rotateLeft(v0, 32); + v2 += v3; + v3 = Long.rotateLeft(v3, 16); + v3 ^= v2; + v0 += v3; + v3 = Long.rotateLeft(v3, 21); + v3 ^= v0; + v2 += v1; + v1 = Long.rotateLeft(v1, 17); + v1 ^= v2; + v2 = Long.rotateLeft(v2, 32); + } + + private void dRounds() { + for (int i = 0; i < 3; i++) { + v0 += v1; + v1 = Long.rotateLeft(v1, 13); + v1 ^= v0; + v0 = Long.rotateLeft(v0, 32); + v2 += v3; + v3 = Long.rotateLeft(v3, 16); + v3 ^= v2; + v0 += v3; + v3 = Long.rotateLeft(v3, 21); + v3 ^= v0; + v2 += v1; + v1 = Long.rotateLeft(v1, 17); + v1 ^= v2; + v2 = Long.rotateLeft(v2, 32); + } + } + } + + public static long hash(byte[] data) { + State state = new State(0, 0); + int len = data.length; + int left = len & 0x7; + int index = 0; + + while (index < len - left) { + long mi = loadLe(data, index, 8); + index += 8; + state.compress(mi); + } + + // padding the end with 0xff to be consistent with rust + long m = (0xffL << (left * 8)) | loadLe(data, index, left); + if (left == 0x7) { + // compress the m w-2 + state.compress(m); + m = 0L; + } + // len adds 1 since padded 0xff + m |= (((len + 1) & 0xffL) << 56); + state.compress(m); + + return state.finish(); + } + + private static long loadLe(byte[] data, int offset, int size) { + long m = 0; + for (int i = 0; i < size; i++) { + m |= (data[i + offset] & 0xffL) << (i * 8); + } + return m; + } +} diff --git a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/StorageFileProvider.java b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/StorageFileProvider.java new file mode 100644 index 0000000000..f75ac36f7d --- /dev/null +++ b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/StorageFileProvider.java @@ -0,0 +1,129 @@ +/* + * Copyright (C) 2024 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.aconfig.storage; + +import java.io.Closeable; +import java.nio.MappedByteBuffer; +import java.nio.channels.FileChannel; +import java.nio.file.DirectoryStream; +import java.nio.file.Files; +import java.nio.file.NoSuchFileException; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.StandardOpenOption; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +/** @hide */ +public class StorageFileProvider { + + private static final String DEFAULT_MAP_PATH = "/metadata/aconfig/maps/"; + private static final String DEFAULT_BOOT_PATH = "/metadata/aconfig/boot/"; + private static final String PMAP_FILE_EXT = ".package.map"; + private static final String FMAP_FILE_EXT = ".flag.map"; + private static final String VAL_FILE_EXT = ".val"; + private static final StorageFileProvider DEFAULT_INSTANCE = + new StorageFileProvider(DEFAULT_MAP_PATH, DEFAULT_BOOT_PATH); + + private final String mMapPath; + private final String mBootPath; + + /** @hide */ + public static StorageFileProvider getDefaultProvider() { + return DEFAULT_INSTANCE; + } + + /** @hide */ + public StorageFileProvider(String mapPath, String bootPath) { + mMapPath = mapPath; + mBootPath = bootPath; + } + + /** @hide */ + public List<String> listContainers(String[] excludes) { + List<String> result = new ArrayList<>(); + Set<String> set = new HashSet<>(Arrays.asList(excludes)); + + try { + DirectoryStream<Path> stream = + Files.newDirectoryStream(Paths.get(mMapPath), "*" + PMAP_FILE_EXT); + for (Path entry : stream) { + String fileName = entry.getFileName().toString(); + String container = + fileName.substring(0, fileName.length() - PMAP_FILE_EXT.length()); + if (!set.contains(container)) { + result.add(container); + } + } + } catch (NoSuchFileException e) { + return result; + } catch (Exception e) { + throw new AconfigStorageException( + String.format("Fail to list map files in path %s", mMapPath), e); + } + + return result; + } + + /** @hide */ + public PackageTable getPackageTable(String container) { + return PackageTable.fromBytes( + mapStorageFile( + Paths.get(mMapPath, container + PMAP_FILE_EXT), FileType.PACKAGE_MAP)); + } + + /** @hide */ + public FlagTable getFlagTable(String container) { + return FlagTable.fromBytes( + mapStorageFile(Paths.get(mMapPath, container + FMAP_FILE_EXT), FileType.FLAG_MAP)); + } + + /** @hide */ + public FlagValueList getFlagValueList(String container) { + return FlagValueList.fromBytes( + mapStorageFile(Paths.get(mBootPath, container + VAL_FILE_EXT), FileType.FLAG_VAL)); + } + + // Map a storage file given file path + private static MappedByteBuffer mapStorageFile(Path file, FileType type) { + FileChannel channel = null; + try { + channel = FileChannel.open(file, StandardOpenOption.READ); + return channel.map(FileChannel.MapMode.READ_ONLY, 0, channel.size()); + } catch (Exception e) { + throw new AconfigStorageException( + AconfigStorageException.ERROR_CANNOT_READ_STORAGE_FILE, + String.format("Fail to mmap storage %s file %s", type.toString(), file), + e); + } finally { + quietlyDispose(channel); + } + } + + private static void quietlyDispose(Closeable closable) { + try { + if (closable != null) { + closable.close(); + } + } catch (Exception e) { + // no need to care, at least as of now + } + } +} diff --git a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/TableUtils.java b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/TableUtils.java index 714b53bf31..d4269dac3f 100644 --- a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/TableUtils.java +++ b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/TableUtils.java @@ -58,4 +58,21 @@ public class TableUtils { } throw new AconfigStorageException("Number of items in a hash table exceeds limit"); } + + public static int getBucketIndex(byte[] val, int numBuckets) { + long hashVal = SipHasher13.hash(val); + return (int) Long.remainderUnsigned(hashVal, numBuckets); + } + + public static class StorageFilesBundle { + public final PackageTable packageTable; + public final FlagTable flagTable; + public final FlagValueList flagValueList; + + public StorageFilesBundle (PackageTable pTable, FlagTable fTable, FlagValueList fValueList) { + this.packageTable = pTable; + this.flagTable = fTable; + this.flagValueList = fValueList; + } + } } diff --git a/tools/aconfig/aconfig_storage_file/tests/Android.bp b/tools/aconfig/aconfig_storage_file/tests/Android.bp index e2e225d08b..bd46d5f0ab 100644 --- a/tools/aconfig/aconfig_storage_file/tests/Android.bp +++ b/tools/aconfig/aconfig_storage_file/tests/Android.bp @@ -10,10 +10,14 @@ cc_test { "libbase", ], data: [ - "package.map", - "flag.map", - "flag.val", - "flag.info", + "data/v1/package_v1.map", + "data/v1/flag_v1.map", + "data/v1/flag_v1.val", + "data/v1/flag_v1.info", + "data/v2/package_v2.map", + "data/v2/flag_v2.map", + "data/v2/flag_v2.val", + "data/v2/flag_v2.info", ], test_suites: [ "device-tests", @@ -30,16 +34,22 @@ android_test { static_libs: [ "androidx.test.runner", "junit", + "aconfig_storage_file_java", ], test_config: "AndroidStorageJaveTest.xml", - certificate: "platform", + sdk_version: "test_current", data: [ - "package.map", - "flag.map", - "flag.val", - "flag.info", + "data/v1/package_v1.map", + "data/v1/flag_v1.map", + "data/v1/flag_v1.val", + "data/v1/flag_v1.info", + "data/v2/package_v2.map", + "data/v2/flag_v2.map", + "data/v2/flag_v2.val", + "data/v2/flag_v2.info", ], test_suites: [ "general-tests", ], + jarjar_rules: "jarjar.txt", } diff --git a/tools/aconfig/aconfig_storage_file/tests/AndroidStorageJaveTest.xml b/tools/aconfig/aconfig_storage_file/tests/AndroidStorageJaveTest.xml index 2d52d44c57..bfc238e320 100644 --- a/tools/aconfig/aconfig_storage_file/tests/AndroidStorageJaveTest.xml +++ b/tools/aconfig/aconfig_storage_file/tests/AndroidStorageJaveTest.xml @@ -21,13 +21,18 @@ </target_preparer> <target_preparer class="com.android.compatibility.common.tradefed.targetprep.FilePusher"> <option name="cleanup" value="true" /> - <option name="push" value="package.map->/data/local/tmp/aconfig_storage_file_test_java/testdata/package.map" /> - <option name="push" value="flag.map->/data/local/tmp/aconfig_storage_file_test_java/testdata/flag.map" /> - <option name="push" value="flag.val->/data/local/tmp/aconfig_storage_file_test_java/testdata/flag.val" /> - <option name="push" value="flag.info->/data/local/tmp/aconfig_storage_file_test_java/testdata/flag.info" /> + <option name="push" value="package_v1.map->/data/local/tmp/aconfig_storage_file_test_java/testdata/mock.v1.package.map" /> + <option name="push" value="flag_v1.map->/data/local/tmp/aconfig_storage_file_test_java/testdata/mock.v1.flag.map" /> + <option name="push" value="flag_v1.val->/data/local/tmp/aconfig_storage_file_test_java/testdata/mock.v1.val" /> + <option name="push" value="flag_v1.info->/data/local/tmp/aconfig_storage_file_test_java/testdata/mock.v1.info" /> + <option name="push" value="package_v2.map->/data/local/tmp/aconfig_storage_file_test_java/testdata/mock.v2.package.map" /> + <option name="push" value="flag_v2.map->/data/local/tmp/aconfig_storage_file_test_java/testdata/mock.v2.flag.map" /> + <option name="push" value="flag_v2.val->/data/local/tmp/aconfig_storage_file_test_java/testdata/mock.v2.val" /> + <option name="push" value="flag_v2.info->/data/local/tmp/aconfig_storage_file_test_java/testdata/mock.v2.info" /> + <option name="post-push" value="chmod +r /data/local/tmp/aconfig_storage_file_test_java/testdata/" /> </target_preparer> <test class="com.android.tradefed.testtype.AndroidJUnitTest" > <option name="package" value="android.aconfig.storage.test" /> <option name="runtime-hint" value="1m" /> </test> -</configuration>
\ No newline at end of file +</configuration> diff --git a/tools/aconfig/aconfig_storage_file/tests/flag.info b/tools/aconfig/aconfig_storage_file/tests/data/v1/flag_v1.info Binary files differindex 6223edf369..6223edf369 100644 --- a/tools/aconfig/aconfig_storage_file/tests/flag.info +++ b/tools/aconfig/aconfig_storage_file/tests/data/v1/flag_v1.info diff --git a/tools/aconfig/aconfig_storage_file/tests/flag.map b/tools/aconfig/aconfig_storage_file/tests/data/v1/flag_v1.map Binary files differindex e868f53d7e..e868f53d7e 100644 --- a/tools/aconfig/aconfig_storage_file/tests/flag.map +++ b/tools/aconfig/aconfig_storage_file/tests/data/v1/flag_v1.map diff --git a/tools/aconfig/aconfig_storage_file/tests/flag.val b/tools/aconfig/aconfig_storage_file/tests/data/v1/flag_v1.val Binary files differindex ed203d4d13..ed203d4d13 100644 --- a/tools/aconfig/aconfig_storage_file/tests/flag.val +++ b/tools/aconfig/aconfig_storage_file/tests/data/v1/flag_v1.val diff --git a/tools/aconfig/aconfig_storage_file/tests/package.map b/tools/aconfig/aconfig_storage_file/tests/data/v1/package_v1.map Binary files differindex 6c46a0339c..6c46a0339c 100644 --- a/tools/aconfig/aconfig_storage_file/tests/package.map +++ b/tools/aconfig/aconfig_storage_file/tests/data/v1/package_v1.map diff --git a/tools/aconfig/aconfig_storage_file/tests/data/v2/flag_v2.info b/tools/aconfig/aconfig_storage_file/tests/data/v2/flag_v2.info Binary files differnew file mode 100644 index 0000000000..9db7fde7ae --- /dev/null +++ b/tools/aconfig/aconfig_storage_file/tests/data/v2/flag_v2.info diff --git a/tools/aconfig/aconfig_storage_file/tests/data/v2/flag_v2.map b/tools/aconfig/aconfig_storage_file/tests/data/v2/flag_v2.map Binary files differnew file mode 100644 index 0000000000..cf4685ceb4 --- /dev/null +++ b/tools/aconfig/aconfig_storage_file/tests/data/v2/flag_v2.map diff --git a/tools/aconfig/aconfig_storage_file/tests/data/v2/flag_v2.val b/tools/aconfig/aconfig_storage_file/tests/data/v2/flag_v2.val Binary files differnew file mode 100644 index 0000000000..37d4750206 --- /dev/null +++ b/tools/aconfig/aconfig_storage_file/tests/data/v2/flag_v2.val diff --git a/tools/aconfig/aconfig_storage_file/tests/data/v2/package_v2.map b/tools/aconfig/aconfig_storage_file/tests/data/v2/package_v2.map Binary files differnew file mode 100644 index 0000000000..0a9f95ec85 --- /dev/null +++ b/tools/aconfig/aconfig_storage_file/tests/data/v2/package_v2.map diff --git a/tools/aconfig/aconfig_storage_file/tests/jarjar.txt b/tools/aconfig/aconfig_storage_file/tests/jarjar.txt new file mode 100644 index 0000000000..24952ecfdf --- /dev/null +++ b/tools/aconfig/aconfig_storage_file/tests/jarjar.txt @@ -0,0 +1,17 @@ +rule android.aconfig.storage.AconfigStorageException android.aconfig.storage.test.AconfigStorageException +rule android.aconfig.storage.FlagTable android.aconfig.storage.test.FlagTable +rule android.aconfig.storage.PackageTable android.aconfig.storage.test.PackageTable +rule android.aconfig.storage.ByteBufferReader android.aconfig.storage.test.ByteBufferReader +rule android.aconfig.storage.FlagType android.aconfig.storage.test.FlagType +rule android.aconfig.storage.SipHasher13 android.aconfig.storage.test.SipHasher13 +rule android.aconfig.storage.FileType android.aconfig.storage.test.FileType +rule android.aconfig.storage.FlagValueList android.aconfig.storage.test.FlagValueList +rule android.aconfig.storage.TableUtils android.aconfig.storage.test.TableUtils +rule android.aconfig.storage.AconfigPackageImpl android.aconfig.storage.test.AconfigPackageImpl +rule android.aconfig.storage.StorageFileProvider android.aconfig.storage.test.StorageFileProvider + + +rule android.aconfig.storage.FlagTable$* android.aconfig.storage.test.FlagTable$@1 +rule android.aconfig.storage.PackageTable$* android.aconfig.storage.test.PackageTable$@1 +rule android.aconfig.storage.FlagValueList$* android.aconfig.storage.test.FlagValueList@1 +rule android.aconfig.storage.SipHasher13$* android.aconfig.storage.test.SipHasher13@1 diff --git a/tools/aconfig/aconfig_storage_file/tests/srcs/FlagTableTest.java b/tools/aconfig/aconfig_storage_file/tests/srcs/FlagTableTest.java index fd40d4c4ef..dc465b658d 100644 --- a/tools/aconfig/aconfig_storage_file/tests/srcs/FlagTableTest.java +++ b/tools/aconfig/aconfig_storage_file/tests/srcs/FlagTableTest.java @@ -31,7 +31,7 @@ public class FlagTableTest { @Test public void testFlagTable_rightHeader() throws Exception { - FlagTable flagTable = FlagTable.fromBytes(TestDataUtils.getTestFlagMapByteBuffer()); + FlagTable flagTable = FlagTable.fromBytes(TestDataUtils.getTestFlagMapByteBuffer(1)); FlagTable.Header header = flagTable.getHeader(); assertEquals(1, header.getVersion()); assertEquals("mockup", header.getContainer()); @@ -44,7 +44,7 @@ public class FlagTableTest { @Test public void testFlagTable_rightNode() throws Exception { - FlagTable flagTable = FlagTable.fromBytes(TestDataUtils.getTestFlagMapByteBuffer()); + FlagTable flagTable = FlagTable.fromBytes(TestDataUtils.getTestFlagMapByteBuffer(1)); FlagTable.Node node1 = flagTable.get(0, "enabled_ro"); FlagTable.Node node2 = flagTable.get(0, "enabled_rw"); diff --git a/tools/aconfig/aconfig_storage_file/tests/srcs/FlagValueListTest.java b/tools/aconfig/aconfig_storage_file/tests/srcs/FlagValueListTest.java index c18590accc..306df7da5f 100644 --- a/tools/aconfig/aconfig_storage_file/tests/srcs/FlagValueListTest.java +++ b/tools/aconfig/aconfig_storage_file/tests/srcs/FlagValueListTest.java @@ -34,7 +34,7 @@ public class FlagValueListTest { @Test public void testFlagValueList_rightHeader() throws Exception { FlagValueList flagValueList = - FlagValueList.fromBytes(TestDataUtils.getTestFlagValByteBuffer()); + FlagValueList.fromBytes(TestDataUtils.getTestFlagValByteBuffer(1)); FlagValueList.Header header = flagValueList.getHeader(); assertEquals(1, header.getVersion()); assertEquals("mockup", header.getContainer()); @@ -47,31 +47,31 @@ public class FlagValueListTest { @Test public void testFlagValueList_rightNode() throws Exception { FlagValueList flagValueList = - FlagValueList.fromBytes(TestDataUtils.getTestFlagValByteBuffer()); + FlagValueList.fromBytes(TestDataUtils.getTestFlagValByteBuffer(1)); boolean[] expected = new boolean[] {false, true, true, false, true, true, true, true}; assertEquals(expected.length, flagValueList.size()); for (int i = 0; i < flagValueList.size(); i++) { - assertEquals(expected[i], flagValueList.get(i)); + assertEquals(expected[i], flagValueList.getBoolean(i)); } } @Test public void testFlagValueList_getValue() throws Exception { PackageTable packageTable = - PackageTable.fromBytes(TestDataUtils.getTestPackageMapByteBuffer()); - FlagTable flagTable = FlagTable.fromBytes(TestDataUtils.getTestFlagMapByteBuffer()); + PackageTable.fromBytes(TestDataUtils.getTestPackageMapByteBuffer(1)); + FlagTable flagTable = FlagTable.fromBytes(TestDataUtils.getTestFlagMapByteBuffer(1)); FlagValueList flagValueList = - FlagValueList.fromBytes(TestDataUtils.getTestFlagValByteBuffer()); + FlagValueList.fromBytes(TestDataUtils.getTestFlagValByteBuffer(1)); PackageTable.Node pNode = packageTable.get("com.android.aconfig.storage.test_1"); FlagTable.Node fNode = flagTable.get(pNode.getPackageId(), "enabled_rw"); - assertTrue(flagValueList.get(pNode.getBooleanStartIndex() + fNode.getFlagIndex())); + assertTrue(flagValueList.getBoolean(pNode.getBooleanStartIndex() + fNode.getFlagIndex())); pNode = packageTable.get("com.android.aconfig.storage.test_4"); fNode = flagTable.get(pNode.getPackageId(), "enabled_fixed_ro"); - assertTrue(flagValueList.get(pNode.getBooleanStartIndex() + fNode.getFlagIndex())); + assertTrue(flagValueList.getBoolean(pNode.getBooleanStartIndex() + fNode.getFlagIndex())); } } diff --git a/tools/aconfig/aconfig_storage_file/tests/srcs/PackageTableTest.java b/tools/aconfig/aconfig_storage_file/tests/srcs/PackageTableTest.java index e7e19d8d51..812ce3512e 100644 --- a/tools/aconfig/aconfig_storage_file/tests/srcs/PackageTableTest.java +++ b/tools/aconfig/aconfig_storage_file/tests/srcs/PackageTableTest.java @@ -17,6 +17,8 @@ package android.aconfig.storage.test; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; import android.aconfig.storage.FileType; import android.aconfig.storage.PackageTable; @@ -25,13 +27,16 @@ import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; +import java.util.HashSet; +import java.util.Set; + @RunWith(JUnit4.class) public class PackageTableTest { @Test public void testPackageTable_rightHeader() throws Exception { PackageTable packageTable = - PackageTable.fromBytes(TestDataUtils.getTestPackageMapByteBuffer()); + PackageTable.fromBytes(TestDataUtils.getTestPackageMapByteBuffer(1)); PackageTable.Header header = packageTable.getHeader(); assertEquals(1, header.getVersion()); assertEquals("mockup", header.getContainer()); @@ -43,9 +48,23 @@ public class PackageTableTest { } @Test + public void testPackageTable_rightHeader_v2() throws Exception { + PackageTable packageTable = + PackageTable.fromBytes(TestDataUtils.getTestPackageMapByteBuffer(2)); + PackageTable.Header header = packageTable.getHeader(); + assertEquals(2, header.getVersion()); + assertEquals("mockup", header.getContainer()); + assertEquals(FileType.PACKAGE_MAP, header.getFileType()); + assertEquals(233, header.getFileSize()); + assertEquals(3, header.getNumPackages()); + assertEquals(31, header.getBucketOffset()); + assertEquals(59, header.getNodeOffset()); + } + + @Test public void testPackageTable_rightNode() throws Exception { PackageTable packageTable = - PackageTable.fromBytes(TestDataUtils.getTestPackageMapByteBuffer()); + PackageTable.fromBytes(TestDataUtils.getTestPackageMapByteBuffer(1)); PackageTable.Node node1 = packageTable.get("com.android.aconfig.storage.test_1"); PackageTable.Node node2 = packageTable.get("com.android.aconfig.storage.test_2"); @@ -66,5 +85,61 @@ public class PackageTableTest { assertEquals(159, node1.getNextOffset()); assertEquals(-1, node2.getNextOffset()); assertEquals(-1, node4.getNextOffset()); + + assertFalse(node1.hasPackageFingerprint()); + assertFalse(node2.hasPackageFingerprint()); + assertFalse(node4.hasPackageFingerprint()); + } + + @Test + public void testPackageTable_rightNode_v2() throws Exception { + PackageTable packageTable = + PackageTable.fromBytes(TestDataUtils.getTestPackageMapByteBuffer(2)); + + PackageTable.Node node1 = packageTable.get("com.android.aconfig.storage.test_1"); + PackageTable.Node node2 = packageTable.get("com.android.aconfig.storage.test_2"); + PackageTable.Node node4 = packageTable.get("com.android.aconfig.storage.test_4"); + + assertEquals("com.android.aconfig.storage.test_1", node1.getPackageName()); + assertEquals("com.android.aconfig.storage.test_2", node2.getPackageName()); + assertEquals("com.android.aconfig.storage.test_4", node4.getPackageName()); + + assertEquals(0, node1.getPackageId()); + assertEquals(1, node2.getPackageId()); + assertEquals(2, node4.getPackageId()); + + assertEquals(0, node1.getBooleanStartIndex()); + assertEquals(3, node2.getBooleanStartIndex()); + assertEquals(6, node4.getBooleanStartIndex()); + + assertEquals(175, node1.getNextOffset()); + assertEquals(-1, node2.getNextOffset()); + assertEquals(-1, node4.getNextOffset()); + + assertTrue(node1.hasPackageFingerprint()); + assertTrue(node2.hasPackageFingerprint()); + assertTrue(node4.hasPackageFingerprint()); + + assertEquals(-3197795563119393530L, node1.getPackageFingerprint()); + assertEquals(4431940502274857964L, node2.getPackageFingerprint()); + assertEquals(-2213514155997929241L, node4.getPackageFingerprint()); + } + + @Test + public void testPackageTable_getPackageList() throws Exception { + PackageTable packageTable = + PackageTable.fromBytes(TestDataUtils.getTestPackageMapByteBuffer(2)); + Set<String> packages = new HashSet<>(packageTable.getPackageList()); + assertEquals(3, packages.size()); + assertTrue(packages.contains("com.android.aconfig.storage.test_1")); + assertTrue(packages.contains("com.android.aconfig.storage.test_2")); + assertTrue(packages.contains("com.android.aconfig.storage.test_4")); + + packageTable = PackageTable.fromBytes(TestDataUtils.getTestPackageMapByteBuffer(1)); + packages = new HashSet<>(packageTable.getPackageList()); + assertEquals(3, packages.size()); + assertTrue(packages.contains("com.android.aconfig.storage.test_1")); + assertTrue(packages.contains("com.android.aconfig.storage.test_2")); + assertTrue(packages.contains("com.android.aconfig.storage.test_4")); } } diff --git a/tools/aconfig/aconfig_storage_read_api/tests/java/StorageInternalReaderTest.java b/tools/aconfig/aconfig_storage_file/tests/srcs/SipHasher13Test.java index 3a1bba0fad..10620d272b 100644 --- a/tools/aconfig/aconfig_storage_read_api/tests/java/StorageInternalReaderTest.java +++ b/tools/aconfig/aconfig_storage_file/tests/srcs/SipHasher13Test.java @@ -16,30 +16,29 @@ package android.aconfig.storage.test; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertEquals; +import static java.nio.charset.StandardCharsets.UTF_8; -import android.aconfig.storage.StorageInternalReader; +import android.aconfig.storage.SipHasher13; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; @RunWith(JUnit4.class) -public class StorageInternalReaderTest { - - private String mStorageDir = "/data/local/tmp/aconfig_java_api_test"; - +public class SipHasher13Test { @Test - public void testStorageInternalReader_getFlag() { - - String packageMapFile = mStorageDir + "/maps/mockup.package.map"; - String flagValueFile = mStorageDir + "/boot/mockup.val"; - - StorageInternalReader reader = - new StorageInternalReader( - "com.android.aconfig.storage.test_1", packageMapFile, flagValueFile); - assertFalse(reader.getBooleanFlagValue(0)); - assertTrue(reader.getBooleanFlagValue(1)); + public void testSipHash_hashString() throws Exception { + String testStr = "com.google.android.test"; + long result = SipHasher13.hash(testStr.getBytes(UTF_8)); + assertEquals(0xF86572EFF9C4A0C1L, result); + + testStr = "abcdefg"; + result = SipHasher13.hash(testStr.getBytes(UTF_8)); + assertEquals(0x2295EF44BD078AE9L, result); + + testStr = "abcdefgh"; + result = SipHasher13.hash(testStr.getBytes(UTF_8)); + assertEquals(0x5CD7657FA7F96C16L, result); } } diff --git a/tools/aconfig/aconfig_storage_file/tests/srcs/StorageFileProviderTest.java b/tools/aconfig/aconfig_storage_file/tests/srcs/StorageFileProviderTest.java new file mode 100644 index 0000000000..c2720f9544 --- /dev/null +++ b/tools/aconfig/aconfig_storage_file/tests/srcs/StorageFileProviderTest.java @@ -0,0 +1,66 @@ +/* + * Copyright (C) 2024 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.aconfig.storage.test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +import android.aconfig.storage.FlagTable; +import android.aconfig.storage.FlagValueList; +import android.aconfig.storage.PackageTable; +import android.aconfig.storage.StorageFileProvider; + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; + +import java.nio.file.Paths; +import java.util.List; + +@RunWith(JUnit4.class) +public class StorageFileProviderTest { + + @Test + public void testlistContainers() throws Exception { + StorageFileProvider p = + new StorageFileProvider(TestDataUtils.TESTDATA_PATH, TestDataUtils.TESTDATA_PATH); + String[] excludes = {}; + List<String> containers = p.listContainers(excludes); + assertEquals(2, containers.size()); + + excludes = new String[] {"mock.v1"}; + containers = p.listContainers(excludes); + assertEquals(1, containers.size()); + + p = new StorageFileProvider("fake/path/", "fake/path/"); + containers = p.listContainers(excludes); + assertTrue(containers.isEmpty()); + } + + @Test + public void testLoadFiles() throws Exception { + StorageFileProvider p = + new StorageFileProvider(TestDataUtils.TESTDATA_PATH, TestDataUtils.TESTDATA_PATH); + PackageTable pt = p.getPackageTable("mock.v1"); + assertNotNull(pt); + FlagTable f = p.getFlagTable("mock.v1"); + assertNotNull(f); + FlagValueList v = p.getFlagValueList("mock.v1"); + assertNotNull(v); + } +} diff --git a/tools/aconfig/aconfig_storage_file/tests/srcs/TestDataUtils.java b/tools/aconfig/aconfig_storage_file/tests/srcs/TestDataUtils.java index f35952d392..388971e28b 100644 --- a/tools/aconfig/aconfig_storage_file/tests/srcs/TestDataUtils.java +++ b/tools/aconfig/aconfig_storage_file/tests/srcs/TestDataUtils.java @@ -21,28 +21,27 @@ import java.io.InputStream; import java.nio.ByteBuffer; public final class TestDataUtils { - private static final String TEST_PACKAGE_MAP_PATH = "package.map"; - private static final String TEST_FLAG_MAP_PATH = "flag.map"; - private static final String TEST_FLAG_VAL_PATH = "flag.val"; - private static final String TEST_FLAG_INFO_PATH = "flag.info"; + private static final String TEST_PACKAGE_MAP_PATH = "mock.v%d.package.map"; + private static final String TEST_FLAG_MAP_PATH = "mock.v%d.flag.map"; + private static final String TEST_FLAG_VAL_PATH = "mock.v%d.val"; + private static final String TEST_FLAG_INFO_PATH = "mock.v%d.info"; - private static final String TESTDATA_PATH = - "/data/local/tmp/aconfig_storage_file_test_java/testdata/"; + public static final String TESTDATA_PATH = "/data/local/tmp/aconfig_storage_file_test_java/testdata/"; - public static ByteBuffer getTestPackageMapByteBuffer() throws Exception { - return readFile(TESTDATA_PATH + TEST_PACKAGE_MAP_PATH); + public static ByteBuffer getTestPackageMapByteBuffer(int version) throws Exception { + return readFile(TESTDATA_PATH + String.format(TEST_PACKAGE_MAP_PATH, version)); } - public static ByteBuffer getTestFlagMapByteBuffer() throws Exception { - return readFile(TESTDATA_PATH + TEST_FLAG_MAP_PATH); + public static ByteBuffer getTestFlagMapByteBuffer(int version) throws Exception { + return readFile(TESTDATA_PATH + String.format(TEST_FLAG_MAP_PATH, version)); } - public static ByteBuffer getTestFlagValByteBuffer() throws Exception { - return readFile(TESTDATA_PATH + TEST_FLAG_VAL_PATH); + public static ByteBuffer getTestFlagValByteBuffer(int version) throws Exception { + return readFile(TESTDATA_PATH + String.format(TEST_FLAG_VAL_PATH, version)); } - public static ByteBuffer getTestFlagInfoByteBuffer() throws Exception { - return readFile(TESTDATA_PATH + TEST_FLAG_INFO_PATH); + public static ByteBuffer getTestFlagInfoByteBuffer(int version) throws Exception { + return readFile(TESTDATA_PATH + String.format(TEST_FLAG_INFO_PATH, version)); } private static ByteBuffer readFile(String fileName) throws Exception { diff --git a/tools/aconfig/aconfig_storage_file/tests/storage_file_test.cpp b/tools/aconfig/aconfig_storage_file/tests/storage_file_test.cpp index ebd1dd89bd..5c008afbf1 100644 --- a/tools/aconfig/aconfig_storage_file/tests/storage_file_test.cpp +++ b/tools/aconfig/aconfig_storage_file/tests/storage_file_test.cpp @@ -24,10 +24,8 @@ using namespace android::base; using namespace aconfig_storage; -void verify_value(const FlagValueSummary& flag, - const std::string& package_name, - const std::string& flag_name, - const std::string& flag_val, +void verify_value(const FlagValueSummary& flag, const std::string& package_name, + const std::string& flag_name, const std::string& flag_val, const std::string& value_type) { ASSERT_EQ(flag.package_name, package_name); ASSERT_EQ(flag.flag_name, flag_name); @@ -39,10 +37,8 @@ void verify_value_info(const FlagValueAndInfoSummary& flag, const std::string& package_name, const std::string& flag_name, const std::string& flag_val, - const std::string& value_type, - bool is_readwrite, - bool has_server_override, - bool has_local_override) { + const std::string& value_type, bool is_readwrite, + bool has_server_override, bool has_local_override) { ASSERT_EQ(flag.package_name, package_name); ASSERT_EQ(flag.flag_name, flag_name); ASSERT_EQ(flag.flag_value, flag_val); @@ -52,61 +48,137 @@ void verify_value_info(const FlagValueAndInfoSummary& flag, ASSERT_EQ(flag.has_local_override, has_local_override); } +Result<std::vector<FlagValueSummary>> get_flag_list_result( + const std::string version) { + auto const test_base_dir = GetExecutableDirectory(); + auto const test_dir = test_base_dir + "/data/v" + version; + auto const package_map = test_dir + "/package_v" + version + ".map"; + auto const flag_map = test_dir + "/flag_v" + version + ".map"; + auto const flag_val = test_dir + "/flag_v" + version + ".val"; + return aconfig_storage::list_flags(package_map, flag_map, flag_val); +} + +Result<std::vector<FlagValueAndInfoSummary>> get_flag_list_result_with_info( + const std::string version) { + auto const test_base_dir = GetExecutableDirectory(); + auto const test_dir = test_base_dir + "/data/v" + version; + auto const package_map = test_dir + "/package_v" + version + ".map"; + auto const flag_map = test_dir + "/flag_v" + version + ".map"; + auto const flag_val = test_dir + "/flag_v" + version + ".val"; + auto const flag_info = test_dir + "/flag_v" + version + ".info"; + return aconfig_storage::list_flags_with_info(package_map, flag_map, flag_val, + flag_info); +} + TEST(AconfigStorageFileTest, test_list_flag) { - auto const test_dir = GetExecutableDirectory(); - auto const package_map = test_dir + "/package.map"; - auto const flag_map = test_dir + "/flag.map"; - auto const flag_val = test_dir + "/flag.val"; - auto flag_list_result = aconfig_storage::list_flags( - package_map, flag_map, flag_val); + auto flag_list_result = get_flag_list_result("1"); + ASSERT_TRUE(flag_list_result.ok()); + + auto const& flag_list = *flag_list_result; + ASSERT_EQ(flag_list.size(), 8); + verify_value(flag_list[0], "com.android.aconfig.storage.test_1", + "disabled_rw", "false", "ReadWriteBoolean"); + verify_value(flag_list[1], "com.android.aconfig.storage.test_1", "enabled_ro", + "true", "ReadOnlyBoolean"); + verify_value(flag_list[2], "com.android.aconfig.storage.test_1", "enabled_rw", + "true", "ReadWriteBoolean"); + verify_value(flag_list[3], "com.android.aconfig.storage.test_2", + "disabled_rw", "false", "ReadWriteBoolean"); + verify_value(flag_list[4], "com.android.aconfig.storage.test_2", + "enabled_fixed_ro", "true", "FixedReadOnlyBoolean"); + verify_value(flag_list[5], "com.android.aconfig.storage.test_2", "enabled_ro", + "true", "ReadOnlyBoolean"); + verify_value(flag_list[6], "com.android.aconfig.storage.test_4", + "enabled_fixed_ro", "true", "FixedReadOnlyBoolean"); + verify_value(flag_list[7], "com.android.aconfig.storage.test_4", "enabled_rw", + "true", "ReadWriteBoolean"); +} + +// TODO: b/376256472 - Use parameterized tests. +TEST(AconfigStorageFileTest, test_list_flag_v2) { + auto flag_list_result = get_flag_list_result("2"); ASSERT_TRUE(flag_list_result.ok()); auto const& flag_list = *flag_list_result; ASSERT_EQ(flag_list.size(), 8); - verify_value(flag_list[0], "com.android.aconfig.storage.test_1", "disabled_rw", - "false", "ReadWriteBoolean"); + verify_value(flag_list[0], "com.android.aconfig.storage.test_1", + "disabled_rw", "false", "ReadWriteBoolean"); verify_value(flag_list[1], "com.android.aconfig.storage.test_1", "enabled_ro", "true", "ReadOnlyBoolean"); verify_value(flag_list[2], "com.android.aconfig.storage.test_1", "enabled_rw", "true", "ReadWriteBoolean"); - verify_value(flag_list[3], "com.android.aconfig.storage.test_2", "disabled_rw", - "false", "ReadWriteBoolean"); - verify_value(flag_list[4], "com.android.aconfig.storage.test_2", "enabled_fixed_ro", - "true", "FixedReadOnlyBoolean"); + verify_value(flag_list[3], "com.android.aconfig.storage.test_2", + "disabled_rw", "false", "ReadWriteBoolean"); + verify_value(flag_list[4], "com.android.aconfig.storage.test_2", + "enabled_fixed_ro", "true", "FixedReadOnlyBoolean"); verify_value(flag_list[5], "com.android.aconfig.storage.test_2", "enabled_ro", "true", "ReadOnlyBoolean"); - verify_value(flag_list[6], "com.android.aconfig.storage.test_4", "enabled_fixed_ro", - "true", "FixedReadOnlyBoolean"); + verify_value(flag_list[6], "com.android.aconfig.storage.test_4", + "enabled_fixed_ro", "true", "FixedReadOnlyBoolean"); verify_value(flag_list[7], "com.android.aconfig.storage.test_4", "enabled_rw", "true", "ReadWriteBoolean"); } TEST(AconfigStorageFileTest, test_list_flag_with_info) { - auto const test_dir = GetExecutableDirectory(); - auto const package_map = test_dir + "/package.map"; - auto const flag_map = test_dir + "/flag.map"; - auto const flag_val = test_dir + "/flag.val"; - auto const flag_info = test_dir + "/flag.info"; - auto flag_list_result = aconfig_storage::list_flags_with_info( - package_map, flag_map, flag_val, flag_info); + auto flag_list_result = get_flag_list_result_with_info("1"); + ASSERT_TRUE(flag_list_result.ok()); + + auto const& flag_list = *flag_list_result; + ASSERT_EQ(flag_list.size(), 8); + verify_value_info(flag_list[0], "com.android.aconfig.storage.test_1", + "disabled_rw", "false", "ReadWriteBoolean", true, false, + false); + verify_value_info(flag_list[1], "com.android.aconfig.storage.test_1", + "enabled_ro", "true", "ReadOnlyBoolean", false, false, + false); + verify_value_info(flag_list[2], "com.android.aconfig.storage.test_1", + "enabled_rw", "true", "ReadWriteBoolean", true, false, + false); + verify_value_info(flag_list[3], "com.android.aconfig.storage.test_2", + "disabled_rw", "false", "ReadWriteBoolean", true, false, + false); + verify_value_info(flag_list[4], "com.android.aconfig.storage.test_2", + "enabled_fixed_ro", "true", "FixedReadOnlyBoolean", false, + false, false); + verify_value_info(flag_list[5], "com.android.aconfig.storage.test_2", + "enabled_ro", "true", "ReadOnlyBoolean", false, false, + false); + verify_value_info(flag_list[6], "com.android.aconfig.storage.test_4", + "enabled_fixed_ro", "true", "FixedReadOnlyBoolean", false, + false, false); + verify_value_info(flag_list[7], "com.android.aconfig.storage.test_4", + "enabled_rw", "true", "ReadWriteBoolean", true, false, + false); +} + +TEST(AconfigStorageFileTest, test_list_flag_with_info_v2) { + auto flag_list_result = get_flag_list_result_with_info("2"); ASSERT_TRUE(flag_list_result.ok()); auto const& flag_list = *flag_list_result; ASSERT_EQ(flag_list.size(), 8); - verify_value_info(flag_list[0], "com.android.aconfig.storage.test_1", "disabled_rw", - "false", "ReadWriteBoolean", true, false, false); - verify_value_info(flag_list[1], "com.android.aconfig.storage.test_1", "enabled_ro", - "true", "ReadOnlyBoolean", false, false, false); - verify_value_info(flag_list[2], "com.android.aconfig.storage.test_1", "enabled_rw", - "true", "ReadWriteBoolean", true, false, false); - verify_value_info(flag_list[3], "com.android.aconfig.storage.test_2", "disabled_rw", - "false", "ReadWriteBoolean", true, false, false); - verify_value_info(flag_list[4], "com.android.aconfig.storage.test_2", "enabled_fixed_ro", - "true", "FixedReadOnlyBoolean", false, false, false); - verify_value_info(flag_list[5], "com.android.aconfig.storage.test_2", "enabled_ro", - "true", "ReadOnlyBoolean", false, false, false); - verify_value_info(flag_list[6], "com.android.aconfig.storage.test_4", "enabled_fixed_ro", - "true", "FixedReadOnlyBoolean", false, false, false); - verify_value_info(flag_list[7], "com.android.aconfig.storage.test_4", "enabled_rw", - "true", "ReadWriteBoolean", true, false, false); + verify_value_info(flag_list[0], "com.android.aconfig.storage.test_1", + "disabled_rw", "false", "ReadWriteBoolean", true, false, + false); + verify_value_info(flag_list[1], "com.android.aconfig.storage.test_1", + "enabled_ro", "true", "ReadOnlyBoolean", false, false, + false); + verify_value_info(flag_list[2], "com.android.aconfig.storage.test_1", + "enabled_rw", "true", "ReadWriteBoolean", true, false, + false); + verify_value_info(flag_list[3], "com.android.aconfig.storage.test_2", + "disabled_rw", "false", "ReadWriteBoolean", true, false, + false); + verify_value_info(flag_list[4], "com.android.aconfig.storage.test_2", + "enabled_fixed_ro", "true", "FixedReadOnlyBoolean", false, + false, false); + verify_value_info(flag_list[5], "com.android.aconfig.storage.test_2", + "enabled_ro", "true", "ReadOnlyBoolean", false, false, + false); + verify_value_info(flag_list[6], "com.android.aconfig.storage.test_4", + "enabled_fixed_ro", "true", "FixedReadOnlyBoolean", false, + false, false); + verify_value_info(flag_list[7], "com.android.aconfig.storage.test_4", + "enabled_rw", "true", "ReadWriteBoolean", true, false, + false); } diff --git a/tools/aconfig/aconfig_storage_read_api/Android.bp b/tools/aconfig/aconfig_storage_read_api/Android.bp index 9e950a61e7..16341b9273 100644 --- a/tools/aconfig/aconfig_storage_read_api/Android.bp +++ b/tools/aconfig/aconfig_storage_read_api/Android.bp @@ -36,10 +36,10 @@ rust_test_host { "librand", ], data: [ - "tests/package.map", - "tests/flag.map", - "tests/flag.val", - "tests/flag.info", + "tests/data/v1/package_v1.map", + "tests/data/v1/flag_v1.map", + "tests/data/v1/flag_v1.val", + "tests/data/v1/flag_v1.info", ], } @@ -107,31 +107,12 @@ cc_library { afdo: true, } -soong_config_module_type { - name: "aconfig_lib_cc_shared_link_defaults", - module_type: "cc_defaults", - config_namespace: "Aconfig", - bool_variables: [ - "read_from_new_storage", - ], - properties: [ - "shared_libs", - ], -} - -soong_config_bool_variable { - name: "read_from_new_storage", -} - -aconfig_lib_cc_shared_link_defaults { +cc_defaults { name: "aconfig_lib_cc_shared_link.defaults", - soong_config_variables: { - read_from_new_storage: { - shared_libs: [ - "libaconfig_storage_read_api_cc", - ], - }, - }, + shared_libs: select(release_flag("RELEASE_READ_FROM_NEW_STORAGE"), { + true: ["libaconfig_storage_read_api_cc"], + default: [], + }), } cc_defaults { @@ -147,6 +128,7 @@ rust_ffi_shared { crate_name: "aconfig_storage_read_api_rust_jni", srcs: ["srcs/lib.rs"], rustlibs: [ + "libaconfig_storage_file", "libaconfig_storage_read_api", "libanyhow", "libjni", @@ -172,37 +154,17 @@ java_library { java_library { name: "aconfig_storage_reader_java", srcs: [ - "srcs/android/aconfig/storage/StorageInternalReader.java", + "srcs/android/os/flagging/*.java", ], libs: [ "unsupportedappusage", - "strict_mode_stub", ], static_libs: [ "aconfig_storage_file_java", ], - sdk_version: "core_current", - host_supported: true, - min_sdk_version: "29", - apex_available: [ - "//apex_available:platform", - "//apex_available:anyapex", - ], -} - -java_library { - name: "aconfig_storage_reader_java_none", - srcs: [ - "srcs/android/aconfig/storage/StorageInternalReader.java", - ], - libs: [ - "unsupportedappusage-sdk-none", - "fake_device_config", + sdk_version: "current", + visibility: [ + "//frameworks/base", + "//build/make/tools/aconfig/aconfig_storage_read_api/tests", ], - static_libs: [ - "aconfig_storage_file_java_none", - ], - sdk_version: "none", - system_modules: "core-all-system-modules", - host_supported: true, } diff --git a/tools/aconfig/aconfig_storage_read_api/src/flag_info_query.rs b/tools/aconfig/aconfig_storage_read_api/src/flag_info_query.rs index 6d03377683..68b6193079 100644 --- a/tools/aconfig/aconfig_storage_read_api/src/flag_info_query.rs +++ b/tools/aconfig/aconfig_storage_read_api/src/flag_info_query.rs @@ -16,8 +16,10 @@ //! flag value query module defines the flag value file read from mapped bytes -use crate::{AconfigStorageError, FILE_VERSION}; -use aconfig_storage_file::{flag_info::FlagInfoHeader, read_u8_from_bytes, FlagValueType}; +use crate::AconfigStorageError; +use aconfig_storage_file::{ + flag_info::FlagInfoHeader, read_u8_from_bytes, FlagValueType, MAX_SUPPORTED_FILE_VERSION, +}; use anyhow::anyhow; /// Get flag attribute bitfield @@ -27,11 +29,11 @@ pub fn find_flag_attribute( flag_index: u32, ) -> Result<u8, AconfigStorageError> { let interpreted_header = FlagInfoHeader::from_bytes(buf)?; - if interpreted_header.version > crate::FILE_VERSION { + if interpreted_header.version > MAX_SUPPORTED_FILE_VERSION { return Err(AconfigStorageError::HigherStorageFileVersion(anyhow!( "Cannot read storage file with a higher version of {} with lib version {}", interpreted_header.version, - FILE_VERSION + MAX_SUPPORTED_FILE_VERSION ))); } @@ -53,12 +55,14 @@ pub fn find_flag_attribute( #[cfg(test)] mod tests { use super::*; - use aconfig_storage_file::{test_utils::create_test_flag_info_list, FlagInfoBit}; + use aconfig_storage_file::{ + test_utils::create_test_flag_info_list, FlagInfoBit, DEFAULT_FILE_VERSION, + }; #[test] // this test point locks down query if flag has server override fn test_is_flag_sticky() { - let flag_info_list = create_test_flag_info_list().into_bytes(); + let flag_info_list = create_test_flag_info_list(DEFAULT_FILE_VERSION).into_bytes(); for offset in 0..8 { let attribute = find_flag_attribute(&flag_info_list[..], FlagValueType::Boolean, offset).unwrap(); @@ -69,7 +73,7 @@ mod tests { #[test] // this test point locks down query if flag is readwrite fn test_is_flag_readwrite() { - let flag_info_list = create_test_flag_info_list().into_bytes(); + let flag_info_list = create_test_flag_info_list(DEFAULT_FILE_VERSION).into_bytes(); let baseline: Vec<bool> = vec![true, false, true, true, false, false, false, true]; for offset in 0..8 { let attribute = @@ -84,7 +88,7 @@ mod tests { #[test] // this test point locks down query if flag has local override fn test_flag_has_override() { - let flag_info_list = create_test_flag_info_list().into_bytes(); + let flag_info_list = create_test_flag_info_list(DEFAULT_FILE_VERSION).into_bytes(); for offset in 0..8 { let attribute = find_flag_attribute(&flag_info_list[..], FlagValueType::Boolean, offset).unwrap(); @@ -95,7 +99,7 @@ mod tests { #[test] // this test point locks down query beyond the end of boolean section fn test_boolean_out_of_range() { - let flag_info_list = create_test_flag_info_list().into_bytes(); + let flag_info_list = create_test_flag_info_list(DEFAULT_FILE_VERSION).into_bytes(); let error = find_flag_attribute(&flag_info_list[..], FlagValueType::Boolean, 8).unwrap_err(); assert_eq!( @@ -107,16 +111,16 @@ mod tests { #[test] // this test point locks down query error when file has a higher version fn test_higher_version_storage_file() { - let mut info_list = create_test_flag_info_list(); - info_list.header.version = crate::FILE_VERSION + 1; + let mut info_list = create_test_flag_info_list(DEFAULT_FILE_VERSION); + info_list.header.version = MAX_SUPPORTED_FILE_VERSION + 1; let flag_info = info_list.into_bytes(); let error = find_flag_attribute(&flag_info[..], FlagValueType::Boolean, 4).unwrap_err(); assert_eq!( format!("{:?}", error), format!( "HigherStorageFileVersion(Cannot read storage file with a higher version of {} with lib version {})", - crate::FILE_VERSION + 1, - crate::FILE_VERSION + MAX_SUPPORTED_FILE_VERSION + 1, + MAX_SUPPORTED_FILE_VERSION ) ); } diff --git a/tools/aconfig/aconfig_storage_read_api/src/flag_table_query.rs b/tools/aconfig/aconfig_storage_read_api/src/flag_table_query.rs index a1a4793bc2..3e87acc43b 100644 --- a/tools/aconfig/aconfig_storage_read_api/src/flag_table_query.rs +++ b/tools/aconfig/aconfig_storage_read_api/src/flag_table_query.rs @@ -16,9 +16,10 @@ //! flag table query module defines the flag table file read from mapped bytes -use crate::{AconfigStorageError, FILE_VERSION}; +use crate::AconfigStorageError; use aconfig_storage_file::{ flag_table::FlagTableHeader, flag_table::FlagTableNode, read_u32_from_bytes, StoredFlagType, + MAX_SUPPORTED_FILE_VERSION, }; use anyhow::anyhow; @@ -36,11 +37,11 @@ pub fn find_flag_read_context( flag: &str, ) -> Result<Option<FlagReadContext>, AconfigStorageError> { let interpreted_header = FlagTableHeader::from_bytes(buf)?; - if interpreted_header.version > crate::FILE_VERSION { + if interpreted_header.version > MAX_SUPPORTED_FILE_VERSION { return Err(AconfigStorageError::HigherStorageFileVersion(anyhow!( "Cannot read storage file with a higher version of {} with lib version {}", interpreted_header.version, - FILE_VERSION + MAX_SUPPORTED_FILE_VERSION ))); } @@ -73,12 +74,12 @@ pub fn find_flag_read_context( #[cfg(test)] mod tests { use super::*; - use aconfig_storage_file::test_utils::create_test_flag_table; + use aconfig_storage_file::{test_utils::create_test_flag_table, DEFAULT_FILE_VERSION}; #[test] // this test point locks down table query fn test_flag_query() { - let flag_table = create_test_flag_table().into_bytes(); + let flag_table = create_test_flag_table(DEFAULT_FILE_VERSION).into_bytes(); let baseline = vec![ (0, "enabled_ro", StoredFlagType::ReadOnlyBoolean, 1u16), (0, "enabled_rw", StoredFlagType::ReadWriteBoolean, 2u16), @@ -100,7 +101,7 @@ mod tests { #[test] // this test point locks down table query of a non exist flag fn test_not_existed_flag_query() { - let flag_table = create_test_flag_table().into_bytes(); + let flag_table = create_test_flag_table(DEFAULT_FILE_VERSION).into_bytes(); let flag_context = find_flag_read_context(&flag_table[..], 1, "disabled_fixed_ro").unwrap(); assert_eq!(flag_context, None); let flag_context = find_flag_read_context(&flag_table[..], 2, "disabled_rw").unwrap(); @@ -110,16 +111,16 @@ mod tests { #[test] // this test point locks down query error when file has a higher version fn test_higher_version_storage_file() { - let mut table = create_test_flag_table(); - table.header.version = crate::FILE_VERSION + 1; + let mut table = create_test_flag_table(DEFAULT_FILE_VERSION); + table.header.version = MAX_SUPPORTED_FILE_VERSION + 1; let flag_table = table.into_bytes(); let error = find_flag_read_context(&flag_table[..], 0, "enabled_ro").unwrap_err(); assert_eq!( format!("{:?}", error), format!( "HigherStorageFileVersion(Cannot read storage file with a higher version of {} with lib version {})", - crate::FILE_VERSION + 1, - crate::FILE_VERSION + MAX_SUPPORTED_FILE_VERSION + 1, + MAX_SUPPORTED_FILE_VERSION ) ); } diff --git a/tools/aconfig/aconfig_storage_read_api/src/flag_value_query.rs b/tools/aconfig/aconfig_storage_read_api/src/flag_value_query.rs index 9d32a16ac8..35f56929a9 100644 --- a/tools/aconfig/aconfig_storage_read_api/src/flag_value_query.rs +++ b/tools/aconfig/aconfig_storage_read_api/src/flag_value_query.rs @@ -16,18 +16,20 @@ //! flag value query module defines the flag value file read from mapped bytes -use crate::{AconfigStorageError, FILE_VERSION}; -use aconfig_storage_file::{flag_value::FlagValueHeader, read_u8_from_bytes}; +use crate::AconfigStorageError; +use aconfig_storage_file::{ + flag_value::FlagValueHeader, read_u8_from_bytes, MAX_SUPPORTED_FILE_VERSION, +}; use anyhow::anyhow; /// Query flag value pub fn find_boolean_flag_value(buf: &[u8], flag_index: u32) -> Result<bool, AconfigStorageError> { let interpreted_header = FlagValueHeader::from_bytes(buf)?; - if interpreted_header.version > crate::FILE_VERSION { + if interpreted_header.version > MAX_SUPPORTED_FILE_VERSION { return Err(AconfigStorageError::HigherStorageFileVersion(anyhow!( "Cannot read storage file with a higher version of {} with lib version {}", interpreted_header.version, - FILE_VERSION + MAX_SUPPORTED_FILE_VERSION ))); } @@ -46,12 +48,12 @@ pub fn find_boolean_flag_value(buf: &[u8], flag_index: u32) -> Result<bool, Acon #[cfg(test)] mod tests { use super::*; - use aconfig_storage_file::test_utils::create_test_flag_value_list; + use aconfig_storage_file::{test_utils::create_test_flag_value_list, DEFAULT_FILE_VERSION}; #[test] // this test point locks down flag value query fn test_flag_value_query() { - let flag_value_list = create_test_flag_value_list().into_bytes(); + let flag_value_list = create_test_flag_value_list(DEFAULT_FILE_VERSION).into_bytes(); let baseline: Vec<bool> = vec![false, true, true, false, true, true, true, true]; for (offset, expected_value) in baseline.into_iter().enumerate() { let flag_value = find_boolean_flag_value(&flag_value_list[..], offset as u32).unwrap(); @@ -62,7 +64,7 @@ mod tests { #[test] // this test point locks down query beyond the end of boolean section fn test_boolean_out_of_range() { - let flag_value_list = create_test_flag_value_list().into_bytes(); + let flag_value_list = create_test_flag_value_list(DEFAULT_FILE_VERSION).into_bytes(); let error = find_boolean_flag_value(&flag_value_list[..], 8).unwrap_err(); assert_eq!( format!("{:?}", error), @@ -73,16 +75,16 @@ mod tests { #[test] // this test point locks down query error when file has a higher version fn test_higher_version_storage_file() { - let mut value_list = create_test_flag_value_list(); - value_list.header.version = crate::FILE_VERSION + 1; + let mut value_list = create_test_flag_value_list(DEFAULT_FILE_VERSION); + value_list.header.version = MAX_SUPPORTED_FILE_VERSION + 1; let flag_value = value_list.into_bytes(); let error = find_boolean_flag_value(&flag_value[..], 4).unwrap_err(); assert_eq!( format!("{:?}", error), format!( "HigherStorageFileVersion(Cannot read storage file with a higher version of {} with lib version {})", - crate::FILE_VERSION + 1, - crate::FILE_VERSION + MAX_SUPPORTED_FILE_VERSION + 1, + MAX_SUPPORTED_FILE_VERSION ) ); } diff --git a/tools/aconfig/aconfig_storage_read_api/src/lib.rs b/tools/aconfig/aconfig_storage_read_api/src/lib.rs index d76cf3fe4e..d3cc9d427d 100644 --- a/tools/aconfig/aconfig_storage_read_api/src/lib.rs +++ b/tools/aconfig/aconfig_storage_read_api/src/lib.rs @@ -44,9 +44,10 @@ pub mod package_table_query; pub use aconfig_storage_file::{AconfigStorageError, FlagValueType, StorageFileType}; pub use flag_table_query::FlagReadContext; +pub use mapped_file::map_file; pub use package_table_query::PackageReadContext; -use aconfig_storage_file::{read_u32_from_bytes, FILE_VERSION}; +use aconfig_storage_file::read_u32_from_bytes; use flag_info_query::find_flag_attribute; use flag_table_query::find_flag_read_context; use flag_value_query::find_boolean_flag_value; @@ -114,13 +115,13 @@ pub fn get_flag_read_context( /// Get the boolean flag value. /// -/// \input file: mapped flag file +/// \input file: a byte slice, can be either &Mmap or &MapMut /// \input index: boolean flag offset /// /// \return /// If the provide offset is valid, it returns the boolean flag value, otherwise it /// returns the error message. -pub fn get_boolean_flag_value(file: &Mmap, index: u32) -> Result<bool, AconfigStorageError> { +pub fn get_boolean_flag_value(file: &[u8], index: u32) -> Result<bool, AconfigStorageError> { find_boolean_flag_value(file, index) } @@ -148,7 +149,7 @@ pub fn get_storage_file_version(file_path: &str) -> Result<u32, AconfigStorageEr /// Get the flag attribute. /// -/// \input file: mapped flag info file +/// \input file: a byte slice, can be either &Mmap or &MapMut /// \input flag_type: flag value type /// \input flag_index: flag index /// @@ -156,7 +157,7 @@ pub fn get_storage_file_version(file_path: &str) -> Result<u32, AconfigStorageEr /// If the provide offset is valid, it returns the flag attribute bitfiled, otherwise it /// returns the error message. pub fn get_flag_attribute( - file: &Mmap, + file: &[u8], flag_type: FlagValueType, flag_index: u32, ) -> Result<u8, AconfigStorageError> { @@ -412,10 +413,10 @@ mod tests { let flag_map = storage_dir.clone() + "/maps/mockup.flag.map"; let flag_val = storage_dir.clone() + "/boot/mockup.val"; let flag_info = storage_dir.clone() + "/boot/mockup.info"; - fs::copy("./tests/package.map", &package_map).unwrap(); - fs::copy("./tests/flag.map", &flag_map).unwrap(); - fs::copy("./tests/flag.val", &flag_val).unwrap(); - fs::copy("./tests/flag.info", &flag_info).unwrap(); + fs::copy("./tests/data/v1/package_v1.map", &package_map).unwrap(); + fs::copy("./tests/data/v1/flag_v1.map", &flag_map).unwrap(); + fs::copy("./tests/data/v1/flag_v1.val", &flag_val).unwrap(); + fs::copy("./tests/data/v1/flag_v1.info", &flag_info).unwrap(); return storage_dir; } @@ -432,21 +433,24 @@ mod tests { get_package_read_context(&package_mapped_file, "com.android.aconfig.storage.test_1") .unwrap() .unwrap(); - let expected_package_context = PackageReadContext { package_id: 0, boolean_start_index: 0 }; + let expected_package_context = + PackageReadContext { package_id: 0, boolean_start_index: 0, fingerprint: 0 }; assert_eq!(package_context, expected_package_context); let package_context = get_package_read_context(&package_mapped_file, "com.android.aconfig.storage.test_2") .unwrap() .unwrap(); - let expected_package_context = PackageReadContext { package_id: 1, boolean_start_index: 3 }; + let expected_package_context = + PackageReadContext { package_id: 1, boolean_start_index: 3, fingerprint: 0 }; assert_eq!(package_context, expected_package_context); let package_context = get_package_read_context(&package_mapped_file, "com.android.aconfig.storage.test_4") .unwrap() .unwrap(); - let expected_package_context = PackageReadContext { package_id: 2, boolean_start_index: 6 }; + let expected_package_context = + PackageReadContext { package_id: 2, boolean_start_index: 6, fingerprint: 0 }; assert_eq!(package_context, expected_package_context); } @@ -507,9 +511,9 @@ mod tests { #[test] // this test point locks down flag storage file version number query api fn test_storage_version_query() { - assert_eq!(get_storage_file_version("./tests/package.map").unwrap(), 1); - assert_eq!(get_storage_file_version("./tests/flag.map").unwrap(), 1); - assert_eq!(get_storage_file_version("./tests/flag.val").unwrap(), 1); - assert_eq!(get_storage_file_version("./tests/flag.info").unwrap(), 1); + assert_eq!(get_storage_file_version("./tests/data/v1/package_v1.map").unwrap(), 1); + assert_eq!(get_storage_file_version("./tests/data/v1/flag_v1.map").unwrap(), 1); + assert_eq!(get_storage_file_version("./tests/data/v1/flag_v1.val").unwrap(), 1); + assert_eq!(get_storage_file_version("./tests/data/v1/flag_v1.info").unwrap(), 1); } } diff --git a/tools/aconfig/aconfig_storage_read_api/src/mapped_file.rs b/tools/aconfig/aconfig_storage_read_api/src/mapped_file.rs index 5a1664535f..f4e269e68b 100644 --- a/tools/aconfig/aconfig_storage_read_api/src/mapped_file.rs +++ b/tools/aconfig/aconfig_storage_read_api/src/mapped_file.rs @@ -28,7 +28,7 @@ use crate::StorageFileType; /// The memory mapped file may have undefined behavior if there are writes to this /// file after being mapped. Ensure no writes can happen to this file while this /// mapping stays alive. -unsafe fn map_file(file_path: &str) -> Result<Mmap, AconfigStorageError> { +pub unsafe fn map_file(file_path: &str) -> Result<Mmap, AconfigStorageError> { let file = File::open(file_path) .map_err(|errmsg| FileReadFail(anyhow!("Failed to open file {}: {}", file_path, errmsg)))?; unsafe { @@ -97,10 +97,10 @@ mod tests { let flag_map = storage_dir.clone() + "/maps/mockup.flag.map"; let flag_val = storage_dir.clone() + "/boot/mockup.val"; let flag_info = storage_dir.clone() + "/boot/mockup.info"; - fs::copy("./tests/package.map", &package_map).unwrap(); - fs::copy("./tests/flag.map", &flag_map).unwrap(); - fs::copy("./tests/flag.val", &flag_val).unwrap(); - fs::copy("./tests/flag.info", &flag_info).unwrap(); + fs::copy("./tests/data/v1/package_v1.map", &package_map).unwrap(); + fs::copy("./tests/data/v1/flag_v1.map", &flag_map).unwrap(); + fs::copy("./tests/data/v1/flag_v1.val", &flag_val).unwrap(); + fs::copy("./tests/data/v1/flag_v1.info", &flag_info).unwrap(); return storage_dir; } @@ -108,9 +108,9 @@ mod tests { #[test] fn test_mapped_file_contents() { let storage_dir = create_test_storage_files(); - map_and_verify(&storage_dir, StorageFileType::PackageMap, "./tests/package.map"); - map_and_verify(&storage_dir, StorageFileType::FlagMap, "./tests/flag.map"); - map_and_verify(&storage_dir, StorageFileType::FlagVal, "./tests/flag.val"); - map_and_verify(&storage_dir, StorageFileType::FlagInfo, "./tests/flag.info"); + map_and_verify(&storage_dir, StorageFileType::PackageMap, "./tests/data/v1/package_v1.map"); + map_and_verify(&storage_dir, StorageFileType::FlagMap, "./tests/data/v1/flag_v1.map"); + map_and_verify(&storage_dir, StorageFileType::FlagVal, "./tests/data/v1/flag_v1.val"); + map_and_verify(&storage_dir, StorageFileType::FlagInfo, "./tests/data/v1/flag_v1.info"); } } diff --git a/tools/aconfig/aconfig_storage_read_api/src/package_table_query.rs b/tools/aconfig/aconfig_storage_read_api/src/package_table_query.rs index 2cb854b1b1..b20668f9c2 100644 --- a/tools/aconfig/aconfig_storage_read_api/src/package_table_query.rs +++ b/tools/aconfig/aconfig_storage_read_api/src/package_table_query.rs @@ -16,9 +16,10 @@ //! package table query module defines the package table file read from mapped bytes -use crate::{AconfigStorageError, FILE_VERSION}; +use crate::AconfigStorageError; use aconfig_storage_file::{ package_table::PackageTableHeader, package_table::PackageTableNode, read_u32_from_bytes, + MAX_SUPPORTED_FILE_VERSION, }; use anyhow::anyhow; @@ -27,6 +28,7 @@ use anyhow::anyhow; pub struct PackageReadContext { pub package_id: u32, pub boolean_start_index: u32, + pub fingerprint: u64, } /// Query package read context: package id and start index @@ -35,11 +37,11 @@ pub fn find_package_read_context( package: &str, ) -> Result<Option<PackageReadContext>, AconfigStorageError> { let interpreted_header = PackageTableHeader::from_bytes(buf)?; - if interpreted_header.version > FILE_VERSION { + if interpreted_header.version > MAX_SUPPORTED_FILE_VERSION { return Err(AconfigStorageError::HigherStorageFileVersion(anyhow!( "Cannot read storage file with a higher version of {} with lib version {}", interpreted_header.version, - FILE_VERSION + MAX_SUPPORTED_FILE_VERSION ))); } @@ -55,11 +57,13 @@ pub fn find_package_read_context( } loop { - let interpreted_node = PackageTableNode::from_bytes(&buf[package_node_offset..])?; + let interpreted_node = + PackageTableNode::from_bytes(&buf[package_node_offset..], interpreted_header.version)?; if interpreted_node.package_name == package { return Ok(Some(PackageReadContext { package_id: interpreted_node.package_id, boolean_start_index: interpreted_node.boolean_start_index, + fingerprint: interpreted_node.fingerprint, })); } match interpreted_node.next_offset { @@ -72,29 +76,68 @@ pub fn find_package_read_context( #[cfg(test)] mod tests { use super::*; - use aconfig_storage_file::test_utils::create_test_package_table; + use aconfig_storage_file::{test_utils::create_test_package_table, DEFAULT_FILE_VERSION}; #[test] // this test point locks down table query fn test_package_query() { - let package_table = create_test_package_table().into_bytes(); + let package_table = create_test_package_table(DEFAULT_FILE_VERSION).into_bytes(); let package_context = find_package_read_context(&package_table[..], "com.android.aconfig.storage.test_1") .unwrap() .unwrap(); - let expected_package_context = PackageReadContext { package_id: 0, boolean_start_index: 0 }; + let expected_package_context = + PackageReadContext { package_id: 0, boolean_start_index: 0, fingerprint: 0 }; assert_eq!(package_context, expected_package_context); let package_context = find_package_read_context(&package_table[..], "com.android.aconfig.storage.test_2") .unwrap() .unwrap(); - let expected_package_context = PackageReadContext { package_id: 1, boolean_start_index: 3 }; + let expected_package_context = + PackageReadContext { package_id: 1, boolean_start_index: 3, fingerprint: 0 }; assert_eq!(package_context, expected_package_context); let package_context = find_package_read_context(&package_table[..], "com.android.aconfig.storage.test_4") .unwrap() .unwrap(); - let expected_package_context = PackageReadContext { package_id: 2, boolean_start_index: 6 }; + let expected_package_context = + PackageReadContext { package_id: 2, boolean_start_index: 6, fingerprint: 0 }; + assert_eq!(package_context, expected_package_context); + } + + #[test] + // this test point locks down table query + fn test_package_query_v2() { + let package_table = create_test_package_table(2).into_bytes(); + let package_context = + find_package_read_context(&package_table[..], "com.android.aconfig.storage.test_1") + .unwrap() + .unwrap(); + let expected_package_context = PackageReadContext { + package_id: 0, + boolean_start_index: 0, + fingerprint: 15248948510590158086u64, + }; + assert_eq!(package_context, expected_package_context); + let package_context = + find_package_read_context(&package_table[..], "com.android.aconfig.storage.test_2") + .unwrap() + .unwrap(); + let expected_package_context = PackageReadContext { + package_id: 1, + boolean_start_index: 3, + fingerprint: 4431940502274857964u64, + }; + assert_eq!(package_context, expected_package_context); + let package_context = + find_package_read_context(&package_table[..], "com.android.aconfig.storage.test_4") + .unwrap() + .unwrap(); + let expected_package_context = PackageReadContext { + package_id: 2, + boolean_start_index: 6, + fingerprint: 16233229917711622375u64, + }; assert_eq!(package_context, expected_package_context); } @@ -102,7 +145,7 @@ mod tests { // this test point locks down table query of a non exist package fn test_not_existed_package_query() { // this will land at an empty bucket - let package_table = create_test_package_table().into_bytes(); + let package_table = create_test_package_table(DEFAULT_FILE_VERSION).into_bytes(); let package_context = find_package_read_context(&package_table[..], "com.android.aconfig.storage.test_3") .unwrap(); @@ -117,8 +160,8 @@ mod tests { #[test] // this test point locks down query error when file has a higher version fn test_higher_version_storage_file() { - let mut table = create_test_package_table(); - table.header.version = crate::FILE_VERSION + 1; + let mut table = create_test_package_table(DEFAULT_FILE_VERSION); + table.header.version = MAX_SUPPORTED_FILE_VERSION + 1; let package_table = table.into_bytes(); let error = find_package_read_context(&package_table[..], "com.android.aconfig.storage.test_1") @@ -127,8 +170,8 @@ mod tests { format!("{:?}", error), format!( "HigherStorageFileVersion(Cannot read storage file with a higher version of {} with lib version {})", - crate::FILE_VERSION + 1, - crate::FILE_VERSION + MAX_SUPPORTED_FILE_VERSION + 1, + MAX_SUPPORTED_FILE_VERSION ) ); } diff --git a/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/AconfigStorageReadAPI.java b/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/AconfigStorageReadAPI.java index 406ff24dd3..850c2b8146 100644 --- a/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/AconfigStorageReadAPI.java +++ b/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/AconfigStorageReadAPI.java @@ -16,18 +16,14 @@ package android.aconfig.storage; +import dalvik.annotation.optimization.FastNative; + import java.io.FileInputStream; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.MappedByteBuffer; import java.nio.channels.FileChannel; -import java.nio.channels.FileChannel.MapMode; - -import android.aconfig.storage.PackageReadContext; -import android.aconfig.storage.FlagReadContext; - -import dalvik.annotation.optimization.FastNative; public class AconfigStorageReadAPI { @@ -50,9 +46,8 @@ public class AconfigStorageReadAPI { } // Map a storage file given container and file type - public static MappedByteBuffer getMappedFile( - String container, - StorageFileType type) throws IOException{ + public static MappedByteBuffer getMappedFile(String container, StorageFileType type) + throws IOException { switch (type) { case PACKAGE_MAP: return mapStorageFile(STORAGEDIR + "/maps/" + container + ".package.map"); @@ -73,14 +68,14 @@ public class AconfigStorageReadAPI { // @throws IOException if the passed in file is not a valid package map file @FastNative private static native ByteBuffer getPackageReadContextImpl( - ByteBuffer mappedFile, String packageName) throws IOException; + ByteBuffer mappedFile, String packageName) throws IOException; // API to get package read context // @param mappedFile: memory mapped package map file // @param packageName: package name // @throws IOException if the passed in file is not a valid package map file - static public PackageReadContext getPackageReadContext ( - ByteBuffer mappedFile, String packageName) throws IOException { + public static PackageReadContext getPackageReadContext( + ByteBuffer mappedFile, String packageName) throws IOException { ByteBuffer buffer = getPackageReadContextImpl(mappedFile, packageName); buffer.order(ByteOrder.LITTLE_ENDIAN); return new PackageReadContext(buffer.getInt(), buffer.getInt(4)); @@ -94,7 +89,7 @@ public class AconfigStorageReadAPI { // @throws IOException if the passed in file is not a valid flag map file @FastNative private static native ByteBuffer getFlagReadContextImpl( - ByteBuffer mappedFile, int packageId, String flagName) throws IOException; + ByteBuffer mappedFile, int packageId, String flagName) throws IOException; // API to get flag read context // @param mappedFile: memory mapped flag map file @@ -103,7 +98,7 @@ public class AconfigStorageReadAPI { // @param flagName: flag name // @throws IOException if the passed in file is not a valid flag map file public static FlagReadContext getFlagReadContext( - ByteBuffer mappedFile, int packageId, String flagName) throws IOException { + ByteBuffer mappedFile, int packageId, String flagName) throws IOException { ByteBuffer buffer = getFlagReadContextImpl(mappedFile, packageId, flagName); buffer.order(ByteOrder.LITTLE_ENDIAN); return new FlagReadContext(buffer.getInt(), buffer.getInt(4)); @@ -115,8 +110,11 @@ public class AconfigStorageReadAPI { // @throws IOException if the passed in file is not a valid flag value file or the // flag index went over the file boundary. @FastNative - public static native boolean getBooleanFlagValue( - ByteBuffer mappedFile, int flagIndex) throws IOException; + public static native boolean getBooleanFlagValue(ByteBuffer mappedFile, int flagIndex) + throws IOException; + + @FastNative + public static native long hash(String packageName) throws IOException; static { System.loadLibrary("aconfig_storage_read_api_rust_jni"); diff --git a/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/StorageInternalReader.java b/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/StorageInternalReader.java deleted file mode 100644 index 71c1c0dc09..0000000000 --- a/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/StorageInternalReader.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright (C) 2024 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package android.aconfig.storage; - -import android.compat.annotation.UnsupportedAppUsage; -import android.os.StrictMode; - -import java.io.Closeable; -import java.nio.MappedByteBuffer; -import java.nio.channels.FileChannel; -import java.nio.file.Paths; -import java.nio.file.StandardOpenOption; - -/** @hide */ -public class StorageInternalReader { - - private static final String MAP_PATH = "/metadata/aconfig/maps/"; - private static final String BOOT_PATH = "/metadata/aconfig/boot/"; - - private PackageTable mPackageTable; - private FlagValueList mFlagValueList; - - private int mPackageBooleanStartOffset; - - @UnsupportedAppUsage - public StorageInternalReader(String container, String packageName) { - this(packageName, MAP_PATH + container + ".package.map", BOOT_PATH + container + ".val"); - } - - @UnsupportedAppUsage - public StorageInternalReader(String packageName, String packageMapFile, String flagValueFile) { - StrictMode.ThreadPolicy oldPolicy = StrictMode.allowThreadDiskReads(); - mPackageTable = PackageTable.fromBytes(mapStorageFile(packageMapFile)); - mFlagValueList = FlagValueList.fromBytes(mapStorageFile(flagValueFile)); - StrictMode.setThreadPolicy(oldPolicy); - mPackageBooleanStartOffset = getPackageBooleanStartOffset(packageName); - } - - @UnsupportedAppUsage - public boolean getBooleanFlagValue(int index) { - index += mPackageBooleanStartOffset; - if (index >= mFlagValueList.size()) { - throw new AconfigStorageException("Fail to get boolean flag value"); - } - return mFlagValueList.get(index); - } - - private int getPackageBooleanStartOffset(String packageName) { - PackageTable.Node pNode = mPackageTable.get(packageName); - if (pNode == null) { - PackageTable.Header header = mPackageTable.getHeader(); - throw new AconfigStorageException( - String.format( - "Fail to get package %s from container %s", - packageName, header.getContainer())); - } - return pNode.getBooleanStartIndex(); - } - - // Map a storage file given file path - private static MappedByteBuffer mapStorageFile(String file) { - FileChannel channel = null; - try { - channel = FileChannel.open(Paths.get(file), StandardOpenOption.READ); - return channel.map(FileChannel.MapMode.READ_ONLY, 0, channel.size()); - } catch (Exception e) { - throw new AconfigStorageException( - String.format("Fail to mmap storage file %s", file), e); - } finally { - quietlyDispose(channel); - } - } - - private static void quietlyDispose(Closeable closable) { - try { - if (closable != null) { - closable.close(); - } - } catch (Exception e) { - // no need to care, at least as of now - } - } -} diff --git a/tools/aconfig/aconfig_storage_read_api/srcs/android/os/flagging/PlatformAconfigPackage.java b/tools/aconfig/aconfig_storage_read_api/srcs/android/os/flagging/PlatformAconfigPackage.java new file mode 100644 index 0000000000..ddad249fa0 --- /dev/null +++ b/tools/aconfig/aconfig_storage_read_api/srcs/android/os/flagging/PlatformAconfigPackage.java @@ -0,0 +1,174 @@ +/* + * Copyright (C) 2024 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.os.flagging; + +import static android.aconfig.storage.TableUtils.StorageFilesBundle; + +import android.aconfig.storage.AconfigStorageException; +import android.aconfig.storage.FlagTable; +import android.aconfig.storage.FlagValueList; +import android.aconfig.storage.PackageTable; +import android.compat.annotation.UnsupportedAppUsage; +import android.util.Log; + +import java.io.Closeable; +import java.nio.MappedByteBuffer; +import java.nio.channels.FileChannel; +import java.nio.file.Paths; +import java.nio.file.StandardOpenOption; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; + +/** + * An {@code aconfig} package containing the enabled state of its flags. + * + * <p><strong>Note: this is intended only to be used by generated code. To determine if a given flag + * is enabled in app code, the generated android flags should be used.</strong> + * + * <p>This class is used to read the flag from platform Aconfig Package.Each instance of this class + * will cache information related to one package. To read flags from a different package, a new + * instance of this class should be {@link #load loaded}. + * + * @hide + */ +public class PlatformAconfigPackage { + private static final String TAG = "PlatformAconfigPackage"; + private static final String MAP_PATH = "/metadata/aconfig/maps/"; + private static final String BOOT_PATH = "/metadata/aconfig/boot/"; + + private FlagTable mFlagTable; + private FlagValueList mFlagValueList; + + private int mPackageBooleanStartOffset = -1; + private int mPackageId = -1; + + private PlatformAconfigPackage() {} + + /** @hide */ + static final Map<String, StorageFilesBundle> sStorageFilesCache = new HashMap<>(); + + /** @hide */ + @UnsupportedAppUsage + public static final Set<String> PLATFORM_PACKAGE_MAP_FILES = + Set.of("system.package.map", "vendor.package.map", "product.package.map"); + + static { + for (String pf : PLATFORM_PACKAGE_MAP_FILES) { + try { + PackageTable pTable = PackageTable.fromBytes(mapStorageFile(MAP_PATH + pf)); + String container = pTable.getHeader().getContainer(); + FlagTable fTable = + FlagTable.fromBytes(mapStorageFile(MAP_PATH + container + ".flag.map")); + FlagValueList fValueList = + FlagValueList.fromBytes(mapStorageFile(BOOT_PATH + container + ".val")); + StorageFilesBundle files = new StorageFilesBundle(pTable, fTable, fValueList); + for (String packageName : pTable.getPackageList()) { + sStorageFilesCache.put(packageName, files); + } + } catch (Exception e) { + // pass + Log.w(TAG, e.toString()); + } + } + } + + /** + * Loads a platform Aconfig Package from Aconfig Storage. + * + * <p>This method attempts to load the specified platform Aconfig package. + * + * @param packageName The name of the Aconfig package to load. + * @return An instance of {@link PlatformAconfigPackage}, which may be empty if the package is + * not found in the container. Null if the package is not found in platform partitions. + * @throws AconfigStorageReadException if there is an error reading from Aconfig Storage, such + * as if the storage system is not found, or there is an error reading the storage file. The + * specific error code can be got using {@link AconfigStorageReadException#getErrorCode()}. + * @hide + */ + @UnsupportedAppUsage + public static PlatformAconfigPackage load(String packageName) { + try { + PlatformAconfigPackage aconfigPackage = new PlatformAconfigPackage(); + StorageFilesBundle files = sStorageFilesCache.get(packageName); + if (files == null) { + return null; + } + PackageTable.Node pNode = files.packageTable.get(packageName); + aconfigPackage.mFlagTable = files.flagTable; + aconfigPackage.mFlagValueList = files.flagValueList; + aconfigPackage.mPackageBooleanStartOffset = pNode.getBooleanStartIndex(); + aconfigPackage.mPackageId = pNode.getPackageId(); + return aconfigPackage; + } catch (AconfigStorageException e) { + throw new AconfigStorageReadException( + e.getErrorCode(), "Fail to create AconfigPackage", e); + } catch (Exception e) { + throw new AconfigStorageReadException( + AconfigStorageReadException.ERROR_GENERIC, + "Fail to create PlatformAconfigPackage", + e); + } + } + + /** + * Retrieves the value of a boolean flag. + * + * <p>This method retrieves the value of the specified flag. If the flag exists within the + * loaded Aconfig Package, its value is returned. Otherwise, the provided `defaultValue` is + * returned. + * + * @param flagName The name of the flag (excluding any package name prefix). + * @param defaultValue The value to return if the flag is not found. + * @return The boolean value of the flag, or `defaultValue` if the flag is not found. + * @hide + */ + @UnsupportedAppUsage + public boolean getBooleanFlagValue(String flagName, boolean defaultValue) { + FlagTable.Node fNode = mFlagTable.get(mPackageId, flagName); + if (fNode == null) { + return defaultValue; + } + return mFlagValueList.getBoolean(fNode.getFlagIndex() + mPackageBooleanStartOffset); + } + + // Map a storage file given file path + private static MappedByteBuffer mapStorageFile(String file) { + FileChannel channel = null; + try { + channel = FileChannel.open(Paths.get(file), StandardOpenOption.READ); + return channel.map(FileChannel.MapMode.READ_ONLY, 0, channel.size()); + } catch (Exception e) { + throw new AconfigStorageReadException( + AconfigStorageReadException.ERROR_CANNOT_READ_STORAGE_FILE, + "Fail to mmap storage", + e); + } finally { + quietlyDispose(channel); + } + } + + private static void quietlyDispose(Closeable closable) { + try { + if (closable != null) { + closable.close(); + } + } catch (Exception e) { + // no need to care, at least as of now + } + } +} diff --git a/tools/aconfig/aconfig_storage_read_api/srcs/android/os/flagging/PlatformAconfigPackageInternal.java b/tools/aconfig/aconfig_storage_read_api/srcs/android/os/flagging/PlatformAconfigPackageInternal.java new file mode 100644 index 0000000000..da18fb9fe0 --- /dev/null +++ b/tools/aconfig/aconfig_storage_read_api/srcs/android/os/flagging/PlatformAconfigPackageInternal.java @@ -0,0 +1,101 @@ +/* + * Copyright (C) 2024 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.os.flagging; + +import static android.aconfig.storage.TableUtils.StorageFilesBundle; + +import android.aconfig.storage.AconfigStorageException; +import android.aconfig.storage.FlagValueList; +import android.aconfig.storage.PackageTable; +import android.compat.annotation.UnsupportedAppUsage; + +/** + * An {@code aconfig} package containing the enabled state of its flags. + * + * <p><strong>Note: this is intended only to be used by generated code. To determine if a given flag + * is enabled in app code, the generated android flags should be used.</strong> + * + * <p>This class is not part of the public API and should be used by Acnofig Flag internally </b> It + * is intended for internal use only and will be changed or removed without notice. + * + * <p>This class is used to read the flag from Aconfig Package.Each instance of this class will + * cache information related to one package. To read flags from a different package, a new instance + * of this class should be {@link #load loaded}. + * + * @hide + */ +public class PlatformAconfigPackageInternal { + + private final FlagValueList mFlagValueList; + private final int mPackageBooleanStartOffset; + + private PlatformAconfigPackageInternal( + FlagValueList flagValueList, int packageBooleanStartOffset) { + this.mFlagValueList = flagValueList; + this.mPackageBooleanStartOffset = packageBooleanStartOffset; + } + + /** + * Loads an Aconfig package from the specified container and verifies its fingerprint. + * + * <p>This method is intended for internal use only and may be changed or removed without + * notice. + * + * @param packageName The name of the Aconfig package. + * @param packageFingerprint The expected fingerprint of the package. + * @return An instance of {@link PlatformAconfigPackageInternal} representing the loaded + * package. + * @hide + */ + @UnsupportedAppUsage + public static PlatformAconfigPackageInternal load(String packageName, long packageFingerprint) { + StorageFilesBundle files = PlatformAconfigPackage.sStorageFilesCache.get(packageName); + if (files == null) { + throw new AconfigStorageException( + AconfigStorageException.ERROR_PACKAGE_NOT_FOUND, + "package " + packageName + " cannot be found on the device"); + } + PackageTable.Node pNode = files.packageTable.get(packageName); + FlagValueList vList = files.flagValueList; + + if (pNode.hasPackageFingerprint() && packageFingerprint != pNode.getPackageFingerprint()) { + throw new AconfigStorageException( + AconfigStorageException.ERROR_FILE_FINGERPRINT_MISMATCH, + "package " + packageName + "fingerprint doesn't match the one on device"); + } + + return new PlatformAconfigPackageInternal(vList, pNode.getBooleanStartIndex()); + } + + /** + * Retrieves the value of a boolean flag using its index. + * + * <p>This method is intended for internal use only and may be changed or removed without + * notice. + * + * <p>This method retrieves the value of a flag within the loaded Aconfig package using its + * index. The index is generated at build time and may vary between builds. + * + * @param index The index of the flag within the package. + * @return The boolean value of the flag. + * @hide + */ + @UnsupportedAppUsage + public boolean getBooleanFlagValue(int index) { + return mFlagValueList.getBoolean(index + mPackageBooleanStartOffset); + } +} diff --git a/tools/aconfig/aconfig_storage_read_api/srcs/lib.rs b/tools/aconfig/aconfig_storage_read_api/srcs/lib.rs index 304a059c90..f5f12bb1fa 100644 --- a/tools/aconfig/aconfig_storage_read_api/srcs/lib.rs +++ b/tools/aconfig/aconfig_storage_read_api/srcs/lib.rs @@ -1,5 +1,6 @@ //! aconfig storage read api java rust interlop +use aconfig_storage_file::SipHasher13; use aconfig_storage_read_api::flag_table_query::find_flag_read_context; use aconfig_storage_read_api::flag_value_query::find_boolean_flag_value; use aconfig_storage_read_api::package_table_query::find_package_read_context; @@ -7,8 +8,9 @@ use aconfig_storage_read_api::{FlagReadContext, PackageReadContext}; use anyhow::Result; use jni::objects::{JByteBuffer, JClass, JString}; -use jni::sys::{jboolean, jint}; +use jni::sys::{jboolean, jint, jlong}; use jni::JNIEnv; +use std::hash::Hasher; /// Call rust find package read context fn get_package_read_context_java( @@ -158,3 +160,30 @@ pub extern "system" fn Java_android_aconfig_storage_AconfigStorageReadAPI_getBoo } } } + +/// Get flag value JNI +#[no_mangle] +#[allow(unused)] +pub extern "system" fn Java_android_aconfig_storage_AconfigStorageReadAPI_hash<'local>( + mut env: JNIEnv<'local>, + class: JClass<'local>, + package_name: JString<'local>, +) -> jlong { + match siphasher13_hash(&mut env, package_name) { + Ok(value) => value as jlong, + Err(errmsg) => { + env.throw(("java/io/IOException", errmsg.to_string())).expect("failed to throw"); + 0i64 + } + } +} + +fn siphasher13_hash(env: &mut JNIEnv, package_name: JString) -> Result<u64> { + // SAFETY: + // The safety here is ensured as the flag name is guaranteed to be a java string + let flag_name: String = unsafe { env.get_string_unchecked(&package_name)?.into() }; + let mut s = SipHasher13::new(); + s.write(flag_name.as_bytes()); + s.write_u8(0xff); + Ok(s.finish()) +} diff --git a/tools/aconfig/aconfig_storage_read_api/tests/java/AndroidTest.xml b/tools/aconfig/aconfig_storage_read_api/tests/AconfigStorageReadFunctionalTest.xml index 99c9e2566e..ee50060208 100644 --- a/tools/aconfig/aconfig_storage_read_api/tests/java/AndroidTest.xml +++ b/tools/aconfig/aconfig_storage_read_api/tests/AconfigStorageReadFunctionalTest.xml @@ -26,7 +26,7 @@ </target_preparer> <target_preparer class="com.android.tradefed.targetprep.TestAppInstallSetup"> - <option name="test-file-name" value="aconfig_storage_read_api.test.java.apk" /> + <option name="test-file-name" value="aconfig_storage_read_functional.apk" /> </target_preparer> <target_preparer class="com.android.tradefed.targetprep.DisableSELinuxTargetPreparer" /> @@ -35,17 +35,17 @@ <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer"> <option name="cleanup" value="true" /> <option name="abort-on-push-failure" value="true" /> - <option name="push-file" key="package.map" + <option name="push-file" key="package_v1.map" value="/data/local/tmp/aconfig_java_api_test/maps/mockup.package.map" /> - <option name="push-file" key="flag.map" + <option name="push-file" key="flag_v1.map" value="/data/local/tmp/aconfig_java_api_test/maps/mockup.flag.map" /> - <option name="push-file" key="flag.val" + <option name="push-file" key="flag_v1.val" value="/data/local/tmp/aconfig_java_api_test/boot/mockup.val" /> </target_preparer> <test class="com.android.tradefed.testtype.AndroidJUnitTest" > <option name="runner" value="androidx.test.runner.AndroidJUnitRunner" /> - <option name="package" value="android.aconfig_storage.test" /> + <option name="package" value="android.aconfig.storage.test" /> <option name="runtime-hint" value="1m" /> </test> -</configuration> +</configuration>
\ No newline at end of file diff --git a/tools/aconfig/aconfig_storage_read_api/tests/Android.bp b/tools/aconfig/aconfig_storage_read_api/tests/Android.bp index ed0c728215..c071f7cd88 100644 --- a/tools/aconfig/aconfig_storage_read_api/tests/Android.bp +++ b/tools/aconfig/aconfig_storage_read_api/tests/Android.bp @@ -1,9 +1,14 @@ filegroup { name: "read_api_test_storage_files", - srcs: ["package.map", - "flag.map", - "flag.val", - "flag.info" + srcs: [ + "data/v1/package_v1.map", + "data/v1/flag_v1.map", + "data/v1/flag_v1.val", + "data/v1/flag_v1.info", + "data/v2/package_v2.map", + "data/v2/flag_v2.map", + "data/v2/flag_v2.val", + "data/v2/flag_v2.info", ], } @@ -43,3 +48,30 @@ cc_test { "general-tests", ], } + +android_test { + name: "aconfig_storage_read_functional", + srcs: [ + "functional/srcs/**/*.java", + ], + static_libs: [ + "aconfig_device_paths_java_util", + "aconfig_storage_file_java", + "androidx.test.rules", + "libaconfig_storage_read_api_java", + "junit", + ], + jni_libs: [ + "libaconfig_storage_read_api_rust_jni", + ], + data: [ + ":read_api_test_storage_files", + ], + platform_apis: true, + certificate: "platform", + test_suites: [ + "general-tests", + ], + test_config: "AconfigStorageReadFunctionalTest.xml", + team: "trendy_team_android_core_experiments", +} diff --git a/tools/aconfig/aconfig_storage_read_api/tests/AndroidManifest.xml b/tools/aconfig/aconfig_storage_read_api/tests/AndroidManifest.xml new file mode 100644 index 0000000000..5e01879157 --- /dev/null +++ b/tools/aconfig/aconfig_storage_read_api/tests/AndroidManifest.xml @@ -0,0 +1,27 @@ +<?xml version="1.0" encoding="utf-8"?> +<!-- + ~ Copyright (C) 2024 The Android Open Source Project + ~ + ~ Licensed under the Apache License, Version 2.0 (the "License"); + ~ you may not use this file except in compliance with the License. + ~ You may obtain a copy of the License at + ~ + ~ http://www.apache.org/licenses/LICENSE-2.0 + ~ + ~ Unless required by applicable law or agreed to in writing, software + ~ distributed under the License is distributed on an "AS IS" BASIS, + ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + ~ See the License for the specific language governing permissions and + ~ limitations under the License. + --> + +<manifest xmlns:android="http://schemas.android.com/apk/res/android" + package="android.aconfig.storage.test"> + <application> + <uses-library android:name="android.test.runner" /> + </application> + + <instrumentation android:name="androidx.test.runner.AndroidJUnitRunner" + android:targetPackage="android.aconfig.storage.test" /> + +</manifest> diff --git a/tools/aconfig/aconfig_storage_read_api/tests/flag.info b/tools/aconfig/aconfig_storage_read_api/tests/data/v1/flag_v1.info Binary files differindex 6223edf369..6223edf369 100644 --- a/tools/aconfig/aconfig_storage_read_api/tests/flag.info +++ b/tools/aconfig/aconfig_storage_read_api/tests/data/v1/flag_v1.info diff --git a/tools/aconfig/aconfig_storage_read_api/tests/flag.map b/tools/aconfig/aconfig_storage_read_api/tests/data/v1/flag_v1.map Binary files differindex e868f53d7e..e868f53d7e 100644 --- a/tools/aconfig/aconfig_storage_read_api/tests/flag.map +++ b/tools/aconfig/aconfig_storage_read_api/tests/data/v1/flag_v1.map diff --git a/tools/aconfig/aconfig_storage_read_api/tests/flag.val b/tools/aconfig/aconfig_storage_read_api/tests/data/v1/flag_v1.val Binary files differindex ed203d4d13..ed203d4d13 100644 --- a/tools/aconfig/aconfig_storage_read_api/tests/flag.val +++ b/tools/aconfig/aconfig_storage_read_api/tests/data/v1/flag_v1.val diff --git a/tools/aconfig/aconfig_storage_read_api/tests/package.map b/tools/aconfig/aconfig_storage_read_api/tests/data/v1/package_v1.map Binary files differindex 6c46a0339c..6c46a0339c 100644 --- a/tools/aconfig/aconfig_storage_read_api/tests/package.map +++ b/tools/aconfig/aconfig_storage_read_api/tests/data/v1/package_v1.map diff --git a/tools/aconfig/aconfig_storage_read_api/tests/data/v2/flag_v2.info b/tools/aconfig/aconfig_storage_read_api/tests/data/v2/flag_v2.info Binary files differnew file mode 100644 index 0000000000..9db7fde7ae --- /dev/null +++ b/tools/aconfig/aconfig_storage_read_api/tests/data/v2/flag_v2.info diff --git a/tools/aconfig/aconfig_storage_read_api/tests/data/v2/flag_v2.map b/tools/aconfig/aconfig_storage_read_api/tests/data/v2/flag_v2.map Binary files differnew file mode 100644 index 0000000000..cf4685ceb4 --- /dev/null +++ b/tools/aconfig/aconfig_storage_read_api/tests/data/v2/flag_v2.map diff --git a/tools/aconfig/aconfig_storage_read_api/tests/data/v2/flag_v2.val b/tools/aconfig/aconfig_storage_read_api/tests/data/v2/flag_v2.val Binary files differnew file mode 100644 index 0000000000..37d4750206 --- /dev/null +++ b/tools/aconfig/aconfig_storage_read_api/tests/data/v2/flag_v2.val diff --git a/tools/aconfig/aconfig_storage_read_api/tests/data/v2/package_v2.map b/tools/aconfig/aconfig_storage_read_api/tests/data/v2/package_v2.map Binary files differnew file mode 100644 index 0000000000..0a9f95ec85 --- /dev/null +++ b/tools/aconfig/aconfig_storage_read_api/tests/data/v2/package_v2.map diff --git a/tools/aconfig/aconfig_storage_read_api/tests/java/AconfigStorageReadAPITest.java b/tools/aconfig/aconfig_storage_read_api/tests/functional/srcs/AconfigStorageReadAPITest.java index a26b25707d..0587e9d4c5 100644 --- a/tools/aconfig/aconfig_storage_read_api/tests/java/AconfigStorageReadAPITest.java +++ b/tools/aconfig/aconfig_storage_read_api/tests/functional/srcs/AconfigStorageReadAPITest.java @@ -16,28 +16,28 @@ package android.aconfig.storage.test; -import java.io.IOException; -import java.nio.MappedByteBuffer; -import java.nio.ByteBuffer; -import java.nio.ByteOrder; -import java.util.ArrayList; -import java.util.List; -import java.util.Random; - import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.JUnit4; +import android.aconfig.DeviceProtosTestUtil; +import android.aconfig.nano.Aconfig.parsed_flag; import android.aconfig.storage.AconfigStorageReadAPI; -import android.aconfig.storage.PackageReadContext; import android.aconfig.storage.FlagReadContext; import android.aconfig.storage.FlagReadContext.StoredFlagType; +import android.aconfig.storage.PackageReadContext; +import android.aconfig.storage.SipHasher13; + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; + +import java.io.IOException; +import java.nio.MappedByteBuffer; +import java.util.ArrayList; +import java.util.List; @RunWith(JUnit4.class) -public class AconfigStorageReadAPITest{ +public class AconfigStorageReadAPITest { private String mStorageDir = "/data/local/tmp/aconfig_java_api_test"; @@ -45,26 +45,29 @@ public class AconfigStorageReadAPITest{ public void testPackageContextQuery() { MappedByteBuffer packageMap = null; try { - packageMap = AconfigStorageReadAPI.mapStorageFile( - mStorageDir + "/maps/mockup.package.map"); - } catch(IOException ex){ + packageMap = + AconfigStorageReadAPI.mapStorageFile(mStorageDir + "/maps/mockup.package.map"); + } catch (IOException ex) { assertTrue(ex.toString(), false); } assertTrue(packageMap != null); try { - PackageReadContext context = AconfigStorageReadAPI.getPackageReadContext( - packageMap, "com.android.aconfig.storage.test_1"); + PackageReadContext context = + AconfigStorageReadAPI.getPackageReadContext( + packageMap, "com.android.aconfig.storage.test_1"); assertEquals(context.mPackageId, 0); assertEquals(context.mBooleanStartIndex, 0); - context = AconfigStorageReadAPI.getPackageReadContext( - packageMap, "com.android.aconfig.storage.test_2"); + context = + AconfigStorageReadAPI.getPackageReadContext( + packageMap, "com.android.aconfig.storage.test_2"); assertEquals(context.mPackageId, 1); assertEquals(context.mBooleanStartIndex, 3); - context = AconfigStorageReadAPI.getPackageReadContext( - packageMap, "com.android.aconfig.storage.test_4"); + context = + AconfigStorageReadAPI.getPackageReadContext( + packageMap, "com.android.aconfig.storage.test_4"); assertEquals(context.mPackageId, 2); assertEquals(context.mBooleanStartIndex, 6); } catch (IOException ex) { @@ -76,19 +79,19 @@ public class AconfigStorageReadAPITest{ public void testNonExistPackageContextQuery() { MappedByteBuffer packageMap = null; try { - packageMap = AconfigStorageReadAPI.mapStorageFile( - mStorageDir + "/maps/mockup.package.map"); - } catch(IOException ex){ + packageMap = + AconfigStorageReadAPI.mapStorageFile(mStorageDir + "/maps/mockup.package.map"); + } catch (IOException ex) { assertTrue(ex.toString(), false); } assertTrue(packageMap != null); try { - PackageReadContext context = AconfigStorageReadAPI.getPackageReadContext( - packageMap, "unknown"); + PackageReadContext context = + AconfigStorageReadAPI.getPackageReadContext(packageMap, "unknown"); assertEquals(context.mPackageId, -1); assertEquals(context.mBooleanStartIndex, -1); - } catch(IOException ex){ + } catch (IOException ex) { assertTrue(ex.toString(), false); } } @@ -97,12 +100,11 @@ public class AconfigStorageReadAPITest{ public void testFlagContextQuery() { MappedByteBuffer flagMap = null; try { - flagMap = AconfigStorageReadAPI.mapStorageFile( - mStorageDir + "/maps/mockup.flag.map"); - } catch(IOException ex){ + flagMap = AconfigStorageReadAPI.mapStorageFile(mStorageDir + "/maps/mockup.flag.map"); + } catch (IOException ex) { assertTrue(ex.toString(), false); } - assertTrue(flagMap!= null); + assertTrue(flagMap != null); class Baseline { public int mPackageId; @@ -110,10 +112,8 @@ public class AconfigStorageReadAPITest{ public StoredFlagType mFlagType; public int mFlagIndex; - public Baseline(int packageId, - String flagName, - StoredFlagType flagType, - int flagIndex) { + public Baseline( + int packageId, String flagName, StoredFlagType flagType, int flagIndex) { mPackageId = packageId; mFlagName = flagName; mFlagType = flagType; @@ -133,8 +133,9 @@ public class AconfigStorageReadAPITest{ try { for (Baseline baseline : baselines) { - FlagReadContext context = AconfigStorageReadAPI.getFlagReadContext( - flagMap, baseline.mPackageId, baseline.mFlagName); + FlagReadContext context = + AconfigStorageReadAPI.getFlagReadContext( + flagMap, baseline.mPackageId, baseline.mFlagName); assertEquals(context.mFlagType, baseline.mFlagType); assertEquals(context.mFlagIndex, baseline.mFlagIndex); } @@ -147,21 +148,19 @@ public class AconfigStorageReadAPITest{ public void testNonExistFlagContextQuery() { MappedByteBuffer flagMap = null; try { - flagMap = AconfigStorageReadAPI.mapStorageFile( - mStorageDir + "/maps/mockup.flag.map"); - } catch(IOException ex){ + flagMap = AconfigStorageReadAPI.mapStorageFile(mStorageDir + "/maps/mockup.flag.map"); + } catch (IOException ex) { assertTrue(ex.toString(), false); } - assertTrue(flagMap!= null); + assertTrue(flagMap != null); try { - FlagReadContext context = AconfigStorageReadAPI.getFlagReadContext( - flagMap, 0, "unknown"); + FlagReadContext context = + AconfigStorageReadAPI.getFlagReadContext(flagMap, 0, "unknown"); assertEquals(context.mFlagType, null); assertEquals(context.mFlagIndex, -1); - context = AconfigStorageReadAPI.getFlagReadContext( - flagMap, 3, "enabled_ro"); + context = AconfigStorageReadAPI.getFlagReadContext(flagMap, 3, "enabled_ro"); assertEquals(context.mFlagType, null); assertEquals(context.mFlagIndex, -1); } catch (IOException ex) { @@ -173,12 +172,11 @@ public class AconfigStorageReadAPITest{ public void testBooleanFlagValueQuery() { MappedByteBuffer flagVal = null; try { - flagVal = AconfigStorageReadAPI.mapStorageFile( - mStorageDir + "/boot/mockup.val"); + flagVal = AconfigStorageReadAPI.mapStorageFile(mStorageDir + "/boot/mockup.val"); } catch (IOException ex) { assertTrue(ex.toString(), false); } - assertTrue(flagVal!= null); + assertTrue(flagVal != null); boolean[] baselines = {false, true, true, false, true, true, true, true}; for (int i = 0; i < 8; ++i) { @@ -195,12 +193,11 @@ public class AconfigStorageReadAPITest{ public void testInvalidBooleanFlagValueQuery() { MappedByteBuffer flagVal = null; try { - flagVal = AconfigStorageReadAPI.mapStorageFile( - mStorageDir + "/boot/mockup.val"); + flagVal = AconfigStorageReadAPI.mapStorageFile(mStorageDir + "/boot/mockup.val"); } catch (IOException ex) { assertTrue(ex.toString(), false); } - assertTrue(flagVal!= null); + assertTrue(flagVal != null); try { Boolean value = AconfigStorageReadAPI.getBooleanFlagValue(flagVal, 9); @@ -210,4 +207,21 @@ public class AconfigStorageReadAPITest{ assertTrue(ex.toString(), ex.toString().contains(expectedErrmsg)); } } - } + + @Test + public void testRustJavaEqualHash() throws IOException { + List<parsed_flag> flags = DeviceProtosTestUtil.loadAndParseFlagProtos(); + for (parsed_flag flag : flags) { + String packageName = flag.package_; + String flagName = flag.name; + long rHash = AconfigStorageReadAPI.hash(packageName); + long jHash = SipHasher13.hash(packageName.getBytes()); + assertEquals(rHash, jHash); + + String fullFlagName = packageName + "/" + flagName; + rHash = AconfigStorageReadAPI.hash(fullFlagName); + jHash = SipHasher13.hash(fullFlagName.getBytes()); + assertEquals(rHash, jHash); + } + } +} diff --git a/tools/aconfig/aconfig_storage_read_api/tests/functional/srcs/PlatformAconfigPackageInternalTest.java b/tools/aconfig/aconfig_storage_read_api/tests/functional/srcs/PlatformAconfigPackageInternalTest.java new file mode 100644 index 0000000000..9896baff87 --- /dev/null +++ b/tools/aconfig/aconfig_storage_read_api/tests/functional/srcs/PlatformAconfigPackageInternalTest.java @@ -0,0 +1,117 @@ +/* + * Copyright (C) 2024 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.aconfig.storage.test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertThrows; + +import android.aconfig.DeviceProtosTestUtil; +import android.aconfig.nano.Aconfig; +import android.aconfig.nano.Aconfig.parsed_flag; +import android.aconfig.storage.FlagTable; +import android.aconfig.storage.FlagValueList; +import android.aconfig.storage.PackageTable; +import android.aconfig.storage.StorageFileProvider; +import android.internal.aconfig.storage.AconfigStorageException; +import android.os.flagging.PlatformAconfigPackageInternal; + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; + +import java.io.IOException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; + +@RunWith(JUnit4.class) +public class PlatformAconfigPackageInternalTest { + + private static final Set<String> PLATFORM_CONTAINERS = Set.of("system", "vendor", "product"); + + @Test + public void testPlatformAconfigPackageInternal_load() throws IOException { + List<parsed_flag> flags = DeviceProtosTestUtil.loadAndParseFlagProtos(); + Map<String, PlatformAconfigPackageInternal> readerMap = new HashMap<>(); + StorageFileProvider fp = StorageFileProvider.getDefaultProvider(); + + for (parsed_flag flag : flags) { + if (flag.permission == Aconfig.READ_ONLY && flag.state == Aconfig.DISABLED) { + continue; + } + String container = flag.container; + String packageName = flag.package_; + String flagName = flag.name; + if (!PLATFORM_CONTAINERS.contains(container)) continue; + + PackageTable pTable = fp.getPackageTable(container); + PackageTable.Node pNode = pTable.get(packageName); + FlagTable fTable = fp.getFlagTable(container); + FlagTable.Node fNode = fTable.get(pNode.getPackageId(), flagName); + FlagValueList fList = fp.getFlagValueList(container); + + int index = pNode.getBooleanStartIndex() + fNode.getFlagIndex(); + boolean rVal = fList.getBoolean(index); + + long fingerprint = pNode.getPackageFingerprint(); + + PlatformAconfigPackageInternal reader = readerMap.get(packageName); + if (reader == null) { + reader = PlatformAconfigPackageInternal.load(packageName, fingerprint); + readerMap.put(packageName, reader); + } + boolean jVal = reader.getBooleanFlagValue(fNode.getFlagIndex()); + + assertEquals(rVal, jVal); + } + } + + @Test + public void testPlatformAconfigPackage_load_withError() throws IOException { + // package not found + AconfigStorageException e = + assertThrows( + AconfigStorageException.class, + () -> PlatformAconfigPackageInternal.load("fake_package", 0)); + assertEquals(AconfigStorageException.ERROR_PACKAGE_NOT_FOUND, e.getErrorCode()); + + // fingerprint doesn't match + List<parsed_flag> flags = DeviceProtosTestUtil.loadAndParseFlagProtos(); + StorageFileProvider fp = StorageFileProvider.getDefaultProvider(); + + parsed_flag flag = flags.get(0); + + String container = flag.container; + String packageName = flag.package_; + boolean value = flag.state == Aconfig.ENABLED; + + PackageTable pTable = fp.getPackageTable(container); + PackageTable.Node pNode = pTable.get(packageName); + + if (pNode.hasPackageFingerprint()) { + long fingerprint = pNode.getPackageFingerprint(); + e = + assertThrows( + AconfigStorageException.class, + () -> + PlatformAconfigPackageInternal.load( + packageName, fingerprint + 1)); + assertEquals(AconfigStorageException.ERROR_FILE_FINGERPRINT_MISMATCH, e.getErrorCode()); + } + } +} diff --git a/tools/aconfig/aconfig_storage_read_api/tests/functional/srcs/PlatformAconfigPackageTest.java b/tools/aconfig/aconfig_storage_read_api/tests/functional/srcs/PlatformAconfigPackageTest.java new file mode 100644 index 0000000000..1c6c238542 --- /dev/null +++ b/tools/aconfig/aconfig_storage_read_api/tests/functional/srcs/PlatformAconfigPackageTest.java @@ -0,0 +1,103 @@ +/* + * Copyright (C) 2024 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.aconfig.storage.test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; + +import android.aconfig.DeviceProtosTestUtil; +import android.aconfig.nano.Aconfig; +import android.aconfig.nano.Aconfig.parsed_flag; +import android.aconfig.storage.FlagTable; +import android.aconfig.storage.FlagValueList; +import android.aconfig.storage.PackageTable; +import android.aconfig.storage.StorageFileProvider; +import android.os.flagging.PlatformAconfigPackage; + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; + +import java.io.IOException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; + +@RunWith(JUnit4.class) +public class PlatformAconfigPackageTest { + + private static final Set<String> PLATFORM_CONTAINERS = Set.of("system", "vendor", "product"); + + @Test + public void testPlatformAconfigPackage_StorageFilesCache() throws IOException { + List<parsed_flag> flags = DeviceProtosTestUtil.loadAndParseFlagProtos(); + for (parsed_flag flag : flags) { + if (flag.permission == Aconfig.READ_ONLY && flag.state == Aconfig.DISABLED) { + continue; + } + String container = flag.container; + String packageName = flag.package_; + if (!PLATFORM_CONTAINERS.contains(container)) continue; + assertNotNull(PlatformAconfigPackage.load(packageName)); + } + } + + @Test + public void testPlatformAconfigPackage_load() throws IOException { + List<parsed_flag> flags = DeviceProtosTestUtil.loadAndParseFlagProtos(); + Map<String, PlatformAconfigPackage> readerMap = new HashMap<>(); + StorageFileProvider fp = StorageFileProvider.getDefaultProvider(); + + for (parsed_flag flag : flags) { + if (flag.permission == Aconfig.READ_ONLY && flag.state == Aconfig.DISABLED) { + continue; + } + String container = flag.container; + String packageName = flag.package_; + String flagName = flag.name; + if (!PLATFORM_CONTAINERS.contains(container)) continue; + + PackageTable pTable = fp.getPackageTable(container); + PackageTable.Node pNode = pTable.get(packageName); + FlagTable fTable = fp.getFlagTable(container); + FlagTable.Node fNode = fTable.get(pNode.getPackageId(), flagName); + FlagValueList fList = fp.getFlagValueList(container); + + int index = pNode.getBooleanStartIndex() + fNode.getFlagIndex(); + boolean rVal = fList.getBoolean(index); + + long fingerprint = pNode.getPackageFingerprint(); + + PlatformAconfigPackage reader = readerMap.get(packageName); + if (reader == null) { + reader = PlatformAconfigPackage.load(packageName); + readerMap.put(packageName, reader); + } + boolean jVal = reader.getBooleanFlagValue(flagName, !rVal); + + assertEquals(rVal, jVal); + } + } + + @Test + public void testPlatformAconfigPackage_load_withError() throws IOException { + // package not found + assertNull(PlatformAconfigPackage.load("fake_container")); + } +} diff --git a/tools/aconfig/aconfig_storage_read_api/tests/java/Android.bp b/tools/aconfig/aconfig_storage_read_api/tests/java/Android.bp deleted file mode 100644 index 11b3824e82..0000000000 --- a/tools/aconfig/aconfig_storage_read_api/tests/java/Android.bp +++ /dev/null @@ -1,22 +0,0 @@ -android_test { - name: "aconfig_storage_read_api.test.java", - srcs: ["./**/*.java"], - static_libs: [ - "aconfig_storage_reader_java", - "androidx.test.rules", - "libaconfig_storage_read_api_java", - "junit", - ], - jni_libs: [ - "libaconfig_storage_read_api_rust_jni", - ], - data: [ - ":read_api_test_storage_files", - ], - platform_apis: true, - certificate: "platform", - test_suites: [ - "general-tests", - ], - team: "trendy_team_android_core_experiments", -} diff --git a/tools/aconfig/aconfig_storage_read_api/tests/storage_read_api_test.cpp b/tools/aconfig/aconfig_storage_read_api/tests/storage_read_api_test.cpp index 6d29045efe..5289faa6de 100644 --- a/tools/aconfig/aconfig_storage_read_api/tests/storage_read_api_test.cpp +++ b/tools/aconfig/aconfig_storage_read_api/tests/storage_read_api_test.cpp @@ -45,7 +45,8 @@ class AconfigStorageTest : public ::testing::Test { } void SetUp() override { - auto const test_dir = android::base::GetExecutableDirectory(); + auto const test_base_dir = android::base::GetExecutableDirectory(); + auto const test_dir = test_base_dir + "/data/v1"; storage_dir = std::string(root_dir.path); auto maps_dir = storage_dir + "/maps"; auto boot_dir = storage_dir + "/boot"; @@ -55,10 +56,10 @@ class AconfigStorageTest : public ::testing::Test { flag_map = std::string(maps_dir) + "/mockup.flag.map"; flag_val = std::string(boot_dir) + "/mockup.val"; flag_info = std::string(boot_dir) + "/mockup.info"; - copy_file(test_dir + "/package.map", package_map); - copy_file(test_dir + "/flag.map", flag_map); - copy_file(test_dir + "/flag.val", flag_val); - copy_file(test_dir + "/flag.info", flag_info); + copy_file(test_dir + "/package_v1.map", package_map); + copy_file(test_dir + "/flag_v1.map", flag_map); + copy_file(test_dir + "/flag_v1.val", flag_val); + copy_file(test_dir + "/flag_v1.info", flag_info); } void TearDown() override { diff --git a/tools/aconfig/aconfig_storage_read_api/tests/storage_read_api_test.rs b/tools/aconfig/aconfig_storage_read_api/tests/storage_read_api_test.rs index afc44d4d70..2a8edf3302 100644 --- a/tools/aconfig/aconfig_storage_read_api/tests/storage_read_api_test.rs +++ b/tools/aconfig/aconfig_storage_read_api/tests/storage_read_api_test.rs @@ -9,7 +9,7 @@ mod aconfig_storage_rust_test { use rand::Rng; use std::fs; - fn create_test_storage_files() -> String { + fn create_test_storage_files(version: u32) -> String { let mut rng = rand::thread_rng(); let number: u32 = rng.gen(); let storage_dir = String::from("/tmp/") + &number.to_string(); @@ -26,17 +26,17 @@ mod aconfig_storage_rust_test { let flag_map = storage_dir.clone() + "/maps/mockup.flag.map"; let flag_val = storage_dir.clone() + "/boot/mockup.val"; let flag_info = storage_dir.clone() + "/boot/mockup.info"; - fs::copy("./package.map", package_map).unwrap(); - fs::copy("./flag.map", flag_map).unwrap(); - fs::copy("./flag.val", flag_val).unwrap(); - fs::copy("./flag.info", flag_info).unwrap(); + fs::copy(format!("./data/v{0}/package_v{0}.map", version), package_map).unwrap(); + fs::copy(format!("./data/v{0}/flag_v{0}.map", version), flag_map).unwrap(); + fs::copy(format!("./data/v{}/flag_v{0}.val", version), flag_val).unwrap(); + fs::copy(format!("./data/v{}/flag_v{0}.info", version), flag_info).unwrap(); storage_dir } #[test] - fn test_unavailable_stoarge() { - let storage_dir = create_test_storage_files(); + fn test_unavailable_storage() { + let storage_dir = create_test_storage_files(1); // SAFETY: // The safety here is ensured as the test process will not write to temp storage file let err = unsafe { @@ -53,7 +53,7 @@ mod aconfig_storage_rust_test { #[test] fn test_package_context_query() { - let storage_dir = create_test_storage_files(); + let storage_dir = create_test_storage_files(1); // SAFETY: // The safety here is ensured as the test process will not write to temp storage file let package_mapped_file = unsafe { @@ -64,27 +64,73 @@ mod aconfig_storage_rust_test { get_package_read_context(&package_mapped_file, "com.android.aconfig.storage.test_1") .unwrap() .unwrap(); - let expected_package_context = PackageReadContext { package_id: 0, boolean_start_index: 0 }; + let expected_package_context = + PackageReadContext { package_id: 0, boolean_start_index: 0, fingerprint: 0 }; assert_eq!(package_context, expected_package_context); let package_context = get_package_read_context(&package_mapped_file, "com.android.aconfig.storage.test_2") .unwrap() .unwrap(); - let expected_package_context = PackageReadContext { package_id: 1, boolean_start_index: 3 }; + let expected_package_context = + PackageReadContext { package_id: 1, boolean_start_index: 3, fingerprint: 0 }; assert_eq!(package_context, expected_package_context); let package_context = get_package_read_context(&package_mapped_file, "com.android.aconfig.storage.test_4") .unwrap() .unwrap(); - let expected_package_context = PackageReadContext { package_id: 2, boolean_start_index: 6 }; + let expected_package_context = + PackageReadContext { package_id: 2, boolean_start_index: 6, fingerprint: 0 }; + assert_eq!(package_context, expected_package_context); + } + + #[test] + fn test_package_context_query_with_fingerprint() { + let storage_dir = create_test_storage_files(2); + // SAFETY: + // The safety here is ensured as the test process will not write to temp storage file + let package_mapped_file = unsafe { + get_mapped_file(&storage_dir, "mockup", StorageFileType::PackageMap).unwrap() + }; + + let package_context = + get_package_read_context(&package_mapped_file, "com.android.aconfig.storage.test_1") + .unwrap() + .unwrap(); + let expected_package_context = PackageReadContext { + package_id: 0, + boolean_start_index: 0, + fingerprint: 15248948510590158086u64, + }; + assert_eq!(package_context, expected_package_context); + + let package_context = + get_package_read_context(&package_mapped_file, "com.android.aconfig.storage.test_2") + .unwrap() + .unwrap(); + let expected_package_context = PackageReadContext { + package_id: 1, + boolean_start_index: 3, + fingerprint: 4431940502274857964u64, + }; + assert_eq!(package_context, expected_package_context); + + let package_context = + get_package_read_context(&package_mapped_file, "com.android.aconfig.storage.test_4") + .unwrap() + .unwrap(); + let expected_package_context = PackageReadContext { + package_id: 2, + boolean_start_index: 6, + fingerprint: 16233229917711622375u64, + }; assert_eq!(package_context, expected_package_context); } #[test] fn test_none_exist_package_context_query() { - let storage_dir = create_test_storage_files(); + let storage_dir = create_test_storage_files(1); // SAFETY: // The safety here is ensured as the test process will not write to temp storage file let package_mapped_file = unsafe { @@ -99,7 +145,7 @@ mod aconfig_storage_rust_test { #[test] fn test_flag_context_query() { - let storage_dir = create_test_storage_files(); + let storage_dir = create_test_storage_files(1); // SAFETY: // The safety here is ensured as the test process will not write to temp storage file let flag_mapped_file = @@ -125,7 +171,7 @@ mod aconfig_storage_rust_test { #[test] fn test_none_exist_flag_context_query() { - let storage_dir = create_test_storage_files(); + let storage_dir = create_test_storage_files(1); // SAFETY: // The safety here is ensured as the test process will not write to temp storage file let flag_mapped_file = @@ -141,7 +187,7 @@ mod aconfig_storage_rust_test { #[test] fn test_boolean_flag_value_query() { - let storage_dir = create_test_storage_files(); + let storage_dir = create_test_storage_files(1); // SAFETY: // The safety here is ensured as the test process will not write to temp storage file let flag_value_file = @@ -155,7 +201,7 @@ mod aconfig_storage_rust_test { #[test] fn test_invalid_boolean_flag_value_query() { - let storage_dir = create_test_storage_files(); + let storage_dir = create_test_storage_files(1); // SAFETY: // The safety here is ensured as the test process will not write to temp storage file let flag_value_file = @@ -169,7 +215,7 @@ mod aconfig_storage_rust_test { #[test] fn test_flag_info_query() { - let storage_dir = create_test_storage_files(); + let storage_dir = create_test_storage_files(1); // SAFETY: // The safety here is ensured as the test process will not write to temp storage file let flag_info_file = @@ -186,7 +232,7 @@ mod aconfig_storage_rust_test { #[test] fn test_invalid_boolean_flag_info_query() { - let storage_dir = create_test_storage_files(); + let storage_dir = create_test_storage_files(1); // SAFETY: // The safety here is ensured as the test process will not write to temp storage file let flag_info_file = @@ -199,10 +245,18 @@ mod aconfig_storage_rust_test { } #[test] - fn test_storage_version_query() { - assert_eq!(get_storage_file_version("./package.map").unwrap(), 1); - assert_eq!(get_storage_file_version("./flag.map").unwrap(), 1); - assert_eq!(get_storage_file_version("./flag.val").unwrap(), 1); - assert_eq!(get_storage_file_version("./flag.info").unwrap(), 1); + fn test_storage_version_query_v1() { + assert_eq!(get_storage_file_version("./data/v1/package_v1.map").unwrap(), 1); + assert_eq!(get_storage_file_version("./data/v1/flag_v1.map").unwrap(), 1); + assert_eq!(get_storage_file_version("./data/v1/flag_v1.val").unwrap(), 1); + assert_eq!(get_storage_file_version("./data/v1/flag_v1.info").unwrap(), 1); + } + + #[test] + fn test_storage_version_query_v2() { + assert_eq!(get_storage_file_version("./data/v2/package_v2.map").unwrap(), 2); + assert_eq!(get_storage_file_version("./data/v2/flag_v2.map").unwrap(), 2); + assert_eq!(get_storage_file_version("./data/v2/flag_v2.val").unwrap(), 2); + assert_eq!(get_storage_file_version("./data/v2/flag_v2.info").unwrap(), 2); } } diff --git a/tools/aconfig/aconfig_storage_write_api/Android.bp b/tools/aconfig/aconfig_storage_write_api/Android.bp index 0f1962c3ac..4c882b4b9a 100644 --- a/tools/aconfig/aconfig_storage_write_api/Android.bp +++ b/tools/aconfig/aconfig_storage_write_api/Android.bp @@ -16,6 +16,11 @@ rust_defaults { "libaconfig_storage_file", "libaconfig_storage_read_api", ], + min_sdk_version: "34", + apex_available: [ + "//apex_available:anyapex", + "//apex_available:platform", + ], } rust_library { diff --git a/tools/aconfig/aconfig_storage_write_api/aconfig_storage_write_api.cpp b/tools/aconfig/aconfig_storage_write_api/aconfig_storage_write_api.cpp index 7b435746da..03a8fa284a 100644 --- a/tools/aconfig/aconfig_storage_write_api/aconfig_storage_write_api.cpp +++ b/tools/aconfig/aconfig_storage_write_api/aconfig_storage_write_api.cpp @@ -100,18 +100,4 @@ android::base::Result<void> set_flag_has_local_override( return {}; } -android::base::Result<void> create_flag_info( - std::string const& package_map, - std::string const& flag_map, - std::string const& flag_info_out) { - auto creation_cxx = create_flag_info_cxx( - rust::Str(package_map.c_str()), - rust::Str(flag_map.c_str()), - rust::Str(flag_info_out.c_str())); - if (creation_cxx.success) { - return {}; - } else { - return android::base::Error() << creation_cxx.error_message; - } -} } // namespace aconfig_storage diff --git a/tools/aconfig/aconfig_storage_write_api/include/aconfig_storage/aconfig_storage_write_api.hpp b/tools/aconfig/aconfig_storage_write_api/include/aconfig_storage/aconfig_storage_write_api.hpp index 0bba7ffcfc..50a51889b1 100644 --- a/tools/aconfig/aconfig_storage_write_api/include/aconfig_storage/aconfig_storage_write_api.hpp +++ b/tools/aconfig/aconfig_storage_write_api/include/aconfig_storage/aconfig_storage_write_api.hpp @@ -36,13 +36,4 @@ android::base::Result<void> set_flag_has_local_override( uint32_t offset, bool value); -/// Create flag info file based on package and flag map -/// \input package_map: package map file -/// \input flag_map: flag map file -/// \input flag_info_out: flag info file to be created -android::base::Result<void> create_flag_info( - std::string const& package_map, - std::string const& flag_map, - std::string const& flag_info_out); - } // namespace aconfig_storage diff --git a/tools/aconfig/aconfig_storage_write_api/src/flag_info_update.rs b/tools/aconfig/aconfig_storage_write_api/src/flag_info_update.rs index 7e6071340c..5721105d86 100644 --- a/tools/aconfig/aconfig_storage_write_api/src/flag_info_update.rs +++ b/tools/aconfig/aconfig_storage_write_api/src/flag_info_update.rs @@ -18,7 +18,7 @@ use aconfig_storage_file::{ read_u8_from_bytes, AconfigStorageError, FlagInfoBit, FlagInfoHeader, FlagValueType, - FILE_VERSION, + MAX_SUPPORTED_FILE_VERSION, }; use anyhow::anyhow; @@ -28,11 +28,11 @@ fn get_flag_info_offset( flag_index: u32, ) -> Result<usize, AconfigStorageError> { let interpreted_header = FlagInfoHeader::from_bytes(buf)?; - if interpreted_header.version > FILE_VERSION { + if interpreted_header.version > MAX_SUPPORTED_FILE_VERSION { return Err(AconfigStorageError::HigherStorageFileVersion(anyhow!( "Cannot write to storage file with a higher version of {} with lib version {}", interpreted_header.version, - FILE_VERSION + MAX_SUPPORTED_FILE_VERSION ))); } @@ -94,13 +94,13 @@ pub fn update_flag_has_local_override( #[cfg(test)] mod tests { use super::*; - use aconfig_storage_file::test_utils::create_test_flag_info_list; + use aconfig_storage_file::{test_utils::create_test_flag_info_list, DEFAULT_FILE_VERSION}; use aconfig_storage_read_api::flag_info_query::find_flag_attribute; #[test] // this test point locks down has server override update fn test_update_flag_has_server_override() { - let flag_info_list = create_test_flag_info_list(); + let flag_info_list = create_test_flag_info_list(DEFAULT_FILE_VERSION); let mut buf = flag_info_list.into_bytes(); for i in 0..flag_info_list.header.num_flags { update_flag_has_server_override(&mut buf, FlagValueType::Boolean, i, true).unwrap(); @@ -115,7 +115,7 @@ mod tests { #[test] // this test point locks down has local override update fn test_update_flag_has_local_override() { - let flag_info_list = create_test_flag_info_list(); + let flag_info_list = create_test_flag_info_list(DEFAULT_FILE_VERSION); let mut buf = flag_info_list.into_bytes(); for i in 0..flag_info_list.header.num_flags { update_flag_has_local_override(&mut buf, FlagValueType::Boolean, i, true).unwrap(); diff --git a/tools/aconfig/aconfig_storage_write_api/src/flag_value_update.rs b/tools/aconfig/aconfig_storage_write_api/src/flag_value_update.rs index dd15c996a6..9772db9ee8 100644 --- a/tools/aconfig/aconfig_storage_write_api/src/flag_value_update.rs +++ b/tools/aconfig/aconfig_storage_write_api/src/flag_value_update.rs @@ -16,7 +16,7 @@ //! flag value update module defines the flag value file write to mapped bytes -use aconfig_storage_file::{AconfigStorageError, FlagValueHeader, FILE_VERSION}; +use aconfig_storage_file::{AconfigStorageError, FlagValueHeader, MAX_SUPPORTED_FILE_VERSION}; use anyhow::anyhow; /// Set flag value @@ -26,11 +26,11 @@ pub fn update_boolean_flag_value( flag_value: bool, ) -> Result<usize, AconfigStorageError> { let interpreted_header = FlagValueHeader::from_bytes(buf)?; - if interpreted_header.version > FILE_VERSION { + if interpreted_header.version > MAX_SUPPORTED_FILE_VERSION { return Err(AconfigStorageError::HigherStorageFileVersion(anyhow!( "Cannot write to storage file with a higher version of {} with lib version {}", interpreted_header.version, - FILE_VERSION + MAX_SUPPORTED_FILE_VERSION ))); } @@ -49,12 +49,12 @@ pub fn update_boolean_flag_value( #[cfg(test)] mod tests { use super::*; - use aconfig_storage_file::test_utils::create_test_flag_value_list; + use aconfig_storage_file::{test_utils::create_test_flag_value_list, DEFAULT_FILE_VERSION}; #[test] // this test point locks down flag value update fn test_boolean_flag_value_update() { - let flag_value_list = create_test_flag_value_list(); + let flag_value_list = create_test_flag_value_list(DEFAULT_FILE_VERSION); let value_offset = flag_value_list.header.boolean_value_offset; let mut content = flag_value_list.into_bytes(); let true_byte = u8::from(true).to_le_bytes()[0]; @@ -72,7 +72,7 @@ mod tests { #[test] // this test point locks down update beyond the end of boolean section fn test_boolean_out_of_range() { - let mut flag_value_list = create_test_flag_value_list().into_bytes(); + let mut flag_value_list = create_test_flag_value_list(DEFAULT_FILE_VERSION).into_bytes(); let error = update_boolean_flag_value(&mut flag_value_list[..], 8, true).unwrap_err(); assert_eq!( format!("{:?}", error), @@ -83,16 +83,16 @@ mod tests { #[test] // this test point locks down query error when file has a higher version fn test_higher_version_storage_file() { - let mut value_list = create_test_flag_value_list(); - value_list.header.version = FILE_VERSION + 1; + let mut value_list = create_test_flag_value_list(DEFAULT_FILE_VERSION); + value_list.header.version = MAX_SUPPORTED_FILE_VERSION + 1; let mut flag_value = value_list.into_bytes(); let error = update_boolean_flag_value(&mut flag_value[..], 4, true).unwrap_err(); assert_eq!( format!("{:?}", error), format!( "HigherStorageFileVersion(Cannot write to storage file with a higher version of {} with lib version {})", - FILE_VERSION + 1, - FILE_VERSION + MAX_SUPPORTED_FILE_VERSION + 1, + MAX_SUPPORTED_FILE_VERSION ) ); } diff --git a/tools/aconfig/aconfig_storage_write_api/src/lib.rs b/tools/aconfig/aconfig_storage_write_api/src/lib.rs index 0396a63d4e..09bb41f54f 100644 --- a/tools/aconfig/aconfig_storage_write_api/src/lib.rs +++ b/tools/aconfig/aconfig_storage_write_api/src/lib.rs @@ -24,15 +24,10 @@ pub mod mapped_file; #[cfg(test)] mod test_utils; -use aconfig_storage_file::{ - AconfigStorageError, FlagInfoHeader, FlagInfoList, FlagInfoNode, FlagTable, FlagValueType, - PackageTable, StorageFileType, StoredFlagType, FILE_VERSION, -}; +use aconfig_storage_file::{AconfigStorageError, FlagValueType}; use anyhow::anyhow; use memmap2::MmapMut; -use std::fs::File; -use std::io::{Read, Write}; /// Get read write mapped storage files. /// @@ -104,86 +99,6 @@ pub fn set_flag_has_local_override( }) } -/// Read in storage file as bytes -fn read_file_to_bytes(file_path: &str) -> Result<Vec<u8>, AconfigStorageError> { - let mut file = File::open(file_path).map_err(|errmsg| { - AconfigStorageError::FileReadFail(anyhow!("Failed to open file {}: {}", file_path, errmsg)) - })?; - let mut buffer = Vec::new(); - file.read_to_end(&mut buffer).map_err(|errmsg| { - AconfigStorageError::FileReadFail(anyhow!( - "Failed to read bytes from file {}: {}", - file_path, - errmsg - )) - })?; - Ok(buffer) -} - -/// Create flag info file given package map file and flag map file -/// \input package_map: package map file -/// \input flag_map: flag map file -/// \output flag_info_out: created flag info file -pub fn create_flag_info( - package_map: &str, - flag_map: &str, - flag_info_out: &str, -) -> Result<(), AconfigStorageError> { - let package_table = PackageTable::from_bytes(&read_file_to_bytes(package_map)?)?; - let flag_table = FlagTable::from_bytes(&read_file_to_bytes(flag_map)?)?; - - if package_table.header.container != flag_table.header.container { - return Err(AconfigStorageError::FileCreationFail(anyhow!( - "container for package map {} and flag map {} does not match", - package_table.header.container, - flag_table.header.container, - ))); - } - - let mut package_start_index = vec![0; package_table.header.num_packages as usize]; - for node in package_table.nodes.iter() { - package_start_index[node.package_id as usize] = node.boolean_start_index; - } - - let mut is_flag_rw = vec![false; flag_table.header.num_flags as usize]; - for node in flag_table.nodes.iter() { - let flag_index = package_start_index[node.package_id as usize] + node.flag_index as u32; - is_flag_rw[flag_index as usize] = node.flag_type == StoredFlagType::ReadWriteBoolean; - } - - let mut list = FlagInfoList { - header: FlagInfoHeader { - version: FILE_VERSION, - container: flag_table.header.container, - file_type: StorageFileType::FlagInfo as u8, - file_size: 0, - num_flags: flag_table.header.num_flags, - boolean_flag_offset: 0, - }, - nodes: is_flag_rw.iter().map(|&rw| FlagInfoNode::create(rw)).collect(), - }; - - list.header.boolean_flag_offset = list.header.into_bytes().len() as u32; - list.header.file_size = list.into_bytes().len() as u32; - - let mut file = File::create(flag_info_out).map_err(|errmsg| { - AconfigStorageError::FileCreationFail(anyhow!( - "fail to create file {}: {}", - flag_info_out, - errmsg - )) - })?; - file.write_all(&list.into_bytes()).map_err(|errmsg| { - AconfigStorageError::FileCreationFail(anyhow!( - "fail to write to file {}: {}", - flag_info_out, - errmsg - )) - })?; - - Ok(()) -} - // *************************************** // // CC INTERLOP // *************************************** // @@ -212,12 +127,6 @@ mod ffi { pub error_message: String, } - // Flag info file creation return for cc interlop - pub struct FlagInfoCreationCXX { - pub success: bool, - pub error_message: String, - } - // Rust export to c++ extern "Rust" { pub fn update_boolean_flag_value_cxx( @@ -239,12 +148,6 @@ mod ffi { offset: u32, value: bool, ) -> FlagHasLocalOverrideUpdateCXX; - - pub fn create_flag_info_cxx( - package_map: &str, - flag_map: &str, - flag_info_out: &str, - ) -> FlagInfoCreationCXX; } } @@ -329,34 +232,15 @@ pub(crate) fn update_flag_has_local_override_cxx( } } -/// Create flag info file cc interlop -pub(crate) fn create_flag_info_cxx( - package_map: &str, - flag_map: &str, - flag_info_out: &str, -) -> ffi::FlagInfoCreationCXX { - match create_flag_info(package_map, flag_map, flag_info_out) { - Ok(()) => ffi::FlagInfoCreationCXX { success: true, error_message: String::from("") }, - Err(errmsg) => { - ffi::FlagInfoCreationCXX { success: false, error_message: format!("{:?}", errmsg) } - } - } -} - #[cfg(test)] mod tests { use super::*; use crate::test_utils::copy_to_temp_file; - use aconfig_storage_file::test_utils::{ - create_test_flag_info_list, create_test_flag_table, create_test_package_table, - write_bytes_to_temp_file, - }; use aconfig_storage_file::FlagInfoBit; use aconfig_storage_read_api::flag_info_query::find_flag_attribute; use aconfig_storage_read_api::flag_value_query::find_boolean_flag_value; use std::fs::File; use std::io::Read; - use tempfile::NamedTempFile; fn get_boolean_flag_value_at_offset(file: &str, offset: u32) -> bool { let mut f = File::open(&file).unwrap(); @@ -439,31 +323,4 @@ mod tests { } } } - - fn create_empty_temp_file() -> Result<NamedTempFile, AconfigStorageError> { - let file = NamedTempFile::new().map_err(|_| { - AconfigStorageError::FileCreationFail(anyhow!("Failed to create temp file")) - })?; - Ok(file) - } - - #[test] - // this test point locks down the flag info creation - fn test_create_flag_info() { - let package_table = - write_bytes_to_temp_file(&create_test_package_table().into_bytes()).unwrap(); - let flag_table = write_bytes_to_temp_file(&create_test_flag_table().into_bytes()).unwrap(); - let flag_info = create_empty_temp_file().unwrap(); - - let package_table_path = package_table.path().display().to_string(); - let flag_table_path = flag_table.path().display().to_string(); - let flag_info_path = flag_info.path().display().to_string(); - - assert!(create_flag_info(&package_table_path, &flag_table_path, &flag_info_path).is_ok()); - - let flag_info = - FlagInfoList::from_bytes(&read_file_to_bytes(&flag_info_path).unwrap()).unwrap(); - let expected_flag_info = create_test_flag_info_list(); - assert_eq!(flag_info, expected_flag_info); - } } diff --git a/tools/aconfig/aflags/Android.bp b/tools/aconfig/aflags/Android.bp index c48585aed9..341975daa4 100644 --- a/tools/aconfig/aflags/Android.bp +++ b/tools/aconfig/aflags/Android.bp @@ -10,8 +10,9 @@ rust_defaults { srcs: ["src/main.rs"], rustlibs: [ "libaconfig_device_paths", + "libaconfig_flags", "libaconfig_protos", - "libaconfigd_protos", + "libaconfigd_protos_rust", "libaconfig_storage_read_api", "libaconfig_storage_file", "libanyhow", @@ -19,12 +20,20 @@ rust_defaults { "libnix", "libprotobuf", "libregex", + // TODO: b/371021174 remove this fake dependency once we find a proper strategy to + // deal with test aconfig libs are not present in storage because they are never used + // by the actual build + "libaconfig_test_rust_library", ], } rust_binary { name: "aflags", + host_supported: true, defaults: ["aflags.defaults"], + apex_available: [ + "//apex_available:platform", + ], } rust_test_host { diff --git a/tools/aconfig/aflags/Cargo.toml b/tools/aconfig/aflags/Cargo.toml index 7dc343668d..d31e232975 100644 --- a/tools/aconfig/aflags/Cargo.toml +++ b/tools/aconfig/aflags/Cargo.toml @@ -9,9 +9,10 @@ paste = "1.0.11" protobuf = "3.2.0" regex = "1.10.3" aconfig_protos = { path = "../aconfig_protos" } -aconfigd_protos = { version = "0.1.0", path = "../../../../../system/server_configurable_flags/aconfigd"} +aconfigd_protos = { version = "0.1.0", path = "../../../../../packages/modules/ConfigInfrastructure/aconfigd/proto"} nix = { version = "0.28.0", features = ["user"] } aconfig_storage_file = { version = "0.1.0", path = "../aconfig_storage_file" } aconfig_storage_read_api = { version = "0.1.0", path = "../aconfig_storage_read_api" } clap = {version = "4.5.2" } aconfig_device_paths = { version = "0.1.0", path = "../aconfig_device_paths" } +aconfig_flags = { version = "0.1.0", path = "../aconfig_flags" } diff --git a/tools/aconfig/aflags/src/aconfig_storage_source.rs b/tools/aconfig/aflags/src/aconfig_storage_source.rs index a2c60128b5..766807acef 100644 --- a/tools/aconfig/aflags/src/aconfig_storage_source.rs +++ b/tools/aconfig/aflags/src/aconfig_storage_source.rs @@ -1,3 +1,4 @@ +use crate::load_protos; use crate::{Flag, FlagSource}; use crate::{FlagPermission, FlagValue, ValuePickedFrom}; use aconfigd_protos::{ @@ -9,13 +10,35 @@ use anyhow::anyhow; use anyhow::Result; use protobuf::Message; use protobuf::SpecialFields; +use std::collections::HashMap; use std::io::{Read, Write}; use std::net::Shutdown; use std::os::unix::net::UnixStream; pub struct AconfigStorageSource {} -fn convert(msg: ProtoFlagQueryReturnMessage) -> Result<Flag> { +static ACONFIGD_SYSTEM_SOCKET_NAME: &str = "/dev/socket/aconfigd_system"; +static ACONFIGD_MAINLINE_SOCKET_NAME: &str = "/dev/socket/aconfigd_mainline"; + +enum AconfigdSocket { + System, + Mainline, +} + +impl AconfigdSocket { + pub fn name(&self) -> &str { + match self { + AconfigdSocket::System => ACONFIGD_SYSTEM_SOCKET_NAME, + AconfigdSocket::Mainline => ACONFIGD_MAINLINE_SOCKET_NAME, + } + } +} + +fn load_flag_to_container() -> Result<HashMap<String, String>> { + Ok(load_protos::load()?.into_iter().map(|p| (p.qualified_name(), p.container)).collect()) +} + +fn convert(msg: ProtoFlagQueryReturnMessage, containers: &HashMap<String, String>) -> Result<Flag> { let (value, value_picked_from) = match ( &msg.boot_flag_value, msg.default_flag_value, @@ -55,21 +78,27 @@ fn convert(msg: ProtoFlagQueryReturnMessage) -> Result<Flag> { None => return Err(anyhow!("missing permission")), }; + let name = msg.flag_name.ok_or(anyhow!("missing flag name"))?; + let package = msg.package_name.ok_or(anyhow!("missing package name"))?; + let qualified_name = format!("{package}.{name}"); Ok(Flag { - name: msg.flag_name.ok_or(anyhow!("missing flag name"))?, - package: msg.package_name.ok_or(anyhow!("missing package name"))?, + name, + package, value, permission, value_picked_from, staged_value, - container: msg.container.ok_or(anyhow!("missing container"))?, - + container: containers + .get(&qualified_name) + .cloned() + .unwrap_or_else(|| "<no container>".to_string()) + .to_string(), // TODO: remove once DeviceConfig is not in the CLI. namespace: "-".to_string(), }) } -fn read_from_socket() -> Result<Vec<ProtoFlagQueryReturnMessage>> { +fn read_from_socket(socket: AconfigdSocket) -> Result<Vec<ProtoFlagQueryReturnMessage>> { let messages = ProtoStorageRequestMessages { msgs: vec![ProtoStorageRequestMessage { msg: Some(ProtoStorageRequestMessageMsg::ListStorageMessage(ProtoListStorageMessage { @@ -81,7 +110,7 @@ fn read_from_socket() -> Result<Vec<ProtoFlagQueryReturnMessage>> { special_fields: SpecialFields::new(), }; - let mut socket = UnixStream::connect("/dev/socket/aconfigd")?; + let mut socket = UnixStream::connect(socket.name())?; let message_buffer = messages.write_to_bytes()?; let mut message_length_buffer: [u8; 4] = [0; 4]; @@ -114,11 +143,21 @@ fn read_from_socket() -> Result<Vec<ProtoFlagQueryReturnMessage>> { impl FlagSource for AconfigStorageSource { fn list_flags() -> Result<Vec<Flag>> { - read_from_socket() - .map(|query_messages| { - query_messages.iter().map(|message| convert(message.clone())).collect::<Vec<_>>() - })? + let containers = load_flag_to_container()?; + let system_messages = read_from_socket(AconfigdSocket::System); + let mainline_messages = read_from_socket(AconfigdSocket::Mainline); + + let mut all_messages = vec![]; + if let Ok(system_messages) = system_messages { + all_messages.extend_from_slice(&system_messages); + } + if let Ok(mainline_messages) = mainline_messages { + all_messages.extend_from_slice(&mainline_messages); + } + + all_messages .into_iter() + .map(|query_message| convert(query_message.clone(), &containers)) .collect() } diff --git a/tools/aconfig/aflags/src/load_protos.rs b/tools/aconfig/aflags/src/load_protos.rs index 90d8599145..c5ac8ff9dc 100644 --- a/tools/aconfig/aflags/src/load_protos.rs +++ b/tools/aconfig/aflags/src/load_protos.rs @@ -51,7 +51,10 @@ pub(crate) fn load() -> Result<Vec<Flag>> { let paths = aconfig_device_paths::parsed_flags_proto_paths()?; for path in paths { - let bytes = fs::read(path.clone())?; + let Ok(bytes) = fs::read(&path) else { + eprintln!("warning: failed to read {:?}", path); + continue; + }; let parsed_flags: ProtoParsedFlags = protobuf::Message::parse_from_bytes(&bytes)?; for flag in parsed_flags.parsed_flag { // TODO(b/334954748): enforce one-container-per-flag invariant. @@ -60,3 +63,10 @@ pub(crate) fn load() -> Result<Vec<Flag>> { } Ok(result) } + +pub(crate) fn list_containers() -> Result<Vec<String>> { + Ok(aconfig_device_paths::parsed_flags_proto_paths()? + .into_iter() + .map(|p| infer_container(&p)) + .collect()) +} diff --git a/tools/aconfig/aflags/src/main.rs b/tools/aconfig/aflags/src/main.rs index d8912a946e..568ad999e0 100644 --- a/tools/aconfig/aflags/src/main.rs +++ b/tools/aconfig/aflags/src/main.rs @@ -16,6 +16,9 @@ //! `aflags` is a device binary to read and write aconfig flags. +use std::env; +use std::process::{Command as OsCommand, Stdio}; + use anyhow::{anyhow, ensure, Result}; use clap::Parser; @@ -116,9 +119,10 @@ impl Flag { } fn display_staged_value(&self) -> String { - match self.staged_value { - Some(v) => format!("(->{})", v), - None => "-".to_string(), + match (&self.permission, self.staged_value) { + (FlagPermission::ReadOnly, _) => "-".to_string(), + (FlagPermission::ReadWrite, None) => "-".to_string(), + (FlagPermission::ReadWrite, Some(v)) => format!("(->{})", v), } } } @@ -164,10 +168,6 @@ struct Cli { enum Command { /// List all aconfig flags on this device. List { - /// Read from the new flag storage. - #[clap(long)] - use_new_storage: bool, - /// Optionally filter by container name. #[clap(short = 'c', long = "container")] container: Option<String>, @@ -184,6 +184,9 @@ enum Command { /// <package>.<flag_name> qualified_name: String, }, + + /// Display which flag storage backs aconfig flags. + WhichBacking, } struct PaddingInfo { @@ -253,6 +256,14 @@ fn list(source_type: FlagSourceType, container: Option<String>) -> Result<String FlagSourceType::DeviceConfig => DeviceConfigSource::list_flags()?, FlagSourceType::AconfigStorage => AconfigStorageSource::list_flags()?, }; + + if let Some(ref c) = container { + ensure!( + load_protos::list_containers()?.contains(c), + format!("container '{}' not found", &c) + ); + } + let flags = (Filter { container }).apply(&flags_unfiltered); let padding_info = PaddingInfo { longest_flag_col: flags.iter().map(|f| f.qualified_name().len()).max().unwrap_or(0), @@ -282,21 +293,61 @@ fn list(source_type: FlagSourceType, container: Option<String>) -> Result<String Ok(result) } +fn display_which_backing() -> String { + if aconfig_flags::auto_generated::enable_only_new_storage() { + "aconfig_storage".to_string() + } else { + "device_config".to_string() + } +} + +fn invoke_updatable_aflags() { + let updatable_command = "/apex/com.android.configinfrastructure/bin/aflags_updatable"; + + let args: Vec<String> = env::args().collect(); + let command_args = if args.len() >= 2 { &args[1..] } else { &["--help".to_string()] }; + + let mut child = OsCommand::new(updatable_command); + for arg in command_args { + child.arg(arg); + } + + let output = child + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .spawn() + .expect("failed to execute child") + .wait_with_output() + .expect("failed to execute command"); + + let output_str = String::from_utf8_lossy(&output.stdout).trim().to_string(); + if !output_str.is_empty() { + println!("{}", output_str); + } +} + fn main() -> Result<()> { + if aconfig_flags::auto_generated::invoke_updatable_aflags() { + invoke_updatable_aflags(); + return Ok(()); + } + ensure!(nix::unistd::Uid::current().is_root(), "must be root"); let cli = Cli::parse(); let output = match cli.command { - Command::List { use_new_storage: true, container } => { - list(FlagSourceType::AconfigStorage, container) - .map_err(|err| anyhow!("storage may not be enabled: {err}")) - .map(Some) - } - Command::List { use_new_storage: false, container } => { - list(FlagSourceType::DeviceConfig, container).map(Some) + Command::List { container } => { + if aconfig_flags::auto_generated::enable_only_new_storage() { + list(FlagSourceType::AconfigStorage, container) + .map_err(|err| anyhow!("could not list flags: {err}")) + .map(Some) + } else { + list(FlagSourceType::DeviceConfig, container).map(Some) + } } Command::Enable { qualified_name } => set_flag(&qualified_name, "true").map(|_| None), Command::Disable { qualified_name } => set_flag(&qualified_name, "false").map(|_| None), + Command::WhichBacking => Ok(Some(display_which_backing())), }; match output { Ok(Some(text)) => println!("{text}"), diff --git a/tools/aconfig/fake_device_config/Android.bp b/tools/aconfig/fake_device_config/Android.bp index d6a1f22649..bf98058895 100644 --- a/tools/aconfig/fake_device_config/Android.bp +++ b/tools/aconfig/fake_device_config/Android.bp @@ -15,20 +15,31 @@ java_library { name: "fake_device_config", srcs: [ - "src/android/util/Log.java", - "src/android/provider/DeviceConfig.java", - "src/android/os/StrictMode.java", + "src/**/*.java", ], sdk_version: "none", system_modules: "core-all-system-modules", host_supported: true, + is_stubs_module: true, } java_library { - name: "strict_mode_stub", + name: "aconfig_storage_stub", srcs: [ - "src/android/os/StrictMode.java", + "src/android/os/flagging/**/*.java", ], sdk_version: "core_current", host_supported: true, + is_stubs_module: true, +} + +java_library { + name: "aconfig_storage_stub_none", + srcs: [ + "src/android/os/flagging/**/*.java", + ], + sdk_version: "none", + system_modules: "core-all-system-modules", + host_supported: true, + is_stubs_module: true, } diff --git a/tools/aconfig/fake_device_config/src/android/os/Build.java b/tools/aconfig/fake_device_config/src/android/os/Build.java new file mode 100644 index 0000000000..8ec72fb2dc --- /dev/null +++ b/tools/aconfig/fake_device_config/src/android/os/Build.java @@ -0,0 +1,23 @@ +/* + * Copyright (C) 2024 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.os; + +public class Build { + public static class VERSION { + public static final int SDK_INT = 0; + } +} diff --git a/tools/aconfig/fake_device_config/src/android/os/StrictMode.java b/tools/aconfig/fake_device_config/src/android/os/flagging/AconfigPackage.java index 641625206c..3cac5168d1 100644 --- a/tools/aconfig/fake_device_config/src/android/os/StrictMode.java +++ b/tools/aconfig/fake_device_config/src/android/os/flagging/AconfigPackage.java @@ -14,16 +14,17 @@ * limitations under the License. */ -package android.os; +package android.os.flagging; -public class StrictMode { - public static ThreadPolicy allowThreadDiskReads() { +/* + * This class allows generated aconfig code to compile independently of the framework. + */ +public class AconfigPackage { + public static AconfigPackage load(String packageName) { throw new UnsupportedOperationException("Stub!"); } - public static void setThreadPolicy(final ThreadPolicy policy) { + public boolean getBooleanFlagValue(String flagName, boolean defaultValue) { throw new UnsupportedOperationException("Stub!"); } - - public static final class ThreadPolicy {} } diff --git a/tools/aconfig/fake_device_config/src/android/provider/DeviceConfig.java b/tools/aconfig/fake_device_config/src/android/os/flagging/AconfigPackageInternal.java index dbb07ac983..46058b664f 100644 --- a/tools/aconfig/fake_device_config/src/android/provider/DeviceConfig.java +++ b/tools/aconfig/fake_device_config/src/android/os/flagging/AconfigPackageInternal.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2023 The Android Open Source Project + * Copyright (C) 2024 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,26 +14,18 @@ * limitations under the License. */ -package android.provider; +package android.os.flagging; /* * This class allows generated aconfig code to compile independently of the framework. */ -public class DeviceConfig { - private DeviceConfig() { - } +public class AconfigPackageInternal { - public static boolean getBoolean(String ns, String name, boolean def) { - return false; - } + public static AconfigPackageInternal load(String packageName, long packageFingerprint) { + throw new UnsupportedOperationException("Stub!"); + } - public static Properties getProperties(String namespace, String... names) { - return new Properties(); - } - - public static class Properties { - public boolean getBoolean(String name, boolean def) { - return false; - } - } + public boolean getBooleanFlagValue(int index) { + throw new UnsupportedOperationException("Stub!"); + } } diff --git a/tools/aconfig/fake_device_config/src/android/os/flagging/PlatformAconfigPackage.java b/tools/aconfig/fake_device_config/src/android/os/flagging/PlatformAconfigPackage.java new file mode 100644 index 0000000000..ec79f7daa1 --- /dev/null +++ b/tools/aconfig/fake_device_config/src/android/os/flagging/PlatformAconfigPackage.java @@ -0,0 +1,36 @@ +/* + * Copyright (C) 2024 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.os.flagging; + +import java.util.Set; + +/* + * This class allows generated aconfig code to compile independently of the framework. + */ +public class PlatformAconfigPackage { + + public static final Set<String> PLATFORM_PACKAGE_MAP_FILES = + Set.of("system.package.map", "vendor.package.map", "product.package.map"); + + public static PlatformAconfigPackage load(String packageName) { + throw new UnsupportedOperationException("Stub!"); + } + + public boolean getBooleanFlagValue(String flagName, boolean defaultValue) { + throw new UnsupportedOperationException("Stub!"); + } +} diff --git a/tools/aconfig/fake_device_config/src/android/os/flagging/PlatformAconfigPackageInternal.java b/tools/aconfig/fake_device_config/src/android/os/flagging/PlatformAconfigPackageInternal.java new file mode 100644 index 0000000000..378c963ba4 --- /dev/null +++ b/tools/aconfig/fake_device_config/src/android/os/flagging/PlatformAconfigPackageInternal.java @@ -0,0 +1,31 @@ +/* + * Copyright (C) 2024 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.os.flagging; + +/* + * This class allows generated aconfig code to compile independently of the framework. + */ +public class PlatformAconfigPackageInternal { + + public static PlatformAconfigPackageInternal load(String packageName, long packageFingerprint) { + throw new UnsupportedOperationException("Stub!"); + } + + public boolean getBooleanFlagValue(int index) { + throw new UnsupportedOperationException("Stub!"); + } +} diff --git a/tools/aconfig/fake_device_config/src/android/util/Log.java b/tools/aconfig/fake_device_config/src/android/util/Log.java index 3e7fd0f386..e40790a432 100644 --- a/tools/aconfig/fake_device_config/src/android/util/Log.java +++ b/tools/aconfig/fake_device_config/src/android/util/Log.java @@ -2,14 +2,18 @@ package android.util; public final class Log { public static int i(String tag, String msg) { - return 0; + throw new UnsupportedOperationException("Stub!"); + } + + public static int w(String tag, String msg) { + throw new UnsupportedOperationException("Stub!"); } public static int e(String tag, String msg) { - return 0; + throw new UnsupportedOperationException("Stub!"); } public static int e(String tag, String msg, Throwable tr) { - return 0; + throw new UnsupportedOperationException("Stub!"); } } diff --git a/tools/aconfig/printflags/Android.bp b/tools/aconfig/printflags/Android.bp index d50a77d072..6f7bca3529 100644 --- a/tools/aconfig/printflags/Android.bp +++ b/tools/aconfig/printflags/Android.bp @@ -19,6 +19,9 @@ rust_defaults { rust_binary { name: "printflags", defaults: ["printflags.defaults"], + apex_available: [ + "//apex_available:platform", + ], } rust_test_host { diff --git a/tools/auto_gen_test_config.py b/tools/auto_gen_test_config.py index 8ee599a1ec..d54c4121e4 100755 --- a/tools/auto_gen_test_config.py +++ b/tools/auto_gen_test_config.py @@ -34,6 +34,7 @@ PLACEHOLDER_MODULE = '{MODULE}' PLACEHOLDER_PACKAGE = '{PACKAGE}' PLACEHOLDER_RUNNER = '{RUNNER}' PLACEHOLDER_TEST_TYPE = '{TEST_TYPE}' +PLACEHOLDER_EXTRA_TEST_RUNNER_CONFIGS = '{EXTRA_TEST_RUNNER_CONFIGS}' def main(argv): @@ -59,6 +60,7 @@ def main(argv): "instrumentation_test_config_template", help="Path to the instrumentation test config template.") parser.add_argument("--extra-configs", default="") + parser.add_argument("--extra-test-runner-configs", default="") args = parser.parse_args(argv) target_config = args.target_config @@ -66,6 +68,7 @@ def main(argv): empty_config = args.empty_config instrumentation_test_config_template = args.instrumentation_test_config_template extra_configs = '\n'.join(args.extra_configs.split('\\n')) + extra_test_runner_configs = '\n'.join(args.extra_test_runner_configs.split('\\n')) module = os.path.splitext(os.path.basename(target_config))[0] @@ -131,6 +134,7 @@ def main(argv): config = config.replace(PLACEHOLDER_PACKAGE, package) config = config.replace(PLACEHOLDER_TEST_TYPE, test_type) config = config.replace(PLACEHOLDER_EXTRA_CONFIGS, extra_configs) + config = config.replace(PLACEHOLDER_EXTRA_TEST_RUNNER_CONFIGS, extra_test_runner_configs) config = config.replace(PLACEHOLDER_RUNNER, runner) with open(target_config, 'w') as config_file: config_file.write(config) diff --git a/tools/check_elf_file.py b/tools/check_elf_file.py index 1fd7950bfe..064004179e 100755 --- a/tools/check_elf_file.py +++ b/tools/check_elf_file.py @@ -42,8 +42,9 @@ _EM_ARM = 40 _EM_X86_64 = 62 _EM_AARCH64 = 183 -_KNOWN_MACHINES = {_EM_386, _EM_ARM, _EM_X86_64, _EM_AARCH64} - +_32_BIT_MACHINES = {_EM_386, _EM_ARM} +_64_BIT_MACHINES = {_EM_X86_64, _EM_AARCH64} +_KNOWN_MACHINES = _32_BIT_MACHINES | _64_BIT_MACHINES # ELF header struct _ELF_HEADER_STRUCT = ( @@ -483,6 +484,11 @@ class Checker(object): sys.exit(2) def check_max_page_size(self, max_page_size): + if self._file_under_test.header.e_machine in _32_BIT_MACHINES: + # Skip test on 32-bit machines. 16 KB pages is an arm64 feature + # and no 32-bit systems in Android use it. + return + for alignment in self._file_under_test.alignments: if alignment % max_page_size != 0: self._error(f'Load segment has alignment {alignment} but ' diff --git a/tools/compliance/go.work b/tools/compliance/go.work index a24d2ea541..506e619436 100644 --- a/tools/compliance/go.work +++ b/tools/compliance/go.work @@ -1,4 +1,4 @@ -go 1.22 +go 1.23 use ( . diff --git a/tools/edit_monitor/Android.bp b/tools/edit_monitor/Android.bp new file mode 100644 index 0000000000..b8ac5bff53 --- /dev/null +++ b/tools/edit_monitor/Android.bp @@ -0,0 +1,118 @@ +// Copyright 2024 The Android Open Source Project +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Set of error prone rules to ensure code quality +// PackageLocation check requires the androidCompatible=false otherwise it does not do anything. + +package { + default_applicable_licenses: ["Android-Apache-2.0"], + default_team: "trendy_team_adte", +} + +python_library_host { + name: "edit_event_proto", + srcs: [ + "proto/edit_event.proto", + ], + proto: { + canonical_path_from_root: false, + }, +} + +python_library_host { + name: "edit_monitor_lib", + pkg_path: "edit_monitor", + srcs: [ + "daemon_manager.py", + "edit_monitor.py", + "utils.py", + ], + libs: [ + "asuite_cc_client", + "edit_event_proto", + "watchdog", + ], +} + +python_test_host { + name: "daemon_manager_test", + main: "daemon_manager_test.py", + pkg_path: "edit_monitor", + srcs: [ + "daemon_manager_test.py", + ], + libs: [ + "edit_monitor_lib", + ], + test_options: { + unit_test: true, + }, +} + +python_test_host { + name: "edit_monitor_test", + main: "edit_monitor_test.py", + pkg_path: "edit_monitor", + srcs: [ + "edit_monitor_test.py", + ], + libs: [ + "edit_monitor_lib", + ], + test_options: { + unit_test: true, + }, +} + +python_test_host { + name: "edit_monitor_utils_test", + main: "utils_test.py", + pkg_path: "edit_monitor", + srcs: [ + "utils_test.py", + ], + libs: [ + "edit_monitor_lib", + ], + test_options: { + unit_test: true, + }, +} + +python_test_host { + name: "edit_monitor_integration_test", + main: "edit_monitor_integration_test.py", + pkg_path: "testdata", + srcs: [ + "edit_monitor_integration_test.py", + ], + test_options: { + unit_test: true, + }, + data: [ + ":edit_monitor", + ], +} + +python_binary_host { + name: "edit_monitor", + pkg_path: "edit_monitor", + srcs: [ + "main.py", + ], + libs: [ + "edit_monitor_lib", + ], + main: "main.py", +} diff --git a/tools/edit_monitor/OWNERS b/tools/edit_monitor/OWNERS new file mode 100644 index 0000000000..8f0f3646dd --- /dev/null +++ b/tools/edit_monitor/OWNERS @@ -0,0 +1 @@ +include platform/tools/asuite:/OWNERS_ADTE_TEAM
\ No newline at end of file diff --git a/tools/edit_monitor/daemon_manager.py b/tools/edit_monitor/daemon_manager.py new file mode 100644 index 0000000000..7d666fed55 --- /dev/null +++ b/tools/edit_monitor/daemon_manager.py @@ -0,0 +1,463 @@ +# Copyright 2024, The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import errno +import fcntl +import getpass +import hashlib +import logging +import multiprocessing +import os +import pathlib +import platform +import signal +import subprocess +import sys +import tempfile +import time + +from atest.metrics import clearcut_client +from atest.proto import clientanalytics_pb2 +from edit_monitor import utils +from proto import edit_event_pb2 + +DEFAULT_PROCESS_TERMINATION_TIMEOUT_SECONDS = 5 +DEFAULT_MONITOR_INTERVAL_SECONDS = 5 +DEFAULT_MEMORY_USAGE_THRESHOLD = 0.02 # 2% of total memory +DEFAULT_CPU_USAGE_THRESHOLD = 200 +DEFAULT_REBOOT_TIMEOUT_SECONDS = 60 * 60 * 24 +BLOCK_SIGN_FILE = "edit_monitor_block_sign" +# Enum of the Clearcut log source defined under +# /google3/wireless/android/play/playlog/proto/log_source_enum.proto +LOG_SOURCE = 2524 + + +def default_daemon_target(): + """Place holder for the default daemon target.""" + print("default daemon target") + + +class DaemonManager: + """Class to manage and monitor the daemon run as a subprocess.""" + + def __init__( + self, + binary_path: str, + daemon_target: callable = default_daemon_target, + daemon_args: tuple = (), + cclient: clearcut_client.Clearcut | None = None, + ): + self.binary_path = binary_path + self.daemon_target = daemon_target + self.daemon_args = daemon_args + self.cclient = cclient or clearcut_client.Clearcut(LOG_SOURCE) + + self.user_name = getpass.getuser() + self.host_name = platform.node() + self.source_root = os.environ.get("ANDROID_BUILD_TOP", "") + self.pid = os.getpid() + self.daemon_process = None + + self.max_memory_usage = 0 + self.max_cpu_usage = 0 + self.total_memory_size = os.sysconf("SC_PAGE_SIZE") * os.sysconf( + "SC_PHYS_PAGES" + ) + + pid_file_dir = pathlib.Path(tempfile.gettempdir()).joinpath("edit_monitor") + pid_file_dir.mkdir(parents=True, exist_ok=True) + self.pid_file_path = self._get_pid_file_path(pid_file_dir) + self.block_sign = pathlib.Path(tempfile.gettempdir()).joinpath( + BLOCK_SIGN_FILE + ) + + def start(self): + """Writes the pidfile and starts the daemon proces.""" + if not utils.is_feature_enabled( + "edit_monitor", + self.user_name, + "ENABLE_ANDROID_EDIT_MONITOR", + 100, + ): + logging.warning("Edit monitor is disabled, exiting...") + return + + if self.block_sign.exists(): + logging.warning("Block sign found, exiting...") + return + + if self.binary_path.startswith("/google/cog/"): + logging.warning("Edit monitor for cog is not supported, exiting...") + return + + setup_lock_file = pathlib.Path(tempfile.gettempdir()).joinpath( + self.pid_file_path.name + ".setup" + ) + logging.info("setup lock file: %s", setup_lock_file) + with open(setup_lock_file, "w") as f: + try: + # Acquire an exclusive lock + fcntl.flock(f, fcntl.LOCK_EX | fcntl.LOCK_NB) + self._stop_any_existing_instance() + self._write_pid_to_pidfile() + self._start_daemon_process() + except Exception as e: + if ( + isinstance(e, IOError) and e.errno == errno.EAGAIN + ): # Failed to acquire the file lock. + logging.warning("Another edit monitor is starting, exitinng...") + return + else: + logging.exception("Failed to start daemon manager with error %s", e) + self._send_error_event_to_clearcut( + edit_event_pb2.EditEvent.FAILED_TO_START_EDIT_MONITOR + ) + raise e + finally: + # Release the lock + fcntl.flock(f, fcntl.LOCK_UN) + + def monitor_daemon( + self, + interval: int = DEFAULT_MONITOR_INTERVAL_SECONDS, + memory_threshold: float = DEFAULT_MEMORY_USAGE_THRESHOLD, + cpu_threshold: float = DEFAULT_CPU_USAGE_THRESHOLD, + reboot_timeout: int = DEFAULT_REBOOT_TIMEOUT_SECONDS, + ): + """Monits the daemon process status. + + Periodically check the CPU/Memory usage of the daemon process as long as the + process is still running and kill the process if the resource usage is above + given thresholds. + """ + if not self.daemon_process: + return + + logging.info("start monitoring daemon process %d.", self.daemon_process.pid) + reboot_time = time.time() + reboot_timeout + while self.daemon_process.is_alive(): + if time.time() > reboot_time: + self.reboot() + try: + memory_usage = self._get_process_memory_percent(self.daemon_process.pid) + self.max_memory_usage = max(self.max_memory_usage, memory_usage) + + cpu_usage = self._get_process_cpu_percent(self.daemon_process.pid) + self.max_cpu_usage = max(self.max_cpu_usage, cpu_usage) + + time.sleep(interval) + except Exception as e: + # Logging the error and continue. + logging.warning("Failed to monitor daemon process with error: %s", e) + + if self.max_memory_usage >= memory_threshold: + self._send_error_event_to_clearcut( + edit_event_pb2.EditEvent.KILLED_DUE_TO_EXCEEDED_MEMORY_USAGE + ) + logging.error( + "Daemon process is consuming too much memory, rebooting..." + ) + self.reboot() + + if self.max_cpu_usage >= cpu_threshold: + self._send_error_event_to_clearcut( + edit_event_pb2.EditEvent.KILLED_DUE_TO_EXCEEDED_CPU_USAGE + ) + logging.error("Daemon process is consuming too much cpu, killing...") + self._terminate_process(self.daemon_process.pid) + + logging.info( + "Daemon process %d terminated. Max memory usage: %f, Max cpu" + " usage: %f.", + self.daemon_process.pid, + self.max_memory_usage, + self.max_cpu_usage, + ) + + def stop(self): + """Stops the daemon process and removes the pidfile.""" + + logging.info("in daemon manager cleanup.") + try: + if self.daemon_process: + # The daemon process might already in termination process, + # wait some time before kill it explicitly. + self._wait_for_process_terminate(self.daemon_process.pid, 1) + if self.daemon_process.is_alive(): + self._terminate_process(self.daemon_process.pid) + self._remove_pidfile(self.pid) + logging.info("Successfully stopped daemon manager.") + except Exception as e: + logging.exception("Failed to stop daemon manager with error %s", e) + self._send_error_event_to_clearcut( + edit_event_pb2.EditEvent.FAILED_TO_STOP_EDIT_MONITOR + ) + sys.exit(1) + finally: + self.cclient.flush_events() + + def reboot(self): + """Reboots the current process. + + Stops the current daemon manager and reboots the entire process based on + the binary file. Exits directly If the binary file no longer exists. + """ + logging.info("Rebooting process based on binary %s.", self.binary_path) + + # Stop the current daemon manager first. + self.stop() + + # If the binary no longer exists, exit directly. + if not os.path.exists(self.binary_path): + logging.info("binary %s no longer exists, exiting.", self.binary_path) + sys.exit(0) + + try: + os.execv(self.binary_path, sys.argv) + except OSError as e: + logging.exception("Failed to reboot process with error: %s.", e) + self._send_error_event_to_clearcut( + edit_event_pb2.EditEvent.FAILED_TO_REBOOT_EDIT_MONITOR + ) + sys.exit(1) # Indicate an error occurred + + def cleanup(self): + """Wipes out all edit monitor instances in the system. + + Stops all the existing edit monitor instances and place a block sign + to prevent any edit monitor process to start. This method is only used + in emergency case when there's something goes wrong with the edit monitor + that requires immediate cleanup to prevent damanger to the system. + """ + logging.debug("Start cleaning up all existing instances.") + self._send_error_event_to_clearcut(edit_event_pb2.EditEvent.FORCE_CLEANUP) + + try: + # First places a block sign to prevent any edit monitor process to start. + self.block_sign.touch() + except (FileNotFoundError, PermissionError, OSError): + logging.exception("Failed to place the block sign") + + # Finds and kills all the existing instances of edit monitor. + existing_instances_pids = self._find_all_instances_pids() + for pid in existing_instances_pids: + logging.info( + "Found existing edit monitor instance with pid %d, killing...", pid + ) + try: + self._terminate_process(pid) + except Exception: + logging.exception("Failed to terminate process %d", pid) + + def _stop_any_existing_instance(self): + if not self.pid_file_path.exists(): + logging.debug("No existing instances.") + return + + ex_pid = self._read_pid_from_pidfile() + + if ex_pid: + logging.info("Found another instance with pid %d.", ex_pid) + self._terminate_process(ex_pid) + self._remove_pidfile(ex_pid) + + def _read_pid_from_pidfile(self) -> int | None: + try: + with open(self.pid_file_path, "r") as f: + return int(f.read().strip()) + except FileNotFoundError as e: + logging.warning("pidfile %s does not exist.", self.pid_file_path) + return None + + def _write_pid_to_pidfile(self): + """Creates a pidfile and writes the current pid to the file. + + Raise FileExistsError if the pidfile already exists. + """ + try: + # Use the 'x' mode to open the file for exclusive creation + with open(self.pid_file_path, "x") as f: + f.write(f"{self.pid}") + except FileExistsError as e: + # This could be caused due to race condition that a user is trying + # to start two edit monitors at the same time. Or because there is + # already an existing edit monitor running and we can not kill it + # for some reason. + logging.exception("pidfile %s already exists.", self.pid_file_path) + raise e + + def _start_daemon_process(self): + """Starts a subprocess to run the daemon.""" + p = multiprocessing.Process( + target=self.daemon_target, args=self.daemon_args + ) + p.daemon = True + p.start() + + logging.info("Start subprocess with PID %d", p.pid) + self.daemon_process = p + + def _terminate_process( + self, pid: int, timeout: int = DEFAULT_PROCESS_TERMINATION_TIMEOUT_SECONDS + ): + """Terminates a process with given pid. + + It first sends a SIGTERM to the process to allow it for proper + termination with a timeout. If the process is not terminated within + the timeout, kills it forcefully. + """ + try: + os.kill(pid, signal.SIGTERM) + if not self._wait_for_process_terminate(pid, timeout): + logging.warning( + "Process %d not terminated within timeout, try force kill", pid + ) + os.kill(pid, signal.SIGKILL) + except ProcessLookupError: + logging.info("Process with PID %d not found (already terminated)", pid) + + def _wait_for_process_terminate(self, pid: int, timeout: int) -> bool: + start_time = time.time() + + while time.time() < start_time + timeout: + if not self._is_process_alive(pid): + return True + time.sleep(1) + + logging.error("Process %d not terminated within %d seconds.", pid, timeout) + return False + + def _is_process_alive(self, pid: int) -> bool: + try: + output = subprocess.check_output( + ["ps", "-p", str(pid), "-o", "state="], text=True + ).strip() + state = output.split()[0] + return state != "Z" # Check if the state is not 'Z' (zombie) + except subprocess.CalledProcessError: + # Process not found (already dead). + return False + except (FileNotFoundError, OSError, ValueError) as e: + logging.warning( + "Unable to check the status for process %d with error: %s.", pid, e + ) + return True + + def _remove_pidfile(self, expected_pid: int): + recorded_pid = self._read_pid_from_pidfile() + + if recorded_pid is None: + logging.info("pid file %s already removed.", self.pid_file_path) + return + + if recorded_pid != expected_pid: + logging.warning( + "pid file contains pid from a different process, expected pid: %d," + " actual pid: %d.", + expected_pid, + recorded_pid, + ) + return + + logging.debug("removing pidfile written by process %s", expected_pid) + try: + os.remove(self.pid_file_path) + except FileNotFoundError: + logging.info("pid file %s already removed.", self.pid_file_path) + + def _get_pid_file_path(self, pid_file_dir: pathlib.Path) -> pathlib.Path: + """Generates the path to store the pidfile. + + The file path should have the format of "/tmp/edit_monitor/xxxx.lock" + where xxxx is a hashed value based on the binary path that starts the + process. + """ + hash_object = hashlib.sha256() + hash_object.update(self.binary_path.encode("utf-8")) + pid_file_path = pid_file_dir.joinpath(hash_object.hexdigest() + ".lock") + logging.info("pid_file_path: %s", pid_file_path) + + return pid_file_path + + def _get_process_memory_percent(self, pid: int) -> float: + with open(f"/proc/{pid}/stat", "r") as f: + stat_data = f.readline().split() + # RSS is the 24th field in /proc/[pid]/stat + rss_pages = int(stat_data[23]) + process_memory = rss_pages * 4 * 1024 # Convert to bytes + + return ( + process_memory / self.total_memory_size + if self.total_memory_size + else 0.0 + ) + + def _get_process_cpu_percent(self, pid: int, interval: int = 1) -> float: + total_start_time = self._get_total_cpu_time(pid) + with open("/proc/uptime", "r") as f: + uptime_start = float(f.readline().split()[0]) + + time.sleep(interval) + + total_end_time = self._get_total_cpu_time(pid) + with open("/proc/uptime", "r") as f: + uptime_end = float(f.readline().split()[0]) + + return ( + (total_end_time - total_start_time) / (uptime_end - uptime_start) * 100 + ) + + def _get_total_cpu_time(self, pid: int) -> float: + with open(f"/proc/{str(pid)}/stat", "r") as f: + stats = f.readline().split() + # utime is the 14th field in /proc/[pid]/stat measured in clock ticks. + utime = int(stats[13]) + # stime is the 15th field in /proc/[pid]/stat measured in clock ticks. + stime = int(stats[14]) + return (utime + stime) / os.sysconf(os.sysconf_names["SC_CLK_TCK"]) + + def _find_all_instances_pids(self) -> list[int]: + pids = [] + + try: + output = subprocess.check_output(["ps", "-ef", "--no-headers"], text=True) + for line in output.splitlines(): + parts = line.split() + process_path = parts[7] + if pathlib.Path(process_path).name == "edit_monitor": + pid = int(parts[1]) + if pid != self.pid: # exclude the current process + pids.append(pid) + except Exception: + logging.exception( + "Failed to get pids of existing edit monitors from ps command." + ) + + return pids + + def _send_error_event_to_clearcut(self, error_type): + edit_monitor_error_event_proto = edit_event_pb2.EditEvent( + user_name=self.user_name, + host_name=self.host_name, + source_root=self.source_root, + ) + edit_monitor_error_event_proto.edit_monitor_error_event.CopyFrom( + edit_event_pb2.EditEvent.EditMonitorErrorEvent(error_type=error_type) + ) + log_event = clientanalytics_pb2.LogEvent( + event_time_ms=int(time.time() * 1000), + source_extension=edit_monitor_error_event_proto.SerializeToString(), + ) + self.cclient.log(log_event) diff --git a/tools/edit_monitor/daemon_manager_test.py b/tools/edit_monitor/daemon_manager_test.py new file mode 100644 index 0000000000..be28965c9e --- /dev/null +++ b/tools/edit_monitor/daemon_manager_test.py @@ -0,0 +1,524 @@ +# Copyright 2024, The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Unittests for DaemonManager.""" + +import fcntl +import logging +import multiprocessing +import os +import pathlib +import signal +import subprocess +import sys +import tempfile +import time +import unittest +from unittest import mock +from edit_monitor import daemon_manager +from proto import edit_event_pb2 + + +TEST_BINARY_FILE = '/path/to/test_binary' +TEST_PID_FILE_PATH = ( + '587239c2d1050afdf54512e2d799f3b929f86b43575eb3c7b4bab105dd9bd25e.lock' +) + + +def simple_daemon(output_file): + with open(output_file, 'w') as f: + f.write('running daemon target') + + +def long_running_daemon(): + while True: + time.sleep(1) + + +def memory_consume_daemon_target(size_mb): + try: + size_bytes = size_mb * 1024 * 1024 + dummy_data = bytearray(size_bytes) + time.sleep(10) + except MemoryError: + print(f'Process failed to allocate {size_mb} MB of memory.') + + +def cpu_consume_daemon_target(target_usage_percent): + while True: + start_time = time.time() + while time.time() - start_time < target_usage_percent / 100: + pass # Busy loop to consume CPU + + # Sleep to reduce CPU usage + time.sleep(1 - target_usage_percent / 100) + + +class DaemonManagerTest(unittest.TestCase): + + @classmethod + def setUpClass(cls): + super().setUpClass() + # Configure to print logging to stdout. + logging.basicConfig(filename=None, level=logging.DEBUG) + console = logging.StreamHandler(sys.stdout) + logging.getLogger('').addHandler(console) + + def setUp(self): + super().setUp() + self.original_tempdir = tempfile.tempdir + self.working_dir = tempfile.TemporaryDirectory() + # Sets the tempdir under the working dir so any temp files created during + # tests will be cleaned. + tempfile.tempdir = self.working_dir.name + self.patch = mock.patch.dict( + os.environ, {'ENABLE_ANDROID_EDIT_MONITOR': 'true'} + ) + self.patch.start() + + def tearDown(self): + # Cleans up any child processes left by the tests. + self._cleanup_child_processes() + self.working_dir.cleanup() + # Restores tempdir. + tempfile.tempdir = self.original_tempdir + self.patch.stop() + super().tearDown() + + def test_start_success_with_no_existing_instance(self): + self.assert_run_simple_daemon_success() + + def test_start_success_with_existing_instance_running(self): + # Create a running daemon subprocess + p = self._create_fake_deamon_process() + + self.assert_run_simple_daemon_success() + self.assert_no_subprocess_running() + + def test_start_success_with_existing_instance_already_dead(self): + # Create a pidfile with pid that does not exist. + pid_file_path_dir = pathlib.Path(self.working_dir.name).joinpath( + 'edit_monitor' + ) + pid_file_path_dir.mkdir(parents=True, exist_ok=True) + with open(pid_file_path_dir.joinpath(TEST_PID_FILE_PATH), 'w') as f: + f.write('123456') + + self.assert_run_simple_daemon_success() + + def test_start_success_with_existing_instance_from_different_binary(self): + # First start an instance based on "some_binary_path" + existing_dm = daemon_manager.DaemonManager( + 'some_binary_path', + daemon_target=long_running_daemon, + ) + existing_dm.start() + + self.assert_run_simple_daemon_success() + existing_dm.stop() + + def test_start_return_directly_if_block_sign_exists(self): + # Creates the block sign. + pathlib.Path(self.working_dir.name).joinpath( + daemon_manager.BLOCK_SIGN_FILE + ).touch() + + dm = daemon_manager.DaemonManager(TEST_BINARY_FILE) + dm.start() + + # Verify no daemon process is started. + self.assertIsNone(dm.daemon_process) + + @mock.patch.dict( + os.environ, {'ENABLE_ANDROID_EDIT_MONITOR': 'false'}, clear=True + ) + def test_start_return_directly_if_disabled(self): + dm = daemon_manager.DaemonManager(TEST_BINARY_FILE) + dm.start() + + # Verify no daemon process is started. + self.assertIsNone(dm.daemon_process) + + def test_start_return_directly_if_in_cog_env(self): + dm = daemon_manager.DaemonManager( + '/google/cog/cloud/user/workspace/edit_monitor' + ) + dm.start() + + # Verify no daemon process is started. + self.assertIsNone(dm.daemon_process) + + def test_start_failed_other_instance_is_starting(self): + f = open( + pathlib.Path(self.working_dir.name).joinpath( + TEST_PID_FILE_PATH + '.setup' + ), + 'w', + ) + # Acquire an exclusive lock + fcntl.flock(f, fcntl.LOCK_EX | fcntl.LOCK_NB) + + dm = daemon_manager.DaemonManager(TEST_BINARY_FILE) + dm.start() + + # Release the lock + fcntl.flock(f, fcntl.LOCK_UN) + f.close() + # Verify no daemon process is started. + self.assertIsNone(dm.daemon_process) + + @mock.patch('os.kill') + def test_start_failed_to_kill_existing_instance(self, mock_kill): + mock_kill.side_effect = OSError('Unknown OSError') + pid_file_path_dir = pathlib.Path(self.working_dir.name).joinpath( + 'edit_monitor' + ) + pid_file_path_dir.mkdir(parents=True, exist_ok=True) + with open(pid_file_path_dir.joinpath(TEST_PID_FILE_PATH), 'w') as f: + f.write('123456') + + fake_cclient = FakeClearcutClient() + with self.assertRaises(OSError): + dm = daemon_manager.DaemonManager(TEST_BINARY_FILE, cclient=fake_cclient) + dm.start() + self._assert_error_event_logged( + fake_cclient, edit_event_pb2.EditEvent.FAILED_TO_START_EDIT_MONITOR + ) + + def test_start_failed_to_write_pidfile(self): + pid_file_path_dir = pathlib.Path(self.working_dir.name).joinpath( + 'edit_monitor' + ) + pid_file_path_dir.mkdir(parents=True, exist_ok=True) + + # Makes the directory read-only so write pidfile will fail. + os.chmod(pid_file_path_dir, 0o555) + + fake_cclient = FakeClearcutClient() + with self.assertRaises(PermissionError): + dm = daemon_manager.DaemonManager(TEST_BINARY_FILE, cclient=fake_cclient) + dm.start() + self._assert_error_event_logged( + fake_cclient, edit_event_pb2.EditEvent.FAILED_TO_START_EDIT_MONITOR + ) + + def test_start_failed_to_start_daemon_process(self): + fake_cclient = FakeClearcutClient() + with self.assertRaises(TypeError): + dm = daemon_manager.DaemonManager( + TEST_BINARY_FILE, + daemon_target='wrong_target', + daemon_args=(1), + cclient=fake_cclient, + ) + dm.start() + self._assert_error_event_logged( + fake_cclient, edit_event_pb2.EditEvent.FAILED_TO_START_EDIT_MONITOR + ) + + @mock.patch('os.execv') + def test_monitor_reboot_with_high_memory_usage(self, mock_execv): + fake_cclient = FakeClearcutClient() + binary_file = tempfile.NamedTemporaryFile( + dir=self.working_dir.name, delete=False + ) + + dm = daemon_manager.DaemonManager( + binary_file.name, + daemon_target=memory_consume_daemon_target, + daemon_args=(2,), + cclient=fake_cclient, + ) + # set the fake total_memory_size + dm.total_memory_size = 100 * 1024 * 1024 + dm.start() + dm.monitor_daemon(interval=1) + + self.assertTrue(dm.max_memory_usage >= 0.02) + self.assert_no_subprocess_running() + self._assert_error_event_logged( + fake_cclient, + edit_event_pb2.EditEvent.KILLED_DUE_TO_EXCEEDED_MEMORY_USAGE, + ) + mock_execv.assert_called_once() + + def test_monitor_daemon_subprocess_killed_high_cpu_usage(self): + fake_cclient = FakeClearcutClient() + dm = daemon_manager.DaemonManager( + TEST_BINARY_FILE, + daemon_target=cpu_consume_daemon_target, + daemon_args=(20,), + cclient=fake_cclient, + ) + dm.start() + dm.monitor_daemon(interval=1, cpu_threshold=20) + + self.assertTrue(dm.max_cpu_usage >= 20) + self.assert_no_subprocess_running() + self._assert_error_event_logged( + fake_cclient, + edit_event_pb2.EditEvent.KILLED_DUE_TO_EXCEEDED_CPU_USAGE, + ) + + @mock.patch('subprocess.check_output') + def test_monitor_daemon_failed_does_not_matter(self, mock_output): + mock_output.side_effect = OSError('Unknown OSError') + self.assert_run_simple_daemon_success() + + @mock.patch('os.execv') + def test_monitor_daemon_reboot_triggered(self, mock_execv): + binary_file = tempfile.NamedTemporaryFile( + dir=self.working_dir.name, delete=False + ) + + dm = daemon_manager.DaemonManager( + binary_file.name, + daemon_target=long_running_daemon, + ) + dm.start() + dm.monitor_daemon(reboot_timeout=0.5) + mock_execv.assert_called_once() + + def test_stop_success(self): + dm = daemon_manager.DaemonManager( + TEST_BINARY_FILE, daemon_target=long_running_daemon + ) + dm.start() + dm.stop() + + self.assert_no_subprocess_running() + self.assertFalse(dm.pid_file_path.exists()) + + @mock.patch('os.kill') + def test_stop_failed_to_kill_daemon_process(self, mock_kill): + mock_kill.side_effect = OSError('Unknown OSError') + fake_cclient = FakeClearcutClient() + dm = daemon_manager.DaemonManager( + TEST_BINARY_FILE, + daemon_target=long_running_daemon, + cclient=fake_cclient, + ) + + with self.assertRaises(SystemExit): + dm.start() + dm.stop() + self.assertTrue(dm.daemon_process.is_alive()) + self.assertTrue(dm.pid_file_path.exists()) + self._assert_error_event_logged( + fake_cclient, edit_event_pb2.EditEvent.FAILED_TO_STOP_EDIT_MONITOR + ) + + @mock.patch('os.remove') + def test_stop_failed_to_remove_pidfile(self, mock_remove): + mock_remove.side_effect = OSError('Unknown OSError') + + fake_cclient = FakeClearcutClient() + dm = daemon_manager.DaemonManager( + TEST_BINARY_FILE, + daemon_target=long_running_daemon, + cclient=fake_cclient, + ) + + with self.assertRaises(SystemExit): + dm.start() + dm.stop() + self.assert_no_subprocess_running() + self.assertTrue(dm.pid_file_path.exists()) + + self._assert_error_event_logged( + fake_cclient, edit_event_pb2.EditEvent.FAILED_TO_STOP_EDIT_MONITOR + ) + + @mock.patch('os.execv') + def test_reboot_success(self, mock_execv): + binary_file = tempfile.NamedTemporaryFile( + dir=self.working_dir.name, delete=False + ) + + dm = daemon_manager.DaemonManager( + binary_file.name, daemon_target=long_running_daemon + ) + dm.start() + dm.reboot() + + # Verifies the old process is stopped + self.assert_no_subprocess_running() + self.assertFalse(dm.pid_file_path.exists()) + + mock_execv.assert_called_once() + + @mock.patch('os.execv') + def test_reboot_binary_no_longer_exists(self, mock_execv): + dm = daemon_manager.DaemonManager( + TEST_BINARY_FILE, daemon_target=long_running_daemon + ) + dm.start() + + with self.assertRaises(SystemExit): + dm.reboot() + mock_execv.assert_not_called() + self.assertEqual(cm.exception.code, 0) + + @mock.patch('os.execv') + def test_reboot_failed(self, mock_execv): + mock_execv.side_effect = OSError('Unknown OSError') + fake_cclient = FakeClearcutClient() + binary_file = tempfile.NamedTemporaryFile( + dir=self.working_dir.name, delete=False + ) + + dm = daemon_manager.DaemonManager( + binary_file.name, + daemon_target=long_running_daemon, + cclient=fake_cclient, + ) + dm.start() + + with self.assertRaises(SystemExit): + dm.reboot() + self.assertEqual(cm.exception.code, 1) + self._assert_error_event_logged( + fake_cclient, edit_event_pb2.EditEvent.FAILED_TO_REBOOT_EDIT_MONITOR + ) + + @mock.patch('subprocess.check_output') + def test_cleanup_success(self, mock_check_output): + p = self._create_fake_deamon_process() + fake_cclient = FakeClearcutClient() + mock_check_output.return_value = f'user {p.pid} 1 1 1 1 1 edit_monitor arg' + + dm = daemon_manager.DaemonManager( + TEST_BINARY_FILE, + daemon_target=long_running_daemon, + cclient=fake_cclient, + ) + dm.cleanup() + + self.assertFalse(p.is_alive()) + self.assertTrue( + pathlib.Path(self.working_dir.name) + .joinpath(daemon_manager.BLOCK_SIGN_FILE) + .exists() + ) + + def assert_run_simple_daemon_success(self): + damone_output_file = tempfile.NamedTemporaryFile( + dir=self.working_dir.name, delete=False + ) + dm = daemon_manager.DaemonManager( + TEST_BINARY_FILE, + daemon_target=simple_daemon, + daemon_args=(damone_output_file.name,), + ) + dm.start() + dm.monitor_daemon(interval=1) + + # Verifies the expected pid file is created. + expected_pid_file_path = pathlib.Path(self.working_dir.name).joinpath( + 'edit_monitor', TEST_PID_FILE_PATH + ) + self.assertTrue(expected_pid_file_path.exists()) + + # Verify the daemon process is executed successfully. + with open(damone_output_file.name, 'r') as f: + contents = f.read() + self.assertEqual(contents, 'running daemon target') + + def assert_no_subprocess_running(self): + child_pids = self._get_child_processes(os.getpid()) + for child_pid in child_pids: + self.assertFalse( + self._is_process_alive(child_pid), f'process {child_pid} still alive' + ) + + def _get_child_processes(self, parent_pid: int) -> list[int]: + try: + output = subprocess.check_output( + ['ps', '-o', 'pid,ppid', '--no-headers'], text=True + ) + + child_processes = [] + for line in output.splitlines(): + pid, ppid = line.split() + if int(ppid) == parent_pid: + child_processes.append(int(pid)) + return child_processes + except subprocess.CalledProcessError as e: + self.fail(f'failed to get child process, error: {e}') + + def _is_process_alive(self, pid: int) -> bool: + try: + output = subprocess.check_output( + ['ps', '-p', str(pid), '-o', 'state='], text=True + ).strip() + state = output.split()[0] + return state != 'Z' # Check if the state is not 'Z' (zombie) + except subprocess.CalledProcessError: + return False + + def _cleanup_child_processes(self): + child_pids = self._get_child_processes(os.getpid()) + for child_pid in child_pids: + try: + os.kill(child_pid, signal.SIGKILL) + except ProcessLookupError: + # process already terminated + pass + + def _create_fake_deamon_process( + self, name: str = TEST_PID_FILE_PATH + ) -> multiprocessing.Process: + # Create a long running subprocess + p = multiprocessing.Process(target=long_running_daemon) + p.start() + + # Create the pidfile with the subprocess pid + pid_file_path_dir = pathlib.Path(self.working_dir.name).joinpath( + 'edit_monitor' + ) + pid_file_path_dir.mkdir(parents=True, exist_ok=True) + with open(pid_file_path_dir.joinpath(name), 'w') as f: + f.write(str(p.pid)) + return p + + def _assert_error_event_logged(self, fake_cclient, error_type): + error_events = fake_cclient.get_sent_events() + self.assertEquals(len(error_events), 1) + self.assertEquals( + edit_event_pb2.EditEvent.FromString( + error_events[0].source_extension + ).edit_monitor_error_event.error_type, + error_type, + ) + + +class FakeClearcutClient: + + def __init__(self): + self.pending_log_events = [] + self.sent_log_event = [] + + def log(self, log_event): + self.pending_log_events.append(log_event) + + def flush_events(self): + self.sent_log_event.extend(self.pending_log_events) + self.pending_log_events.clear() + + def get_sent_events(self): + return self.sent_log_event + self.pending_log_events + + +if __name__ == '__main__': + unittest.main() diff --git a/tools/edit_monitor/edit_monitor.py b/tools/edit_monitor/edit_monitor.py new file mode 100644 index 0000000000..ab528e870f --- /dev/null +++ b/tools/edit_monitor/edit_monitor.py @@ -0,0 +1,220 @@ +# Copyright 2024, The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import getpass +import logging +import multiprocessing.connection +import os +import pathlib +import platform +import threading +import time + +from atest.metrics import clearcut_client +from atest.proto import clientanalytics_pb2 +from proto import edit_event_pb2 +from watchdog.events import FileSystemEvent +from watchdog.events import PatternMatchingEventHandler +from watchdog.observers import Observer + +# Enum of the Clearcut log source defined under +# /google3/wireless/android/play/playlog/proto/log_source_enum.proto +LOG_SOURCE = 2524 +DEFAULT_FLUSH_INTERVAL_SECONDS = 5 +DEFAULT_SINGLE_EVENTS_SIZE_THRESHOLD = 100 + + +class ClearcutEventHandler(PatternMatchingEventHandler): + + def __init__( + self, + path: str, + flush_interval_sec: int, + single_events_size_threshold: int, + is_dry_run: bool = False, + cclient: clearcut_client.Clearcut | None = None, + ): + + super().__init__(patterns=["*"], ignore_directories=True) + self.root_monitoring_path = path + self.flush_interval_sec = flush_interval_sec + self.single_events_size_threshold = single_events_size_threshold + self.is_dry_run = is_dry_run + self.cclient = cclient or clearcut_client.Clearcut(LOG_SOURCE) + + self.user_name = getpass.getuser() + self.host_name = platform.node() + self.source_root = os.environ.get("ANDROID_BUILD_TOP", "") + + self.pending_events = [] + self._scheduled_log_thread = None + self._pending_events_lock = threading.Lock() + + def on_moved(self, event: FileSystemEvent): + self._log_edit_event(event, edit_event_pb2.EditEvent.MOVE) + + def on_created(self, event: FileSystemEvent): + self._log_edit_event(event, edit_event_pb2.EditEvent.CREATE) + + def on_deleted(self, event: FileSystemEvent): + self._log_edit_event(event, edit_event_pb2.EditEvent.DELETE) + + def on_modified(self, event: FileSystemEvent): + self._log_edit_event(event, edit_event_pb2.EditEvent.MODIFY) + + def flushall(self): + logging.info("flushing all pending events.") + if self._scheduled_log_thread: + logging.info("canceling log thread") + self._scheduled_log_thread.cancel() + self._scheduled_log_thread = None + + self._log_clearcut_events() + self.cclient.flush_events() + + def _log_edit_event( + self, event: FileSystemEvent, edit_type: edit_event_pb2.EditEvent.EditType + ): + try: + event_time = time.time() + + if self._is_hidden_file(pathlib.Path(event.src_path)): + logging.debug("ignore hidden file: %s.", event.src_path) + return + + if not self._is_under_git_project(pathlib.Path(event.src_path)): + logging.debug( + "ignore file %s which does not belong to a git project", + event.src_path, + ) + return + + logging.info("%s: %s", event.event_type, event.src_path) + + event_proto = edit_event_pb2.EditEvent( + user_name=self.user_name, + host_name=self.host_name, + source_root=self.source_root, + ) + event_proto.single_edit_event.CopyFrom( + edit_event_pb2.EditEvent.SingleEditEvent( + file_path=event.src_path, edit_type=edit_type + ) + ) + with self._pending_events_lock: + self.pending_events.append((event_proto, event_time)) + if not self._scheduled_log_thread: + logging.debug( + "Scheduling thread to run in %d seconds", self.flush_interval_sec + ) + self._scheduled_log_thread = threading.Timer( + self.flush_interval_sec, self._log_clearcut_events + ) + self._scheduled_log_thread.start() + + except Exception: + logging.exception("Failed to log edit event.") + + def _is_hidden_file(self, file_path: pathlib.Path) -> bool: + return any( + part.startswith(".") + for part in file_path.relative_to(self.root_monitoring_path).parts + ) + + def _is_under_git_project(self, file_path: pathlib.Path) -> bool: + root_path = pathlib.Path(self.root_monitoring_path).resolve() + return any( + root_path.joinpath(dir).joinpath('.git').exists() + for dir in file_path.relative_to(root_path).parents + ) + + def _log_clearcut_events(self): + with self._pending_events_lock: + self._scheduled_log_thread = None + edit_events = self.pending_events + self.pending_events = [] + + pending_events_size = len(edit_events) + if pending_events_size > self.single_events_size_threshold: + logging.info( + "got %d events in %d seconds, sending aggregated events instead", + pending_events_size, + self.flush_interval_sec, + ) + aggregated_event_time = edit_events[0][1] + aggregated_event_proto = edit_event_pb2.EditEvent( + user_name=self.user_name, + host_name=self.host_name, + source_root=self.source_root, + ) + aggregated_event_proto.aggregated_edit_event.CopyFrom( + edit_event_pb2.EditEvent.AggregatedEditEvent( + num_edits=pending_events_size + ) + ) + edit_events = [(aggregated_event_proto, aggregated_event_time)] + + if self.is_dry_run: + logging.info("Sent %d edit events in dry run.", len(edit_events)) + return + + for event_proto, event_time in edit_events: + log_event = clientanalytics_pb2.LogEvent( + event_time_ms=int(event_time * 1000), + source_extension=event_proto.SerializeToString(), + ) + self.cclient.log(log_event) + + logging.info("sent %d edit events", len(edit_events)) + + +def start( + path: str, + is_dry_run: bool = False, + flush_interval_sec: int = DEFAULT_FLUSH_INTERVAL_SECONDS, + single_events_size_threshold: int = DEFAULT_SINGLE_EVENTS_SIZE_THRESHOLD, + cclient: clearcut_client.Clearcut | None = None, + pipe_sender: multiprocessing.connection.Connection | None = None, +): + """Method to start the edit monitor. + + This is the entry point to start the edit monitor as a subprocess of + the daemon manager. + + params: + path: The root path to monitor + cclient: The clearcut client to send the edit logs. + conn: the sender of the pipe to communicate with the deamon manager. + """ + event_handler = ClearcutEventHandler( + path, flush_interval_sec, single_events_size_threshold, is_dry_run, cclient) + observer = Observer() + + logging.info("Starting observer on path %s.", path) + observer.schedule(event_handler, path, recursive=True) + observer.start() + logging.info("Observer started.") + if pipe_sender: + pipe_sender.send("Observer started.") + + try: + while True: + time.sleep(1) + finally: + event_handler.flushall() + observer.stop() + observer.join() + if pipe_sender: + pipe_sender.close() diff --git a/tools/edit_monitor/edit_monitor_integration_test.py b/tools/edit_monitor/edit_monitor_integration_test.py new file mode 100644 index 0000000000..f39b93667d --- /dev/null +++ b/tools/edit_monitor/edit_monitor_integration_test.py @@ -0,0 +1,169 @@ +# Copyright 2024, The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Integration tests for Edit Monitor.""" + +import glob +from importlib import resources +import logging +import os +import pathlib +import shutil +import signal +import subprocess +import sys +import tempfile +import time +import unittest +from unittest import mock + + +class EditMonitorIntegrationTest(unittest.TestCase): + + @classmethod + def setUpClass(cls): + super().setUpClass() + # Configure to print logging to stdout. + logging.basicConfig(filename=None, level=logging.DEBUG) + console = logging.StreamHandler(sys.stdout) + logging.getLogger("").addHandler(console) + + def setUp(self): + super().setUp() + self.working_dir = tempfile.TemporaryDirectory() + self.root_monitoring_path = pathlib.Path(self.working_dir.name).joinpath( + "files" + ) + self.root_monitoring_path.mkdir() + self.edit_monitor_binary_path = self._import_executable("edit_monitor") + self.patch = mock.patch.dict( + os.environ, {"ENABLE_ANDROID_EDIT_MONITOR": "true"} + ) + self.patch.start() + + def tearDown(self): + self.patch.stop() + self.working_dir.cleanup() + super().tearDown() + + def test_log_single_edit_event_success(self): + p = self._start_edit_monitor_process() + + # Create the .git file under the monitoring dir. + self.root_monitoring_path.joinpath(".git").touch() + + # Create and modify a file. + test_file = self.root_monitoring_path.joinpath("test.txt") + with open(test_file, "w") as f: + f.write("something") + + # Move the file. + test_file_moved = self.root_monitoring_path.joinpath("new_test.txt") + test_file.rename(test_file_moved) + + # Delete the file. + test_file_moved.unlink() + + # Give some time for the edit monitor to receive the edit event. + time.sleep(1) + # Stop the edit monitor and flush all events. + os.kill(p.pid, signal.SIGINT) + p.communicate() + + self.assertEqual(self._get_logged_events_num(), 4) + + def test_start_multiple_edit_monitor_only_one_started(self): + p1 = self._start_edit_monitor_process(wait_for_observer_start=False) + p2 = self._start_edit_monitor_process(wait_for_observer_start=False) + p3 = self._start_edit_monitor_process(wait_for_observer_start=False) + + live_processes = self._get_live_processes([p1, p2, p3]) + + # Cleanup all live processes. + for p in live_processes: + os.kill(p.pid, signal.SIGINT) + p.communicate() + + self.assertEqual(len(live_processes), 1) + + def _start_edit_monitor_process(self, wait_for_observer_start=True): + command = f""" + export TMPDIR="{self.working_dir.name}" + {self.edit_monitor_binary_path} --path={self.root_monitoring_path} --dry_run""" + p = subprocess.Popen( + command, + shell=True, + text=True, + start_new_session=True, + executable="/bin/bash", + ) + if wait_for_observer_start: + self._wait_for_observer_start(time_out=5) + + return p + + def _wait_for_observer_start(self, time_out): + start_time = time.time() + + while time.time() < start_time + time_out: + log_files = glob.glob(self.working_dir.name + "/edit_monitor_*/*.log") + if log_files: + with open(log_files[0], "r") as f: + for line in f: + logging.debug("initial log: %s", line) + if line.rstrip("\n").endswith("Observer started."): + return + else: + time.sleep(1) + + self.fail(f"Observer not started in {time_out} seconds.") + + def _get_logged_events_num(self): + log_files = glob.glob(self.working_dir.name + "/edit_monitor_*/*.log") + self.assertEqual(len(log_files), 1) + + with open(log_files[0], "r") as f: + for line in f: + logging.debug("complete log: %s", line) + if line.rstrip("\n").endswith("in dry run."): + return int(line.split(":")[-1].split(" ")[2]) + + return 0 + + def _get_live_processes(self, processes): + live_processes = [] + for p in processes: + try: + p.wait(timeout=5) + except subprocess.TimeoutExpired as e: + live_processes.append(p) + logging.info("process: %d still alive.", p.pid) + else: + logging.info("process: %d stopped.", p.pid) + return live_processes + + def _import_executable(self, executable_name: str) -> pathlib.Path: + binary_dir = pathlib.Path(self.working_dir.name).joinpath("binary") + binary_dir.mkdir() + executable_path = binary_dir.joinpath(executable_name) + with resources.as_file( + resources.files("testdata").joinpath(executable_name) + ) as binary: + shutil.copy(binary, executable_path) + executable_path.chmod(0o755) + return executable_path + + +if __name__ == "__main__": + unittest.main() diff --git a/tools/edit_monitor/edit_monitor_test.py b/tools/edit_monitor/edit_monitor_test.py new file mode 100644 index 0000000000..64a3871b22 --- /dev/null +++ b/tools/edit_monitor/edit_monitor_test.py @@ -0,0 +1,301 @@ +# Copyright 2024, The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Unittests for Edit Monitor.""" + +import logging +import multiprocessing +import os +import pathlib +import signal +import sys +import tempfile +import time +import unittest + +from atest.proto import clientanalytics_pb2 +from edit_monitor import edit_monitor +from proto import edit_event_pb2 + + +class EditMonitorTest(unittest.TestCase): + + @classmethod + def setUpClass(cls): + super().setUpClass() + # Configure to print logging to stdout. + logging.basicConfig(filename=None, level=logging.DEBUG) + console = logging.StreamHandler(sys.stdout) + logging.getLogger('').addHandler(console) + + def setUp(self): + super().setUp() + self.working_dir = tempfile.TemporaryDirectory() + self.root_monitoring_path = pathlib.Path(self.working_dir.name).joinpath( + 'files' + ) + self.root_monitoring_path.mkdir() + self.log_event_dir = pathlib.Path(self.working_dir.name).joinpath('logs') + self.log_event_dir.mkdir() + + def tearDown(self): + self.working_dir.cleanup() + super().tearDown() + + def test_log_single_edit_event_success(self): + # Create the .git file under the monitoring dir. + self.root_monitoring_path.joinpath('.git').touch() + fake_cclient = FakeClearcutClient( + log_output_file=self.log_event_dir.joinpath('logs.output') + ) + p = self._start_test_edit_monitor_process(fake_cclient) + + # Create and modify a file. + test_file = self.root_monitoring_path.joinpath('test.txt') + with open(test_file, 'w') as f: + f.write('something') + # Move the file. + test_file_moved = self.root_monitoring_path.joinpath('new_test.txt') + test_file.rename(test_file_moved) + # Delete the file. + test_file_moved.unlink() + # Give some time for the edit monitor to receive the edit event. + time.sleep(1) + # Stop the edit monitor and flush all events. + os.kill(p.pid, signal.SIGINT) + p.join() + + logged_events = self._get_logged_events() + self.assertEqual(len(logged_events), 4) + expected_create_event = edit_event_pb2.EditEvent.SingleEditEvent( + file_path=str( + self.root_monitoring_path.joinpath('test.txt').resolve() + ), + edit_type=edit_event_pb2.EditEvent.CREATE, + ) + expected_modify_event = edit_event_pb2.EditEvent.SingleEditEvent( + file_path=str( + self.root_monitoring_path.joinpath('test.txt').resolve() + ), + edit_type=edit_event_pb2.EditEvent.MODIFY, + ) + expected_move_event = edit_event_pb2.EditEvent.SingleEditEvent( + file_path=str( + self.root_monitoring_path.joinpath('test.txt').resolve() + ), + edit_type=edit_event_pb2.EditEvent.MOVE, + ) + expected_delete_event = edit_event_pb2.EditEvent.SingleEditEvent( + file_path=str( + self.root_monitoring_path.joinpath('new_test.txt').resolve() + ), + edit_type=edit_event_pb2.EditEvent.DELETE, + ) + self.assertEqual( + expected_create_event, + edit_event_pb2.EditEvent.FromString( + logged_events[0].source_extension + ).single_edit_event, + ) + self.assertEqual( + expected_modify_event, + edit_event_pb2.EditEvent.FromString( + logged_events[1].source_extension + ).single_edit_event, + ) + self.assertEqual( + expected_move_event, + edit_event_pb2.EditEvent.FromString( + logged_events[2].source_extension + ).single_edit_event, + ) + self.assertEqual( + expected_delete_event, + edit_event_pb2.EditEvent.FromString( + logged_events[3].source_extension + ).single_edit_event, + ) + + + def test_log_aggregated_edit_event_success(self): + # Create the .git file under the monitoring dir. + self.root_monitoring_path.joinpath('.git').touch() + fake_cclient = FakeClearcutClient( + log_output_file=self.log_event_dir.joinpath('logs.output') + ) + p = self._start_test_edit_monitor_process(fake_cclient) + + # Create 6 test files + for i in range(6): + test_file = self.root_monitoring_path.joinpath('test_' + str(i)) + test_file.touch() + + # Give some time for the edit monitor to receive the edit event. + time.sleep(1) + # Stop the edit monitor and flush all events. + os.kill(p.pid, signal.SIGINT) + p.join() + + logged_events = self._get_logged_events() + self.assertEqual(len(logged_events), 1) + + expected_aggregated_edit_event = ( + edit_event_pb2.EditEvent.AggregatedEditEvent( + num_edits=6, + ) + ) + + self.assertEqual( + expected_aggregated_edit_event, + edit_event_pb2.EditEvent.FromString( + logged_events[0].source_extension + ).aggregated_edit_event, + ) + + def test_do_not_log_edit_event_for_directory_change(self): + # Create the .git file under the monitoring dir. + self.root_monitoring_path.joinpath('.git').touch() + fake_cclient = FakeClearcutClient( + log_output_file=self.log_event_dir.joinpath('logs.output') + ) + p = self._start_test_edit_monitor_process(fake_cclient) + + # Create a sub directory + self.root_monitoring_path.joinpath('test_dir').mkdir() + # Give some time for the edit monitor to receive the edit event. + time.sleep(1) + # Stop the edit monitor and flush all events. + os.kill(p.pid, signal.SIGINT) + p.join() + + logged_events = self._get_logged_events() + self.assertEqual(len(logged_events), 0) + + def test_do_not_log_edit_event_for_hidden_file(self): + # Create the .git file under the monitoring dir. + self.root_monitoring_path.joinpath('.git').touch() + fake_cclient = FakeClearcutClient( + log_output_file=self.log_event_dir.joinpath('logs.output') + ) + p = self._start_test_edit_monitor_process(fake_cclient) + + # Create a hidden file. + self.root_monitoring_path.joinpath('.test.txt').touch() + # Create a hidden dir. + hidden_dir = self.root_monitoring_path.joinpath('.test') + hidden_dir.mkdir() + hidden_dir.joinpath('test.txt').touch() + # Give some time for the edit monitor to receive the edit event. + time.sleep(1) + # Stop the edit monitor and flush all events. + os.kill(p.pid, signal.SIGINT) + p.join() + + logged_events = self._get_logged_events() + self.assertEqual(len(logged_events), 0) + + def test_do_not_log_edit_event_for_non_git_project_file(self): + fake_cclient = FakeClearcutClient( + log_output_file=self.log_event_dir.joinpath('logs.output') + ) + p = self._start_test_edit_monitor_process(fake_cclient) + + # Create a file. + self.root_monitoring_path.joinpath('test.txt').touch() + # Create a file under a sub dir. + sub_dir = self.root_monitoring_path.joinpath('.test') + sub_dir.mkdir() + sub_dir.joinpath('test.txt').touch() + # Give some time for the edit monitor to receive the edit event. + time.sleep(1) + # Stop the edit monitor and flush all events. + os.kill(p.pid, signal.SIGINT) + p.join() + + logged_events = self._get_logged_events() + self.assertEqual(len(logged_events), 0) + + def test_log_edit_event_fail(self): + # Create the .git file under the monitoring dir. + self.root_monitoring_path.joinpath('.git').touch() + fake_cclient = FakeClearcutClient( + log_output_file=self.log_event_dir.joinpath('logs.output'), + raise_log_exception=True, + ) + p = self._start_test_edit_monitor_process(fake_cclient) + + # Create a file. + self.root_monitoring_path.joinpath('test.txt').touch() + # Give some time for the edit monitor to receive the edit event. + time.sleep(1) + # Stop the edit monitor and flush all events. + os.kill(p.pid, signal.SIGINT) + p.join() + + logged_events = self._get_logged_events() + self.assertEqual(len(logged_events), 0) + + def _start_test_edit_monitor_process( + self, cclient + ) -> multiprocessing.Process: + receiver, sender = multiprocessing.Pipe() + # Start edit monitor in a subprocess. + p = multiprocessing.Process( + target=edit_monitor.start, + args=(str(self.root_monitoring_path.resolve()), False, 0.5, 5, cclient, sender), + ) + p.daemon = True + p.start() + + # Wait until observer started. + received_data = receiver.recv() + self.assertEquals(received_data, 'Observer started.') + + receiver.close() + return p + + def _get_logged_events(self): + with open(self.log_event_dir.joinpath('logs.output'), 'rb') as f: + data = f.read() + + return [ + clientanalytics_pb2.LogEvent.FromString(record) + for record in data.split(b'\x00') + if record + ] + + +class FakeClearcutClient: + + def __init__(self, log_output_file, raise_log_exception=False): + self.pending_log_events = [] + self.raise_log_exception = raise_log_exception + self.log_output_file = log_output_file + + def log(self, log_event): + if self.raise_log_exception: + raise Exception('unknown exception') + self.pending_log_events.append(log_event) + + def flush_events(self): + delimiter = b'\x00' # Use a null byte as the delimiter + with open(self.log_output_file, 'wb') as f: + for log_event in self.pending_log_events: + f.write(log_event.SerializeToString() + delimiter) + + self.pending_log_events.clear() + + +if __name__ == '__main__': + unittest.main() diff --git a/tools/edit_monitor/main.py b/tools/edit_monitor/main.py new file mode 100644 index 0000000000..3c2d183aed --- /dev/null +++ b/tools/edit_monitor/main.py @@ -0,0 +1,119 @@ +# Copyright 2024, The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import logging +import os +import signal +import sys +import tempfile + +from edit_monitor import daemon_manager +from edit_monitor import edit_monitor + + +def create_arg_parser(): + """Creates an instance of the default arg parser.""" + + parser = argparse.ArgumentParser( + description=( + 'Monitors edits in Android source code and uploads the edit logs.' + ), + add_help=True, + formatter_class=argparse.RawDescriptionHelpFormatter, + ) + + parser.add_argument( + '--path', + type=str, + required=True, + help='Root path to monitor the edit events.', + ) + + parser.add_argument( + '--dry_run', + action='store_true', + help='Dry run the edit monitor. This starts the edit monitor process without actually send the edit logs to clearcut.', + ) + + parser.add_argument( + '--force_cleanup', + action='store_true', + help=( + 'Instead of start a new edit monitor, force stop all existing edit' + ' monitors in the system. This option is only used in emergent cases' + ' when we want to prevent user damage by the edit monitor.' + ), + ) + + parser.add_argument( + '--verbose', + action='store_true', + help=( + 'Log verbose info in the log file for debugging purpose.' + ), + ) + + return parser + + +def configure_logging(verbose=False): + root_logging_dir = tempfile.mkdtemp(prefix='edit_monitor_') + _, log_path = tempfile.mkstemp(dir=root_logging_dir, suffix='.log') + + + log_fmt = '%(asctime)s.%(msecs)03d %(filename)s:%(lineno)s:%(levelname)s: %(message)s' + date_fmt = '%Y-%m-%d %H:%M:%S' + log_level = logging.DEBUG if verbose else logging.INFO + + logging.basicConfig( + filename=log_path, level=log_level, format=log_fmt, datefmt=date_fmt + ) + # Filter out logs from inotify_buff to prevent log pollution. + logging.getLogger('watchdog.observers.inotify_buffer').addFilter( + lambda record: record.filename != 'inotify_buffer.py') + print(f'logging to file {log_path}') + + +def term_signal_handler(_signal_number, _frame): + logging.info('Process %d received SIGTERM, Terminating...', os.getpid()) + sys.exit(0) + + +def main(argv: list[str]): + args = create_arg_parser().parse_args(argv[1:]) + configure_logging(args.verbose) + if args.dry_run: + logging.info('This is a dry run.') + dm = daemon_manager.DaemonManager( + binary_path=argv[0], + daemon_target=edit_monitor.start, + daemon_args=(args.path, args.dry_run), + ) + + try: + if args.force_cleanup: + dm.cleanup() + else: + dm.start() + dm.monitor_daemon() + except Exception: + logging.exception('Unexpected exception raised when run daemon.') + finally: + dm.stop() + + +if __name__ == '__main__': + signal.signal(signal.SIGTERM, term_signal_handler) + main(sys.argv) diff --git a/tools/edit_monitor/proto/edit_event.proto b/tools/edit_monitor/proto/edit_event.proto new file mode 100644 index 0000000000..9acc2e754b --- /dev/null +++ b/tools/edit_monitor/proto/edit_event.proto @@ -0,0 +1,57 @@ +syntax = "proto3"; + +package tools.asuite.edit_monitor; + +message EditEvent { + enum EditType { + UNSUPPORTED_TYPE = 0; + CREATE = 1; + MODIFY = 2; + DELETE = 3; + MOVE = 4; + } + + enum ErrorType { + UNKNOWN_ERROR = 0; + FAILED_TO_START_EDIT_MONITOR = 1; + FAILED_TO_STOP_EDIT_MONITOR = 2; + FAILED_TO_REBOOT_EDIT_MONITOR = 3; + KILLED_DUE_TO_EXCEEDED_MEMORY_USAGE = 4; + FORCE_CLEANUP = 5; + KILLED_DUE_TO_EXCEEDED_CPU_USAGE = 6; + } + + // Event that logs a single edit + message SingleEditEvent { + // Full path of the file that edited. + string file_path = 1; + // Type of the edit. + EditType edit_type = 2; + } + + // Event that logs aggregated info for a set of edits. + message AggregatedEditEvent { + int32 num_edits = 1; + } + + // Event that logs errors happened in the edit monitor. + message EditMonitorErrorEvent { + ErrorType error_type = 1; + } + + // ------------------------ + // FIELDS FOR EditEvent + // ------------------------ + // Internal user name. + string user_name = 1; + // The root of Android source. + string source_root = 2; + // Name of the host workstation. + string host_name = 3; + + oneof event { + SingleEditEvent single_edit_event = 4; + AggregatedEditEvent aggregated_edit_event = 5; + EditMonitorErrorEvent edit_monitor_error_event = 6; + } +} diff --git a/tools/edit_monitor/utils.py b/tools/edit_monitor/utils.py new file mode 100644 index 0000000000..b88949d300 --- /dev/null +++ b/tools/edit_monitor/utils.py @@ -0,0 +1,53 @@ +# Copyright 2024, The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import hashlib +import logging +import os + + +def is_feature_enabled( + feature_name: str, + user_name: str, + enable_flag: str = None, + rollout_percent: int = 100, +) -> bool: + """Determine whether the given feature is enabled. + + Whether a given feature is enabled or not depends on two flags: 1) the + enable_flag that explicitly enable/disable the feature and 2) the rollout_flag + that controls the rollout percentage. + + Args: + feature_name: name of the feature. + user_name: system user name. + enable_flag: name of the env var that enables/disables the feature + explicitly. + rollout_flg: name of the env var that controls the rollout percentage, the + value stored in the env var should be an int between 0 and 100 string + """ + if enable_flag: + if os.environ.get(enable_flag, "") == "false": + logging.info("feature: %s is disabled", feature_name) + return False + + if os.environ.get(enable_flag, "") == "true": + logging.info("feature: %s is enabled", feature_name) + return True + + hash_object = hashlib.sha256() + hash_object.update((user_name + feature_name).encode("utf-8")) + hash_number = int(hash_object.hexdigest(), 16) % 100 + + return hash_number < rollout_percent diff --git a/tools/edit_monitor/utils_test.py b/tools/edit_monitor/utils_test.py new file mode 100644 index 0000000000..1c30aa1acc --- /dev/null +++ b/tools/edit_monitor/utils_test.py @@ -0,0 +1,71 @@ +# Copyright 2024, The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Unittests for edit monitor utils.""" +import os +import unittest +from unittest import mock + +from edit_monitor import utils + +TEST_USER = 'test_user' +TEST_FEATURE = 'test_feature' +ENABLE_TEST_FEATURE_FLAG = 'ENABLE_TEST_FEATURE' +ROLLOUT_TEST_FEATURE_FLAG = 'ROLLOUT_TEST_FEATURE' + + +class EnableFeatureTest(unittest.TestCase): + + def test_feature_enabled_without_flag(self): + self.assertTrue(utils.is_feature_enabled(TEST_FEATURE, TEST_USER)) + + @mock.patch.dict(os.environ, {ENABLE_TEST_FEATURE_FLAG: 'false'}, clear=True) + def test_feature_disabled_with_flag(self): + self.assertFalse( + utils.is_feature_enabled( + TEST_FEATURE, TEST_USER, ENABLE_TEST_FEATURE_FLAG + ) + ) + + @mock.patch.dict(os.environ, {ENABLE_TEST_FEATURE_FLAG: 'true'}, clear=True) + def test_feature_enabled_with_flag(self): + self.assertTrue( + utils.is_feature_enabled( + TEST_FEATURE, TEST_USER, ENABLE_TEST_FEATURE_FLAG + ) + ) + + def test_feature_enabled_with_rollout_percentage(self): + self.assertTrue( + utils.is_feature_enabled( + TEST_FEATURE, + TEST_USER, + ENABLE_TEST_FEATURE_FLAG, + 90, + ) + ) + + def test_feature_disabled_with_rollout_percentage(self): + self.assertFalse( + utils.is_feature_enabled( + TEST_FEATURE, + TEST_USER, + ENABLE_TEST_FEATURE_FLAG, + 10, + ) + ) + + +if __name__ == '__main__': + unittest.main() diff --git a/tools/event_log_tags.py b/tools/event_log_tags.py index a6ae9f193e..e859b6b3b1 100644 --- a/tools/event_log_tags.py +++ b/tools/event_log_tags.py @@ -14,21 +14,21 @@ """A module for reading and parsing event-log-tags files.""" +import dataclasses import re import sys +from typing import Optional -class Tag(object): - __slots__ = ["tagnum", "tagname", "description", "filename", "linenum"] - - def __init__(self, tagnum, tagname, description, filename, linenum): - self.tagnum = tagnum - self.tagname = tagname - self.description = description - self.filename = filename - self.linenum = linenum +@dataclasses.dataclass +class Tag: + tagnum: int + tagname: str + description: Optional[str] + filename: str + linenum: int -class TagFile(object): +class TagFile: """Read an input event-log-tags file.""" def AddError(self, msg, linenum=None): if linenum is None: @@ -76,14 +76,11 @@ class TagFile(object): self.options[parts[1]] = parts[2:] continue - if parts[0] == "?": - tag = None - else: - try: - tag = int(parts[0]) - except ValueError: - self.AddError("\"%s\" isn't an integer tag or '?'" % (parts[0],)) - continue + try: + tag = int(parts[0]) + except ValueError: + self.AddError("\"%s\" isn't an integer tag" % (parts[0],)) + continue tagname = parts[1] if len(parts) == 3: @@ -128,8 +125,8 @@ def WriteOutput(output_file, data): out = sys.stdout output_file = "<stdout>" else: - out = open(output_file, "wb") - out.write(str.encode(data)) + out = open(output_file, "w") + out.write(data) out.close() except (IOError, OSError) as e: print("failed to write %s: %s" % (output_file, e), file=sys.stderr) diff --git a/tools/filelistdiff/Android.bp b/tools/filelistdiff/Android.bp index 632ada348e..3826e50ff3 100644 --- a/tools/filelistdiff/Android.bp +++ b/tools/filelistdiff/Android.bp @@ -25,3 +25,8 @@ prebuilt_etc_host { name: "system_image_diff_allowlist", src: "allowlist", } + +prebuilt_etc_host { + name: "system_image_diff_allowlist_next", + src: "allowlist_next", +} diff --git a/tools/filelistdiff/OWNERS b/tools/filelistdiff/OWNERS new file mode 100644 index 0000000000..690fb178fc --- /dev/null +++ b/tools/filelistdiff/OWNERS @@ -0,0 +1 @@ +per-file allowlist = justinyun@google.com, jeongik@google.com, kiyoungkim@google.com, inseob@google.com diff --git a/tools/filelistdiff/allowlist b/tools/filelistdiff/allowlist index 943f9559f3..d8979d6983 100644 --- a/tools/filelistdiff/allowlist +++ b/tools/filelistdiff/allowlist @@ -1,87 +1,3 @@ -# Known diffs only in the KATI system image -etc/NOTICE.xml.gz -etc/compatconfig/TeleService-platform-compat-config.xml -etc/compatconfig/calendar-provider-compat-config.xml -etc/compatconfig/contacts-provider-platform-compat-config.xml -etc/compatconfig/documents-ui-compat-config.xml -etc/compatconfig/framework-location-compat-config.xml -etc/compatconfig/framework-platform-compat-config.xml -etc/compatconfig/icu4j-platform-compat-config.xml -etc/compatconfig/services-platform-compat-config.xml -etc/permissions/android.software.credentials.xml -etc/permissions/android.software.preview_sdk.xml -etc/permissions/android.software.webview.xml -etc/permissions/android.software.window_magnification.xml -etc/permissions/com.android.adservices.sdksandbox.xml -etc/security/otacerts.zip -etc/vintf/compatibility_matrix.202404.xml -etc/vintf/compatibility_matrix.202504.xml -etc/vintf/compatibility_matrix.5.xml -etc/vintf/compatibility_matrix.6.xml -etc/vintf/compatibility_matrix.7.xml -etc/vintf/compatibility_matrix.8.xml -etc/vintf/compatibility_matrix.device.xml -etc/vintf/manifest.xml -framework/boot-apache-xml.vdex -framework/boot-apache-xml.vdex.fsv_meta -framework/boot-bouncycastle.vdex -framework/boot-bouncycastle.vdex.fsv_meta -framework/boot-core-icu4j.vdex -framework/boot-core-icu4j.vdex.fsv_meta -framework/boot-core-libart.vdex -framework/boot-core-libart.vdex.fsv_meta -framework/boot-ext.vdex -framework/boot-ext.vdex.fsv_meta -framework/boot-framework-adservices.vdex -framework/boot-framework-adservices.vdex.fsv_meta -framework/boot-framework-graphics.vdex -framework/boot-framework-graphics.vdex.fsv_meta -framework/boot-framework-location.vdex -framework/boot-framework-location.vdex.fsv_meta -framework/boot-framework.vdex -framework/boot-framework.vdex.fsv_meta -framework/boot-ims-common.vdex -framework/boot-ims-common.vdex.fsv_meta -framework/boot-okhttp.vdex -framework/boot-okhttp.vdex.fsv_meta -framework/boot-telephony-common.vdex -framework/boot-telephony-common.vdex.fsv_meta -framework/boot-voip-common.vdex -framework/boot-voip-common.vdex.fsv_meta -framework/boot.vdex -framework/boot.vdex.fsv_meta -framework/oat/x86_64/apex@com.android.compos@javalib@service-compos.jar@classes.odex -framework/oat/x86_64/apex@com.android.compos@javalib@service-compos.jar@classes.odex.fsv_meta -framework/oat/x86_64/apex@com.android.compos@javalib@service-compos.jar@classes.vdex -framework/oat/x86_64/apex@com.android.compos@javalib@service-compos.jar@classes.vdex.fsv_meta -lib/aaudio-aidl-cpp.so -lib/android.hardware.biometrics.fingerprint@2.1.so -lib/android.hardware.radio.config@1.0.so -lib/android.hardware.radio.deprecated@1.0.so -lib/android.hardware.radio@1.0.so -lib/android.hardware.radio@1.1.so -lib/android.hardware.radio@1.2.so -lib/android.hardware.radio@1.3.so -lib/android.hardware.radio@1.4.so -lib/android.hardware.secure_element@1.0.so -lib/com.android.media.aaudio-aconfig-cc.so -lib/heapprofd_client.so -lib/heapprofd_client_api.so -lib/libaaudio.so -lib/libaaudio_internal.so -lib/libalarm_jni.so -lib/libamidi.so -lib/libcups.so -lib/libjni_deviceAsWebcam.so -lib/libprintspooler_jni.so -lib/libvendorsupport.so -lib/libwfds.so -lib/libyuv.so - -# b/351258461 -adb_keys +# Known diffs that are installed in either system image with the configuration +# b/353429422 init.environ.rc - -# Known diffs only in the Soong system image -lib/libhidcommand_jni.so -lib/libuinputcommand_jni.so
\ No newline at end of file diff --git a/tools/filelistdiff/allowlist_next b/tools/filelistdiff/allowlist_next new file mode 100644 index 0000000000..9cc7f34aec --- /dev/null +++ b/tools/filelistdiff/allowlist_next @@ -0,0 +1,3 @@ +# Allowlist only for the next release configuration. +# TODO(b/369678122): The list will be cleared when the trunk configurations are +# available to the next. diff --git a/tools/filelistdiff/file_list_diff.py b/tools/filelistdiff/file_list_diff.py index cdc5b2ee41..a6408e87cc 100644 --- a/tools/filelistdiff/file_list_diff.py +++ b/tools/filelistdiff/file_list_diff.py @@ -19,38 +19,54 @@ COLOR_WARNING = '\033[93m' COLOR_ERROR = '\033[91m' COLOR_NORMAL = '\033[0m' -def find_unique_items(kati_installed_files, soong_installed_files, allowlist, system_module_name): +def find_unique_items(kati_installed_files, soong_installed_files, system_module_name, allowlists): with open(kati_installed_files, 'r') as kati_list_file, \ - open(soong_installed_files, 'r') as soong_list_file, \ - open(allowlist, 'r') as allowlist_file: + open(soong_installed_files, 'r') as soong_list_file: kati_files = set(kati_list_file.read().split()) soong_files = set(soong_list_file.read().split()) - allowed_files = set(filter(lambda x: len(x), map(lambda x: x.lstrip().split('#',1)[0].rstrip() , allowlist_file.read().split('\n')))) + + allowed_files = set() + for allowlist in allowlists: + with open(allowlist, 'r') as allowlist_file: + allowed_files.update(set(filter(lambda x: len(x), map(lambda x: x.lstrip().split('#',1)[0].rstrip() , allowlist_file.read().split('\n'))))) def is_unknown_diff(filepath): - return not filepath in allowed_files + return filepath not in allowed_files + + def is_unnecessary_allowlist(filepath): + return filepath not in kati_files.symmetric_difference(soong_files) unique_in_kati = set(filter(is_unknown_diff, kati_files - soong_files)) unique_in_soong = set(filter(is_unknown_diff, soong_files - kati_files)) + unnecessary_allowlists = set(filter(is_unnecessary_allowlist, allowed_files)) if unique_in_kati: - print(f'{COLOR_ERROR}Please add following modules into system image module {system_module_name}.{COLOR_NORMAL}') - print(f'{COLOR_WARNING}KATI only module(s):{COLOR_NORMAL}') + print('') + print(f'{COLOR_ERROR}Missing required modules in {system_module_name} module.{COLOR_NORMAL}') + print(f'To resolve this issue, please add the modules to the Android.bp file for the {system_module_name} to install the following KATI only installed files.') + print(f'You can find the correct Android.bp file using the command "gomod {system_module_name}".') + print(f'{COLOR_WARNING}KATI only installed file(s):{COLOR_NORMAL}') for item in sorted(unique_in_kati): - print(item) + print(' '+item) if unique_in_soong: - if unique_in_kati: - print('') - - print(f'{COLOR_ERROR}Please add following modules into build/make/target/product/base_system.mk.{COLOR_NORMAL}') - print(f'{COLOR_WARNING}Soong only module(s):{COLOR_NORMAL}') + print('') + print(f'{COLOR_ERROR}Missing packages in base_system.mk.{COLOR_NORMAL}') + print('Please add packages into build/make/target/product/base_system.mk or build/make/tools/filelistdiff/allowlist to install or skip the following Soong only installed files.') + print(f'{COLOR_WARNING}Soong only installed file(s):{COLOR_NORMAL}') for item in sorted(unique_in_soong): - print(item) + print(' '+item) + + if unnecessary_allowlists: + print('') + print(f'{COLOR_ERROR}Unnecessary files in allowlist.{COLOR_NORMAL}') + print('Please remove these entries from build/make/tools/filelistdiff/allowlist') + for item in sorted(unnecessary_allowlists): + print(' '+item) + - if unique_in_kati or unique_in_soong: + if unique_in_kati or unique_in_soong or unnecessary_allowlists: print('') - print(f'{COLOR_ERROR}FAILED: System image from KATI and SOONG differs from installed file list.{COLOR_NORMAL}') sys.exit(1) @@ -59,8 +75,8 @@ if __name__ == '__main__': parser.add_argument('kati_installed_file_list') parser.add_argument('soong_installed_file_list') - parser.add_argument('allowlist') parser.add_argument('system_module_name') + parser.add_argument('--allowlists', nargs='*', default=[]) args = parser.parse_args() - find_unique_items(args.kati_installed_file_list, args.soong_installed_file_list, args.allowlist, args.system_module_name)
\ No newline at end of file + find_unique_items(args.kati_installed_file_list, args.soong_installed_file_list, args.system_module_name, args.allowlists)
\ No newline at end of file diff --git a/tools/finalization/README.md b/tools/finalization/README.md index d0aed69ae2..5e2aecd525 100644 --- a/tools/finalization/README.md +++ b/tools/finalization/README.md @@ -19,3 +19,8 @@ Performed in build targets in Finalization branches. ## Utility: [Full cleanup](./cleanup.sh). Remove all local changes and switch each project into head-less state. This is the best state to sync/rebase/finalize the branch. + +## Dry run: +[Full cleanup](./dryrun-cleanup.sh). Remove all local changes and switch each project into head-less state. Also removes "DryRun" branches. +[SDK](./dryrun-step-1.sh). Perform SDK finalization and upload the CLs to Gerrit. +[SDK and REL](./dryrun-step-1-and-2.sh). Perform SDK finalization, plus all necessary changes to switch configuration to REL, and upload the CLs to Gerrit.
\ No newline at end of file diff --git a/tools/finalization/build-step-0-and-m.sh b/tools/finalization/build-step-0-and-m.sh new file mode 100755 index 0000000000..484380045e --- /dev/null +++ b/tools/finalization/build-step-0-and-m.sh @@ -0,0 +1,20 @@ + +#!/bin/bash +# Copyright 2024 Google Inc. All rights reserved. +set -ex +function help() { + echo "Finalize VINTF and build a target for test." + echo "usage: $(basename "$0") target [goals...]" +} +function finalize_main_step0_and_m() { + if [ $# == 0 ] ; then + help + exit 1 + fi; + local top="$(dirname "$0")"/../../../.. + source $top/build/make/tools/finalization/build-step-0.sh + local m="$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=$1 TARGET_RELEASE=fina_0 TARGET_BUILD_VARIANT=userdebug" + # This command tests the release state for AIDL. + AIDL_FROZEN_REL=true $m ${@:2} +} +finalize_main_step0_and_m $@ diff --git a/tools/finalization/build-step-0.sh b/tools/finalization/build-step-0.sh index f81b720b2d..8826b35c0f 100755 --- a/tools/finalization/build-step-0.sh +++ b/tools/finalization/build-step-0.sh @@ -7,11 +7,26 @@ function finalize_main_step0() { local top="$(dirname "$0")"/../../../.. source $top/build/make/tools/finalization/environment.sh + local need_vintf_finalize=false if [ "$FINAL_STATE" = "unfinalized" ] ; then - # VINTF finalization + need_vintf_finalize=true + else + # build-step-0.sh tests the vintf finalization step (step-0) when the + # FINAL_BOARD_API_LEVEL is the same as the RELEASE_BOARD_API_LEVEL; and + # RELEASE_BOARD_API_LEVEL_FROZEN is not true from the fina_0 configuration. + # The FINAL_BOARD_API_LEVEL must be the next vendor API level to be finalized. + local board_api_level_vars=$(TARGET_RELEASE=fina_0 $top/build/soong/soong_ui.bash --dumpvars-mode -vars "RELEASE_BOARD_API_LEVEL_FROZEN RELEASE_BOARD_API_LEVEL") + local target_board_api_level_vars="RELEASE_BOARD_API_LEVEL_FROZEN='' +RELEASE_BOARD_API_LEVEL='$FINAL_BOARD_API_LEVEL'" + if [ "$board_api_level_vars" = "$target_board_api_level_vars" ] ; then + echo The target is a finalization candidate. + need_vintf_finalize=true + fi; + fi; + + if [ "$need_vintf_finalize" = true ] ; then # VINTF finalization source $top/build/make/tools/finalization/finalize-vintf-resources.sh fi; } finalize_main_step0 - diff --git a/tools/finalization/build-step-1-and-m.sh b/tools/finalization/build-step-1-and-m.sh index 0e7129f342..88bb3474d2 100755 --- a/tools/finalization/build-step-1-and-m.sh +++ b/tools/finalization/build-step-1-and-m.sh @@ -9,10 +9,9 @@ function finalize_main_step1_and_m() { local m="$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug" # This command tests: - # The release state for AIDL. # ABI difference between user and userdebug builds. # Resource/SDK finalization. - AIDL_FROZEN_REL=true $m + $m } finalize_main_step1_and_m diff --git a/tools/finalization/cleanup.sh b/tools/finalization/cleanup.sh index cd87b1d129..e2a059204e 100755 --- a/tools/finalization/cleanup.sh +++ b/tools/finalization/cleanup.sh @@ -14,8 +14,8 @@ function finalize_revert_local_changes_main() { repo forall -c '\ git checkout . ; git revert --abort ; git clean -fdx ;\ - git checkout @ ; git branch fina-step1 -D ; git reset --hard; \ - repo start fina-step1 ; git checkout @ ; git b fina-step1 -D ;' + git checkout @ --detach ; git branch fina-step1 -D ; git reset --hard; \ + repo start fina-step1 ; git checkout @ --detach ; git b fina-step1 -D ;' } finalize_revert_local_changes_main diff --git a/tools/finalization/command-line-options.sh b/tools/finalization/command-line-options.sh new file mode 100644 index 0000000000..3a1e0491f3 --- /dev/null +++ b/tools/finalization/command-line-options.sh @@ -0,0 +1,9 @@ +ARGV=$(getopt --options '' --long dry-run -- "$@") +eval set -- "$ARGV" +while true; do + case "$1" in + --dry-run) repo_upload_dry_run_arg="--dry-run"; repo_branch="finalization-dry-run"; shift ;; + --) shift; break;; + *) break + esac +done diff --git a/tools/finalization/dryrun-cleanup.sh b/tools/finalization/dryrun-cleanup.sh new file mode 100755 index 0000000000..ddaffae44c --- /dev/null +++ b/tools/finalization/dryrun-cleanup.sh @@ -0,0 +1,25 @@ +#!/bin/bash +# Brings local repository to a remote head state. Also removes all dryrun branches. + +# set -ex + +function finalize_revert_local_changes_main() { + local top="$(dirname "$0")"/../../../.. + source $top/build/make/tools/finalization/environment.sh + + local m="$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug" + + # remove the out folder + $m clobber + + repo selfupdate + + repo forall -c '\ + git checkout . ; git revert --abort ; git clean -fdx ;\ + git checkout @ --detach ; git branch fina-step1 -D ; git reset --hard; \ + repo start fina-step1 ; git checkout @ --detach ; git b fina-step1 -D ; \ + git b $FINAL_PLATFORM_CODENAME-SDK-Finalization-DryRun -D; \ + git b $FINAL_PLATFORM_CODENAME-SDK-Finalization-DryRun-Rel -D; ' +} + +finalize_revert_local_changes_main diff --git a/tools/finalization/dryrun-step-1-and-2.sh b/tools/finalization/dryrun-step-1-and-2.sh new file mode 100755 index 0000000000..f883bca673 --- /dev/null +++ b/tools/finalization/dryrun-step-1-and-2.sh @@ -0,0 +1,39 @@ +#!/bin/bash +# Script to perform 1st and 2nd step of Android Finalization, create CLs and upload to Gerrit. + +function commit_step_2_changes() { + repo forall -c '\ + if [[ $(git status --short) ]]; then + repo start "$FINAL_PLATFORM_CODENAME-SDK-Finalization-DryRun-Rel" ; + git add -A . ; + git commit -m "$FINAL_PLATFORM_CODENAME/$FINAL_PLATFORM_SDK_VERSION is now REL" \ + -m "Ignore-AOSP-First: $FINAL_PLATFORM_CODENAME Finalization +Bug: $FINAL_BUG_ID +Test: build"; + + repo upload --cbr --no-verify -o nokeycheck -t -y . ; + fi' +} + +function finalize_step_2_main() { + local top="$(dirname "$0")"/../../../.. + source $top/build/make/tools/finalization/environment.sh + + source $top/build/make/tools/finalization/finalize-sdk-resources.sh + + source $top/build/make/tools/finalization/localonly-steps.sh + + source $top/build/make/tools/finalization/finalize-sdk-rel.sh + + # move all changes to finalization branch/topic and upload to gerrit + commit_step_2_changes + + # build to confirm everything is OK + local m_next="$top/build/soong/soong_ui.bash --make-mode TARGET_RELEASE=next TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug" + $m_next + + local m_fina="$top/build/soong/soong_ui.bash --make-mode TARGET_RELEASE=fina_2 TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug" + $m_fina +} + +finalize_step_2_main diff --git a/tools/finalization/dryrun-step-1.sh b/tools/finalization/dryrun-step-1.sh new file mode 100755 index 0000000000..0f2bc635a0 --- /dev/null +++ b/tools/finalization/dryrun-step-1.sh @@ -0,0 +1,35 @@ +#!/bin/bash +# Script to perform a dry run of step 1 of Android Finalization, create CLs and upload to Gerrit. + +function commit_step_1_changes() { + repo forall -c '\ + if [[ $(git status --short) ]]; then + repo start "$FINAL_PLATFORM_CODENAME-SDK-Finalization-DryRun" ; + git add -A . ; + git commit -m "$FINAL_PLATFORM_CODENAME is now $FINAL_PLATFORM_SDK_VERSION" \ + -m "Ignore-AOSP-First: $FINAL_PLATFORM_CODENAME Finalization +Bug: $FINAL_BUG_ID +Test: build"; + + repo upload --cbr --no-verify -o nokeycheck -t -y . ; + fi' +} + +function finalize_step_1_main() { + local top="$(dirname "$0")"/../../../.. + source $top/build/make/tools/finalization/environment.sh + + source $top/build/make/tools/finalization/finalize-sdk-resources.sh + + # move all changes to finalization branch/topic and upload to gerrit + commit_step_1_changes + + # build to confirm everything is OK + local m_next="$top/build/soong/soong_ui.bash --make-mode TARGET_RELEASE=next TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug" + $m_next + + local m_fina="$top/build/soong/soong_ui.bash --make-mode TARGET_RELEASE=fina_1 TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug" + $m_fina +} + +finalize_step_1_main diff --git a/tools/finalization/environment.sh b/tools/finalization/environment.sh index 7961e8bc3e..0d3a9e130a 100755 --- a/tools/finalization/environment.sh +++ b/tools/finalization/environment.sh @@ -15,18 +15,22 @@ export FINAL_PLATFORM_VERSION='15' # We might or might not fix this in future, but for now let's keep it +1. export FINAL_PLATFORM_SDK_VERSION='35' # Feel free to randomize once in a while to detect buggy version detection code. -export FINAL_MAINLINE_EXTENSION='58' +export FINAL_MAINLINE_EXTENSION='13' # Options: # 'unfinalized' - branch is in development state, # 'vintf' - VINTF is finalized # 'sdk' - VINTF and SDK/API are finalized # 'rel' - branch is finalized, switched to REL -export FINAL_STATE='vintf' +export FINAL_STATE='rel' export BUILD_FROM_SOURCE_STUB=true # FINAL versions for VINTF # TODO(b/323985297): The version must match with that from the release configuration. # Instead of hardcoding the version here, read it from a release configuration. -export FINAL_BOARD_API_LEVEL='202404' +export FINAL_BOARD_API_LEVEL='202504' +export FINAL_CORRESPONDING_VERSION_LETTER='B' +export FINAL_CORRESPONDING_PLATFORM_VERSION='16' +export FINAL_NEXT_BOARD_API_LEVEL='202604' +export FINAL_NEXT_CORRESPONDING_VERSION_LETTER='C' diff --git a/tools/finalization/finalize-sdk-rel.sh b/tools/finalization/finalize-sdk-rel.sh index 59fe28cb46..c49f974bcd 100755 --- a/tools/finalization/finalize-sdk-rel.sh +++ b/tools/finalization/finalize-sdk-rel.sh @@ -8,12 +8,6 @@ function revert_droidstubs_hack() { fi } -function revert_resources_sdk_int_fix() { - if grep -q 'public static final int RESOURCES_SDK_INT = SDK_INT;' "$top/frameworks/base/core/java/android/os/Build.java" ; then - patch --strip=1 --no-backup-if-mismatch --directory="$top/frameworks/base" --input=../../build/make/tools/finalization/frameworks_base.revert_resource_sdk_int.diff - fi -} - function apply_prerelease_sdk_hack() { if ! grep -q 'STOPSHIP: hack for the pre-release SDK' "$top/frameworks/base/core/java/android/content/pm/parsing/FrameworkParsingPackageUtils.java" ; then patch --strip=1 --no-backup-if-mismatch --directory="$top/frameworks/base" --input=../../build/make/tools/finalization/frameworks_base.apply_hack.diff @@ -30,25 +24,18 @@ function finalize_sdk_rel() { # let the apps built with pre-release SDK parse apply_prerelease_sdk_hack - # in REL mode, resources would correctly set the resources_sdk_int, no fix required - revert_resources_sdk_int_fix - # cts - echo "$FINAL_PLATFORM_VERSION" > "$top/cts/tests/tests/os/assets/platform_versions.txt" + if ! grep -q "${FINAL_PLATFORM_VERSION}" "$top/cts/tests/tests/os/assets/platform_versions.txt" ; then + echo ${FINAL_PLATFORM_VERSION} >> "$top/cts/tests/tests/os/assets/platform_versions.txt" + fi if [ "$FINAL_PLATFORM_CODENAME" != "$CURRENT_PLATFORM_CODENAME" ]; then echo "$CURRENT_PLATFORM_CODENAME" >> "./cts/tests/tests/os/assets/platform_versions.txt" fi git -C "$top/cts" mv hostsidetests/theme/assets/${FINAL_PLATFORM_CODENAME} hostsidetests/theme/assets/${FINAL_PLATFORM_SDK_VERSION} # prebuilts/abi-dumps/platform - mkdir -p "$top/prebuilts/abi-dumps/platform/$FINAL_PLATFORM_SDK_VERSION" - cp -r "$top/prebuilts/abi-dumps/platform/current/64/" "$top/prebuilts/abi-dumps/platform/$FINAL_PLATFORM_SDK_VERSION/" - - # TODO(b/309880485) - # uncomment and update - # prebuilts/abi-dumps/ndk - #mkdir -p "$top/prebuilts/abi-dumps/ndk/$FINAL_PLATFORM_SDK_VERSION" - #cp -r "$top/prebuilts/abi-dumps/ndk/current/64/" "$top/prebuilts/abi-dumps/ndk/$FINAL_PLATFORM_SDK_VERSION/" + "$top/build/soong/soong_ui.bash" --make-mode TARGET_RELEASE=next TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug create_reference_dumps + ANDROID_BUILD_TOP="$top" "$top/out/host/linux-x86/bin/create_reference_dumps" -release next --build-variant userdebug --lib-variant APEX } finalize_sdk_rel diff --git a/tools/finalization/finalize-sdk-resources.sh b/tools/finalization/finalize-sdk-resources.sh index 596f803e56..10266ed1da 100755 --- a/tools/finalization/finalize-sdk-resources.sh +++ b/tools/finalization/finalize-sdk-resources.sh @@ -9,13 +9,6 @@ function apply_droidstubs_hack() { fi } -function apply_resources_sdk_int_fix() { - if ! grep -q 'public static final int RESOURCES_SDK_INT = SDK_INT;' "$top/frameworks/base/core/java/android/os/Build.java" ; then - local base_git_root="$(readlink -f $top/frameworks/base)" - patch --strip=1 --no-backup-if-mismatch --directory="$base_git_root" --input=../../build/make/tools/finalization/frameworks_base.apply_resource_sdk_int.diff - fi -} - function finalize_bionic_ndk() { # Adding __ANDROID_API_<>__. # If this hasn't done then it's not used and not really needed. Still, let's check and add this. @@ -41,7 +34,8 @@ function finalize_modules_utils() { echo " /** Checks if the device is running on a release version of Android $FINAL_PLATFORM_CODENAME or newer */ @ChecksSdkIntAtLeast(api = $FINAL_PLATFORM_SDK_VERSION /* BUILD_VERSION_CODES.$FINAL_PLATFORM_CODENAME */) public static boolean isAtLeast${FINAL_PLATFORM_CODENAME:0:1}() { - return SDK_INT >= $FINAL_PLATFORM_SDK_VERSION; + return SDK_INT >= $FINAL_PLATFORM_SDK_VERSION || + (SDK_INT == $(($FINAL_PLATFORM_SDK_VERSION - 1)) && isAtLeastPreReleaseCodename(\"$FINAL_PLATFORM_CODENAME\")); }" > "$tmpfile" local javaFuncRegex='\/\*\*[^{]*isAtLeast'"${shortCodename}"'() {[^{}]*}' @@ -55,7 +49,11 @@ function finalize_modules_utils() { d}' $javaSdkLevel echo "// Checks if the device is running on release version of Android ${FINAL_PLATFORM_CODENAME:0:1} or newer. -inline bool IsAtLeast${FINAL_PLATFORM_CODENAME:0:1}() { return android_get_device_api_level() >= $FINAL_PLATFORM_SDK_VERSION; }" > "$tmpfile" +inline bool IsAtLeast${FINAL_PLATFORM_CODENAME:0:1}() { + return android_get_device_api_level() >= $FINAL_PLATFORM_SDK_VERSION || + (android_get_device_api_level() == $(($FINAL_PLATFORM_SDK_VERSION - 1)) && + detail::IsAtLeastPreReleaseCodename(\"$FINAL_PLATFORM_CODENAME\")); +}" > "$tmpfile" local cppFuncRegex='\/\/[^{]*IsAtLeast'"${shortCodename}"'() {[^{}]*}' local cppFuncReplace="N;N;N;N;N;N; s/$cppFuncRegex/$methodPlaceholder/; /$cppFuncRegex/!{P;D};" @@ -123,14 +121,18 @@ function finalize_sdk_resources() { sed -i -e 's/Pkg\.Revision.*/Pkg\.Revision=${PLATFORM_SDK_VERSION}.0.0/g' $build_tools_source # build/soong - local codename_version="\"${FINAL_PLATFORM_CODENAME}\": ${FINAL_PLATFORM_SDK_VERSION}" + local codename_version="\"${FINAL_PLATFORM_CODENAME}\": ${FINAL_PLATFORM_SDK_VERSION}" if ! grep -q "$codename_version" "$top/build/soong/android/api_levels.go" ; then sed -i -e "/:.*$((${FINAL_PLATFORM_SDK_VERSION}-1)),/a \\\t\t$codename_version," "$top/build/soong/android/api_levels.go" fi # cts - echo ${FINAL_PLATFORM_VERSION} > "$top/cts/tests/tests/os/assets/platform_releases.txt" - sed -i -e "s/EXPECTED_SDK = $((${FINAL_PLATFORM_SDK_VERSION}-1))/EXPECTED_SDK = ${FINAL_PLATFORM_SDK_VERSION}/g" "$top/cts/tests/tests/os/src/android/os/cts/BuildVersionTest.java" + if ! grep -q "${FINAL_PLATFORM_VERSION}" "$top/cts/tests/tests/os/assets/platform_releases.txt" ; then + echo ${FINAL_PLATFORM_VERSION} >> "$top/cts/tests/tests/os/assets/platform_releases.txt" + fi + if ! grep -q "$((${FINAL_PLATFORM_SDK_VERSION}-1)), ${FINAL_PLATFORM_VERSION}" "$top/cts/tests/tests/os/src/android/os/cts/BuildVersionTest.java" ; then + sed -i -e "s/.*EXPECTED_SDKS = List.of(.*$((${FINAL_PLATFORM_SDK_VERSION}-1))/&, $FINAL_PLATFORM_SDK_VERSION/" "$top/cts/tests/tests/os/src/android/os/cts/BuildVersionTest.java" + fi # libcore sed -i "s%$SDK_CODENAME%$SDK_VERSION%g" "$top/libcore/dalvik/src/main/java/dalvik/annotation/compat/VersionCodes.java" @@ -153,7 +155,6 @@ function finalize_sdk_resources() { # frameworks/base sed -i "s%$SDK_CODENAME%$SDK_VERSION%g" "$top/frameworks/base/core/java/android/os/Build.java" - apply_resources_sdk_int_fix sed -i -e "/=.*$((${FINAL_PLATFORM_SDK_VERSION}-1)),/a \\ SDK_${FINAL_PLATFORM_CODENAME_JAVA} = ${FINAL_PLATFORM_SDK_VERSION}," "$top/frameworks/base/tools/aapt/SdkConstants.h" sed -i -e "/=.*$((${FINAL_PLATFORM_SDK_VERSION}-1)),/a \\ SDK_${FINAL_PLATFORM_CODENAME_JAVA} = ${FINAL_PLATFORM_SDK_VERSION}," "$top/frameworks/base/tools/aapt2/SdkConstants.h" diff --git a/tools/finalization/finalize-vintf-resources.sh b/tools/finalization/finalize-vintf-resources.sh index a55d8e1a1a..6f1a6f646e 100755 --- a/tools/finalization/finalize-vintf-resources.sh +++ b/tools/finalization/finalize-vintf-resources.sh @@ -16,8 +16,6 @@ function finalize_vintf_resources() { export TARGET_RELEASE=fina_0 export TARGET_PRODUCT=aosp_arm64 - # TODO(b/314010764): finalize LL_NDK - # system/sepolicy "$top/system/sepolicy/tools/finalize-vintf-resources.sh" "$top" "$FINAL_BOARD_API_LEVEL" @@ -25,24 +23,28 @@ function finalize_vintf_resources() { # pre-finalization build target (trunk) local aidl_m="$top/build/soong/soong_ui.bash --make-mode" - AIDL_TRANSITIVE_FREEZE=true $aidl_m aidl-freeze-api + AIDL_TRANSITIVE_FREEZE=true $aidl_m aidl-freeze-api create_reference_dumps + + # Generate LLNDK ABI dumps + # This command depends on ANDROID_BUILD_TOP + "$ANDROID_HOST_OUT/bin/create_reference_dumps" -release "$TARGET_RELEASE" --build-variant "$TARGET_BUILD_VARIANT" --lib-variant LLNDK } function create_new_compat_matrix_and_kernel_configs() { # The compatibility matrix versions are bumped during vFRC # These will change every time we have a new vFRC - local CURRENT_COMPATIBILITY_MATRIX_LEVEL='202404' - local NEXT_COMPATIBILITY_MATRIX_LEVEL='202504' + local CURRENT_COMPATIBILITY_MATRIX_LEVEL="$FINAL_BOARD_API_LEVEL" + local NEXT_COMPATIBILITY_MATRIX_LEVEL="$FINAL_NEXT_BOARD_API_LEVEL" # The kernel configs need the letter of the Android release - local CURRENT_RELEASE_LETTER='v' - local NEXT_RELEASE_LETTER='w' + local CURRENT_RELEASE_LETTER="$FINAL_CORRESPONDING_VERSION_LETTER" + local NEXT_RELEASE_LETTER="$FINAL_NEXT_CORRESPONDING_VERSION_LETTER" # build the targets required before touching the Android.bp/Android.mk files local build_cmd="$top/build/soong/soong_ui.bash --make-mode" $build_cmd bpmodify - "$top/prebuilts/build-tools/path/linux-x86/python3" "$top/hardware/interfaces/compatibility_matrices/bump.py" "$CURRENT_COMPATIBILITY_MATRIX_LEVEL" "$NEXT_COMPATIBILITY_MATRIX_LEVEL" "$CURRENT_RELEASE_LETTER" "$NEXT_RELEASE_LETTER" + "$top/prebuilts/build-tools/path/linux-x86/python3" "$top/hardware/interfaces/compatibility_matrices/bump.py" "$CURRENT_COMPATIBILITY_MATRIX_LEVEL" "$NEXT_COMPATIBILITY_MATRIX_LEVEL" "$CURRENT_RELEASE_LETTER" "$NEXT_RELEASE_LETTER" "$FINAL_CORRESPONDING_PLATFORM_VERSION" # Freeze the current framework manifest file. This relies on the # aosp_cf_x86_64-trunk_staging build target to get the right manifest diff --git a/tools/finalization/frameworks_base.apply_resource_sdk_int.diff b/tools/finalization/frameworks_base.apply_resource_sdk_int.diff deleted file mode 100644 index f0576d0851..0000000000 --- a/tools/finalization/frameworks_base.apply_resource_sdk_int.diff +++ /dev/null @@ -1,24 +0,0 @@ -From cdb47fc90b8d6860ec1dc5efada1f9ccd471618b Mon Sep 17 00:00:00 2001 -From: Alex Buynytskyy <alexbuy@google.com> -Date: Tue, 11 Apr 2023 22:12:44 +0000 -Subject: [PATCH] Don't force +1 for resource resolution. - -Bug: 277674088 -Fixes: 277674088 -Test: boots, no crashes -Change-Id: I17e743a0f1cf6f98fddd40c358dea5a8b9cc7723 ---- - -diff --git a/core/java/android/os/Build.java b/core/java/android/os/Build.java -index eb47170..4d3e92b 100755 ---- a/core/java/android/os/Build.java -+++ b/core/java/android/os/Build.java -@@ -493,7 +493,7 @@ - * @hide - */ - @TestApi -- public static final int RESOURCES_SDK_INT = SDK_INT + ACTIVE_CODENAMES.length; -+ public static final int RESOURCES_SDK_INT = SDK_INT; - - /** - * The current lowest supported value of app target SDK. Applications targeting diff --git a/tools/finalization/frameworks_base.revert_resource_sdk_int.diff b/tools/finalization/frameworks_base.revert_resource_sdk_int.diff deleted file mode 100644 index 2ade499e20..0000000000 --- a/tools/finalization/frameworks_base.revert_resource_sdk_int.diff +++ /dev/null @@ -1,27 +0,0 @@ -From c7e460bb19071d867cd7ca04282ce42694f4f358 Mon Sep 17 00:00:00 2001 -From: Alex Buynytskyy <alexbuy@google.com> -Date: Wed, 12 Apr 2023 01:06:26 +0000 -Subject: [PATCH] Revert "Don't force +1 for resource resolution." - -It's not required for master. - -This reverts commit f1cb683988f81579a76ddbf9993848a4a06dd28c. - -Bug: 277674088 -Test: boots, no crashes -Change-Id: Ia1692548f26496fdc6f1e4f0557213c7996d6823 ---- - -diff --git a/core/java/android/os/Build.java b/core/java/android/os/Build.java -index 4d3e92b..eb47170 100755 ---- a/core/java/android/os/Build.java -+++ b/core/java/android/os/Build.java -@@ -493,7 +493,7 @@ - * @hide - */ - @TestApi -- public static final int RESOURCES_SDK_INT = SDK_INT; -+ public static final int RESOURCES_SDK_INT = SDK_INT + ACTIVE_CODENAMES.length; - - /** - * The current lowest supported value of app target SDK. Applications targeting diff --git a/tools/finalization/localonly-steps.sh b/tools/finalization/localonly-steps.sh index bebd563bea..94ee36836d 100755 --- a/tools/finalization/localonly-steps.sh +++ b/tools/finalization/localonly-steps.sh @@ -10,14 +10,15 @@ function finalize_locally() { local m="$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=aosp_arm64 TARGET_RELEASE=fina_1 TARGET_BUILD_VARIANT=userdebug DIST_DIR=out/dist" # adb keys - $m adb - LOGNAME=android-eng HOSTNAME=google.com "$top/out/host/linux-x86/bin/adb" keygen "$top/vendor/google/security/adb/${FINAL_PLATFORM_VERSION}.adb_key" + # The keys are already generated for Android 15. Keeping the command (commented out) for future reference. + # $m adb + # LOGNAME=android-eng HOSTNAME=google.com "$top/out/host/linux-x86/bin/adb" keygen "$top/vendor/google/security/adb/${FINAL_PLATFORM_VERSION}.adb_key" # Build Platform SDKs. $top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=sdk TARGET_RELEASE=fina_1 TARGET_BUILD_VARIANT=userdebug sdk dist sdk_repo DIST_DIR=out/dist # Build Modules SDKs. - TARGET_RELEASE=fina_1 TARGET_BUILD_VARIANT=userdebug UNBUNDLED_BUILD_SDKS_FROM_SOURCE=true DIST_DIR=out/dist "$top/vendor/google/build/mainline_modules_sdks.sh" --build-release=latest + TARGET_RELEASE=fina_1 TARGET_BUILD_VARIANT=userdebug UNBUNDLED_BUILD_SDKS_FROM_SOURCE=true DIST_DIR=out/dist "$top/vendor/google/build/mainline_modules_sdks.sh" --build-release=next # Update prebuilts. "$top/prebuilts/build-tools/path/linux-x86/python3" -W ignore::DeprecationWarning "$top/prebuilts/sdk/update_prebuilts.py" --local_mode -f ${FINAL_PLATFORM_SDK_VERSION} -e ${FINAL_MAINLINE_EXTENSION} --bug 1 1 diff --git a/tools/finalization/step-0.sh b/tools/finalization/step-0.sh index e61c644de0..2087f6e670 100755 --- a/tools/finalization/step-0.sh +++ b/tools/finalization/step-0.sh @@ -9,19 +9,21 @@ function commit_step_0_changes() { set +e repo forall -c '\ if [[ $(git status --short) ]]; then - repo start "VINTF-$FINAL_BOARD_API_LEVEL-Finalization" ; + repo start "'$repo_branch'" ; git add -A . ; git commit -m "Vendor API level $FINAL_BOARD_API_LEVEL is now frozen" \ -m "Ignore-AOSP-First: VINTF $FINAL_BOARD_API_LEVEL Finalization Bug: $FINAL_BUG_ID Test: build"; - repo upload --cbr --no-verify -o nokeycheck -t -y . ; + repo upload '"$repo_upload_dry_run_arg"' --cbr --no-verify -o nokeycheck -t -y . ; fi' } function finalize_step_0_main() { local top="$(dirname "$0")"/../../../.. source $top/build/make/tools/finalization/environment.sh + local repo_branch="VINTF-$FINAL_BOARD_API_LEVEL-Finalization" + source $top/build/make/tools/finalization/command-line-options.sh local m="$top/build/soong/soong_ui.bash --make-mode TARGET_RELEASE=next TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug" @@ -34,4 +36,4 @@ function finalize_step_0_main() { AIDL_FROZEN_REL=true $m } -finalize_step_0_main +finalize_step_0_main $@ diff --git a/tools/finalization/step-1.sh b/tools/finalization/step-1.sh index 0e483d5510..736d64110e 100755 --- a/tools/finalization/step-1.sh +++ b/tools/finalization/step-1.sh @@ -7,21 +7,21 @@ function commit_step_1_changes() { set +e repo forall -c '\ if [[ $(git status --short) ]]; then - repo start "$FINAL_PLATFORM_CODENAME-SDK-Finalization" ; + repo start "'$repo_branch'" ; git add -A . ; git commit -m "$FINAL_PLATFORM_CODENAME is now $FINAL_PLATFORM_SDK_VERSION and extension version $FINAL_MAINLINE_EXTENSION" \ -m "Ignore-AOSP-First: $FINAL_PLATFORM_CODENAME Finalization Bug: $FINAL_BUG_ID Test: build"; - repo upload --cbr --no-verify -o nokeycheck -t -y . ; + repo upload '"$repo_upload_dry_run_arg"' --cbr --no-verify -o nokeycheck -t -y . ; fi' } function finalize_step_1_main() { local top="$(dirname "$0")"/../../../.. source $top/build/make/tools/finalization/environment.sh - - local m="$top/build/soong/soong_ui.bash --make-mode TARGET_RELEASE=next TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug" + local repo_branch="$FINAL_PLATFORM_CODENAME-SDK-Finalization" + source $top/build/make/tools/finalization/command-line-options.sh source $top/build/make/tools/finalization/finalize-sdk-resources.sh @@ -29,7 +29,11 @@ function finalize_step_1_main() { commit_step_1_changes # build to confirm everything is OK - AIDL_FROZEN_REL=true $m + local m_next="$top/build/soong/soong_ui.bash --make-mode TARGET_RELEASE=next TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug" + $m_next + + local m_fina="$top/build/soong/soong_ui.bash --make-mode TARGET_RELEASE=fina_1 TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug" + $m_fina } -finalize_step_1_main +finalize_step_1_main $@ diff --git a/tools/finalization/step-2.sh b/tools/finalization/step-2.sh index 356cad023d..52e388722e 100755 --- a/tools/finalization/step-2.sh +++ b/tools/finalization/step-2.sh @@ -4,22 +4,22 @@ function commit_step_2_changes() { repo forall -c '\ if [[ $(git status --short) ]]; then - repo start "$FINAL_PLATFORM_CODENAME-SDK-Finalization-Rel" ; + repo start "'$repo_branch'" ; git add -A . ; git commit -m "$FINAL_PLATFORM_CODENAME/$FINAL_PLATFORM_SDK_VERSION is now REL" \ -m "Ignore-AOSP-First: $FINAL_PLATFORM_CODENAME Finalization Bug: $FINAL_BUG_ID Test: build"; - repo upload --cbr --no-verify -o nokeycheck -t -y . ; + repo upload '"$repo_upload_dry_run_arg"' --cbr --no-verify -o nokeycheck -t -y . ; fi' } function finalize_step_2_main() { local top="$(dirname "$0")"/../../../.. source $top/build/make/tools/finalization/environment.sh - - local m="$top/build/soong/soong_ui.bash --make-mode TARGET_RELEASE=next TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug" + local repo_branch="$FINAL_PLATFORM_CODENAME-SDK-Finalization-Rel" + source $top/build/make/tools/finalization/command-line-options.sh # prebuilts etc source $top/build/make/tools/finalization/finalize-sdk-rel.sh @@ -28,7 +28,11 @@ function finalize_step_2_main() { commit_step_2_changes # build to confirm everything is OK - AIDL_FROZEN_REL=true $m + local m_next="$top/build/soong/soong_ui.bash --make-mode TARGET_RELEASE=next TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug" + $m_next + + local m_fina="$top/build/soong/soong_ui.bash --make-mode TARGET_RELEASE=fina_2 TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug" + $m_fina } -finalize_step_2_main +finalize_step_2_main $@ diff --git a/tools/fs_config/Android.bp b/tools/fs_config/Android.bp index 6aa528963d..a5b6fd0a4c 100644 --- a/tools/fs_config/Android.bp +++ b/tools/fs_config/Android.bp @@ -277,6 +277,7 @@ genrule_defaults { out: ["out"], } +// system genrule { name: "fs_config_dirs_system_gen", defaults: ["fs_config_defaults"], @@ -307,6 +308,7 @@ prebuilt_etc { src: ":fs_config_files_system_gen", } +// system_ext genrule { name: "fs_config_dirs_system_ext_gen", defaults: ["fs_config_defaults"], @@ -337,6 +339,7 @@ prebuilt_etc { system_ext_specific: true, } +// product genrule { name: "fs_config_dirs_product_gen", defaults: ["fs_config_defaults"], @@ -367,6 +370,7 @@ prebuilt_etc { product_specific: true, } +// vendor genrule { name: "fs_config_dirs_vendor_gen", defaults: ["fs_config_defaults"], @@ -397,6 +401,7 @@ prebuilt_etc { vendor: true, } +// odm genrule { name: "fs_config_dirs_odm_gen", defaults: ["fs_config_defaults"], @@ -427,4 +432,214 @@ prebuilt_etc { device_specific: true, } -// TODO(jiyong): add fs_config for oem, system_dlkm, vendor_dlkm, odm_dlkm partitions +// system_dlkm +genrule { + name: "fs_config_dirs_system_dlkm_gen", + defaults: ["fs_config_defaults"], + cmd: fs_config_cmd_dirs + + "--partition system_dlkm " + + "$(locations :target_fs_config_gen)", +} + +prebuilt_etc { + name: "fs_config_dirs_system_dlkm", + filename: "fs_config_dirs", + src: ":fs_config_dirs_system_dlkm_gen", + system_dlkm_specific: true, +} + +genrule { + name: "fs_config_files_system_dlkm_gen", + defaults: ["fs_config_defaults"], + cmd: fs_config_cmd_files + + "--partition system_dlkm " + + "$(locations :target_fs_config_gen)", +} + +prebuilt_etc { + name: "fs_config_files_system_dlkm", + filename: "fs_config_files", + src: ":fs_config_files_system_dlkm_gen", + system_dlkm_specific: true, +} + +// vendor_dlkm +genrule { + name: "fs_config_dirs_vendor_dlkm_gen", + defaults: ["fs_config_defaults"], + cmd: fs_config_cmd_dirs + + "--partition vendor_dlkm " + + "$(locations :target_fs_config_gen)", +} + +prebuilt_etc { + name: "fs_config_dirs_vendor_dlkm", + filename: "fs_config_dirs", + src: ":fs_config_dirs_vendor_dlkm_gen", + vendor_dlkm_specific: true, +} + +genrule { + name: "fs_config_files_vendor_dlkm_gen", + defaults: ["fs_config_defaults"], + cmd: fs_config_cmd_files + + "--partition vendor_dlkm " + + "$(locations :target_fs_config_gen)", +} + +prebuilt_etc { + name: "fs_config_files_vendor_dlkm", + filename: "fs_config_files", + src: ":fs_config_files_vendor_dlkm_gen", + vendor_dlkm_specific: true, +} + +// odm_dlkm +genrule { + name: "fs_config_dirs_odm_dlkm_gen", + defaults: ["fs_config_defaults"], + cmd: fs_config_cmd_dirs + + "--partition odm_dlkm " + + "$(locations :target_fs_config_gen)", +} + +prebuilt_etc { + name: "fs_config_dirs_odm_dlkm", + filename: "fs_config_dirs", + src: ":fs_config_dirs_odm_dlkm_gen", + odm_dlkm_specific: true, +} + +genrule { + name: "fs_config_files_odm_dlkm_gen", + defaults: ["fs_config_defaults"], + cmd: fs_config_cmd_files + + "--partition odm_dlkm " + + "$(locations :target_fs_config_gen)", +} + +prebuilt_etc { + name: "fs_config_files_odm_dlkm", + filename: "fs_config_files", + src: ":fs_config_files_odm_dlkm_gen", + odm_dlkm_specific: true, +} + +// oem +genrule { + name: "fs_config_dirs_oem_gen", + defaults: ["fs_config_defaults"], + cmd: fs_config_cmd_dirs + + "--partition oem " + + "$(locations :target_fs_config_gen)", +} + +prebuilt_etc { + name: "fs_config_dirs_oem", + filename: "fs_config_dirs", + src: ":fs_config_dirs_oem_gen", + oem_specific: true, +} + +genrule { + name: "fs_config_files_oem_gen", + defaults: ["fs_config_defaults"], + cmd: fs_config_cmd_files + + "--partition oem " + + "$(locations :target_fs_config_gen)", +} + +prebuilt_etc { + name: "fs_config_files_oem", + filename: "fs_config_files", + src: ":fs_config_files_oem_gen", + oem_specific: true, +} + +// Generate the <p>/etc/fs_config_dirs binary files for each partition. +// Add fs_config_dirs to PRODUCT_PACKAGES in the device make file to enable. +phony { + name: "fs_config_dirs", + required: [ + "fs_config_dirs_system", + "fs_config_dirs_system_ext", + "fs_config_dirs_product", + "fs_config_dirs_nonsystem", + ], +} + +// Generate the <p>/etc/fs_config_files binary files for each partition. +// Add fs_config_files to PRODUCT_PACKAGES in the device make file to enable. +phony { + name: "fs_config_files", + required: [ + "fs_config_files_system", + "fs_config_files_system_ext", + "fs_config_files_product", + "fs_config_files_nonsystem", + ], +} + +// Generate the <p>/etc/fs_config_dirs binary files for all enabled partitions +// excluding /system, /system_ext and /product. Add fs_config_dirs_nonsystem to +// PRODUCT_PACKAGES in the device make file to enable. +phony { + name: "fs_config_dirs_nonsystem", + required: [] + + select(soong_config_variable("fs_config", "vendor"), { + true: ["fs_config_dirs_vendor"], + default: [], + }) + + select(soong_config_variable("fs_config", "oem"), { + true: ["fs_config_dirs_oem"], + default: [], + }) + + select(soong_config_variable("fs_config", "odm"), { + true: ["fs_config_dirs_odm"], + default: [], + }) + + select(soong_config_variable("fs_config", "vendor_dlkm"), { + true: ["fs_config_dirs_vendor_dlkm"], + default: [], + }) + + select(soong_config_variable("fs_config", "odm_dlkm"), { + true: ["fs_config_dirs_odm_dlkm"], + default: [], + }) + + select(soong_config_variable("fs_config", "system_dlkm"), { + true: ["fs_config_dirs_system_dlkm"], + default: [], + }), +} + +// Generate the <p>/etc/fs_config_files binary files for all enabled partitions +// excluding /system, /system_ext and /product. Add fs_config_files_nonsystem to +// PRODUCT_PACKAGES in the device make file to enable. +phony { + name: "fs_config_files_nonsystem", + required: [] + + select(soong_config_variable("fs_config", "vendor"), { + true: ["fs_config_files_vendor"], + default: [], + }) + + select(soong_config_variable("fs_config", "oem"), { + true: ["fs_config_files_oem"], + default: [], + }) + + select(soong_config_variable("fs_config", "odm"), { + true: ["fs_config_files_odm"], + default: [], + }) + + select(soong_config_variable("fs_config", "vendor_dlkm"), { + true: ["fs_config_files_vendor_dlkm"], + default: [], + }) + + select(soong_config_variable("fs_config", "odm_dlkm"), { + true: ["fs_config_files_odm_dlkm"], + default: [], + }) + + select(soong_config_variable("fs_config", "system_dlkm"), { + true: ["fs_config_files_system_dlkm"], + default: [], + }), +} diff --git a/tools/fs_config/Android.mk b/tools/fs_config/Android.mk deleted file mode 100644 index e4c362630f..0000000000 --- a/tools/fs_config/Android.mk +++ /dev/null @@ -1,328 +0,0 @@ -# Copyright (C) 2008 The Android Open Source Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -LOCAL_PATH := $(call my-dir) - -# One can override the default android_filesystem_config.h file by using TARGET_FS_CONFIG_GEN. -# Set TARGET_FS_CONFIG_GEN to contain a list of intermediate format files -# for generating the android_filesystem_config.h file. -# -# More information can be found in the README - -ifneq ($(wildcard $(TARGET_DEVICE_DIR)/android_filesystem_config.h),) -$(error Using $(TARGET_DEVICE_DIR)/android_filesystem_config.h is deprecated, please use TARGET_FS_CONFIG_GEN instead) -endif - -android_filesystem_config := system/core/libcutils/include/private/android_filesystem_config.h -capability_header := bionic/libc/kernel/uapi/linux/capability.h - -# List of supported vendor, oem, odm, vendor_dlkm, odm_dlkm, and system_dlkm Partitions -fs_config_generate_extra_partition_list := $(strip \ - $(if $(BOARD_USES_VENDORIMAGE)$(BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE),vendor) \ - $(if $(BOARD_USES_OEMIMAGE)$(BOARD_OEMIMAGE_FILE_SYSTEM_TYPE),oem) \ - $(if $(BOARD_USES_ODMIMAGE)$(BOARD_ODMIMAGE_FILE_SYSTEM_TYPE),odm) \ - $(if $(BOARD_USES_VENDOR_DLKMIMAGE)$(BOARD_VENDOR_DLKMIMAGE_FILE_SYSTEM_TYPE),vendor_dlkm) \ - $(if $(BOARD_USES_ODM_DLKMIMAGE)$(BOARD_ODM_DLKMIMAGE_FILE_SYSTEM_TYPE),odm_dlkm) \ - $(if $(BOARD_USES_SYSTEM_DLKMIMAGE)$(BOARD_SYSTEM_DLKMIMAGE_FILE_SYSTEM_TYPE),system_dlkm) \ -) - -################################## -# Generate the <p>/etc/fs_config_dirs binary files for each partition. -# Add fs_config_dirs to PRODUCT_PACKAGES in the device make file to enable. -include $(CLEAR_VARS) - -LOCAL_MODULE := fs_config_dirs -LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0 -LOCAL_LICENSE_CONDITIONS := notice -LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE -LOCAL_REQUIRED_MODULES := \ - fs_config_dirs_system \ - fs_config_dirs_system_ext \ - fs_config_dirs_product \ - fs_config_dirs_nonsystem -include $(BUILD_PHONY_PACKAGE) - -################################## -# Generate the <p>/etc/fs_config_files binary files for each partition. -# Add fs_config_files to PRODUCT_PACKAGES in the device make file to enable. -include $(CLEAR_VARS) - -LOCAL_MODULE := fs_config_files -LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0 -LOCAL_LICENSE_CONDITIONS := notice -LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE -LOCAL_REQUIRED_MODULES := \ - fs_config_files_system \ - fs_config_files_system_ext \ - fs_config_files_product \ - fs_config_files_nonsystem -include $(BUILD_PHONY_PACKAGE) - -################################## -# Generate the <p>/etc/fs_config_dirs binary files for all enabled partitions -# excluding /system, /system_ext and /product. Add fs_config_dirs_nonsystem to -# PRODUCT_PACKAGES in the device make file to enable. -include $(CLEAR_VARS) - -LOCAL_MODULE := fs_config_dirs_nonsystem -LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0 -LOCAL_LICENSE_CONDITIONS := notice -LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE -LOCAL_REQUIRED_MODULES := $(foreach t,$(fs_config_generate_extra_partition_list),fs_config_dirs_$(t)) -include $(BUILD_PHONY_PACKAGE) - -################################## -# Generate the <p>/etc/fs_config_files binary files for all enabled partitions -# excluding /system, /system_ext and /product. Add fs_config_files_nonsystem to -# PRODUCT_PACKAGES in the device make file to enable. -include $(CLEAR_VARS) - -LOCAL_MODULE := fs_config_files_nonsystem -LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0 -LOCAL_LICENSE_CONDITIONS := notice -LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE -LOCAL_REQUIRED_MODULES := $(foreach t,$(fs_config_generate_extra_partition_list),fs_config_files_$(t)) -include $(BUILD_PHONY_PACKAGE) - -ifneq ($(filter oem,$(fs_config_generate_extra_partition_list)),) -################################## -# Generate the oem/etc/fs_config_dirs binary file for the target -# Add fs_config_dirs or fs_config_dirs_nonsystem to PRODUCT_PACKAGES -# in the device make file to enable -include $(CLEAR_VARS) - -LOCAL_MODULE := fs_config_dirs_oem -LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0 -LOCAL_LICENSE_CONDITIONS := notice -LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE -LOCAL_MODULE_CLASS := ETC -LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs -LOCAL_MODULE_PATH := $(TARGET_OUT_OEM)/etc -include $(BUILD_SYSTEM)/base_rules.mk -$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(android_filesystem_config) -$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(capability_header) -$(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN) -$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(android_filesystem_config) $(capability_header) - @mkdir -p $(dir $@) - $< fsconfig \ - --aid-header $(PRIVATE_ANDROID_FS_HDR) \ - --capability-header $(PRIVATE_ANDROID_CAP_HDR) \ - --partition oem \ - --dirs \ - --out_file $@ \ - $(or $(PRIVATE_TARGET_FS_CONFIG_GEN),/dev/null) - -################################## -# Generate the oem/etc/fs_config_files binary file for the target -# Add fs_config_files or fs_config_files_nonsystem to PRODUCT_PACKAGES -# in the device make file to enable -include $(CLEAR_VARS) - -LOCAL_MODULE := fs_config_files_oem -LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0 -LOCAL_LICENSE_CONDITIONS := notice -LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE -LOCAL_MODULE_CLASS := ETC -LOCAL_INSTALLED_MODULE_STEM := fs_config_files -LOCAL_MODULE_PATH := $(TARGET_OUT_OEM)/etc -include $(BUILD_SYSTEM)/base_rules.mk -$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(android_filesystem_config) -$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(capability_header) -$(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN) -$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(android_filesystem_config) $(capability_header) - @mkdir -p $(dir $@) - $< fsconfig \ - --aid-header $(PRIVATE_ANDROID_FS_HDR) \ - --capability-header $(PRIVATE_ANDROID_CAP_HDR) \ - --partition oem \ - --files \ - --out_file $@ \ - $(or $(PRIVATE_TARGET_FS_CONFIG_GEN),/dev/null) - -endif - -ifneq ($(filter vendor_dlkm,$(fs_config_generate_extra_partition_list)),) -################################## -# Generate the vendor_dlkm/etc/fs_config_dirs binary file for the target -# Add fs_config_dirs or fs_config_dirs_nonsystem to PRODUCT_PACKAGES in -# the device make file to enable -include $(CLEAR_VARS) - -LOCAL_MODULE := fs_config_dirs_vendor_dlkm -LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0 -LOCAL_LICENSE_CONDITIONS := notice -LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE -LOCAL_MODULE_CLASS := ETC -LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs -LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR_DLKM)/etc -include $(BUILD_SYSTEM)/base_rules.mk -$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(android_filesystem_config) -$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(capability_header) -$(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN) -$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(android_filesystem_config) $(capability_header) - @mkdir -p $(dir $@) - $< fsconfig \ - --aid-header $(PRIVATE_ANDROID_FS_HDR) \ - --capability-header $(PRIVATE_ANDROID_CAP_HDR) \ - --partition vendor_dlkm \ - --dirs \ - --out_file $@ \ - $(or $(PRIVATE_TARGET_FS_CONFIG_GEN),/dev/null) - -################################## -# Generate the vendor_dlkm/etc/fs_config_files binary file for the target -# Add fs_config_files or fs_config_files_nonsystem to PRODUCT_PACKAGES in -# the device make file to enable -include $(CLEAR_VARS) - -LOCAL_MODULE := fs_config_files_vendor_dlkm -LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0 -LOCAL_LICENSE_CONDITIONS := notice -LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE -LOCAL_MODULE_CLASS := ETC -LOCAL_INSTALLED_MODULE_STEM := fs_config_files -LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR_DLKM)/etc -include $(BUILD_SYSTEM)/base_rules.mk -$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(android_filesystem_config) -$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(capability_header) -$(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN) -$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(android_filesystem_config) $(capability_header) - @mkdir -p $(dir $@) - $< fsconfig \ - --aid-header $(PRIVATE_ANDROID_FS_HDR) \ - --capability-header $(PRIVATE_ANDROID_CAP_HDR) \ - --partition vendor_dlkm \ - --files \ - --out_file $@ \ - $(or $(PRIVATE_TARGET_FS_CONFIG_GEN),/dev/null) - -endif - -ifneq ($(filter odm_dlkm,$(fs_config_generate_extra_partition_list)),) -################################## -# Generate the odm_dlkm/etc/fs_config_dirs binary file for the target -# Add fs_config_dirs or fs_config_dirs_nonsystem to PRODUCT_PACKAGES -# in the device make file to enable -include $(CLEAR_VARS) - -LOCAL_MODULE := fs_config_dirs_odm_dlkm -LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0 -LOCAL_LICENSE_CONDITIONS := notice -LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE -LOCAL_MODULE_CLASS := ETC -LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs -LOCAL_MODULE_PATH := $(TARGET_OUT_ODM_DLKM)/etc -include $(BUILD_SYSTEM)/base_rules.mk -$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(android_filesystem_config) -$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(capability_header) -$(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN) -$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(android_filesystem_config) $(capability_header) - @mkdir -p $(dir $@) - $< fsconfig \ - --aid-header $(PRIVATE_ANDROID_FS_HDR) \ - --capability-header $(PRIVATE_ANDROID_CAP_HDR) \ - --partition odm_dlkm \ - --dirs \ - --out_file $@ \ - $(or $(PRIVATE_TARGET_FS_CONFIG_GEN),/dev/null) - -################################## -# Generate the odm_dlkm/etc/fs_config_files binary file for the target -# Add fs_config_files or fs_config_files_nonsystem to PRODUCT_PACKAGES -# in the device make file to enable -include $(CLEAR_VARS) - -LOCAL_MODULE := fs_config_files_odm_dlkm -LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0 -LOCAL_LICENSE_CONDITIONS := notice -LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE -LOCAL_MODULE_CLASS := ETC -LOCAL_INSTALLED_MODULE_STEM := fs_config_files -LOCAL_MODULE_PATH := $(TARGET_OUT_ODM_DLKM)/etc -include $(BUILD_SYSTEM)/base_rules.mk -$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(android_filesystem_config) -$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(capability_header) -$(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN) -$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(android_filesystem_config) $(capability_header) - @mkdir -p $(dir $@) - $< fsconfig \ - --aid-header $(PRIVATE_ANDROID_FS_HDR) \ - --capability-header $(PRIVATE_ANDROID_CAP_HDR) \ - --partition odm_dlkm \ - --files \ - --out_file $@ \ - $(or $(PRIVATE_TARGET_FS_CONFIG_GEN),/dev/null) - -endif - -ifneq ($(filter system_dlkm,$(fs_config_generate_extra_partition_list)),) -################################## -# Generate the system_dlkm/etc/fs_config_dirs binary file for the target -# Add fs_config_dirs or fs_config_dirs_nonsystem to PRODUCT_PACKAGES -# in the device make file to enable -include $(CLEAR_VARS) - -LOCAL_MODULE := fs_config_dirs_system_dlkm -LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0 -LOCAL_LICENSE_CONDITIONS := notice -LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE -LOCAL_MODULE_CLASS := ETC -LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs -LOCAL_MODULE_PATH := $(TARGET_OUT_SYSTEM_DLKM)/etc -include $(BUILD_SYSTEM)/base_rules.mk -$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(android_filesystem_config) -$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(capability_header) -$(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN) -$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(android_filesystem_config) $(capability_header) - @mkdir -p $(dir $@) - $< fsconfig \ - --aid-header $(PRIVATE_ANDROID_FS_HDR) \ - --capability-header $(PRIVATE_ANDROID_CAP_HDR) \ - --partition system_dlkm \ - --dirs \ - --out_file $@ \ - $(or $(PRIVATE_TARGET_FS_CONFIG_GEN),/dev/null) - -################################## -# Generate the system_dlkm/etc/fs_config_files binary file for the target -# Add fs_config_files or fs_config_files_nonsystem to PRODUCT_PACKAGES -# in the device make file to enable -include $(CLEAR_VARS) - -LOCAL_MODULE := fs_config_files_system_dlkm -LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0 -LOCAL_LICENSE_CONDITIONS := notice -LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE -LOCAL_MODULE_CLASS := ETC -LOCAL_INSTALLED_MODULE_STEM := fs_config_files -LOCAL_MODULE_PATH := $(TARGET_OUT_SYSTEM_DLKM)/etc -include $(BUILD_SYSTEM)/base_rules.mk -$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(android_filesystem_config) -$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(capability_header) -$(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN) -$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(android_filesystem_config) $(capability_header) - @mkdir -p $(dir $@) - $< fsconfig \ - --aid-header $(PRIVATE_ANDROID_FS_HDR) \ - --capability-header $(PRIVATE_ANDROID_CAP_HDR) \ - --partition system_dlkm \ - --files \ - --out_file $@ \ - $(or $(PRIVATE_TARGET_FS_CONFIG_GEN),/dev/null) - -endif - -android_filesystem_config := -capability_header := -fs_config_generate_extra_partition_list := diff --git a/tools/ide_query/cc_analyzer/README.md b/tools/ide_query/cc_analyzer/README.md new file mode 100644 index 0000000000..7b822d205f --- /dev/null +++ b/tools/ide_query/cc_analyzer/README.md @@ -0,0 +1,3 @@ +See instructions in +[Android Clang/LLVM-based Tools Readme Doc](https://android.googlesource.com/platform/prebuilts/clang-tools/+/main/README.md) +for cutting a new release. diff --git a/tools/ide_query/cc_analyzer/include_scanner.cc b/tools/ide_query/cc_analyzer/include_scanner.cc index 8916a3edd6..1d3f26e737 100644 --- a/tools/ide_query/cc_analyzer/include_scanner.cc +++ b/tools/ide_query/cc_analyzer/include_scanner.cc @@ -94,6 +94,11 @@ class IncludeScanningAction final : public clang::PreprocessOnlyAction { std::unordered_map<std::string, std::string> &abs_paths) : abs_paths_(abs_paths) {} bool BeginSourceFileAction(clang::CompilerInstance &ci) override { + // Be more resilient against all warnings/errors, as we want + // include-scanning to work even on incomplete sources. + ci.getDiagnostics().setEnableAllWarnings(false); + ci.getDiagnostics().setSeverityForAll(clang::diag::Flavor::WarningOrError, + clang::diag::Severity::Ignored); std::string cwd; auto cwd_or_err = ci.getVirtualFileSystem().getCurrentWorkingDirectory(); if (!cwd_or_err || cwd_or_err.get().empty()) return false; @@ -154,6 +159,8 @@ llvm::Expected<std::vector<std::pair<std::string, std::string>>> ScanIncludes( main_file.get()->getBuffer().str()); std::vector<std::string> argv = cmd.CommandLine; + // Disable all warnings to be more robust in analysis. + argv.insert(llvm::find(argv, "--"), {"-Wno-error", "-w"}); fs = OverlayBuiltinHeaders(argv, std::move(fs)); llvm::IntrusiveRefCntPtr<clang::FileManager> files( diff --git a/tools/ide_query/ide_query.go b/tools/ide_query/ide_query.go index 23c7abd2a0..6caa29c1f3 100644 --- a/tools/ide_query/ide_query.go +++ b/tools/ide_query/ide_query.go @@ -116,8 +116,8 @@ func main() { var targets []string javaTargetsByFile := findJavaModules(javaFiles, javaModules) - for _, t := range javaTargetsByFile { - targets = append(targets, t) + for _, target := range javaTargetsByFile { + targets = append(targets, javaModules[target].Jars...) } ccTargets, err := getCCTargets(ctx, env, ccFiles) @@ -293,11 +293,23 @@ func getCCInputs(ctx context.Context, env Env, filePaths []string) ([]*pb.Analys // If a file is covered by multiple modules, the first module is returned. func findJavaModules(paths []string, modules map[string]*javaModule) map[string]string { ret := make(map[string]string) - for name, module := range modules { + // A file may be part of multiple modules. To make the result deterministic, + // check the modules in sorted order. + keys := make([]string, 0, len(modules)) + for name := range modules { + keys = append(keys, name) + } + slices.Sort(keys) + for _, name := range keys { if strings.HasSuffix(name, ".impl") { continue } + module := modules[name] + if len(module.Jars) == 0 { + continue + } + for i, p := range paths { if slices.Contains(module.Srcs, p) { ret[p] = name @@ -309,6 +321,7 @@ func findJavaModules(paths []string, modules map[string]*javaModule) map[string] break } } + return ret } @@ -341,6 +354,8 @@ func getJavaInputs(env Env, modulesByPath map[string]string, modules map[string] Id: moduleName, Language: pb.Language_LANGUAGE_JAVA, SourceFilePaths: m.Srcs, + GeneratedFiles: genFiles(env, m), + DependencyIds: m.Deps, } unitsById[u.Id] = u @@ -355,14 +370,11 @@ func getJavaInputs(env Env, modulesByPath map[string]string, modules map[string] continue } - var paths []string - paths = append(paths, mod.Srcs...) - paths = append(paths, mod.SrcJars...) - paths = append(paths, mod.Jars...) unitsById[name] = &pb.BuildableUnit{ Id: name, SourceFilePaths: mod.Srcs, - GeneratedFiles: genFiles(env, paths), + GeneratedFiles: genFiles(env, mod), + DependencyIds: mod.Deps, } for _, d := range mod.Deps { @@ -379,8 +391,13 @@ func getJavaInputs(env Env, modulesByPath map[string]string, modules map[string] } // genFiles returns the generated files (paths that start with outDir/) for the -// given paths. Generated files that do not exist are ignored. -func genFiles(env Env, paths []string) []*pb.GeneratedFile { +// given module. Generated files that do not exist are ignored. +func genFiles(env Env, mod *javaModule) []*pb.GeneratedFile { + var paths []string + paths = append(paths, mod.Srcs...) + paths = append(paths, mod.SrcJars...) + paths = append(paths, mod.Jars...) + prefix := env.OutDir + "/" var ret []*pb.GeneratedFile for _, p := range paths { diff --git a/tools/ide_query/ide_query.sh b/tools/ide_query/ide_query.sh index 6f9b0c4b8b..8dfffc1cfa 100755 --- a/tools/ide_query/ide_query.sh +++ b/tools/ide_query/ide_query.sh @@ -19,7 +19,7 @@ source $(pwd)/../../shell_utils.sh require_top # Ensure cogsetup (out/ will be symlink outside the repo) -. ${TOP}/build/make/cogsetup.sh +setup_cog_env_if_needed case $(uname -s) in Linux) diff --git a/tools/ide_query/prober_scripts/cpp/general.cc b/tools/ide_query/prober_scripts/cpp/general.cc index 0f0639be5e..ac882829c0 100644 --- a/tools/ide_query/prober_scripts/cpp/general.cc +++ b/tools/ide_query/prober_scripts/cpp/general.cc @@ -56,7 +56,7 @@ void TestCompletion() { void TestNavigation() { std::vector<int> ints; - // | | ints + // ^ ^ ints // ^ // step diff --git a/tools/ide_query/prober_scripts/ide_query.out b/tools/ide_query/prober_scripts/ide_query.out index cd7ce6d258..be48da1424 100644 --- a/tools/ide_query/prober_scripts/ide_query.out +++ b/tools/ide_query/prober_scripts/ide_query.out @@ -1,7 +1,9 @@ -out–a -8build/make/tools/ide_query/prober_scripts/cpp/general.cc8prebuilts/clang/host/linux-x86/clang-r522817/bin/clang++-mthumb-Os-fomit-frame-pointer-mllvm-enable-shrink-wrap=false-O2-Wall-Wextra-Winit-self-Wpointer-arith-Wunguarded-availability-Werror=date-time-Werror=int-conversion-Werror=pragma-pack&-Werror=pragma-pack-suspicious-include-Werror=sizeof-array-div-Werror=string-plus-int'-Werror=unreachable-code-loop-increment"-Wno-error=deprecated-declarations-Wno-c99-designator-Wno-gnu-folding-constant"-Wno-inconsistent-missing-override-Wno-error=reorder-init-list-Wno-reorder-init-list-Wno-sign-compare-Wno-unused -DANDROID-DNDEBUG-UDEBUG(-D__compiler_offsetof=__builtin_offsetof*-D__ANDROID_UNAVAILABLE_SYMBOLS_ARE_WEAK__ -faddrsig-fdebug-default-version=5-fcolor-diagnostics-ffp-contract=off-fno-exceptions-fno-strict-aliasing-fmessage-length=0#-fno-relaxed-template-template-args-gsimple-template-names-gz=zstd-no-canonical-prefixes-Wno-error=format"-fdebug-prefix-map=/proc/self/cwd=-ftrivial-auto-var-init=zero-g-ffunction-sections-fdata-sections-fno-short-enums-funwind-tables-fstack-protector-strong-Wa,--noexecstack-D_FORTIFY_SOURCE=2-Wstrict-aliasing=2-Werror=return-type-Werror=non-virtual-dtor-Werror=address-Werror=sequence-point-Werror=format-security-nostdlibinc-fdebug-info-for-profiling-msoft-float-march=armv7-a-mfloat-abi=softfp --mfpu=neon/-Ibuild/make/tools/ide_query/prober_scripts/cpp³-Iout/soong/.intermediates/build/make/tools/ide_query/prober_scripts/cpp/ide_query_proberscript_cc/android_arm_armv7-a-neon/gen/proto/build/make/tools/ide_query/prober_scripts/cpp…-Iout/soong/.intermediates/build/make/tools/ide_query/prober_scripts/cpp/ide_query_proberscript_cc/android_arm_armv7-a-neon/gen/proto-D__LIBC_API__=10000-D__LIBM_API__=10000-D__LIBDL_API__=10000-Iexternal/protobuf/srcY-Iprebuilts/clang/host/linux-x86/clang-r522817/android_libc++/platform/arm/include/c++/v1=-Iprebuilts/clang/host/linux-x86/clang-r522817/include/c++/v1 -Ibionic/libc/async_safe/include-Isystem/logging/liblog/include'-Ibionic/libc/system_properties/include<-Isystem/core/property_service/libpropertyinfoparser/include-isystembionic/libc/include-isystembionic/libc/kernel/uapi/asm-arm-isystembionic/libc/kernel/uapi-isystembionic/libc/kernel/android/scsi-isystembionic/libc/kernel/android/uapi-targetarmv7a-linux-androideabi10000-DANDROID_STRICT-fPIE-Werror-Wno-unused-parameter-DGOOGLE_PROTOBUF_NO_RTTI-Wimplicit-fallthrough*-D_LIBCPP_ENABLE_THREAD_SAFETY_ANNOTATIONS-Wno-gnu-include-next-fvisibility-inlines-hidden-mllvm-enable-shrink-wrap=false-std=gnu++20 -fno-rtti-Isystem/core/include-Isystem/logging/liblog/include-Isystem/media/audio/include-Ihardware/libhardware/include%-Ihardware/libhardware_legacy/include-Ihardware/ril/include-Iframeworks/native/include"-Iframeworks/native/opengl/include-Iframeworks/av/include-Werror=bool-operation -Werror=format-insufficient-args%-Werror=implicit-int-float-conversion-Werror=int-in-bool-context-Werror=int-to-pointer-cast-Werror=pointer-to-int-cast-Werror=xor-used-as-pow-Wno-void-pointer-to-enum-cast-Wno-void-pointer-to-int-cast-Wno-pointer-to-int-cast-Werror=fortify-source-Wno-unused-variable-Wno-missing-field-initializers-Wno-packed-non-pod-Werror=address-of-temporary+-Werror=incompatible-function-pointer-types-Werror=null-dereference-Werror=return-type"-Wno-tautological-constant-compare$-Wno-tautological-type-limit-compare"-Wno-implicit-int-float-conversion!-Wno-tautological-overlap-compare-Wno-deprecated-copy-Wno-range-loop-construct"-Wno-zero-as-null-pointer-constant)-Wno-deprecated-anon-enum-enum-conversion$-Wno-deprecated-enum-enum-conversion-Wno-pessimizing-move-Wno-non-c-typedef-for-linkage-Wno-align-mismatch"-Wno-error=unused-but-set-variable#-Wno-error=unused-but-set-parameter-Wno-error=deprecated-builtins-Wno-error=deprecated2-Wno-error=single-bit-bitfield-constant-conversion$-Wno-error=enum-constexpr-conversion-Wno-error=invalid-offsetof&-Wno-deprecated-dynamic-exception-spec8build/make/tools/ide_query/prober_scripts/cpp/general.cc"Õ? +out2x +8build/make/tools/ide_query/prober_scripts/cpp/general.cc8build/make/tools/ide_query/prober_scripts/cpp/general.cc:—" +8build/make/tools/ide_query/prober_scripts/cpp/general.cc8build/make/tools/ide_query/prober_scripts/cpp/general.cc"8prebuilts/clang/host/linux-x86/clang-r530567/bin/clang++"-nostdlibinc"-mthumb"-Os"-fomit-frame-pointer"-mllvm"-enable-shrink-wrap=false"-O2"-Wall"-Wextra"-Winit-self"-Wpointer-arith"-Wunguarded-availability"-Werror=date-time"-Werror=int-conversion"-Werror=pragma-pack"&-Werror=pragma-pack-suspicious-include"-Werror=sizeof-array-div"-Werror=string-plus-int"'-Werror=unreachable-code-loop-increment""-Wno-error=deprecated-declarations"-Wno-c23-extensions"-Wno-c99-designator"-Wno-gnu-folding-constant""-Wno-inconsistent-missing-override"-Wno-error=reorder-init-list"-Wno-reorder-init-list"-Wno-sign-compare"-Wno-unused" -DANDROID"-DNDEBUG"-UDEBUG"(-D__compiler_offsetof=__builtin_offsetof"*-D__ANDROID_UNAVAILABLE_SYMBOLS_ARE_WEAK__" -faddrsig"-fdebug-default-version=5"-fcolor-diagnostics"-ffp-contract=off"-fno-exceptions"-fno-strict-aliasing"-fmessage-length=0"-gsimple-template-names"-gz=zstd"-no-canonical-prefixes""-fdebug-prefix-map=/proc/self/cwd="-ftrivial-auto-var-init=zero"-g"-ffunction-sections"-fdata-sections"-fno-short-enums"-funwind-tables"-fstack-protector-strong"-Wa,--noexecstack"-D_FORTIFY_SOURCE=2"-Wstrict-aliasing=2"-Werror=return-type"-Werror=non-virtual-dtor"-Werror=address"-Werror=sequence-point"-Werror=format-security"-msoft-float"-march=armv7-a"-mfloat-abi=softfp" +-mfpu=neon"/-Ibuild/make/tools/ide_query/prober_scripts/cpp"³-Iout/soong/.intermediates/build/make/tools/ide_query/prober_scripts/cpp/ide_query_proberscript_cc/android_arm_armv7-a-neon/gen/proto/build/make/tools/ide_query/prober_scripts/cpp"…-Iout/soong/.intermediates/build/make/tools/ide_query/prober_scripts/cpp/ide_query_proberscript_cc/android_arm_armv7-a-neon/gen/proto"-D__LIBC_API__=10000"-D__LIBM_API__=10000"-D__LIBDL_API__=10000"-Iexternal/protobuf/src"Y-Iprebuilts/clang/host/linux-x86/clang-r530567/android_libc++/platform/arm/include/c++/v1"=-Iprebuilts/clang/host/linux-x86/clang-r530567/include/c++/v1" -Ibionic/libc/async_safe/include"-Isystem/logging/liblog/include"'-Ibionic/libc/system_properties/include"<-Isystem/core/property_service/libpropertyinfoparser/include"-isystem"bionic/libc/include"-isystem"bionic/libc/kernel/uapi/asm-arm"-isystem"bionic/libc/kernel/uapi"-isystem"bionic/libc/kernel/android/scsi"-isystem"bionic/libc/kernel/android/uapi"-target"armv7a-linux-androideabi10000"-DANDROID_STRICT"-fPIE"-Werror"-Wno-unused-parameter"-DGOOGLE_PROTOBUF_NO_RTTI"-Wimplicit-fallthrough"*-D_LIBCPP_ENABLE_THREAD_SAFETY_ANNOTATIONS"-Wno-gnu-include-next"-fvisibility-inlines-hidden"-mllvm"-enable-shrink-wrap=false"-std=gnu++20" -fno-rtti"-Isystem/core/include"-Isystem/logging/liblog/include"-Isystem/media/audio/include"-Ihardware/libhardware/include"%-Ihardware/libhardware_legacy/include"-Ihardware/ril/include"-Iframeworks/native/include""-Iframeworks/native/opengl/include"-Iframeworks/av/include"-Werror=bool-operation" -Werror=format-insufficient-args"%-Werror=implicit-int-float-conversion"-Werror=int-in-bool-context"-Werror=int-to-pointer-cast"-Werror=pointer-to-int-cast"-Werror=xor-used-as-pow"-Wno-void-pointer-to-enum-cast"-Wno-void-pointer-to-int-cast"-Wno-pointer-to-int-cast"-Werror=fortify-source"-Wno-unused-variable"-Wno-missing-field-initializers"-Wno-packed-non-pod"-Werror=address-of-temporary"+-Werror=incompatible-function-pointer-types"-Werror=null-dereference"-Werror=return-type""-Wno-tautological-constant-compare"$-Wno-tautological-type-limit-compare""-Wno-implicit-int-float-conversion"!-Wno-tautological-overlap-compare"-Wno-deprecated-copy"-Wno-range-loop-construct""-Wno-zero-as-null-pointer-constant")-Wno-deprecated-anon-enum-enum-conversion"$-Wno-deprecated-enum-enum-conversion"-Wno-error=pessimizing-move"-Wno-non-c-typedef-for-linkage"-Wno-align-mismatch""-Wno-error=unused-but-set-variable"#-Wno-error=unused-but-set-parameter"-Wno-error=deprecated-builtins"-Wno-error=deprecated"&-Wno-deprecated-dynamic-exception-spec"$-Wno-error=enum-constexpr-conversion"-Wno-error=invalid-offsetof")-Wno-error=thread-safety-reference-return"-Wno-vla-cxx-extension"8build/make/tools/ide_query/prober_scripts/cpp/general.cc2Egenfiles_for_build/make/tools/ide_query/prober_scripts/cpp/general.cc:Ÿ@ +Egenfiles_for_build/make/tools/ide_query/prober_scripts/cpp/general.cc*Õ? ¶soong/.intermediates/build/make/tools/ide_query/prober_scripts/cpp/ide_query_proberscript_cc/android_arm_armv7-a-neon/gen/proto/build/make/tools/ide_query/prober_scripts/cpp/foo.pb.h™>// Generated by the protocol buffer compiler. DO NOT EDIT! // source: build/make/tools/ide_query/prober_scripts/cpp/foo.proto diff --git a/tools/ide_query/prober_scripts/jvm/Android.bp b/tools/ide_query/prober_scripts/jvm/Android.bp new file mode 100644 index 0000000000..84d00b52fd --- /dev/null +++ b/tools/ide_query/prober_scripts/jvm/Android.bp @@ -0,0 +1,28 @@ +/* + * Copyright (C) 2024 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package { + default_applicable_licenses: ["Android-Apache-2.0"], +} + +java_library { + name: "ide_query_proberscript_jvm", + srcs: [ + "Foo.java", + "Bar.java", + "other/Other.java", + ], +} diff --git a/tools/ide_query/prober_scripts/jvm/Bar.java b/tools/ide_query/prober_scripts/jvm/Bar.java new file mode 100644 index 0000000000..8d51576901 --- /dev/null +++ b/tools/ide_query/prober_scripts/jvm/Bar.java @@ -0,0 +1,32 @@ +/* + * Copyright 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package jvm; + +/** Bar class. The class for testing code assist within the same build module. */ +class Bar<K extends Number, V extends Number> { + Bar() { + foo(new Foo()); + } + + void foo(Foo f) {} + + void foo(Object o) {} + + void bar(Foo f) {} + + void baz(Object o) {} +}
\ No newline at end of file diff --git a/tools/ide_query/prober_scripts/jvm/Foo.java b/tools/ide_query/prober_scripts/jvm/Foo.java index a043f72e32..2c8ceb62db 100644 --- a/tools/ide_query/prober_scripts/jvm/Foo.java +++ b/tools/ide_query/prober_scripts/jvm/Foo.java @@ -16,22 +16,109 @@ package jvm; -import java.util.ArrayList; -import java.util.HashSet; +import jvm.other.Other; /** Foo class. */ public final class Foo { +// ^ ^ foo_def + + void testParameterInfo() { + // Test signature help for type parameters. + + Bar<Integer, Double> b = new Bar<>(); + // ^ ctor + // ^ decl_1 + // ^ decl_2 + System.out.println(b); + + // step at ctor + // workspace.waitForReady() + // paraminfo.trigger() + // assert paraminfo.items.filter( + // label="K extends Number, V extends Number", + // selection="K extends Number", + // ) + + // step at decl_1 + // workspace.waitForReady() + // paraminfo.trigger() + // assert paraminfo.items.filter( + // label="K extends Number, V extends Number", + // selection="K extends Number", + // ) + + // step at decl_2 + // workspace.waitForReady() + // paraminfo.trigger() + // assert paraminfo.items.filter( + // label="K extends Number, V extends Number", + // selection="V extends Number", + // ) + + // Test signature help for constructor parameters. + + Other other = new Other(123, "foo"); + // ^ param_1 + // ^ param_2 + System.out.println(other); + + // step at param_1 + // workspace.waitForReady() + // paraminfo.trigger() + // assert paraminfo.items.filter( + // label="\\(int first, String second\\)", + // selection="int first", + // ) + + // step at param_2 + // workspace.waitForReady() + // paraminfo.trigger() + // assert paraminfo.items.empty() + } void testCompletion() { - ArrayList<Integer> list = new ArrayList<>(); - System.out.println(list); + Bar<Integer, Double> b = new Bar<>(); + System.out.println(b); // ^ // step - // ; Test completion on the standard types. - // type("list.") + // ; Test completion on types from the same package. + // workspace.waitForReady() + // type("b.") // completion.trigger() - // assert completion.items.filter(label="add.*") + // assert completion.items.filter(label="foo.*") + // delline() + + Other other = new Other(1, "foo"); + System.out.println(other); + + // ^ + + // step + // ; Test completion on types from a different package. + // workspace.waitForReady() + // type("other.") + // completion.trigger() + // apply(completion.items.filter(label="other.*").first()) + // type(".") + // completion.trigger() + // apply(completion.items.filter(label="other.*").first()) + // delline() + } + + void testDiagnostics() { + + // ^ + + // step + // ; Test diagnostics about wrong type argument bounds. + // workspace.waitForReady() + // type("Bar<String, Double> b;") + // assert diagnostics.items.filter( + // message="type argument .* is not within bounds .*", + // code="compiler.err.not.within.bounds", + // ) + // delline() } } diff --git a/tools/ide_query/prober_scripts/jvm/ide_query.out b/tools/ide_query/prober_scripts/jvm/ide_query.out new file mode 100644 index 0000000000..af9fb86e83 --- /dev/null +++ b/tools/ide_query/prober_scripts/jvm/ide_query.out @@ -0,0 +1,4 @@ + +out2X +6build/make/tools/ide_query/prober_scripts/jvm/Foo.javaide_query_proberscript_jvm:Î +ide_query_proberscript_jvm6build/make/tools/ide_query/prober_scripts/jvm/Foo.java6build/make/tools/ide_query/prober_scripts/jvm/Bar.java>build/make/tools/ide_query/prober_scripts/jvm/other/Other.java
\ No newline at end of file diff --git a/tools/ide_query/prober_scripts/jvm/other/Other.java b/tools/ide_query/prober_scripts/jvm/other/Other.java new file mode 100644 index 0000000000..822662a66e --- /dev/null +++ b/tools/ide_query/prober_scripts/jvm/other/Other.java @@ -0,0 +1,26 @@ +/* + * Copyright 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package jvm.other; + +/** Other class */ +public class Other { + public Other(int first, String second) {} + + public Other other() { + return new Other(0, ""); + } +} diff --git a/tools/ide_query/prober_scripts/regen.sh b/tools/ide_query/prober_scripts/regen.sh index 2edfe53ec3..04a02640d7 100755 --- a/tools/ide_query/prober_scripts/regen.sh +++ b/tools/ide_query/prober_scripts/regen.sh @@ -21,13 +21,8 @@ # ide_query.sh. The prober doesn't check-out the full source code, so it # can't run ide_query.sh itself. -cd $(dirname $BASH_SOURCE) -source $(pwd)/../../../shell_utils.sh -require_top - files_to_build=( build/make/tools/ide_query/prober_scripts/cpp/general.cc ) -cd ${TOP} build/make/tools/ide_query/ide_query.sh --lunch_target=aosp_arm-trunk_staging-eng ${files_to_build[@]} > build/make/tools/ide_query/prober_scripts/ide_query.out diff --git a/tools/java-event-log-tags.py b/tools/java-event-log-tags.py index bbd65fa4bf..e3dc07e4ab 100755 --- a/tools/java-event-log-tags.py +++ b/tools/java-event-log-tags.py @@ -15,16 +15,12 @@ # limitations under the License. """ -Usage: java-event-log-tags.py [-o output_file] <input_file> <merged_tags_file> - Generate a java class containing constants for each of the event log tags in the given input file. - --h to display this usage message and exit. """ from io import StringIO -import getopt +import argparse import os import os.path import re @@ -32,57 +28,14 @@ import sys import event_log_tags -output_file = None - -try: - opts, args = getopt.getopt(sys.argv[1:], "ho:") -except getopt.GetoptError as err: - print(str(err)) - print(__doc__) - sys.exit(2) - -for o, a in opts: - if o == "-h": - print(__doc__) - sys.exit(2) - elif o == "-o": - output_file = a - else: - print("unhandled option %s" % (o,), file=sys.stderr) - sys.exit(1) - -if len(args) != 1 and len(args) != 2: - print("need one or two input files, not %d" % (len(args),)) - print(__doc__) - sys.exit(1) +parser = argparse.ArgumentParser(description=__doc__) +parser.add_argument('-o', dest='output_file') +parser.add_argument('file') +args = parser.parse_args() -fn = args[0] +fn = args.file tagfile = event_log_tags.TagFile(fn) -if len(args) > 1: - # Load the merged tag file (which should have numbers assigned for all - # tags. Use the numbers from the merged file to fill in any missing - # numbers from the input file. - merged_fn = args[1] - merged_tagfile = event_log_tags.TagFile(merged_fn) - merged_by_name = dict([(t.tagname, t) for t in merged_tagfile.tags]) - for t in tagfile.tags: - if t.tagnum is None: - if t.tagname in merged_by_name: - t.tagnum = merged_by_name[t.tagname].tagnum - else: - # We're building something that's not being included in the - # product, so its tags don't appear in the merged file. Assign - # them all an arbitrary number so we can emit the java and - # compile the (unused) package. - t.tagnum = 999999 -else: - # Not using the merged tag file, so all tags must have manually assigned - # numbers - for t in tagfile.tags: - if t.tagnum is None: - tagfilef.AddError("tag \"%s\" has no number" % (tagname,), tag.linenum) - if "java_package" not in tagfile.options: tagfile.AddError("java_package option not specified", linenum=0) @@ -141,11 +94,11 @@ javaTypes = ["ERROR", "int", "long", "String", "Object[]", "float"] for t in tagfile.tags: methodName = javaName("write_" + t.tagname) if t.description: - args = [arg.strip("() ").split("|") for arg in t.description.split(",")] + fn_args = [arg.strip("() ").split("|") for arg in t.description.split(",")] else: - args = [] - argTypesNames = ", ".join([javaTypes[int(arg[1])] + " " + javaName(arg[0]) for arg in args]) - argNames = "".join([", " + javaName(arg[0]) for arg in args]) + fn_args = [] + argTypesNames = ", ".join([javaTypes[int(arg[1])] + " " + javaName(arg[0]) for arg in fn_args]) + argNames = "".join([", " + javaName(arg[0]) for arg in fn_args]) buffer.write("\n public static void %s(%s) {" % (methodName, argTypesNames)) buffer.write("\n android.util.EventLog.writeEvent(%s%s);" % (t.tagname.upper(), argNames)) buffer.write("\n }\n") @@ -153,8 +106,8 @@ for t in tagfile.tags: buffer.write("}\n"); -output_dir = os.path.dirname(output_file) +output_dir = os.path.dirname(args.output_file) if not os.path.exists(output_dir): os.makedirs(output_dir) -event_log_tags.WriteOutput(output_file, buffer) +event_log_tags.WriteOutput(args.output_file, buffer) diff --git a/tools/merge-event-log-tags.py b/tools/merge-event-log-tags.py index 292604c469..5730c11c43 100755 --- a/tools/merge-event-log-tags.py +++ b/tools/merge-event-log-tags.py @@ -15,22 +15,13 @@ # limitations under the License. """ -Usage: merge-event-log-tags.py [-o output_file] [input_files...] - Merge together zero or more event-logs-tags files to produce a single output file, stripped of comments. Checks that no tag numbers conflict and fails if they do. - --h to display this usage message and exit. """ from io import StringIO -import getopt -try: - import hashlib -except ImportError: - import md5 as hashlib -import struct +import argparse import sys import event_log_tags @@ -38,32 +29,10 @@ import event_log_tags errors = [] warnings = [] -output_file = None -pre_merged_file = None - -# Tags with a tag number of ? are assigned a tag in the range -# [ASSIGN_START, ASSIGN_LIMIT). -ASSIGN_START = 900000 -ASSIGN_LIMIT = 1000000 - -try: - opts, args = getopt.getopt(sys.argv[1:], "ho:m:") -except getopt.GetoptError as err: - print(str(err)) - print(__doc__) - sys.exit(2) - -for o, a in opts: - if o == "-h": - print(__doc__) - sys.exit(2) - elif o == "-o": - output_file = a - elif o == "-m": - pre_merged_file = a - else: - print("unhandled option %s" % (o,), file=sys.stderr) - sys.exit(1) +parser = argparse.ArgumentParser(description=__doc__) +parser.add_argument('-o', dest='output_file') +parser.add_argument('files', nargs='*') +args = parser.parse_args() # Restrictions on tags: # @@ -77,12 +46,7 @@ for o, a in opts: by_tagname = {} by_tagnum = {} -pre_merged_tags = {} -if pre_merged_file: - for t in event_log_tags.TagFile(pre_merged_file).tags: - pre_merged_tags[t.tagname] = t - -for fn in args: +for fn in args.files: tagfile = event_log_tags.TagFile(fn) for t in tagfile.tags: @@ -93,12 +57,6 @@ for fn in args: if t.tagname in by_tagname: orig = by_tagname[t.tagname] - # Allow an explicit tag number to define an implicit tag number - if orig.tagnum is None: - orig.tagnum = t.tagnum - elif t.tagnum is None: - t.tagnum = orig.tagnum - if (t.tagnum == orig.tagnum and t.description == orig.description): # if the name and description are identical, issue a warning @@ -114,7 +72,7 @@ for fn in args: linenum=t.linenum) continue - if t.tagnum is not None and t.tagnum in by_tagnum: + if t.tagnum in by_tagnum: orig = by_tagnum[t.tagnum] if t.tagname != orig.tagname: @@ -125,8 +83,7 @@ for fn in args: continue by_tagname[t.tagname] = t - if t.tagnum is not None: - by_tagnum[t.tagnum] = t + by_tagnum[t.tagnum] = t errors.extend(tagfile.errors) warnings.extend(tagfile.warnings) @@ -140,38 +97,6 @@ if warnings: for fn, ln, msg in warnings: print("%s:%d: warning: %s" % (fn, ln, msg), file=sys.stderr) -# Python's hash function (a) isn't great and (b) varies between -# versions of python. Using md5 is overkill here but is the same from -# platform to platform and speed shouldn't matter in practice. -def hashname(str): - d = hashlib.md5(str).digest()[:4] - return struct.unpack("!I", d)[0] - -# Assign a tag number to all the entries that say they want one -# assigned. We do this based on a hash of the tag name so that the -# numbers should stay relatively stable as tags are added. - -# If we were provided pre-merged tags (w/ the -m option), then don't -# ever try to allocate one, just fail if we don't have a number - -for name, t in sorted(by_tagname.items()): - if t.tagnum is None: - if pre_merged_tags: - try: - t.tagnum = pre_merged_tags[t.tagname] - except KeyError: - print("Error: Tag number not defined for tag `%s'. Have you done a full build?" % t.tagname, - file=sys.stderr) - sys.exit(1) - else: - while True: - x = (hashname(name) % (ASSIGN_LIMIT - ASSIGN_START - 1)) + ASSIGN_START - if x not in by_tagnum: - t.tagnum = x - by_tagnum[x] = t - break - name = "_" + name - # by_tagnum should be complete now; we've assigned numbers to all tags. buffer = StringIO() @@ -181,4 +106,4 @@ for n, t in sorted(by_tagnum.items()): else: buffer.write("%d %s\n" % (t.tagnum, t.tagname)) -event_log_tags.WriteOutput(output_file, buffer) +event_log_tags.WriteOutput(args.output_file, buffer) diff --git a/tools/metadata/Android.bp b/tools/metadata/Android.bp deleted file mode 100644 index 77d106d705..0000000000 --- a/tools/metadata/Android.bp +++ /dev/null @@ -1,16 +0,0 @@ -package { - default_applicable_licenses: ["Android-Apache-2.0"], -} - -blueprint_go_binary { - name: "metadata", - deps: [ - "soong-testing-test_spec_proto", - "soong-testing-code_metadata_proto", - "soong-testing-code_metadata_internal_proto", - "golang-protobuf-proto", - ], - srcs: [ - "generator.go", - ] -}
\ No newline at end of file diff --git a/tools/metadata/OWNERS b/tools/metadata/OWNERS deleted file mode 100644 index 03bcdf1c40..0000000000 --- a/tools/metadata/OWNERS +++ /dev/null @@ -1,4 +0,0 @@ -dariofreni@google.com -joeo@google.com -ronish@google.com -caditya@google.com diff --git a/tools/metadata/generator.go b/tools/metadata/generator.go deleted file mode 100644 index b7668be44f..0000000000 --- a/tools/metadata/generator.go +++ /dev/null @@ -1,328 +0,0 @@ -package main - -import ( - "flag" - "fmt" - "io" - "log" - "os" - "sort" - "strings" - "sync" - - "android/soong/testing/code_metadata_internal_proto" - "android/soong/testing/code_metadata_proto" - "android/soong/testing/test_spec_proto" - "google.golang.org/protobuf/proto" -) - -type keyToLocksMap struct { - locks sync.Map -} - -func (kl *keyToLocksMap) GetLockForKey(key string) *sync.Mutex { - mutex, _ := kl.locks.LoadOrStore(key, &sync.Mutex{}) - return mutex.(*sync.Mutex) -} - -// Define a struct to hold the combination of team ID and multi-ownership flag for validation -type sourceFileAttributes struct { - TeamID string - MultiOwnership bool - Path string -} - -func getSortedKeys(syncMap *sync.Map) []string { - var allKeys []string - syncMap.Range( - func(key, _ interface{}) bool { - allKeys = append(allKeys, key.(string)) - return true - }, - ) - - sort.Strings(allKeys) - return allKeys -} - -// writeProtoToFile marshals a protobuf message and writes it to a file -func writeProtoToFile(outputFile string, message proto.Message) { - data, err := proto.Marshal(message) - if err != nil { - log.Fatal(err) - } - file, err := os.Create(outputFile) - if err != nil { - log.Fatal(err) - } - defer file.Close() - - _, err = file.Write(data) - if err != nil { - log.Fatal(err) - } -} - -func readFileToString(filePath string) string { - file, err := os.Open(filePath) - if err != nil { - log.Fatal(err) - } - defer file.Close() - - data, err := io.ReadAll(file) - if err != nil { - log.Fatal(err) - } - return string(data) -} - -func writeEmptyOutputProto(outputFile string, metadataRule string) { - file, err := os.Create(outputFile) - if err != nil { - log.Fatal(err) - } - var message proto.Message - if metadataRule == "test_spec" { - message = &test_spec_proto.TestSpec{} - } else if metadataRule == "code_metadata" { - message = &code_metadata_proto.CodeMetadata{} - } - data, err := proto.Marshal(message) - if err != nil { - log.Fatal(err) - } - defer file.Close() - - _, err = file.Write([]byte(data)) - if err != nil { - log.Fatal(err) - } -} - -func processTestSpecProtobuf( - filePath string, ownershipMetadataMap *sync.Map, keyLocks *keyToLocksMap, - errCh chan error, wg *sync.WaitGroup, -) { - defer wg.Done() - - fileContent := strings.TrimRight(readFileToString(filePath), "\n") - testData := test_spec_proto.TestSpec{} - err := proto.Unmarshal([]byte(fileContent), &testData) - if err != nil { - errCh <- err - return - } - - ownershipMetadata := testData.GetOwnershipMetadataList() - for _, metadata := range ownershipMetadata { - key := metadata.GetTargetName() - lock := keyLocks.GetLockForKey(key) - lock.Lock() - - value, loaded := ownershipMetadataMap.LoadOrStore( - key, []*test_spec_proto.TestSpec_OwnershipMetadata{metadata}, - ) - if loaded { - existingMetadata := value.([]*test_spec_proto.TestSpec_OwnershipMetadata) - isDuplicate := false - for _, existing := range existingMetadata { - if metadata.GetTrendyTeamId() != existing.GetTrendyTeamId() { - errCh <- fmt.Errorf( - "Conflicting trendy team IDs found for %s at:\n%s with teamId"+ - ": %s,\n%s with teamId: %s", - key, - metadata.GetPath(), metadata.GetTrendyTeamId(), existing.GetPath(), - existing.GetTrendyTeamId(), - ) - - lock.Unlock() - return - } - if metadata.GetTrendyTeamId() == existing.GetTrendyTeamId() && metadata.GetPath() == existing.GetPath() { - isDuplicate = true - break - } - } - if !isDuplicate { - existingMetadata = append(existingMetadata, metadata) - ownershipMetadataMap.Store(key, existingMetadata) - } - } - - lock.Unlock() - } -} - -// processCodeMetadataProtobuf processes CodeMetadata protobuf files -func processCodeMetadataProtobuf( - filePath string, ownershipMetadataMap *sync.Map, sourceFileMetadataMap *sync.Map, keyLocks *keyToLocksMap, - errCh chan error, wg *sync.WaitGroup, -) { - defer wg.Done() - - fileContent := strings.TrimRight(readFileToString(filePath), "\n") - internalCodeData := code_metadata_internal_proto.CodeMetadataInternal{} - err := proto.Unmarshal([]byte(fileContent), &internalCodeData) - if err != nil { - errCh <- err - return - } - - // Process each TargetOwnership entry - for _, internalMetadata := range internalCodeData.GetTargetOwnershipList() { - key := internalMetadata.GetTargetName() - lock := keyLocks.GetLockForKey(key) - lock.Lock() - - for _, srcFile := range internalMetadata.GetSourceFiles() { - srcFileKey := srcFile - srcFileLock := keyLocks.GetLockForKey(srcFileKey) - srcFileLock.Lock() - attributes := sourceFileAttributes{ - TeamID: internalMetadata.GetTrendyTeamId(), - MultiOwnership: internalMetadata.GetMultiOwnership(), - Path: internalMetadata.GetPath(), - } - - existingAttributes, exists := sourceFileMetadataMap.Load(srcFileKey) - if exists { - existing := existingAttributes.(sourceFileAttributes) - if attributes.TeamID != existing.TeamID && (!attributes.MultiOwnership || !existing.MultiOwnership) { - errCh <- fmt.Errorf( - "Conflict found for source file %s covered at %s with team ID: %s. Existing team ID: %s and path: %s."+ - " If multi-ownership is required, multiOwnership should be set to true in all test_spec modules using this target. "+ - "Multiple-ownership in general is discouraged though as it make infrastructure around android relying on this information pick up a random value when it needs only one.", - srcFile, internalMetadata.GetPath(), attributes.TeamID, existing.TeamID, existing.Path, - ) - srcFileLock.Unlock() - lock.Unlock() - return - } - } else { - // Store the metadata if no conflict - sourceFileMetadataMap.Store(srcFileKey, attributes) - } - srcFileLock.Unlock() - } - - value, loaded := ownershipMetadataMap.LoadOrStore( - key, []*code_metadata_internal_proto.CodeMetadataInternal_TargetOwnership{internalMetadata}, - ) - if loaded { - existingMetadata := value.([]*code_metadata_internal_proto.CodeMetadataInternal_TargetOwnership) - isDuplicate := false - for _, existing := range existingMetadata { - if internalMetadata.GetTrendyTeamId() == existing.GetTrendyTeamId() && internalMetadata.GetPath() == existing.GetPath() { - isDuplicate = true - break - } - } - if !isDuplicate { - existingMetadata = append(existingMetadata, internalMetadata) - ownershipMetadataMap.Store(key, existingMetadata) - } - } - - lock.Unlock() - } -} - -func main() { - inputFile := flag.String("inputFile", "", "Input file path") - outputFile := flag.String("outputFile", "", "Output file path") - rule := flag.String( - "rule", "", "Metadata rule (Hint: test_spec or code_metadata)", - ) - flag.Parse() - - if *inputFile == "" || *outputFile == "" || *rule == "" { - fmt.Println("Usage: metadata -rule <rule> -inputFile <input file path> -outputFile <output file path>") - os.Exit(1) - } - - inputFileData := strings.TrimRight(readFileToString(*inputFile), "\n") - filePaths := strings.Split(inputFileData, " ") - if len(filePaths) == 1 && filePaths[0] == "" { - writeEmptyOutputProto(*outputFile, *rule) - return - } - ownershipMetadataMap := &sync.Map{} - keyLocks := &keyToLocksMap{} - errCh := make(chan error, len(filePaths)) - var wg sync.WaitGroup - - switch *rule { - case "test_spec": - for _, filePath := range filePaths { - wg.Add(1) - go processTestSpecProtobuf( - filePath, ownershipMetadataMap, keyLocks, errCh, &wg, - ) - } - - wg.Wait() - close(errCh) - - for err := range errCh { - log.Fatal(err) - } - - allKeys := getSortedKeys(ownershipMetadataMap) - var allMetadata []*test_spec_proto.TestSpec_OwnershipMetadata - - for _, key := range allKeys { - value, _ := ownershipMetadataMap.Load(key) - metadataList := value.([]*test_spec_proto.TestSpec_OwnershipMetadata) - allMetadata = append(allMetadata, metadataList...) - } - - testSpec := &test_spec_proto.TestSpec{ - OwnershipMetadataList: allMetadata, - } - writeProtoToFile(*outputFile, testSpec) - break - case "code_metadata": - sourceFileMetadataMap := &sync.Map{} - for _, filePath := range filePaths { - wg.Add(1) - go processCodeMetadataProtobuf( - filePath, ownershipMetadataMap, sourceFileMetadataMap, keyLocks, errCh, &wg, - ) - } - - wg.Wait() - close(errCh) - - for err := range errCh { - log.Fatal(err) - } - - sortedKeys := getSortedKeys(ownershipMetadataMap) - allMetadata := make([]*code_metadata_proto.CodeMetadata_TargetOwnership, 0) - for _, key := range sortedKeys { - value, _ := ownershipMetadataMap.Load(key) - metadata := value.([]*code_metadata_internal_proto.CodeMetadataInternal_TargetOwnership) - for _, m := range metadata { - targetName := m.GetTargetName() - path := m.GetPath() - trendyTeamId := m.GetTrendyTeamId() - - allMetadata = append(allMetadata, &code_metadata_proto.CodeMetadata_TargetOwnership{ - TargetName: &targetName, - Path: &path, - TrendyTeamId: &trendyTeamId, - SourceFiles: m.GetSourceFiles(), - }) - } - } - - finalMetadata := &code_metadata_proto.CodeMetadata{ - TargetOwnershipList: allMetadata, - } - writeProtoToFile(*outputFile, finalMetadata) - break - default: - log.Fatalf("No specific processing implemented for rule '%s'.\n", *rule) - } -} diff --git a/tools/metadata/go.mod b/tools/metadata/go.mod deleted file mode 100644 index e9d04b16f6..0000000000 --- a/tools/metadata/go.mod +++ /dev/null @@ -1,7 +0,0 @@ -module android/soong/tools/metadata - -require google.golang.org/protobuf v0.0.0 - -replace google.golang.org/protobuf v0.0.0 => ../../../external/golang-protobuf - -go 1.18
\ No newline at end of file diff --git a/tools/metadata/go.work b/tools/metadata/go.work deleted file mode 100644 index f2cdf8ec98..0000000000 --- a/tools/metadata/go.work +++ /dev/null @@ -1,11 +0,0 @@ -go 1.18 - -use ( - . - ../../../../external/golang-protobuf - ../../../soong/testing/test_spec_proto - ../../../soong/testing/code_metadata_proto - ../../../soong/testing/code_metadata_proto_internal -) - -replace google.golang.org/protobuf v0.0.0 => ../../../../external/golang-protobuf diff --git a/tools/metadata/testdata/emptyInputFile.txt b/tools/metadata/testdata/emptyInputFile.txt deleted file mode 100644 index 8b13789179..0000000000 --- a/tools/metadata/testdata/emptyInputFile.txt +++ /dev/null @@ -1 +0,0 @@ - diff --git a/tools/metadata/testdata/expectedCodeMetadataOutput.txt b/tools/metadata/testdata/expectedCodeMetadataOutput.txt deleted file mode 100644 index 755cf40a30..0000000000 --- a/tools/metadata/testdata/expectedCodeMetadataOutput.txt +++ /dev/null @@ -1,7 +0,0 @@ - - -bar -Android.bp12346"b.java - -foo -Android.bp12345"a.java
\ No newline at end of file diff --git a/tools/metadata/testdata/expectedOutputFile.txt b/tools/metadata/testdata/expectedOutputFile.txt deleted file mode 100644 index b0d382f279..0000000000 --- a/tools/metadata/testdata/expectedOutputFile.txt +++ /dev/null @@ -1,22 +0,0 @@ - -. -java-test-module-name-one -Android.bp12345 -. -java-test-module-name-six -Android.bp12346 -. -java-test-module-name-six -Aqwerty.bp12346 -. -java-test-module-name-six -Apoiuyt.bp12346 -. -java-test-module-name-two -Android.bp12345 -. -java-test-module-name-two -Asdfghj.bp12345 -. -java-test-module-name-two -Azxcvbn.bp12345
\ No newline at end of file diff --git a/tools/metadata/testdata/file1.txt b/tools/metadata/testdata/file1.txt deleted file mode 100644 index 81beed00ab..0000000000 --- a/tools/metadata/testdata/file1.txt +++ /dev/null @@ -1,13 +0,0 @@ - -. -java-test-module-name-one -Android.bp12345 -. -java-test-module-name-two -Android.bp12345 -. -java-test-module-name-two -Asdfghj.bp12345 -. -java-test-module-name-two -Azxcvbn.bp12345 diff --git a/tools/metadata/testdata/file2.txt b/tools/metadata/testdata/file2.txt deleted file mode 100644 index 32a753fef5..0000000000 --- a/tools/metadata/testdata/file2.txt +++ /dev/null @@ -1,25 +0,0 @@ - -. -java-test-module-name-one -Android.bp12345 -. -java-test-module-name-six -Android.bp12346 -. -java-test-module-name-one -Android.bp12345 -. -java-test-module-name-six -Aqwerty.bp12346 -. -java-test-module-name-six -Apoiuyt.bp12346 -. -java-test-module-name-six -Apoiuyt.bp12346 -. -java-test-module-name-six -Apoiuyt.bp12346 -. -java-test-module-name-six -Apoiuyt.bp12346 diff --git a/tools/metadata/testdata/file3.txt b/tools/metadata/testdata/file3.txt deleted file mode 100644 index 81beed00ab..0000000000 --- a/tools/metadata/testdata/file3.txt +++ /dev/null @@ -1,13 +0,0 @@ - -. -java-test-module-name-one -Android.bp12345 -. -java-test-module-name-two -Android.bp12345 -. -java-test-module-name-two -Asdfghj.bp12345 -. -java-test-module-name-two -Azxcvbn.bp12345 diff --git a/tools/metadata/testdata/file4.txt b/tools/metadata/testdata/file4.txt deleted file mode 100644 index 6a7590021d..0000000000 --- a/tools/metadata/testdata/file4.txt +++ /dev/null @@ -1,25 +0,0 @@ - -. -java-test-module-name-one -Android.bp12345 -. -java-test-module-name-six -Android.bp12346 -. -java-test-module-name-one -Android.bp12346 -. -java-test-module-name-six -Aqwerty.bp12346 -. -java-test-module-name-six -Apoiuyt.bp12346 -. -java-test-module-name-six -Apoiuyt.bp12346 -. -java-test-module-name-six -Apoiuyt.bp12346 -. -java-test-module-name-six -Apoiuyt.bp12346 diff --git a/tools/metadata/testdata/file5.txt b/tools/metadata/testdata/file5.txt deleted file mode 100644 index d8de06457d..0000000000 --- a/tools/metadata/testdata/file5.txt +++ /dev/null @@ -1,4 +0,0 @@ - - -foo -Android.bp12345"a.java diff --git a/tools/metadata/testdata/file6.txt b/tools/metadata/testdata/file6.txt deleted file mode 100644 index 9c7cdcd505..0000000000 --- a/tools/metadata/testdata/file6.txt +++ /dev/null @@ -1,4 +0,0 @@ - - -bar -Android.bp12346"b.java diff --git a/tools/metadata/testdata/file7.txt b/tools/metadata/testdata/file7.txt deleted file mode 100644 index d8de06457d..0000000000 --- a/tools/metadata/testdata/file7.txt +++ /dev/null @@ -1,4 +0,0 @@ - - -foo -Android.bp12345"a.java diff --git a/tools/metadata/testdata/file8.txt b/tools/metadata/testdata/file8.txt deleted file mode 100644 index a931690022..0000000000 --- a/tools/metadata/testdata/file8.txt +++ /dev/null @@ -1,4 +0,0 @@ - - -foo -Android.gp12346"a.java diff --git a/tools/metadata/testdata/generatedCodeMetadataOutput.txt b/tools/metadata/testdata/generatedCodeMetadataOutput.txt deleted file mode 100644 index 755cf40a30..0000000000 --- a/tools/metadata/testdata/generatedCodeMetadataOutput.txt +++ /dev/null @@ -1,7 +0,0 @@ - - -bar -Android.bp12346"b.java - -foo -Android.bp12345"a.java
\ No newline at end of file diff --git a/tools/metadata/testdata/generatedCodeMetadataOutputFile.txt b/tools/metadata/testdata/generatedCodeMetadataOutputFile.txt deleted file mode 100644 index 755cf40a30..0000000000 --- a/tools/metadata/testdata/generatedCodeMetadataOutputFile.txt +++ /dev/null @@ -1,7 +0,0 @@ - - -bar -Android.bp12346"b.java - -foo -Android.bp12345"a.java
\ No newline at end of file diff --git a/tools/metadata/testdata/generatedEmptyOutputFile.txt b/tools/metadata/testdata/generatedEmptyOutputFile.txt deleted file mode 100644 index e69de29bb2..0000000000 --- a/tools/metadata/testdata/generatedEmptyOutputFile.txt +++ /dev/null diff --git a/tools/metadata/testdata/generatedOutputFile.txt b/tools/metadata/testdata/generatedOutputFile.txt deleted file mode 100644 index b0d382f279..0000000000 --- a/tools/metadata/testdata/generatedOutputFile.txt +++ /dev/null @@ -1,22 +0,0 @@ - -. -java-test-module-name-one -Android.bp12345 -. -java-test-module-name-six -Android.bp12346 -. -java-test-module-name-six -Aqwerty.bp12346 -. -java-test-module-name-six -Apoiuyt.bp12346 -. -java-test-module-name-two -Android.bp12345 -. -java-test-module-name-two -Asdfghj.bp12345 -. -java-test-module-name-two -Azxcvbn.bp12345
\ No newline at end of file diff --git a/tools/metadata/testdata/inputCodeMetadata.txt b/tools/metadata/testdata/inputCodeMetadata.txt deleted file mode 100644 index 7a81b7d523..0000000000 --- a/tools/metadata/testdata/inputCodeMetadata.txt +++ /dev/null @@ -1 +0,0 @@ -file5.txt file6.txt
\ No newline at end of file diff --git a/tools/metadata/testdata/inputCodeMetadataNegative.txt b/tools/metadata/testdata/inputCodeMetadataNegative.txt deleted file mode 100644 index 26668e44a9..0000000000 --- a/tools/metadata/testdata/inputCodeMetadataNegative.txt +++ /dev/null @@ -1 +0,0 @@ -file7.txt file8.txt
\ No newline at end of file diff --git a/tools/metadata/testdata/inputFiles.txt b/tools/metadata/testdata/inputFiles.txt deleted file mode 100644 index e44bc94d32..0000000000 --- a/tools/metadata/testdata/inputFiles.txt +++ /dev/null @@ -1 +0,0 @@ -file1.txt file2.txt
\ No newline at end of file diff --git a/tools/metadata/testdata/inputFilesNegativeCase.txt b/tools/metadata/testdata/inputFilesNegativeCase.txt deleted file mode 100644 index a37aa3fd5d..0000000000 --- a/tools/metadata/testdata/inputFilesNegativeCase.txt +++ /dev/null @@ -1 +0,0 @@ -file3.txt file4.txt
\ No newline at end of file diff --git a/tools/metadata/testdata/metadata_test.go b/tools/metadata/testdata/metadata_test.go deleted file mode 100644 index 314add352f..0000000000 --- a/tools/metadata/testdata/metadata_test.go +++ /dev/null @@ -1,119 +0,0 @@ -package main - -import ( - "fmt" - "io/ioutil" - "os/exec" - "strings" - "testing" -) - -func TestMetadata(t *testing.T) { - cmd := exec.Command( - "metadata", "-rule", "test_spec", "-inputFile", "./inputFiles.txt", "-outputFile", - "./generatedOutputFile.txt", - ) - stderr, err := cmd.CombinedOutput() - if err != nil { - t.Fatalf("Error running metadata command: %s. Error: %v", stderr, err) - } - - // Read the contents of the expected output file - expectedOutput, err := ioutil.ReadFile("./expectedOutputFile.txt") - if err != nil { - t.Fatalf("Error reading expected output file: %s", err) - } - - // Read the contents of the generated output file - generatedOutput, err := ioutil.ReadFile("./generatedOutputFile.txt") - if err != nil { - t.Fatalf("Error reading generated output file: %s", err) - } - - fmt.Println() - - // Compare the contents - if string(expectedOutput) != string(generatedOutput) { - t.Errorf("Generated file contents do not match the expected output") - } -} - -func TestMetadataNegativeCase(t *testing.T) { - cmd := exec.Command( - "metadata", "-rule", "test_spec", "-inputFile", "./inputFilesNegativeCase.txt", "-outputFile", - "./generatedOutputFileNegativeCase.txt", - ) - stderr, err := cmd.CombinedOutput() - if err == nil { - t.Fatalf( - "Expected an error, but the metadata command executed successfully. Output: %s", - stderr, - ) - } - - expectedError := "Conflicting trendy team IDs found for java-test-module" + - "-name-one at:\nAndroid.bp with teamId: 12346," + - "\nAndroid.bp with teamId: 12345" - if !strings.Contains( - strings.TrimSpace(string(stderr)), strings.TrimSpace(expectedError), - ) { - t.Errorf( - "Unexpected error message. Expected to contain: %s, Got: %s", - expectedError, stderr, - ) - } -} - -func TestEmptyInputFile(t *testing.T) { - cmd := exec.Command( - "metadata", "-rule", "test_spec", "-inputFile", "./emptyInputFile.txt", "-outputFile", - "./generatedEmptyOutputFile.txt", - ) - stderr, err := cmd.CombinedOutput() - if err != nil { - t.Fatalf("Error running metadata command: %s. Error: %v", stderr, err) - } - - // Read the contents of the generated output file - generatedOutput, err := ioutil.ReadFile("./generatedEmptyOutputFile.txt") - if err != nil { - t.Fatalf("Error reading generated output file: %s", err) - } - - fmt.Println() - - // Compare the contents - if string(generatedOutput) != "\n" { - t.Errorf("Generated file contents do not match the expected output") - } -} - -func TestCodeMetadata(t *testing.T) { - cmd := exec.Command( - "metadata", "-rule", "code_metadata", "-inputFile", "./inputCodeMetadata.txt", "-outputFile", - "./generatedCodeMetadataOutputFile.txt", - ) - stderr, err := cmd.CombinedOutput() - if err != nil { - t.Fatalf("Error running metadata command: %s. Error: %v", stderr, err) - } - - // Read the contents of the expected output file - expectedOutput, err := ioutil.ReadFile("./expectedCodeMetadataOutput.txt") - if err != nil { - t.Fatalf("Error reading expected output file: %s", err) - } - - // Read the contents of the generated output file - generatedOutput, err := ioutil.ReadFile("./generatedCodeMetadataOutputFile.txt") - if err != nil { - t.Fatalf("Error reading generated output file: %s", err) - } - - fmt.Println() - - // Compare the contents - if string(expectedOutput) != string(generatedOutput) { - t.Errorf("Generated file contents do not match the expected output") - } -} diff --git a/tools/metadata/testdata/outputFile.txt b/tools/metadata/testdata/outputFile.txt deleted file mode 100644 index b0d382f279..0000000000 --- a/tools/metadata/testdata/outputFile.txt +++ /dev/null @@ -1,22 +0,0 @@ - -. -java-test-module-name-one -Android.bp12345 -. -java-test-module-name-six -Android.bp12346 -. -java-test-module-name-six -Aqwerty.bp12346 -. -java-test-module-name-six -Apoiuyt.bp12346 -. -java-test-module-name-two -Android.bp12345 -. -java-test-module-name-two -Asdfghj.bp12345 -. -java-test-module-name-two -Azxcvbn.bp12345
\ No newline at end of file diff --git a/tools/missing_soong_module_info.py b/tools/missing_soong_module_info.py new file mode 100755 index 0000000000..6fa7f2bccb --- /dev/null +++ b/tools/missing_soong_module_info.py @@ -0,0 +1,53 @@ +#!/usr/bin/env python3 +# +# Copyright (C) 2016 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import os +import sys + +def main(): + try: + product_out = os.environ["ANDROID_PRODUCT_OUT"] + except KeyError: + sys.stderr.write("Can't get ANDROID_PRODUCT_OUT. Run lunch first.\n") + sys.exit(1) + + filename = os.path.join(product_out, "module-info.json") + try: + with open(filename) as f: + modules = json.load(f) + except FileNotFoundError: + sys.stderr.write(f"File not found: {filename}\n") + sys.exit(1) + except json.JSONDecodeError: + sys.stderr.write(f"Invalid json: {filename}\n") + return None + + classes = {} + + for name, info in modules.items(): + make = info.get("make") + make_gen = info.get("make_generated_module_info") + if not make and make_gen: + classes.setdefault(frozenset(info.get("class")), []).append(name) + + for cl, names in classes.items(): + print(" ".join(cl)) + for name in names: + print(" ", name) + +if __name__ == "__main__": + main() diff --git a/tools/perf/benchmarks b/tools/perf/benchmarks index 6998ecd5c2..8c24e127c8 100755 --- a/tools/perf/benchmarks +++ b/tools/perf/benchmarks @@ -786,6 +786,32 @@ benchmarks: preroll=1, postroll=2, ), + Benchmark(id="add_systemui_field_with_tests", + title="Add SystemUI field with tests", + change=AddJavaField("frameworks/base/packages/SystemUI/src/com/android/systemui/wmshell/WMShell.java", + "public"), + modules=["SystemUiRavenTests"], + preroll=1, + postroll=2, + ), + Benchmark(id="systemui_flicker_add_log_call", + title="Add a Log call to flicker", + change=Modify("platform_testing/libraries/flicker/src/android/tools/flicker/FlickerServiceResultsCollector.kt", + lambda: f'Log.v(LOG_TAG, "BENCHMARK = {random.randint(0, 1000000)}");\n', + before="Log.v(LOG_TAG,"), + modules=["WMShellFlickerTestsPip"], + preroll=1, + postroll=2, + ), + Benchmark(id="systemui_core_add_log_call", + title="Add a Log call SystemUIApplication", + change=Modify("frameworks/base/packages/SystemUI/src/com/android/systemui/SystemUIApplication.java", + lambda: f'Log.v(TAG, "BENCHMARK = {random.randint(0, 1000000)}");\n', + before="Log.wtf(TAG,"), + modules=["SystemUI-core"], + preroll=1, + postroll=2, + ), ] def _error(self, message): diff --git a/tools/record-finalized-flags/.gitignore b/tools/record-finalized-flags/.gitignore new file mode 100644 index 0000000000..1e7caa9ea8 --- /dev/null +++ b/tools/record-finalized-flags/.gitignore @@ -0,0 +1,2 @@ +Cargo.lock +target/ diff --git a/tools/record-finalized-flags/Android.bp b/tools/record-finalized-flags/Android.bp new file mode 100644 index 0000000000..55a3a389e0 --- /dev/null +++ b/tools/record-finalized-flags/Android.bp @@ -0,0 +1,28 @@ +package { + default_applicable_licenses: ["Android-Apache-2.0"], +} + +rust_defaults { + name: "record-finalized-flags-defaults", + edition: "2021", + clippy_lints: "android", + lints: "android", + srcs: ["src/main.rs"], + rustlibs: [ + "libaconfig_protos", + "libanyhow", + "libclap", + "libregex", + ], +} + +rust_binary_host { + name: "record-finalized-flags", + defaults: ["record-finalized-flags-defaults"], +} + +rust_test_host { + name: "record-finalized-flags-test", + defaults: ["record-finalized-flags-defaults"], + test_suites: ["general-tests"], +} diff --git a/tools/record-finalized-flags/Cargo.toml b/tools/record-finalized-flags/Cargo.toml new file mode 100644 index 0000000000..0fc795363f --- /dev/null +++ b/tools/record-finalized-flags/Cargo.toml @@ -0,0 +1,15 @@ +# Cargo.toml file to allow rapid development of record-finalized-flags using +# cargo. Soong is the official Android build system, and the only system +# guaranteed to support record-finalized-flags. If there is ever any issue with +# the cargo setup, support for cargo will be dropped and this file removed. + +[package] +name = "record-finalized-flags" +version = "0.1.0" +edition = "2021" + +[dependencies] +aconfig_protos = { path = "../aconfig/aconfig_protos" } +anyhow = { path = "../../../../external/rust/android-crates-io/crates/anyhow" } +clap = { path = "../../../../external/rust/android-crates-io/crates/clap", features = ["derive"] } +regex = { path = "../../../../external/rust/android-crates-io/crates/regex" } diff --git a/tools/record-finalized-flags/OWNERS b/tools/record-finalized-flags/OWNERS new file mode 100644 index 0000000000..2864a2c23c --- /dev/null +++ b/tools/record-finalized-flags/OWNERS @@ -0,0 +1 @@ +include platform/frameworks/base:/SDK_OWNERS diff --git a/tools/record-finalized-flags/src/api_signature_files.rs b/tools/record-finalized-flags/src/api_signature_files.rs new file mode 100644 index 0000000000..af8f4d1957 --- /dev/null +++ b/tools/record-finalized-flags/src/api_signature_files.rs @@ -0,0 +1,49 @@ +/* + * Copyright (C) 2025 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +use anyhow::Result; +use regex::Regex; +use std::{collections::HashSet, io::Read}; + +use crate::FlagId; + +/// Grep for all flags used with @FlaggedApi annotations in an API signature file (*current.txt +/// file). +pub(crate) fn extract_flagged_api_flags<R: Read>(mut reader: R) -> Result<HashSet<FlagId>> { + let mut haystack = String::new(); + reader.read_to_string(&mut haystack)?; + let regex = Regex::new(r#"(?ms)@FlaggedApi\("(.*?)"\)"#).unwrap(); + let iter = regex.captures_iter(&haystack).map(|cap| cap[1].to_owned()); + Ok(HashSet::from_iter(iter)) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test() { + let api_signature_file = include_bytes!("../tests/api-signature-file.txt"); + let flags = extract_flagged_api_flags(&api_signature_file[..]).unwrap(); + assert_eq!( + flags, + HashSet::from_iter(vec![ + "record_finalized_flags.test.foo".to_string(), + "this.flag.is.not.used".to_string(), + ]) + ); + } +} diff --git a/tools/record-finalized-flags/src/finalized_flags.rs b/tools/record-finalized-flags/src/finalized_flags.rs new file mode 100644 index 0000000000..1ae4c4d789 --- /dev/null +++ b/tools/record-finalized-flags/src/finalized_flags.rs @@ -0,0 +1,47 @@ +/* + * Copyright (C) 2025 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +use anyhow::Result; +use std::{collections::HashSet, io::Read}; + +use crate::FlagId; + +/// Read a list of flag names. The input is expected to be plain text, with each line containing +/// the name of a single flag. +pub(crate) fn read_finalized_flags<R: Read>(mut reader: R) -> Result<HashSet<FlagId>> { + let mut contents = String::new(); + reader.read_to_string(&mut contents)?; + let iter = contents.lines().map(|s| s.to_owned()); + Ok(HashSet::from_iter(iter)) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test() { + let input = include_bytes!("../tests/finalized-flags.txt"); + let flags = read_finalized_flags(&input[..]).unwrap(); + assert_eq!( + flags, + HashSet::from_iter(vec![ + "record_finalized_flags.test.bar".to_string(), + "record_finalized_flags.test.baz".to_string(), + ]) + ); + } +} diff --git a/tools/record-finalized-flags/src/flag_values.rs b/tools/record-finalized-flags/src/flag_values.rs new file mode 100644 index 0000000000..cc16d12f3c --- /dev/null +++ b/tools/record-finalized-flags/src/flag_values.rs @@ -0,0 +1,53 @@ +/* + * Copyright (C) 2025 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +use aconfig_protos::{ParsedFlagExt, ProtoFlagPermission, ProtoFlagState}; +use anyhow::{anyhow, Result}; +use std::{collections::HashSet, io::Read}; + +use crate::FlagId; + +/// Parse a ProtoParsedFlags binary protobuf blob and return the fully qualified names of flags +/// that are slated for API finalization (i.e. are both ENABLED and READ_ONLY). +pub(crate) fn get_relevant_flags_from_binary_proto<R: Read>( + mut reader: R, +) -> Result<HashSet<FlagId>> { + let mut buffer = Vec::new(); + reader.read_to_end(&mut buffer)?; + let parsed_flags = aconfig_protos::parsed_flags::try_from_binary_proto(&buffer) + .map_err(|_| anyhow!("failed to parse binary proto"))?; + let iter = parsed_flags + .parsed_flag + .into_iter() + .filter(|flag| { + flag.state() == ProtoFlagState::ENABLED + && flag.permission() == ProtoFlagPermission::READ_ONLY + }) + .map(|flag| flag.fully_qualified_name()); + Ok(HashSet::from_iter(iter)) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_disabled_or_read_write_flags_are_ignored() { + let bytes = include_bytes!("../tests/flags.protobuf"); + let flags = get_relevant_flags_from_binary_proto(&bytes[..]).unwrap(); + assert_eq!(flags, HashSet::from_iter(vec!["record_finalized_flags.test.foo".to_string()])); + } +} diff --git a/tools/record-finalized-flags/src/main.rs b/tools/record-finalized-flags/src/main.rs new file mode 100644 index 0000000000..efdbc9be8e --- /dev/null +++ b/tools/record-finalized-flags/src/main.rs @@ -0,0 +1,134 @@ +/* + * Copyright (C) 2025 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//! `record-finalized-flags` is a tool to create a snapshot (intended to be stored in +//! prebuilts/sdk) of the flags used with @FlaggedApi APIs +use anyhow::Result; +use clap::Parser; +use std::{collections::HashSet, fs::File, path::PathBuf}; + +mod api_signature_files; +mod finalized_flags; +mod flag_values; + +pub(crate) type FlagId = String; + +const ABOUT: &str = "Create a new prebuilts/sdk/<version>/finalized-flags.txt file + +The prebuilts/sdk/<version>/finalized-flags.txt files list all aconfig flags that have been used +with @FlaggedApi annotations on APIs that have been finalized. These files are used to prevent +flags from being re-used for new, unfinalized, APIs, and by the aconfig code generation. + +This tool works as follows: + + - Read API signature files from source tree (*current.txt files) [--api-signature-file] + - Read the current aconfig flag values from source tree [--parsed-flags-file] + - Read the previous finalized-flags.txt files from prebuilts/sdk [--finalized-flags-file] + - Extract the flags slated for API finalization by scanning through the API signature files for + flags that are ENABLED and READ_ONLY + - Merge the found flags with the recorded flags from previous API finalizations + - Print the set of flags to stdout +"; + +#[derive(Parser, Debug)] +#[clap(about=ABOUT)] +struct Cli { + #[arg(long)] + parsed_flags_file: PathBuf, + + #[arg(long)] + api_signature_file: Vec<PathBuf>, + + #[arg(long)] + finalized_flags_file: PathBuf, +} + +/// Filter out the ENABLED and READ_ONLY flags used with @FlaggedApi annotations in the source +/// tree, and add those flags to the set of previously finalized flags. +fn calculate_new_finalized_flags( + flags_used_with_flaggedapi_annotation: &HashSet<FlagId>, + all_flags_to_be_finalized: &HashSet<FlagId>, + already_finalized_flags: &HashSet<FlagId>, +) -> HashSet<FlagId> { + let new_flags: HashSet<_> = flags_used_with_flaggedapi_annotation + .intersection(all_flags_to_be_finalized) + .map(|s| s.to_owned()) + .collect(); + already_finalized_flags.union(&new_flags).map(|s| s.to_owned()).collect() +} + +fn main() -> Result<()> { + let args = Cli::parse(); + + let mut flags_used_with_flaggedapi_annotation = HashSet::new(); + for path in args.api_signature_file { + let file = File::open(path)?; + for flag in api_signature_files::extract_flagged_api_flags(file)?.drain() { + flags_used_with_flaggedapi_annotation.insert(flag); + } + } + + let file = File::open(args.parsed_flags_file)?; + let all_flags_to_be_finalized = flag_values::get_relevant_flags_from_binary_proto(file)?; + + let file = File::open(args.finalized_flags_file)?; + let already_finalized_flags = finalized_flags::read_finalized_flags(file)?; + + let mut new_finalized_flags = Vec::from_iter(calculate_new_finalized_flags( + &flags_used_with_flaggedapi_annotation, + &all_flags_to_be_finalized, + &already_finalized_flags, + )); + new_finalized_flags.sort(); + + println!("{}", new_finalized_flags.join("\n")); + + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test() { + let input = include_bytes!("../tests/api-signature-file.txt"); + let flags_used_with_flaggedapi_annotation = + api_signature_files::extract_flagged_api_flags(&input[..]).unwrap(); + + let input = include_bytes!("../tests/flags.protobuf"); + let all_flags_to_be_finalized = + flag_values::get_relevant_flags_from_binary_proto(&input[..]).unwrap(); + + let input = include_bytes!("../tests/finalized-flags.txt"); + let already_finalized_flags = finalized_flags::read_finalized_flags(&input[..]).unwrap(); + + let new_finalized_flags = calculate_new_finalized_flags( + &flags_used_with_flaggedapi_annotation, + &all_flags_to_be_finalized, + &already_finalized_flags, + ); + + assert_eq!( + new_finalized_flags, + HashSet::from_iter(vec![ + "record_finalized_flags.test.foo".to_string(), + "record_finalized_flags.test.bar".to_string(), + "record_finalized_flags.test.baz".to_string(), + ]) + ); + } +} diff --git a/tools/record-finalized-flags/tests/api-signature-file.txt b/tools/record-finalized-flags/tests/api-signature-file.txt new file mode 100644 index 0000000000..2ad559f0ad --- /dev/null +++ b/tools/record-finalized-flags/tests/api-signature-file.txt @@ -0,0 +1,15 @@ +// Signature format: 2.0 +package android { + + public final class C { + ctor public C(); + } + + public static final class C.inner { + ctor public C.inner(); + field @FlaggedApi("record_finalized_flags.test.foo") public static final String FOO = "foo"; + field @FlaggedApi("this.flag.is.not.used") public static final String BAR = "bar"; + } + +} + diff --git a/tools/record-finalized-flags/tests/finalized-flags.txt b/tools/record-finalized-flags/tests/finalized-flags.txt new file mode 100644 index 0000000000..7fbcb3dc65 --- /dev/null +++ b/tools/record-finalized-flags/tests/finalized-flags.txt @@ -0,0 +1,2 @@ +record_finalized_flags.test.bar +record_finalized_flags.test.baz diff --git a/tools/record-finalized-flags/tests/flags.declarations b/tools/record-finalized-flags/tests/flags.declarations new file mode 100644 index 0000000000..b45ef62523 --- /dev/null +++ b/tools/record-finalized-flags/tests/flags.declarations @@ -0,0 +1,16 @@ +package: "record_finalized_flags.test" +container: "system" + +flag { + name: "foo" + namespace: "test" + description: "FIXME" + bug: "" +} + +flag { + name: "not_enabled" + namespace: "test" + description: "FIXME" + bug: "" +} diff --git a/tools/record-finalized-flags/tests/flags.protobuf b/tools/record-finalized-flags/tests/flags.protobuf Binary files differnew file mode 100644 index 0000000000..7c6e63eca8 --- /dev/null +++ b/tools/record-finalized-flags/tests/flags.protobuf diff --git a/tools/record-finalized-flags/tests/flags.values b/tools/record-finalized-flags/tests/flags.values new file mode 100644 index 0000000000..ff6225d822 --- /dev/null +++ b/tools/record-finalized-flags/tests/flags.values @@ -0,0 +1,13 @@ +flag_value { + package: "record_finalized_flags.test" + name: "foo" + state: ENABLED + permission: READ_ONLY +} + +flag_value { + package: "record_finalized_flags.test" + name: "not_enabled" + state: DISABLED + permission: READ_ONLY +} diff --git a/tools/record-finalized-flags/tests/generate-flags-protobuf.sh b/tools/record-finalized-flags/tests/generate-flags-protobuf.sh new file mode 100755 index 0000000000..701189cd5c --- /dev/null +++ b/tools/record-finalized-flags/tests/generate-flags-protobuf.sh @@ -0,0 +1,7 @@ +#!/bin/bash +aconfig create-cache \ + --package record_finalized_flags.test \ + --container system \ + --declarations flags.declarations \ + --values flags.values \ + --cache flags.protobuf diff --git a/tools/releasetools/Android.bp b/tools/releasetools/Android.bp index 9b134f22d4..e371b2354c 100644 --- a/tools/releasetools/Android.bp +++ b/tools/releasetools/Android.bp @@ -96,6 +96,7 @@ python_defaults { ], libs: [ "apex_manifest", + "releasetools_apex_utils", "releasetools_common", ], required: [ @@ -107,7 +108,7 @@ python_defaults { python_library_host { name: "ota_metadata_proto", srcs: [ - "ota_metadata.proto", + "ota_metadata.proto", ], proto: { canonical_path_from_root: false, @@ -117,7 +118,7 @@ python_library_host { cc_library_static { name: "ota_metadata_proto_cc", srcs: [ - "ota_metadata.proto", + "ota_metadata.proto", ], host_supported: true, recovery_available: true, @@ -144,7 +145,7 @@ java_library_static { static_libs: ["libprotobuf-java-nano"], }, }, - visibility: ["//frameworks/base:__subpackages__"] + visibility: ["//frameworks/base:__subpackages__"], } python_defaults { @@ -367,6 +368,9 @@ python_binary_host { libs: [ "ota_utils_lib", ], + required: [ + "signapk", + ], } python_binary_host { @@ -436,7 +440,7 @@ python_binary_host { name: "check_target_files_vintf", defaults: [ "releasetools_binary_defaults", - "releasetools_check_target_files_vintf_defaults" + "releasetools_check_target_files_vintf_defaults", ], } @@ -546,13 +550,15 @@ python_binary_host { defaults: ["releasetools_binary_defaults"], srcs: [ "sign_target_files_apks.py", - "payload_signer.py", - "ota_signing_utils.py", + "ota_from_raw_img.py", ], libs: [ "releasetools_add_img_to_target_files", "releasetools_apex_utils", "releasetools_common", + "ota_metadata_proto", + "ota_utils_lib", + "update_payload", ], } @@ -631,8 +637,10 @@ python_defaults { ], data: [ "testdata/**/*", + ], + device_common_data: [ ":com.android.apex.compressed.v1", - ":com.android.apex.vendor.foo.with_vintf" + ":com.android.apex.vendor.foo.with_vintf", ], target: { darwin: { diff --git a/tools/releasetools/add_img_to_target_files.py b/tools/releasetools/add_img_to_target_files.py index b39a82cf45..30a6accf32 100644 --- a/tools/releasetools/add_img_to_target_files.py +++ b/tools/releasetools/add_img_to_target_files.py @@ -464,6 +464,7 @@ def AddDtbo(output_zip): dtbo_prebuilt_path = os.path.join( OPTIONS.input_tmp, "PREBUILT_IMAGES", "dtbo.img") assert os.path.exists(dtbo_prebuilt_path) + os.makedirs(os.path.dirname(img.name), exist_ok=True) shutil.copy(dtbo_prebuilt_path, img.name) # AVB-sign the image as needed. @@ -1099,7 +1100,7 @@ def AddImagesToTargetFiles(filename): vbmeta_partitions = common.AVB_PARTITIONS[:] + tuple(avb_custom_partitions) vbmeta_system = OPTIONS.info_dict.get("avb_vbmeta_system", "").strip() - if vbmeta_system: + if vbmeta_system and set(vbmeta_system.split()).intersection(partitions): banner("vbmeta_system") partitions["vbmeta_system"] = AddVBMeta( output_zip, partitions, "vbmeta_system", vbmeta_system.split()) @@ -1109,7 +1110,7 @@ def AddImagesToTargetFiles(filename): vbmeta_partitions.append("vbmeta_system") vbmeta_vendor = OPTIONS.info_dict.get("avb_vbmeta_vendor", "").strip() - if vbmeta_vendor: + if vbmeta_vendor and set(vbmeta_vendor.split()).intersection(partitions): banner("vbmeta_vendor") partitions["vbmeta_vendor"] = AddVBMeta( output_zip, partitions, "vbmeta_vendor", vbmeta_vendor.split()) @@ -1136,7 +1137,7 @@ def AddImagesToTargetFiles(filename): if item not in included_partitions] vbmeta_partitions.append(partition_name) - if OPTIONS.info_dict.get("avb_building_vbmeta_image") == "true": + if OPTIONS.info_dict.get("avb_building_vbmeta_image") == "true" and set(vbmeta_partitions).intersection(partitions): banner("vbmeta") AddVBMeta(output_zip, partitions, "vbmeta", vbmeta_partitions) diff --git a/tools/releasetools/apex_utils.py b/tools/releasetools/apex_utils.py index 3abef3bece..08f2b83388 100644 --- a/tools/releasetools/apex_utils.py +++ b/tools/releasetools/apex_utils.py @@ -36,6 +36,8 @@ APEX_PAYLOAD_IMAGE = 'apex_payload.img' APEX_PUBKEY = 'apex_pubkey' +# Partitions supporting APEXes +PARTITIONS = ['system', 'system_ext', 'product', 'vendor', 'odm'] class ApexInfoError(Exception): """An Exception raised during Apex Information command.""" @@ -77,15 +79,10 @@ class ApexApkSigner(object): Returns: The repacked apex file containing the signed apk files. """ - if not os.path.exists(self.debugfs_path): - raise ApexSigningError( - "Couldn't find location of debugfs_static: " + - "Path {} does not exist. ".format(self.debugfs_path) + - "Make sure bin/debugfs_static can be found in -p <path>") - list_cmd = ['deapexer', '--debugfs_path', self.debugfs_path, - 'list', self.apex_path] - entries_names = common.RunAndCheckOutput(list_cmd).split() - apk_entries = [name for name in entries_names if name.endswith('.apk')] + payload_dir = self.ExtractApexPayload(self.apex_path) + apk_entries = [] + for base_dir, _, files in os.walk(payload_dir): + apk_entries.extend(os.path.join(base_dir, file) for file in files if file.endswith('.apk')) # No need to sign and repack, return the original apex path. if not apk_entries and self.sign_tool is None: @@ -103,16 +100,16 @@ class ApexApkSigner(object): logger.warning('Apk path does not contain the intended directory name:' ' %s', entry) - payload_dir, has_signed_content = self.ExtractApexPayloadAndSignContents( - apk_entries, apk_keys, payload_key, signing_args) + has_signed_content = self.SignContentsInPayload( + payload_dir, apk_entries, apk_keys, payload_key, signing_args) if not has_signed_content: logger.info('No contents has been signed in %s', self.apex_path) return self.apex_path return self.RepackApexPayload(payload_dir, payload_key, signing_args) - def ExtractApexPayloadAndSignContents(self, apk_entries, apk_keys, payload_key, signing_args): - """Extracts the payload image and signs the containing apk files.""" + def ExtractApexPayload(self, apex_path): + """Extracts the contents of an APEX and returns the directory of the contents""" if not os.path.exists(self.debugfs_path): raise ApexSigningError( "Couldn't find location of debugfs_static: " + @@ -127,9 +124,12 @@ class ApexApkSigner(object): extract_cmd = ['deapexer', '--debugfs_path', self.debugfs_path, '--fsckerofs_path', self.fsckerofs_path, 'extract', - self.apex_path, payload_dir] + apex_path, payload_dir] common.RunAndCheckOutput(extract_cmd) + return payload_dir + def SignContentsInPayload(self, payload_dir, apk_entries, apk_keys, payload_key, signing_args): + """Signs the contents in payload.""" has_signed_content = False for entry in apk_entries: apk_path = os.path.join(payload_dir, entry) @@ -161,7 +161,7 @@ class ApexApkSigner(object): common.RunAndCheckOutput(cmd) has_signed_content = True - return payload_dir, has_signed_content + return has_signed_content def RepackApexPayload(self, payload_dir, payload_key, signing_args=None): """Rebuilds the apex file with the updated payload directory.""" @@ -550,7 +550,7 @@ def GetApexInfoFromTargetFiles(input_file): if not isinstance(input_file, str): raise RuntimeError("must pass filepath to target-files zip or directory") apex_infos = [] - for partition in ['system', 'system_ext', 'product', 'vendor']: + for partition in PARTITIONS: apex_infos.extend(GetApexInfoForPartition(input_file, partition)) return apex_infos diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py index 464ad9b4cc..b6c96c4bf3 100755 --- a/tools/releasetools/build_image.py +++ b/tools/releasetools/build_image.py @@ -677,24 +677,31 @@ def TryParseFingerprint(glob_dict: dict): glob_dict["fingerprint"] = fingerprint return - -def ImagePropFromGlobalDict(glob_dict, mount_point): - """Build an image property dictionary from the global dictionary. +def TryParseFingerprintAndTimestamp(glob_dict): + """Helper function that parses fingerprint and timestamp from the global dictionary. Args: glob_dict: the global dictionary from the build system. - mount_point: such as "system", "data" etc. """ - d = {} TryParseFingerprint(glob_dict) # Set fixed timestamp for building the OTA package. if "use_fixed_timestamp" in glob_dict: - d["timestamp"] = FIXED_FILE_TIMESTAMP + glob_dict["timestamp"] = FIXED_FILE_TIMESTAMP if "build.prop" in glob_dict: timestamp = glob_dict["build.prop"].GetProp("ro.build.date.utc") if timestamp: - d["timestamp"] = timestamp + glob_dict["timestamp"] = timestamp + +def ImagePropFromGlobalDict(glob_dict, mount_point): + """Build an image property dictionary from the global dictionary. + + Args: + glob_dict: the global dictionary from the build system. + mount_point: such as "system", "data" etc. + """ + d = {} + TryParseFingerprintAndTimestamp(glob_dict) def copy_prop(src_p, dest_p): """Copy a property from the global dictionary. @@ -730,6 +737,7 @@ def ImagePropFromGlobalDict(glob_dict, mount_point): "avb_avbtool", "use_dynamic_partition_size", "fingerprint", + "timestamp", ) for p in common_props: copy_prop(p, p) @@ -992,6 +1000,7 @@ def main(argv): # The caller knows the mount point and provides a dictionary needed by # BuildImage(). image_properties = glob_dict + TryParseFingerprintAndTimestamp(image_properties) else: image_filename = os.path.basename(args.out_file) mount_point = "" diff --git a/tools/releasetools/check_target_files_vintf.py b/tools/releasetools/check_target_files_vintf.py index b8dcd8465c..dc123efb46 100755 --- a/tools/releasetools/check_target_files_vintf.py +++ b/tools/releasetools/check_target_files_vintf.py @@ -30,6 +30,7 @@ import subprocess import sys import zipfile +import apex_utils import common from apex_manifest import ParseApexManifest @@ -229,7 +230,7 @@ def PrepareApexDirectory(inp, dirmap): apex_host = os.path.join(OPTIONS.search_path, 'bin', 'apexd_host') cmd = [apex_host, '--tool_path', OPTIONS.search_path] cmd += ['--apex_path', dirmap['/apex']] - for p in ['system', 'system_ext', 'product', 'vendor']: + for p in apex_utils.PARTITIONS: if '/' + p in dirmap: cmd += ['--' + p + '_path', dirmap['/' + p]] common.RunAndCheckOutput(cmd) diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py index f6f694444b..b6cbb15222 100644 --- a/tools/releasetools/common.py +++ b/tools/releasetools/common.py @@ -23,7 +23,7 @@ import fnmatch import getopt import getpass import gzip -import imp +import importlib.util import json import logging import logging.config @@ -898,7 +898,7 @@ def LoadInfoDict(input_file, repacking=False): if key.endswith("selinux_fc"): fc_basename = os.path.basename(d[key]) fc_config = os.path.join(input_file, "META", fc_basename) - assert os.path.exists(fc_config) + assert os.path.exists(fc_config), "{} does not exist".format(fc_config) d[key] = fc_config @@ -907,9 +907,10 @@ def LoadInfoDict(input_file, repacking=False): d["root_fs_config"] = os.path.join( input_file, "META", "root_filesystem_config.txt") + partitions = ["system", "vendor", "system_ext", "product", "odm", + "vendor_dlkm", "odm_dlkm", "system_dlkm"] # Redirect {partition}_base_fs_file for each of the named partitions. - for part_name in ["system", "vendor", "system_ext", "product", "odm", - "vendor_dlkm", "odm_dlkm", "system_dlkm"]: + for part_name in partitions: key_name = part_name + "_base_fs_file" if key_name not in d: continue @@ -922,6 +923,25 @@ def LoadInfoDict(input_file, repacking=False): "Failed to find %s base fs file: %s", part_name, base_fs_file) del d[key_name] + # Redirecting helper for optional properties like erofs_compress_hints + def redirect_file(prop, filename): + if prop not in d: + return + config_file = os.path.join(input_file, "META/" + filename) + if os.path.exists(config_file): + d[prop] = config_file + else: + logger.warning( + "Failed to find %s fro %s", filename, prop) + del d[prop] + + # Redirect erofs_[default_]compress_hints files + redirect_file("erofs_default_compress_hints", + "erofs_default_compress_hints.txt") + for part in partitions: + redirect_file(part + "_erofs_compress_hints", + part + "_erofs_compress_hints.txt") + def makeint(key): if key in d: d[key] = int(d[key], 0) @@ -2988,7 +3008,7 @@ def ZipWrite(zip_file, filename, arcname=None, perms=0o644, zipfile.ZIP64_LIMIT = saved_zip64_limit -def ZipWriteStr(zip_file, zinfo_or_arcname, data, perms=None, +def ZipWriteStr(zip_file: zipfile.ZipFile, zinfo_or_arcname, data, perms=None, compress_type=None): """Wrap zipfile.writestr() function to work around the zip64 limit. @@ -3112,16 +3132,19 @@ class DeviceSpecificParams(object): return try: if os.path.isdir(path): - info = imp.find_module("releasetools", [path]) - else: - d, f = os.path.split(path) - b, x = os.path.splitext(f) - if x == ".py": - f = b - info = imp.find_module(f, [d]) + path = os.path.join(path, "releasetools") + if os.path.isdir(path): + path = os.path.join(path, "__init__.py") + if not os.path.exists(path) and os.path.exists(path + ".py"): + path = path + ".py" + spec = importlib.util.spec_from_file_location("device_specific", path) + if not spec: + raise FileNotFoundError(path) logger.info("loaded device-specific extensions from %s", path) - self.module = imp.load_module("device_specific", *info) - except ImportError: + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) + self.module = module + except (ImportError, FileNotFoundError): logger.info("unable to load device-specific module; assuming none") def _DoCall(self, function_name, *args, **kwargs): @@ -3218,7 +3241,9 @@ class File(object): return t def WriteToDir(self, d): - with open(os.path.join(d, self.name), "wb") as fp: + output_path = os.path.join(d, self.name) + os.makedirs(os.path.dirname(output_path), exist_ok=True) + with open(output_path, "wb") as fp: fp.write(self.data) def AddToZip(self, z, compression=None): diff --git a/tools/releasetools/merge_ota.py b/tools/releasetools/merge_ota.py index fb5957a857..e8732a2c52 100644 --- a/tools/releasetools/merge_ota.py +++ b/tools/releasetools/merge_ota.py @@ -226,9 +226,21 @@ def main(argv): logger.setLevel(logging.INFO) logger.info(args) + if args.java_path: + common.OPTIONS.java_path = args.java_path + if args.search_path: common.OPTIONS.search_path = args.search_path + if args.signapk_path: + common.OPTIONS.signapk_path = args.signapk_path + + if args.extra_signapk_args: + common.OPTIONS.extra_signapk_args = args.extra_signapk_args + + if args.signapk_shared_library_path: + common.OPTIONS.signapk_shared_library_path = args.signapk_shared_library_path + metadata_ota = args.packages[-1] if args.metadata_ota is not None: metadata_ota = args.metadata_ota diff --git a/tools/releasetools/ota_from_raw_img.py b/tools/releasetools/ota_from_raw_img.py index 03b44f15d6..3b9374ab13 100644 --- a/tools/releasetools/ota_from_raw_img.py +++ b/tools/releasetools/ota_from_raw_img.py @@ -105,9 +105,6 @@ def main(argv): if args.package_key: logger.info("Signing payload...") - # TODO: remove OPTIONS when no longer used as fallback in payload_signer - common.OPTIONS.payload_signer_args = None - common.OPTIONS.payload_signer_maximum_signature_size = None signer = PayloadSigner(args.package_key, args.private_key_suffix, key_passwords[args.package_key], payload_signer=args.payload_signer, diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py index 985cd56cb0..76d168cb8e 100755 --- a/tools/releasetools/ota_from_target_files.py +++ b/tools/releasetools/ota_from_target_files.py @@ -264,6 +264,10 @@ A/B OTA specific options --compression_factor Specify the maximum block size to be compressed at once during OTA. supported options: 4k, 8k, 16k, 32k, 64k, 128k, 256k + + --full_ota_partitions + Specify list of partitions should be updated in full OTA fashion, even if + an incremental OTA is about to be generated """ from __future__ import print_function @@ -283,7 +287,7 @@ import common import ota_utils import payload_signer from ota_utils import (VABC_COMPRESSION_PARAM_SUPPORT, FinalizeMetadata, GetPackageMetadata, - PayloadGenerator, SECURITY_PATCH_LEVEL_PROP_NAME, ExtractTargetFiles, CopyTargetFilesDir) + PayloadGenerator, SECURITY_PATCH_LEVEL_PROP_NAME, ExtractTargetFiles, CopyTargetFilesDir, TARGET_FILES_IMAGES_SUBDIR) from common import DoesInputFileContain, IsSparseImage import target_files_diff from non_ab_ota import GenerateNonAbOtaPackage @@ -337,6 +341,7 @@ OPTIONS.security_patch_level = None OPTIONS.max_threads = None OPTIONS.vabc_cow_version = None OPTIONS.compression_factor = None +OPTIONS.full_ota_partitions = None POSTINSTALL_CONFIG = 'META/postinstall_config.txt' @@ -892,6 +897,14 @@ def GenerateAbOtaPackage(target_file, output_file, source_file=None): if source_file is not None: source_file = ExtractTargetFiles(source_file) + if OPTIONS.full_ota_partitions: + for partition in OPTIONS.full_ota_partitions: + for subdir in TARGET_FILES_IMAGES_SUBDIR: + image_path = os.path.join(source_file, subdir, partition + ".img") + if os.path.exists(image_path): + logger.info( + "Ignoring source image %s for partition %s because it is configured to use full OTA", image_path, partition) + os.remove(image_path) assert "ab_partitions" in OPTIONS.source_info_dict, \ "META/ab_partitions.txt is required for ab_update." assert "ab_partitions" in OPTIONS.target_info_dict, \ @@ -1026,6 +1039,9 @@ def GenerateAbOtaPackage(target_file, output_file, source_file=None): # Prepare custom images. if OPTIONS.custom_images: + if source_file is not None: + source_file = GetTargetFilesZipForCustomImagesUpdates( + source_file, OPTIONS.custom_images) target_file = GetTargetFilesZipForCustomImagesUpdates( target_file, OPTIONS.custom_images) @@ -1108,17 +1124,18 @@ def GenerateAbOtaPackage(target_file, output_file, source_file=None): additional_args += ["--enable_lz4diff=" + str(OPTIONS.enable_lz4diff).lower()] + env_override = {} if source_file and OPTIONS.enable_lz4diff: - input_tmp = common.UnzipTemp(source_file, ["META/liblz4.so"]) - liblz4_path = os.path.join(input_tmp, "META", "liblz4.so") + liblz4_path = os.path.join(source_file, "META", "liblz4.so") assert os.path.exists( liblz4_path), "liblz4.so not found in META/ dir of target file {}".format(liblz4_path) logger.info("Enabling lz4diff %s", liblz4_path) - additional_args += ["--liblz4_path", liblz4_path] erofs_compression_param = OPTIONS.target_info_dict.get( "erofs_default_compressor") assert erofs_compression_param is not None, "'erofs_default_compressor' not found in META/misc_info.txt of target build. This is required to enable lz4diff." additional_args += ["--erofs_compression_param", erofs_compression_param] + env_override["LD_PRELOAD"] = liblz4_path + \ + ":" + os.environ.get("LD_PRELOAD", "") if OPTIONS.disable_vabc: additional_args += ["--disable_vabc=true"] @@ -1128,10 +1145,15 @@ def GenerateAbOtaPackage(target_file, output_file, source_file=None): additional_args += ["--compressor_types", OPTIONS.compressor_types] additional_args += ["--max_timestamp", max_timestamp] + env = dict(os.environ) + if env_override: + logger.info("Using environment variables %s", env_override) + env.update(env_override) payload.Generate( target_file, source_file, - additional_args + partition_timestamps_flags + additional_args + partition_timestamps_flags, + env=env ) # Sign the payload. @@ -1193,7 +1215,7 @@ def GenerateAbOtaPackage(target_file, output_file, source_file=None): def main(argv): - def option_handler(o, a): + def option_handler(o, a: str): if o in ("-i", "--incremental_from"): OPTIONS.incremental_source = a elif o == "--full_radio": @@ -1320,6 +1342,9 @@ def main(argv): else: raise ValueError("Cannot parse value %r for option %r - only " "integers are allowed." % (a, o)) + elif o == "--full_ota_partitions": + OPTIONS.full_ota_partitions = set( + a.strip().strip("\"").strip("'").split(",")) else: return False return True @@ -1370,6 +1395,7 @@ def main(argv): "max_threads=", "vabc_cow_version=", "compression_factor=", + "full_ota_partitions=", ], extra_option_handler=[option_handler, payload_signer.signer_options]) common.InitLogging() diff --git a/tools/releasetools/ota_signing_utils.py b/tools/releasetools/ota_signing_utils.py index 60c8c94f91..9d04c3bbb5 100644 --- a/tools/releasetools/ota_signing_utils.py +++ b/tools/releasetools/ota_signing_utils.py @@ -23,10 +23,18 @@ def ParseSignerArgs(args): def AddSigningArgumentParse(parser: argparse.ArgumentParser): + parser.add_argument('--java_path', type=str, + help='Path to JVM if other than default') parser.add_argument('--package_key', type=str, help='Paths to private key for signing payload') parser.add_argument('--search_path', '--path', type=str, help='Search path for framework/signapk.jar') + parser.add_argument('--signapk_path', type=str, + help='Path to signapk.jar, relative to search_path') + parser.add_argument('--extra_signapk_args', type=ParseSignerArgs, + help='Extra arguments for signapk.jar') + parser.add_argument('--signapk_shared_library_path', type=str, + help='Path to lib64 libraries used by signapk.jar') parser.add_argument('--payload_signer', type=str, help='Path to custom payload signer') parser.add_argument('--payload_signer_args', type=ParseSignerArgs, diff --git a/tools/releasetools/ota_utils.py b/tools/releasetools/ota_utils.py index 81b53dce36..852d62bb0f 100644 --- a/tools/releasetools/ota_utils.py +++ b/tools/releasetools/ota_utils.py @@ -845,16 +845,16 @@ class PayloadGenerator(object): self.is_partial_update = is_partial_update self.spl_downgrade = spl_downgrade - def _Run(self, cmd): # pylint: disable=no-self-use + def _Run(self, cmd, **kwargs): # pylint: disable=no-self-use # Don't pipe (buffer) the output if verbose is set. Let # brillo_update_payload write to stdout/stderr directly, so its progress can # be monitored. if OPTIONS.verbose: - common.RunAndCheckOutput(cmd, stdout=None, stderr=None) + common.RunAndCheckOutput(cmd, stdout=None, stderr=None, **kwargs) else: - common.RunAndCheckOutput(cmd) + common.RunAndCheckOutput(cmd, **kwargs) - def Generate(self, target_file, source_file=None, additional_args=None): + def Generate(self, target_file, source_file=None, additional_args=None, **kwargs): """Generates a payload from the given target-files zip(s). Args: @@ -863,6 +863,7 @@ class PayloadGenerator(object): generating a full OTA. additional_args: A list of additional args that should be passed to delta_generator binary; or None. + kwargs: Any additional args to pass to subprocess.Popen """ if additional_args is None: additional_args = [] @@ -918,7 +919,7 @@ class PayloadGenerator(object): if self.is_partial_update: cmd.extend(["--is_partial_update=true"]) cmd.extend(additional_args) - self._Run(cmd) + self._Run(cmd, **kwargs) self.payload_file = payload_file self.payload_properties = None diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py index b8f848fb2b..f1855a543d 100755 --- a/tools/releasetools/sign_target_files_apks.py +++ b/tools/releasetools/sign_target_files_apks.py @@ -184,14 +184,17 @@ import re import shutil import stat import sys +import shlex import tempfile import zipfile from xml.etree import ElementTree import add_img_to_target_files +import ota_from_raw_img import apex_utils import common import payload_signer +import update_payload from payload_signer import SignOtaPackage, PAYLOAD_BIN @@ -221,6 +224,7 @@ OPTIONS.vendor_otatools = None OPTIONS.allow_gsi_debug_sepolicy = False OPTIONS.override_apk_keys = None OPTIONS.override_apex_keys = None +OPTIONS.input_tmp = None AVB_FOOTER_ARGS_BY_PARTITION = { @@ -374,6 +378,37 @@ def GetApexKeys(keys_info, key_map): return keys_info +def GetMicrodroidVbmetaKey(virt_apex_path, avbtool_path): + """Extracts the AVB public key from microdroid_vbmeta.img within a virt apex. + + Args: + virt_apex_path: The path to the com.android.virt.apex file. + avbtool_path: The path to the avbtool executable. + + Returns: + The AVB public key (bytes). + """ + # Creates an ApexApkSigner to extract microdroid_vbmeta.img. + # No need to set key_passwords/codename_to_api_level_map since + # we won't do signing here. + apex_signer = apex_utils.ApexApkSigner( + virt_apex_path, + None, # key_passwords + None) # codename_to_api_level_map + payload_dir = apex_signer.ExtractApexPayload(virt_apex_path) + microdroid_vbmeta_image = os.path.join( + payload_dir, 'etc', 'fs', 'microdroid_vbmeta.img') + + # Extracts the avb public key from microdroid_vbmeta.img. + with tempfile.NamedTemporaryFile() as microdroid_pubkey: + common.RunAndCheckOutput([ + avbtool_path, 'info_image', + '--image', microdroid_vbmeta_image, + '--output_pubkey', microdroid_pubkey.name]) + with open(microdroid_pubkey.name, 'rb') as f: + return f.read() + + def GetApkFileInfo(filename, compressed_extension, skipped_prefixes): """Returns the APK info based on the given filename. @@ -579,7 +614,104 @@ def IsBuildPropFile(filename): filename.endswith("/prop.default") -def ProcessTargetFiles(input_tf_zip: zipfile.ZipFile, output_tf_zip, misc_info, +def GetOtaSigningArgs(): + args = [] + if OPTIONS.package_key: + args.extend(["--package_key", OPTIONS.package_key]) + if OPTIONS.payload_signer: + args.extend(["--payload_signer=" + OPTIONS.payload_signer]) + if OPTIONS.payload_signer_args: + args.extend(["--payload_signer_args=" + shlex.join(OPTIONS.payload_signer_args)]) + if OPTIONS.search_path: + args.extend(["--search_path", OPTIONS.search_path]) + if OPTIONS.payload_signer_maximum_signature_size: + args.extend(["--payload_signer_maximum_signature_size", + OPTIONS.payload_signer_maximum_signature_size]) + if OPTIONS.private_key_suffix: + args.extend(["--private_key_suffix", OPTIONS.private_key_suffix]) + return args + + +def RegenerateKernelPartitions(input_tf_zip: zipfile.ZipFile, output_tf_zip: zipfile.ZipFile, misc_info): + """Re-generate boot and dtbo partitions using new signing configuration""" + files_to_unzip = [ + "PREBUILT_IMAGES/*", "BOOTABLE_IMAGES/*.img", "*/boot_16k.img", "*/dtbo_16k.img"] + if OPTIONS.input_tmp is None: + OPTIONS.input_tmp = common.UnzipTemp(input_tf_zip.filename, files_to_unzip) + else: + common.UnzipToDir(input_tf_zip.filename, OPTIONS.input_tmp, files_to_unzip) + unzip_dir = OPTIONS.input_tmp + os.makedirs(os.path.join(unzip_dir, "IMAGES"), exist_ok=True) + + boot_image = common.GetBootableImage( + "IMAGES/boot.img", "boot.img", unzip_dir, "BOOT", misc_info) + if boot_image: + boot_image.WriteToDir(unzip_dir) + boot_image = os.path.join(unzip_dir, boot_image.name) + common.ZipWrite(output_tf_zip, boot_image, "IMAGES/boot.img", + compress_type=zipfile.ZIP_STORED) + if misc_info.get("has_dtbo") == "true": + add_img_to_target_files.AddDtbo(output_tf_zip) + return unzip_dir + + +def RegenerateBootOTA(input_tf_zip: zipfile.ZipFile, filename, input_ota): + with input_tf_zip.open(filename, "r") as in_fp: + payload = update_payload.Payload(in_fp) + is_incremental = any([part.HasField('old_partition_info') + for part in payload.manifest.partitions]) + is_boot_ota = filename.startswith( + "VENDOR/boot_otas/") or filename.startswith("SYSTEM/boot_otas/") + if not is_boot_ota: + return + is_4k_boot_ota = filename in [ + "VENDOR/boot_otas/boot_ota_4k.zip", "SYSTEM/boot_otas/boot_ota_4k.zip"] + # Only 4K boot image is re-generated, so if 16K boot ota isn't incremental, + # we do not need to re-generate + if not is_4k_boot_ota and not is_incremental: + return + + timestamp = str(payload.manifest.max_timestamp) + partitions = [part.partition_name for part in payload.manifest.partitions] + unzip_dir = OPTIONS.input_tmp + signed_boot_image = os.path.join(unzip_dir, "IMAGES", "boot.img") + if not os.path.exists(signed_boot_image): + logger.warn("Need to re-generate boot OTA {} but failed to get signed boot image. 16K dev option will be impacted, after rolling back to 4K user would need to sideload/flash their device to continue receiving OTAs.") + return + signed_dtbo_image = os.path.join(unzip_dir, "IMAGES", "dtbo.img") + if "dtbo" in partitions and not os.path.exists(signed_dtbo_image): + raise ValueError( + "Boot OTA {} has dtbo partition, but no dtbo image found in target files.".format(filename)) + if is_incremental: + signed_16k_boot_image = os.path.join( + unzip_dir, "IMAGES", "boot_16k.img") + signed_16k_dtbo_image = os.path.join( + unzip_dir, "IMAGES", "dtbo_16k.img") + if is_4k_boot_ota: + if os.path.exists(signed_16k_boot_image): + signed_boot_image = signed_16k_boot_image + ":" + signed_boot_image + if os.path.exists(signed_16k_dtbo_image): + signed_dtbo_image = signed_16k_dtbo_image + ":" + signed_dtbo_image + else: + if os.path.exists(signed_16k_boot_image): + signed_boot_image += ":" + signed_16k_boot_image + if os.path.exists(signed_16k_dtbo_image): + signed_dtbo_image += ":" + signed_16k_dtbo_image + + args = ["ota_from_raw_img", + "--max_timestamp", timestamp, "--output", input_ota.name] + args.extend(GetOtaSigningArgs()) + if "dtbo" in partitions: + args.extend(["--partition_name", "boot,dtbo", + signed_boot_image, signed_dtbo_image]) + else: + args.extend(["--partition_name", "boot", signed_boot_image]) + logger.info( + "Re-generating boot OTA {} using cmd {}".format(filename, args)) + ota_from_raw_img.main(args) + + +def ProcessTargetFiles(input_tf_zip: zipfile.ZipFile, output_tf_zip: zipfile.ZipFile, misc_info, apk_keys, apex_keys, key_passwords, platform_api_level, codename_to_api_level_map, compressed_extension): @@ -593,6 +725,16 @@ def ProcessTargetFiles(input_tf_zip: zipfile.ZipFile, output_tf_zip, misc_info, # Sets this to zero for targets without APK files. maxsize = 0 + # Replace the AVB signing keys, if any. + ReplaceAvbSigningKeys(misc_info) + OPTIONS.info_dict = misc_info + + # Rewrite the props in AVB signing args. + if misc_info.get('avb_enable') == 'true': + RewriteAvbProps(misc_info) + + RegenerateKernelPartitions(input_tf_zip, output_tf_zip, misc_info) + for info in input_tf_zip.infolist(): filename = info.filename if filename.startswith("IMAGES/"): @@ -603,10 +745,10 @@ def ProcessTargetFiles(input_tf_zip: zipfile.ZipFile, output_tf_zip, misc_info, if filename.startswith("OTA/") and filename.endswith(".img"): continue - data = input_tf_zip.read(filename) - out_info = copy.copy(info) (is_apk, is_compressed, should_be_skipped) = GetApkFileInfo( filename, compressed_extension, OPTIONS.skip_apks_with_path_prefix) + data = input_tf_zip.read(filename) + out_info = copy.copy(info) if is_apk and should_be_skipped: # Copy skipped APKs verbatim. @@ -670,9 +812,8 @@ def ProcessTargetFiles(input_tf_zip: zipfile.ZipFile, output_tf_zip, misc_info, elif filename.endswith(".zip") and IsEntryOtaPackage(input_tf_zip, filename): logger.info("Re-signing OTA package {}".format(filename)) with tempfile.NamedTemporaryFile() as input_ota, tempfile.NamedTemporaryFile() as output_ota: - with input_tf_zip.open(filename, "r") as in_fp: - shutil.copyfileobj(in_fp, input_ota) - input_ota.flush() + RegenerateBootOTA(input_tf_zip, filename, input_ota) + SignOtaPackage(input_ota.name, output_ota.name) common.ZipWrite(output_tf_zip, output_ota.name, filename, compress_type=zipfile.ZIP_STORED) @@ -752,21 +893,31 @@ def ProcessTargetFiles(input_tf_zip: zipfile.ZipFile, output_tf_zip, misc_info, # Updates pvmfw embedded public key with the virt APEX payload key. elif filename == "PREBUILT_IMAGES/pvmfw.img": - # Find the name of the virt APEX in the target files. + # Find the path of the virt APEX in the target files. namelist = input_tf_zip.namelist() - apex_gen = (GetApexFilename(f) for f in namelist if IsApexFile(f)) - virt_apex_re = re.compile("^com\.([^\.]+\.)?android\.virt\.apex$") - virt_apex = next((a for a in apex_gen if virt_apex_re.match(a)), None) - if not virt_apex: + apex_gen = (f for f in namelist if IsApexFile(f)) + virt_apex_re = re.compile("^.*com\.([^\.]+\.)?android\.virt\.apex$") + virt_apex_path = next( + (a for a in apex_gen if virt_apex_re.match(a)), None) + if not virt_apex_path: print("Removing %s from ramdisk: virt APEX not found" % filename) else: - print("Replacing %s embedded key with %s key" % (filename, virt_apex)) + print("Replacing %s embedded key with %s key" % (filename, + virt_apex_path)) # Get the current and new embedded keys. + virt_apex = GetApexFilename(virt_apex_path) payload_key, container_key, sign_tool = apex_keys[virt_apex] - new_pubkey_path = common.ExtractAvbPublicKey( - misc_info['avb_avbtool'], payload_key) - with open(new_pubkey_path, 'rb') as f: - new_pubkey = f.read() + + # b/384813199: handles the pre-signed com.android.virt.apex in GSI. + if payload_key == 'PRESIGNED': + new_pubkey = GetMicrodroidVbmetaKey(virt_apex_path, + misc_info['avb_avbtool']) + else: + new_pubkey_path = common.ExtractAvbPublicKey( + misc_info['avb_avbtool'], payload_key) + with open(new_pubkey_path, 'rb') as f: + new_pubkey = f.read() + pubkey_info = copy.copy( input_tf_zip.getinfo("PREBUILT_IMAGES/pvmfw_embedded.avbpubkey")) old_pubkey = input_tf_zip.read(pubkey_info.filename) @@ -811,17 +962,18 @@ def ProcessTargetFiles(input_tf_zip: zipfile.ZipFile, output_tf_zip, misc_info, common.ZipWrite(output_tf_zip, image.name, filename) # A non-APK file; copy it verbatim. else: - common.ZipWriteStr(output_tf_zip, out_info, data) + try: + entry = output_tf_zip.getinfo(filename) + if output_tf_zip.read(entry) != data: + logger.warn( + "Output zip contains duplicate entries for %s with different contents", filename) + continue + except KeyError: + common.ZipWriteStr(output_tf_zip, out_info, data) if OPTIONS.replace_ota_keys: ReplaceOtaKeys(input_tf_zip, output_tf_zip, misc_info) - # Replace the AVB signing keys, if any. - ReplaceAvbSigningKeys(misc_info) - - # Rewrite the props in AVB signing args. - if misc_info.get('avb_enable') == 'true': - RewriteAvbProps(misc_info) # Write back misc_info with the latest values. ReplaceMiscInfoTxt(input_tf_zip, output_tf_zip, misc_info) @@ -1066,9 +1218,9 @@ def WriteOtacerts(output_zip, filename, keys): common.ZipWriteStr(output_zip, filename, temp_file.getvalue()) -def ReplaceOtaKeys(input_tf_zip, output_tf_zip, misc_info): +def ReplaceOtaKeys(input_tf_zip: zipfile.ZipFile, output_tf_zip, misc_info): try: - keylist = input_tf_zip.read("META/otakeys.txt").split() + keylist = input_tf_zip.read("META/otakeys.txt").decode().split() except KeyError: raise common.ExternalError("can't read META/otakeys.txt from input") diff --git a/tools/releasetools/test_common.py b/tools/releasetools/test_common.py index 89933a00fc..62f425ae6e 100644 --- a/tools/releasetools/test_common.py +++ b/tools/releasetools/test_common.py @@ -2157,3 +2157,11 @@ class PartitionBuildPropsTest(test_utils.ReleaseToolsTestCase): 'google/coral/coral:10/RP1A.200325.001/6337676:user/dev-keys', 'ro.product.odm.device': 'coral', }, copied_props.build_props) + + +class DeviceSpecificParamsTest(test_utils.ReleaseToolsTestCase): + + def test_missingSource(self): + common.OPTIONS.device_specific = '/does_not_exist' + ds = DeviceSpecificParams() + self.assertIsNone(ds.module) diff --git a/tools/sbom/Android.bp b/tools/sbom/Android.bp index 6901b06720..7e2840fae1 100644 --- a/tools/sbom/Android.bp +++ b/tools/sbom/Android.bp @@ -33,6 +33,13 @@ python_binary_host { ], } +python_library_host { + name: "compliance_metadata", + srcs: [ + "compliance_metadata.py", + ], +} + python_binary_host { name: "gen_sbom", srcs: [ @@ -44,6 +51,7 @@ python_binary_host { }, }, libs: [ + "compliance_metadata", "metadata_file_proto_py", "libprotobuf-python", "sbom_lib", @@ -109,3 +117,19 @@ python_binary_host { "sbom_lib", ], } + +python_binary_host { + name: "gen_notice_xml", + srcs: [ + "gen_notice_xml.py", + ], + version: { + py3: { + embedded_launcher: true, + }, + }, + libs: [ + "compliance_metadata", + "metadata_file_proto_py", + ], +} diff --git a/tools/sbom/compliance_metadata.py b/tools/sbom/compliance_metadata.py new file mode 100644 index 0000000000..502c110154 --- /dev/null +++ b/tools/sbom/compliance_metadata.py @@ -0,0 +1,219 @@ +#!/usr/bin/env python3 +# +# Copyright (C) 2024 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sqlite3 + +class MetadataDb: + def __init__(self, db): + self.conn = sqlite3.connect(':memory:') + self.conn.row_factory = sqlite3.Row + with sqlite3.connect(db) as c: + c.backup(self.conn) + self.reorg() + + def reorg(self): + # package_license table + self.conn.execute("create table package_license as " + "select name as package, pkg_default_applicable_licenses as license " + "from modules " + "where module_type = 'package' ") + cursor = self.conn.execute("select package,license from package_license where license like '% %'") + multi_licenses_packages = cursor.fetchall() + cursor.close() + rows = [] + for p in multi_licenses_packages: + licenses = p['license'].strip().split(' ') + for lic in licenses: + rows.append((p['package'], lic)) + self.conn.executemany('insert into package_license values (?, ?)', rows) + self.conn.commit() + + self.conn.execute("delete from package_license where license like '% %'") + self.conn.commit() + + # module_license table + self.conn.execute("create table module_license as " + "select distinct name as module, package, licenses as license " + "from modules " + "where licenses != '' ") + cursor = self.conn.execute("select module,package,license from module_license where license like '% %'") + multi_licenses_modules = cursor.fetchall() + cursor.close() + rows = [] + for m in multi_licenses_modules: + licenses = m['license'].strip().split(' ') + for lic in licenses: + rows.append((m['module'], m['package'],lic)) + self.conn.executemany('insert into module_license values (?, ?, ?)', rows) + self.conn.commit() + + self.conn.execute("delete from module_license where license like '% %'") + self.conn.commit() + + # module_installed_file table + self.conn.execute("create table module_installed_file as " + "select id as module_id, name as module_name, package, installed_files as installed_file " + "from modules " + "where installed_files != '' ") + cursor = self.conn.execute("select module_id, module_name, package, installed_file " + "from module_installed_file where installed_file like '% %'") + multi_installed_file_modules = cursor.fetchall() + cursor.close() + rows = [] + for m in multi_installed_file_modules: + installed_files = m['installed_file'].strip().split(' ') + for f in installed_files: + rows.append((m['module_id'], m['module_name'], m['package'], f)) + self.conn.executemany('insert into module_installed_file values (?, ?, ?, ?)', rows) + self.conn.commit() + + self.conn.execute("delete from module_installed_file where installed_file like '% %'") + self.conn.commit() + + # module_built_file table + self.conn.execute("create table module_built_file as " + "select id as module_id, name as module_name, package, built_files as built_file " + "from modules " + "where built_files != '' ") + cursor = self.conn.execute("select module_id, module_name, package, built_file " + "from module_built_file where built_file like '% %'") + multi_built_file_modules = cursor.fetchall() + cursor.close() + rows = [] + for m in multi_built_file_modules: + built_files = m['built_file'].strip().split(' ') + for f in built_files: + rows.append((m['module_id'], m['module_name'], m['package'], f)) + self.conn.executemany('insert into module_built_file values (?, ?, ?, ?)', rows) + self.conn.commit() + + self.conn.execute("delete from module_built_file where built_file like '% %'") + self.conn.commit() + + + # Indexes + self.conn.execute('create index idx_modules_id on modules (id)') + self.conn.execute('create index idx_modules_name on modules (name)') + self.conn.execute('create index idx_package_licnese_package on package_license (package)') + self.conn.execute('create index idx_package_licnese_license on package_license (license)') + self.conn.execute('create index idx_module_licnese_module on module_license (module)') + self.conn.execute('create index idx_module_licnese_license on module_license (license)') + self.conn.execute('create index idx_module_installed_file_module_id on module_installed_file (module_id)') + self.conn.execute('create index idx_module_installed_file_installed_file on module_installed_file (installed_file)') + self.conn.execute('create index idx_module_built_file_module_id on module_built_file (module_id)') + self.conn.execute('create index idx_module_built_file_built_file on module_built_file (built_file)') + self.conn.commit() + + def dump_debug_db(self, debug_db): + with sqlite3.connect(debug_db) as c: + self.conn.backup(c) + + def get_installed_files(self): + # Get all records from table make_metadata, which contains all installed files and corresponding make modules' metadata + cursor = self.conn.execute('select installed_file, module_path, is_prebuilt_make_module, product_copy_files, kernel_module_copy_files, is_platform_generated, license_text from make_metadata') + rows = cursor.fetchall() + cursor.close() + installed_files_metadata = [] + for row in rows: + metadata = dict(zip(row.keys(), row)) + installed_files_metadata.append(metadata) + return installed_files_metadata + + def get_installed_file_in_dir(self, dir): + dir = dir.removesuffix('/') + cursor = self.conn.execute( + 'select installed_file, module_path, is_prebuilt_make_module, product_copy_files, ' + ' kernel_module_copy_files, is_platform_generated, license_text ' + 'from make_metadata ' + 'where installed_file like ?', (dir + '/%',)) + rows = cursor.fetchall() + cursor.close() + installed_files_metadata = [] + for row in rows: + metadata = dict(zip(row.keys(), row)) + installed_files_metadata.append(metadata) + return installed_files_metadata + + def get_soong_modules(self): + # Get all records from table modules, which contains metadata of all soong modules + cursor = self.conn.execute('select name, package, package as module_path, module_type as soong_module_type, built_files, installed_files, static_dep_files, whole_static_dep_files from modules') + rows = cursor.fetchall() + cursor.close() + soong_modules = [] + for row in rows: + soong_module = dict(zip(row.keys(), row)) + soong_modules.append(soong_module) + return soong_modules + + def get_package_licenses(self, package): + cursor = self.conn.execute('select m.name, m.package, m.lic_license_text as license_text ' + 'from package_license pl join modules m on pl.license = m.name ' + 'where pl.package = ?', + ('//' + package,)) + rows = cursor.fetchall() + licenses = {} + for r in rows: + licenses[r['name']] = r['license_text'] + return licenses + + def get_module_licenses(self, module_name, package): + licenses = {} + # If property "licenses" is defined on module + cursor = self.conn.execute('select m.name, m.package, m.lic_license_text as license_text ' + 'from module_license ml join modules m on ml.license = m.name ' + 'where ml.module = ? and ml.package = ?', + (module_name, package)) + rows = cursor.fetchall() + for r in rows: + licenses[r['name']] = r['license_text'] + if len(licenses) > 0: + return licenses + + # Use default package license + cursor = self.conn.execute('select m.name, m.package, m.lic_license_text as license_text ' + 'from package_license pl join modules m on pl.license = m.name ' + 'where pl.package = ?', + ('//' + package,)) + rows = cursor.fetchall() + for r in rows: + licenses[r['name']] = r['license_text'] + return licenses + + def get_soong_module_of_installed_file(self, installed_file): + cursor = self.conn.execute('select name, m.package, m.package as module_path, module_type as soong_module_type, built_files, installed_files, static_dep_files, whole_static_dep_files ' + 'from modules m join module_installed_file mif on m.id = mif.module_id ' + 'where mif.installed_file = ?', + (installed_file,)) + rows = cursor.fetchall() + cursor.close() + if rows: + soong_module = dict(zip(rows[0].keys(), rows[0])) + return soong_module + + return None + + def get_soong_module_of_built_file(self, built_file): + cursor = self.conn.execute('select name, m.package, m.package as module_path, module_type as soong_module_type, built_files, installed_files, static_dep_files, whole_static_dep_files ' + 'from modules m join module_built_file mbf on m.id = mbf.module_id ' + 'where mbf.built_file = ?', + (built_file,)) + rows = cursor.fetchall() + cursor.close() + if rows: + soong_module = dict(zip(rows[0].keys(), rows[0])) + return soong_module + + return None
\ No newline at end of file diff --git a/tools/sbom/gen_notice_xml.py b/tools/sbom/gen_notice_xml.py new file mode 100644 index 0000000000..8478b1fdd4 --- /dev/null +++ b/tools/sbom/gen_notice_xml.py @@ -0,0 +1,224 @@ +# !/usr/bin/env python3 +# +# Copyright (C) 2024 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Generate NOTICE.xml.gz of a partition. +Usage example: + gen_notice_xml.py --output_file out/soong/.intermediate/.../NOTICE.xml.gz \ + --metadata out/soong/compliance-metadata/aosp_cf_x86_64_phone/compliance-metadata.db \ + --partition system \ + --product_out out/target/vsoc_x86_64 \ + --soong_out out/soong +""" + +import argparse +import compliance_metadata +import google.protobuf.text_format as text_format +import gzip +import hashlib +import metadata_file_pb2 +import os +import queue +import xml.sax.saxutils + + +FILE_HEADER = '''\ +<?xml version="1.0" encoding="utf-8"?> +<licenses> +''' +FILE_FOOTER = '''\ +</licenses> +''' + + +def get_args(): + parser = argparse.ArgumentParser() + parser.add_argument('-v', '--verbose', action='store_true', default=False, help='Print more information.') + parser.add_argument('-d', '--debug', action='store_true', default=False, help='Debug mode') + parser.add_argument('--output_file', required=True, help='The path of the generated NOTICE.xml.gz file.') + parser.add_argument('--partition', required=True, help='The name of partition for which the NOTICE.xml.gz is generated.') + parser.add_argument('--metadata', required=True, help='The path of compliance metadata DB file.') + parser.add_argument('--product_out', required=True, help='The path of PRODUCT_OUT, e.g. out/target/product/vsoc_x86_64.') + parser.add_argument('--soong_out', required=True, help='The path of Soong output directory, e.g. out/soong') + + return parser.parse_args() + + +def log(*info): + if args.verbose: + for i in info: + print(i) + + +def new_file_name_tag(file_metadata, package_name, content_id): + file_path = file_metadata['installed_file'].removeprefix(args.product_out) + lib = 'Android' + if package_name: + lib = package_name + return f'<file-name contentId="{content_id}" lib="{lib}">{file_path}</file-name>\n' + + +def new_file_content_tag(content_id, license_text): + escaped_license_text = xml.sax.saxutils.escape(license_text, {'\t': '	', '\n': '
', '\r': '
'}) + return f'<file-content contentId="{content_id}"><![CDATA[{escaped_license_text}]]></file-content>\n\n' + +def get_metadata_file_path(file_metadata): + """Search for METADATA file of a package and return its path.""" + metadata_path = '' + if file_metadata['module_path']: + metadata_path = file_metadata['module_path'] + elif file_metadata['kernel_module_copy_files']: + metadata_path = os.path.dirname(file_metadata['kernel_module_copy_files'].split(':')[0]) + + while metadata_path and not os.path.exists(metadata_path + '/METADATA'): + metadata_path = os.path.dirname(metadata_path) + + return metadata_path + +def md5_file_content(filepath): + h = hashlib.md5() + with open(filepath, 'rb') as f: + h.update(f.read()) + return h.hexdigest() + +def get_transitive_static_dep_modules(installed_file_metadata, db): + # Find all transitive static dep files of the installed files + q = queue.Queue() + if installed_file_metadata['static_dep_files']: + for f in installed_file_metadata['static_dep_files'].split(' '): + q.put(f) + if installed_file_metadata['whole_static_dep_files']: + for f in installed_file_metadata['whole_static_dep_files'].split(' '): + q.put(f) + + static_dep_files = {} + while not q.empty(): + dep_file = q.get() + if dep_file in static_dep_files: + # It has been processed + continue + + soong_module = db.get_soong_module_of_built_file(dep_file) + if not soong_module: + continue + + static_dep_files[dep_file] = soong_module + + if soong_module['static_dep_files']: + for f in soong_module['static_dep_files'].split(' '): + if f not in static_dep_files: + q.put(f) + if soong_module['whole_static_dep_files']: + for f in soong_module['whole_static_dep_files'].split(' '): + if f not in static_dep_files: + q.put(f) + + return static_dep_files.values() + +def main(): + global args + args = get_args() + log('Args:', vars(args)) + + global db + db = compliance_metadata.MetadataDb(args.metadata) + if args.debug: + db.dump_debug_db(os.path.dirname(args.output_file) + '/compliance-metadata-debug.db') + + # NOTICE.xml + notice_xml_file_path = os.path.dirname(args.output_file) + '/NOTICE.xml' + with open(notice_xml_file_path, 'w', encoding="utf-8") as notice_xml_file: + notice_xml_file.write(FILE_HEADER) + + all_license_files = {} + for metadata in db.get_installed_file_in_dir(args.product_out + '/' + args.partition): + soong_module = db.get_soong_module_of_installed_file(metadata['installed_file']) + if soong_module: + metadata.update(soong_module) + else: + # For make modules soong_module_type should be empty + metadata['soong_module_type'] = '' + metadata['static_dep_files'] = '' + metadata['whole_static_dep_files'] = '' + + installed_file_metadata_list = [metadata] + if args.partition in ('vendor', 'product', 'system_ext'): + # For transitive static dependencies of an installed file, make it as if an installed file are + # also created from static dependency modules whose licenses are also collected + static_dep_modules = get_transitive_static_dep_modules(metadata, db) + for dep in static_dep_modules: + dep['installed_file'] = metadata['installed_file'] + installed_file_metadata_list.append(dep) + + for installed_file_metadata in installed_file_metadata_list: + package_name = 'Android' + licenses = {} + if installed_file_metadata['module_path']: + metadata_file_path = get_metadata_file_path(installed_file_metadata) + if metadata_file_path: + proto = metadata_file_pb2.Metadata() + with open(metadata_file_path + '/METADATA', 'rt') as f: + text_format.Parse(f.read(), proto) + if proto.name: + package_name = proto.name + if proto.third_party and proto.third_party.version: + if proto.third_party.version.startswith('v'): + package_name = package_name + '_' + proto.third_party.version + else: + package_name = package_name + '_v_' + proto.third_party.version + else: + package_name = metadata_file_path + if metadata_file_path.startswith('external/'): + package_name = metadata_file_path.removeprefix('external/') + + # Every license file is in a <file-content> element + licenses = db.get_module_licenses(installed_file_metadata.get('name', ''), installed_file_metadata['module_path']) + + # Installed file is from PRODUCT_COPY_FILES + elif metadata['product_copy_files']: + licenses['unused_name'] = metadata['license_text'] + + # Installed file is generated by the platform in builds + elif metadata['is_platform_generated']: + licenses['unused_name'] = metadata['license_text'] + + if licenses: + # Each value is a space separated filepath list + for license_files in licenses.values(): + if not license_files: + continue + for filepath in license_files.split(' '): + if filepath not in all_license_files: + all_license_files[filepath] = md5_file_content(filepath) + md5 = all_license_files[filepath] + notice_xml_file.write(new_file_name_tag(installed_file_metadata, package_name, md5)) + + # Licenses + processed_md5 = [] + for filepath, md5 in all_license_files.items(): + if md5 not in processed_md5: + processed_md5.append(md5) + with open(filepath, 'rt', errors='backslashreplace') as f: + notice_xml_file.write(new_file_content_tag(md5, f.read())) + + notice_xml_file.write(FILE_FOOTER) + + # NOTICE.xml.gz + with open(notice_xml_file_path, 'rb') as notice_xml_file, gzip.open(args.output_file, 'wb') as gz_file: + gz_file.writelines(notice_xml_file) + +if __name__ == '__main__': + main() diff --git a/tools/sbom/gen_sbom.py b/tools/sbom/gen_sbom.py index a203258b96..77bccbb73a 100644 --- a/tools/sbom/gen_sbom.py +++ b/tools/sbom/gen_sbom.py @@ -26,6 +26,7 @@ Usage example: """ import argparse +import compliance_metadata import datetime import google.protobuf.text_format as text_format import hashlib @@ -35,7 +36,6 @@ import queue import metadata_file_pb2 import sbom_data import sbom_writers -import sqlite3 # Package type PKG_SOURCE = 'SOURCE' @@ -92,6 +92,7 @@ THIRD_PARTY_IDENTIFIER_TYPES = [ 'SVN', 'Hg', 'Darcs', + 'Piper', 'VCS', 'Archive', 'PrebuiltByAlphabet', @@ -568,202 +569,16 @@ def get_all_transitive_static_dep_files_of_installed_files(installed_files_metad return sorted(all_static_dep_files.keys()) -class MetadataDb: - def __init__(self, db): - self.conn = sqlite3.connect(':memory') - self.conn.row_factory = sqlite3.Row - with sqlite3.connect(db) as c: - c.backup(self.conn) - self.reorg() - - def reorg(self): - # package_license table - self.conn.execute("create table package_license as " - "select name as package, pkg_default_applicable_licenses as license " - "from modules " - "where module_type = 'package' ") - cursor = self.conn.execute("select package,license from package_license where license like '% %'") - multi_licenses_packages = cursor.fetchall() - cursor.close() - rows = [] - for p in multi_licenses_packages: - licenses = p['license'].strip().split(' ') - for lic in licenses: - rows.append((p['package'], lic)) - self.conn.executemany('insert into package_license values (?, ?)', rows) - self.conn.commit() - - self.conn.execute("delete from package_license where license like '% %'") - self.conn.commit() - - # module_license table - self.conn.execute("create table module_license as " - "select distinct name as module, package, licenses as license " - "from modules " - "where licenses != '' ") - cursor = self.conn.execute("select module,package,license from module_license where license like '% %'") - multi_licenses_modules = cursor.fetchall() - cursor.close() - rows = [] - for m in multi_licenses_modules: - licenses = m['license'].strip().split(' ') - for lic in licenses: - rows.append((m['module'], m['package'],lic)) - self.conn.executemany('insert into module_license values (?, ?, ?)', rows) - self.conn.commit() - - self.conn.execute("delete from module_license where license like '% %'") - self.conn.commit() - - # module_installed_file table - self.conn.execute("create table module_installed_file as " - "select id as module_id, name as module_name, package, installed_files as installed_file " - "from modules " - "where installed_files != '' ") - cursor = self.conn.execute("select module_id, module_name, package, installed_file " - "from module_installed_file where installed_file like '% %'") - multi_installed_file_modules = cursor.fetchall() - cursor.close() - rows = [] - for m in multi_installed_file_modules: - installed_files = m['installed_file'].strip().split(' ') - for f in installed_files: - rows.append((m['module_id'], m['module_name'], m['package'], f)) - self.conn.executemany('insert into module_installed_file values (?, ?, ?, ?)', rows) - self.conn.commit() - - self.conn.execute("delete from module_installed_file where installed_file like '% %'") - self.conn.commit() - - # module_built_file table - self.conn.execute("create table module_built_file as " - "select id as module_id, name as module_name, package, built_files as built_file " - "from modules " - "where built_files != '' ") - cursor = self.conn.execute("select module_id, module_name, package, built_file " - "from module_built_file where built_file like '% %'") - multi_built_file_modules = cursor.fetchall() - cursor.close() - rows = [] - for m in multi_built_file_modules: - built_files = m['installed_file'].strip().split(' ') - for f in built_files: - rows.append((m['module_id'], m['module_name'], m['package'], f)) - self.conn.executemany('insert into module_built_file values (?, ?, ?, ?)', rows) - self.conn.commit() - - self.conn.execute("delete from module_built_file where built_file like '% %'") - self.conn.commit() - - - # Indexes - self.conn.execute('create index idx_modules_id on modules (id)') - self.conn.execute('create index idx_modules_name on modules (name)') - self.conn.execute('create index idx_package_licnese_package on package_license (package)') - self.conn.execute('create index idx_package_licnese_license on package_license (license)') - self.conn.execute('create index idx_module_licnese_module on module_license (module)') - self.conn.execute('create index idx_module_licnese_license on module_license (license)') - self.conn.execute('create index idx_module_installed_file_module_id on module_installed_file (module_id)') - self.conn.execute('create index idx_module_installed_file_installed_file on module_installed_file (installed_file)') - self.conn.execute('create index idx_module_built_file_module_id on module_built_file (module_id)') - self.conn.execute('create index idx_module_built_file_built_file on module_built_file (built_file)') - self.conn.commit() - - if args.debug: - with sqlite3.connect(os.path.dirname(args.metadata) + '/compliance-metadata-debug.db') as c: - self.conn.backup(c) - - - def get_installed_files(self): - # Get all records from table make_metadata, which contains all installed files and corresponding make modules' metadata - cursor = self.conn.execute('select installed_file, module_path, is_prebuilt_make_module, product_copy_files, kernel_module_copy_files, is_platform_generated, license_text from make_metadata') - rows = cursor.fetchall() - cursor.close() - installed_files_metadata = [] - for row in rows: - metadata = dict(zip(row.keys(), row)) - installed_files_metadata.append(metadata) - return installed_files_metadata - - def get_soong_modules(self): - # Get all records from table modules, which contains metadata of all soong modules - cursor = self.conn.execute('select name, package, package as module_path, module_type as soong_module_type, built_files, installed_files, static_dep_files, whole_static_dep_files from modules') - rows = cursor.fetchall() - cursor.close() - soong_modules = [] - for row in rows: - soong_module = dict(zip(row.keys(), row)) - soong_modules.append(soong_module) - return soong_modules - - def get_package_licenses(self, package): - cursor = self.conn.execute('select m.name, m.package, m.lic_license_text as license_text ' - 'from package_license pl join modules m on pl.license = m.name ' - 'where pl.package = ?', - ('//' + package,)) - rows = cursor.fetchall() - licenses = {} - for r in rows: - licenses[r['name']] = r['license_text'] - return licenses - - def get_module_licenses(self, module_name, package): - licenses = {} - # If property "licenses" is defined on module - cursor = self.conn.execute('select m.name, m.package, m.lic_license_text as license_text ' - 'from module_license ml join modules m on ml.license = m.name ' - 'where ml.module = ? and ml.package = ?', - (module_name, package)) - rows = cursor.fetchall() - for r in rows: - licenses[r['name']] = r['license_text'] - if len(licenses) > 0: - return licenses - - # Use default package license - cursor = self.conn.execute('select m.name, m.package, m.lic_license_text as license_text ' - 'from package_license pl join modules m on pl.license = m.name ' - 'where pl.package = ?', - ('//' + package,)) - rows = cursor.fetchall() - for r in rows: - licenses[r['name']] = r['license_text'] - return licenses - - def get_soong_module_of_installed_file(self, installed_file): - cursor = self.conn.execute('select name, m.package, m.package as module_path, module_type as soong_module_type, built_files, installed_files, static_dep_files, whole_static_dep_files ' - 'from modules m join module_installed_file mif on m.id = mif.module_id ' - 'where mif.installed_file = ?', - (installed_file,)) - rows = cursor.fetchall() - cursor.close() - if rows: - soong_module = dict(zip(rows[0].keys(), rows[0])) - return soong_module - - return None - - def get_soong_module_of_built_file(self, built_file): - cursor = self.conn.execute('select name, m.package, m.package as module_path, module_type as soong_module_type, built_files, installed_files, static_dep_files, whole_static_dep_files ' - 'from modules m join module_built_file mbf on m.id = mbf.module_id ' - 'where mbf.built_file = ?', - (built_file,)) - rows = cursor.fetchall() - cursor.close() - if rows: - soong_module = dict(zip(rows[0].keys(), rows[0])) - return soong_module - - return None - - def main(): global args args = get_args() log('Args:', vars(args)) global db - db = MetadataDb(args.metadata) + db = compliance_metadata.MetadataDb(args.metadata) + if args.debug: + db.dump_debug_db(os.path.dirname(args.output_file) + '/compliance-metadata-debug.db') + global metadata_file_protos metadata_file_protos = {} global licenses_text @@ -894,8 +709,17 @@ def main(): 'installed_file': dep_file, 'is_prebuilt_make_module': False } - file_metadata.update(db.get_soong_module_of_built_file(dep_file)) - add_package_of_file(file_id, file_metadata, doc, report) + soong_module = db.get_soong_module_of_built_file(dep_file) + if not soong_module: + continue + file_metadata.update(soong_module) + if is_source_package(file_metadata) or is_prebuilt_package(file_metadata): + add_package_of_file(file_id, file_metadata, doc, report) + else: + # Other static lib files are generated from the platform + doc.add_relationship(sbom_data.Relationship(id1=file_id, + relationship=sbom_data.RelationshipType.GENERATED_FROM, + id2=sbom_data.SPDXID_PLATFORM)) # Add relationships for static deps of static libraries add_static_deps_of_file(file_id, file_metadata, doc) diff --git a/tools/sbom/generate-sbom-framework_res.py b/tools/sbom/generate-sbom-framework_res.py index d0d232d635..27f3d2ebc1 100644 --- a/tools/sbom/generate-sbom-framework_res.py +++ b/tools/sbom/generate-sbom-framework_res.py @@ -80,7 +80,8 @@ def main(): resource_file_spdxids = [] for file in layoutlib_sbom[sbom_writers.PropNames.FILES]: - if file[sbom_writers.PropNames.FILE_NAME].startswith('data/res/'): + file_path = file[sbom_writers.PropNames.FILE_NAME] + if file_path.startswith('data/res/') or file_path.startswith('data/overlays/'): resource_file_spdxids.append(file[sbom_writers.PropNames.SPDXID]) doc.relationships = [ diff --git a/tools/signapk/src/com/android/signapk/SignApk.java b/tools/signapk/src/com/android/signapk/SignApk.java index 6b2341bc80..654e19675d 100644 --- a/tools/signapk/src/com/android/signapk/SignApk.java +++ b/tools/signapk/src/com/android/signapk/SignApk.java @@ -302,7 +302,6 @@ class SignApk { final KeyStore keyStore, final String keyName) throws CertificateException, KeyStoreException, NoSuchAlgorithmException, UnrecoverableKeyException, UnrecoverableEntryException { - final Key key = keyStore.getKey(keyName, readPassword(keyName)); final PrivateKeyEntry privateKeyEntry = (PrivateKeyEntry) keyStore.getEntry(keyName, null); if (privateKeyEntry == null) { throw new Error( |