Reduce multithreading of run-test builds.
The build system will build many shards in parallel, so try to use
just one core per shard in that situation to lower the system load.
However, at the same time, try to use all the cores if we are just
iterating on single test, and thus build only one shard at a time.
I tested that the build time of both uses is same as before,
while reducing the number processes executed in parallel.
Test: time m art-run-test-{jvm,host,target}-data
Test: time m art-run-test-{jvm,host,target}-data-shard00
Change-Id: I9e656d9980feeb5a0acbc91ae37162461a920f5f
diff --git a/test/run-test-build.py b/test/run-test-build.py
index 93e9599..fa9073b 100755
--- a/test/run-test-build.py
+++ b/test/run-test-build.py
@@ -19,7 +19,7 @@
It is intended to be used only from soong genrule.
"""
-import argparse, os, shutil, subprocess, glob, re, json, multiprocessing, pathlib
+import argparse, os, shutil, subprocess, glob, re, json, multiprocessing, pathlib, fcntl
import art_build_rules
from importlib.machinery import SourceFileLoader
@@ -95,6 +95,20 @@
os.environ[name] = str(value)
SourceFileLoader("build_" + test_name, join(dstdir, "build.py")).load_module()
+# If we build just individual shard, we want to split the work among all the cores,
+# but if the build system builds all shards, we don't want to overload the machine.
+# We don't know which situation we are in, so as simple work-around, we use a lock
+# file to allow only one shard to use multiprocessing at the same time.
+def use_multiprocessing(mode):
+ global lock_file # Keep alive as long as this process is alive.
+ lock_path = os.path.join(os.environ["TMPDIR"], "art-test-run-test-build-py-" + mode)
+ lock_file = open(lock_path, "w")
+ try:
+ fcntl.lockf(lock_file, fcntl.LOCK_EX | fcntl.LOCK_NB)
+ return True # We are the only instance of this script in the build system.
+ except BlockingIOError:
+ return False # Some other instance is already running.
+
def main():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("--out", help="Path of the generated ZIP file with the build data")
@@ -113,7 +127,7 @@
dstdirs = [copy_sources(args, ziproot, args.mode, srcdir) for srcdir in srcdirs]
dstdirs = filter(lambda dstdir: dstdir, dstdirs) # Remove None (skipped tests).
# Use multiprocess (i.e. forking) since tests modify their current working directory.
- with multiprocessing.Pool() as pool:
+ with multiprocessing.Pool(os.cpu_count() if use_multiprocessing(args.mode) else 1) as pool:
jobs = [(d, pool.apply_async(build_test, (args, args.mode, build_top, sbox, d))) for d in dstdirs]
for dstdir, job in jobs:
try: