Format python files using yapf
Change-Id: I8b7b97efb6bfdcceef9efc533cdaa0675ab7db40
diff --git a/tools/adb.py b/tools/adb.py
index 2e015a0..988c684 100644
--- a/tools/adb.py
+++ b/tools/adb.py
@@ -7,105 +7,117 @@
import time
import utils
+
def install_apk_on_emulator(apk, emulator_id, quiet=False):
- cmd = ['adb', '-s', emulator_id, 'install', '-r', '-d', apk]
- if quiet:
- subprocess.check_output(cmd)
- else:
- subprocess.check_call(cmd)
+ cmd = ['adb', '-s', emulator_id, 'install', '-r', '-d', apk]
+ if quiet:
+ subprocess.check_output(cmd)
+ else:
+ subprocess.check_call(cmd)
def uninstall_apk_on_emulator(app_id, emulator_id):
- process = subprocess.Popen(
- ['adb', '-s', emulator_id, 'uninstall', app_id],
- stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- stdout, stderr = process.communicate()
- stdout = stdout.decode('UTF-8')
- stderr = stderr.decode('UTF-8')
+ process = subprocess.Popen(['adb', '-s', emulator_id, 'uninstall', app_id],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ stdout, stderr = process.communicate()
+ stdout = stdout.decode('UTF-8')
+ stderr = stderr.decode('UTF-8')
- if stdout.strip() == 'Success':
- # Successfully uninstalled
- return
+ if stdout.strip() == 'Success':
+ # Successfully uninstalled
+ return
- if 'Unknown package: {}'.format(app_id) in stderr:
- # Application not installed
- return
+ if 'Unknown package: {}'.format(app_id) in stderr:
+ # Application not installed
+ return
- # Check if the app is listed in packages
- packages = subprocess.check_output(['adb', 'shell', 'pm', 'list', 'packages'])
- if not 'package:' + app_id in packages:
- return
+ # Check if the app is listed in packages
+ packages = subprocess.check_output(
+ ['adb', 'shell', 'pm', 'list', 'packages'])
+ if not 'package:' + app_id in packages:
+ return
- raise Exception(
- 'Unexpected result from `adb uninstall {}\nStdout: {}\nStderr: {}'.format(
- app_id, stdout, stderr))
+ raise Exception(
+ 'Unexpected result from `adb uninstall {}\nStdout: {}\nStderr: {}'.
+ format(app_id, stdout, stderr))
def wait_for_emulator(emulator_id):
- stdout = subprocess.check_output(['adb', 'devices']).decode('UTF-8')
- if '{}\tdevice'.format(emulator_id) in stdout:
- return True
-
- print('Emulator \'{}\' not connected; waiting for connection'.format(
- emulator_id))
-
- time_waited = 0
- while True:
- time.sleep(10)
- time_waited += 10
stdout = subprocess.check_output(['adb', 'devices']).decode('UTF-8')
- if '{}\tdevice'.format(emulator_id) not in stdout:
- print('... still waiting for connection')
- if time_waited >= 5 * 60:
- return False
- else:
- return True
+ if '{}\tdevice'.format(emulator_id) in stdout:
+ return True
+
+ print('Emulator \'{}\' not connected; waiting for connection'.format(
+ emulator_id))
+
+ time_waited = 0
+ while True:
+ time.sleep(10)
+ time_waited += 10
+ stdout = subprocess.check_output(['adb', 'devices']).decode('UTF-8')
+ if '{}\tdevice'.format(emulator_id) not in stdout:
+ print('... still waiting for connection')
+ if time_waited >= 5 * 60:
+ return False
+ else:
+ return True
def run_monkey(app_id, emulator_id, apk, monkey_events, quiet, enable_logging):
- if not wait_for_emulator(emulator_id):
- return False
+ if not wait_for_emulator(emulator_id):
+ return False
- install_apk_on_emulator(apk, emulator_id, quiet)
+ install_apk_on_emulator(apk, emulator_id, quiet)
- # Intentionally using a constant seed such that the monkey generates the same
- # event sequence for each shrinker.
- random_seed = 42
+ # Intentionally using a constant seed such that the monkey generates the same
+ # event sequence for each shrinker.
+ random_seed = 42
- cmd = ['adb', '-s', emulator_id, 'shell', 'monkey', '-p', app_id,
- '-s', str(random_seed), str(monkey_events)]
+ cmd = [
+ 'adb', '-s', emulator_id, 'shell', 'monkey', '-p', app_id, '-s',
+ str(random_seed),
+ str(monkey_events)
+ ]
- try:
- stdout = utils.RunCmd(cmd, quiet=quiet, logging=enable_logging)
- succeeded = ('Events injected: {}'.format(monkey_events) in stdout)
- except subprocess.CalledProcessError as e:
- succeeded = False
+ try:
+ stdout = utils.RunCmd(cmd, quiet=quiet, logging=enable_logging)
+ succeeded = ('Events injected: {}'.format(monkey_events) in stdout)
+ except subprocess.CalledProcessError as e:
+ succeeded = False
- uninstall_apk_on_emulator(app_id, emulator_id)
+ uninstall_apk_on_emulator(app_id, emulator_id)
- return succeeded
+ return succeeded
-def run_instrumented(app_id, test_id, emulator_id, apk, test_apk, quiet,
+def run_instrumented(app_id,
+ test_id,
+ emulator_id,
+ apk,
+ test_apk,
+ quiet,
enable_logging,
test_runner='androidx.test.runner.AndroidJUnitRunner'):
- if not wait_for_emulator(emulator_id):
- return None
+ if not wait_for_emulator(emulator_id):
+ return None
- install_apk_on_emulator(apk, emulator_id, quiet)
- install_apk_on_emulator(test_apk, emulator_id, quiet)
+ install_apk_on_emulator(apk, emulator_id, quiet)
+ install_apk_on_emulator(test_apk, emulator_id, quiet)
- cmd = ['adb', '-s', emulator_id, 'shell', 'am', 'instrument', '-w',
- '{}/{}'.format(test_id, test_runner)]
+ cmd = [
+ 'adb', '-s', emulator_id, 'shell', 'am', 'instrument', '-w',
+ '{}/{}'.format(test_id, test_runner)
+ ]
- try:
- stdout = utils.RunCmd(cmd, quiet=quiet, logging=enable_logging)
- # The runner will print OK (X tests) if completed succesfully
- succeeded = any("OK (" in s for s in stdout)
- except subprocess.CalledProcessError as e:
- succeeded = False
+ try:
+ stdout = utils.RunCmd(cmd, quiet=quiet, logging=enable_logging)
+ # The runner will print OK (X tests) if completed succesfully
+ succeeded = any("OK (" in s for s in stdout)
+ except subprocess.CalledProcessError as e:
+ succeeded = False
- uninstall_apk_on_emulator(test_id, emulator_id)
- uninstall_apk_on_emulator(app_id, emulator_id)
+ uninstall_apk_on_emulator(test_id, emulator_id)
+ uninstall_apk_on_emulator(app_id, emulator_id)
- return succeeded
+ return succeeded
diff --git a/tools/api_sample_coverage.py b/tools/api_sample_coverage.py
index 2d9b0d2..448a0be 100755
--- a/tools/api_sample_coverage.py
+++ b/tools/api_sample_coverage.py
@@ -2,7 +2,6 @@
# Copyright (c) 2018, the R8 project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
-
'''
Compare the R8 API used by the API usage sample to the API kept by @Keep.
'''
@@ -21,57 +20,67 @@
def main(output_dir=None):
- if output_dir is None:
- output_dir = ''
+ if output_dir is None:
+ output_dir = ''
- javaExecutable = jdk.GetJavaExecutable()
- printseeds_path = os.path.join(output_dir, 'keep-seeds.txt')
- printseeds_args = [
- javaExecutable, '-jar', utils.R8_JAR, 'printseeds',
- utils.RT_JAR, utils.R8_JAR, utils.R8LIB_KEEP_RULES,
- ]
- write_sorted_lines(printseeds_args, printseeds_path)
+ javaExecutable = jdk.GetJavaExecutable()
+ printseeds_path = os.path.join(output_dir, 'keep-seeds.txt')
+ printseeds_args = [
+ javaExecutable,
+ '-jar',
+ utils.R8_JAR,
+ 'printseeds',
+ utils.RT_JAR,
+ utils.R8_JAR,
+ utils.R8LIB_KEEP_RULES,
+ ]
+ write_sorted_lines(printseeds_args, printseeds_path)
- printuses_path = os.path.join(output_dir, 'sample-uses.txt')
- printuses_args = [
- javaExecutable, '-jar', utils.R8_JAR, 'printuses',
- utils.RT_JAR, utils.R8_JAR, API_SAMPLE_JAR,
- ]
- write_sorted_lines(printuses_args, printuses_path)
+ printuses_path = os.path.join(output_dir, 'sample-uses.txt')
+ printuses_args = [
+ javaExecutable,
+ '-jar',
+ utils.R8_JAR,
+ 'printuses',
+ utils.RT_JAR,
+ utils.R8_JAR,
+ API_SAMPLE_JAR,
+ ]
+ write_sorted_lines(printuses_args, printuses_path)
- print_diff(printseeds_path, printuses_path)
+ print_diff(printseeds_path, printuses_path)
def write_sorted_lines(cmd_args, output_path):
- utils.PrintCmd(cmd_args)
- output_lines = subprocess.check_output(cmd_args).splitlines(True)
- print("Write output to %s" % output_path)
- output_lines.sort()
- with open(output_path, 'w') as fp:
- for line in output_lines:
- fp.write(line)
+ utils.PrintCmd(cmd_args)
+ output_lines = subprocess.check_output(cmd_args).splitlines(True)
+ print("Write output to %s" % output_path)
+ output_lines.sort()
+ with open(output_path, 'w') as fp:
+ for line in output_lines:
+ fp.write(line)
def print_diff(printseeds_path, printuses_path):
- with open(printseeds_path) as fp:
- seeds = set(fp.read().splitlines())
- with open(printuses_path) as fp:
- uses = set(fp.read().splitlines())
- only_in_seeds = seeds - uses
- only_in_uses = uses - seeds
- if only_in_seeds:
- print("%s lines with '-' are marked @Keep " % len(only_in_seeds) +
- "but not used by sample.")
- if only_in_uses:
- print("%s lines with '+' are used by sample " % len(only_in_uses) +
- "but are missing @Keep annotations.")
- for line in sorted(only_in_seeds):
- print('-' + line)
- for line in sorted(only_in_uses):
- print('+' + line)
- if not only_in_seeds and not only_in_uses:
- print('Sample uses the entire set of members marked @Keep. Well done!')
+ with open(printseeds_path) as fp:
+ seeds = set(fp.read().splitlines())
+ with open(printuses_path) as fp:
+ uses = set(fp.read().splitlines())
+ only_in_seeds = seeds - uses
+ only_in_uses = uses - seeds
+ if only_in_seeds:
+ print("%s lines with '-' are marked @Keep " % len(only_in_seeds) +
+ "but not used by sample.")
+ if only_in_uses:
+ print("%s lines with '+' are used by sample " % len(only_in_uses) +
+ "but are missing @Keep annotations.")
+ for line in sorted(only_in_seeds):
+ print('-' + line)
+ for line in sorted(only_in_uses):
+ print('+' + line)
+ if not only_in_seeds and not only_in_uses:
+ print('Sample uses the entire set of members marked @Keep. Well done!')
if __name__ == '__main__':
- main(**vars(parser.parse_args()))
+ main(**vars(parser.parse_args()))
diff --git a/tools/apk_masseur.py b/tools/apk_masseur.py
index 1b35d23..51e9366 100755
--- a/tools/apk_masseur.py
+++ b/tools/apk_masseur.py
@@ -15,164 +15,193 @@
USAGE = 'usage: %prog [options] <apk>'
+
def parse_options():
- parser = optparse.OptionParser(usage=USAGE)
- parser.add_option('--clear-profile',
- help='To remove baseline.prof and baseline.profm from '
- 'assets/dexopt/',
- default=False,
- action='store_true')
- parser.add_option('--dex',
- help='Directory or archive with dex files to use instead '
- 'of those in the apk',
- default=None)
- parser.add_option('--desugared-library-dex',
- help='Path to desugared library dex file to use or archive '
- 'containing a single classes.dex file',
- default=None)
- parser.add_option('--resources',
- help=('pattern that matches resources to use instead of '
- + 'those in the apk'),
- default=None)
- parser.add_option('--out',
- help='output file (default ./$(basename <apk>))',
- default=None)
- parser.add_option('--keystore',
- help='keystore file (default ~/.android/app.keystore)',
- default=None)
- parser.add_option('--install',
- help='install the generated apk with adb options -t -r -d',
- default=False,
- action='store_true')
- parser.add_option('--adb-options',
- help='additional adb options when running adb',
- default=None)
- parser.add_option('--quiet',
- help='disable verbose logging',
- default=False)
- parser.add_option('--sign-before-align',
- help='Sign the apk before aligning',
- default=False,
- action='store_true')
- (options, args) = parser.parse_args()
- if len(args) != 1:
- parser.error('Expected <apk> argument, got: ' + ' '.join(args))
- apk = args[0]
- return (options, apk)
+ parser = optparse.OptionParser(usage=USAGE)
+ parser.add_option('--clear-profile',
+ help='To remove baseline.prof and baseline.profm from '
+ 'assets/dexopt/',
+ default=False,
+ action='store_true')
+ parser.add_option('--dex',
+ help='Directory or archive with dex files to use instead '
+ 'of those in the apk',
+ default=None)
+ parser.add_option(
+ '--desugared-library-dex',
+ help='Path to desugared library dex file to use or archive '
+ 'containing a single classes.dex file',
+ default=None)
+ parser.add_option(
+ '--resources',
+ help=('pattern that matches resources to use instead of ' +
+ 'those in the apk'),
+ default=None)
+ parser.add_option('--out',
+ help='output file (default ./$(basename <apk>))',
+ default=None)
+ parser.add_option('--keystore',
+ help='keystore file (default ~/.android/app.keystore)',
+ default=None)
+ parser.add_option(
+ '--install',
+ help='install the generated apk with adb options -t -r -d',
+ default=False,
+ action='store_true')
+ parser.add_option('--adb-options',
+ help='additional adb options when running adb',
+ default=None)
+ parser.add_option('--quiet', help='disable verbose logging', default=False)
+ parser.add_option('--sign-before-align',
+ help='Sign the apk before aligning',
+ default=False,
+ action='store_true')
+ (options, args) = parser.parse_args()
+ if len(args) != 1:
+ parser.error('Expected <apk> argument, got: ' + ' '.join(args))
+ apk = args[0]
+ return (options, apk)
+
def is_archive(file):
- return file.endswith('.zip') or file.endswith('.jar')
+ return file.endswith('.zip') or file.endswith('.jar')
-def repack(
- apk, clear_profile, processed_out, desugared_library_dex, resources, temp,
- quiet, logging):
- processed_apk = os.path.join(temp, 'processed.apk')
- shutil.copyfile(apk, processed_apk)
- if clear_profile:
- zip_utils.remove_files_from_zip(
- ['assets/dexopt/baseline.prof', 'assets/dexopt/baseline.profm'],
- processed_apk)
+def repack(apk, clear_profile, processed_out, desugared_library_dex, resources,
+ temp, quiet, logging):
+ processed_apk = os.path.join(temp, 'processed.apk')
+ shutil.copyfile(apk, processed_apk)
- if not processed_out:
- utils.Print('Using original dex as is', quiet=quiet)
+ if clear_profile:
+ zip_utils.remove_files_from_zip(
+ ['assets/dexopt/baseline.prof', 'assets/dexopt/baseline.profm'],
+ processed_apk)
+
+ if not processed_out:
+ utils.Print('Using original dex as is', quiet=quiet)
+ return processed_apk
+
+ utils.Print('Repacking APK with dex files from {}'.format(processed_out),
+ quiet=quiet)
+
+ # Delete original dex files in APK.
+ with utils.ChangedWorkingDirectory(temp, quiet=quiet):
+ cmd = ['zip', '-d', 'processed.apk', '*.dex']
+ utils.RunCmd(cmd, quiet=quiet, logging=logging)
+
+ # Unzip the jar or zip file into `temp`.
+ if is_archive(processed_out):
+ cmd = ['unzip', processed_out, '-d', temp]
+ if quiet:
+ cmd.insert(1, '-q')
+ utils.RunCmd(cmd, quiet=quiet, logging=logging)
+ processed_out = temp
+ elif desugared_library_dex:
+ for dex_name in glob.glob('*.dex', root_dir=processed_out):
+ src = os.path.join(processed_out, dex_name)
+ dst = os.path.join(temp, dex_name)
+ shutil.copyfile(src, dst)
+ processed_out = temp
+
+ if desugared_library_dex:
+ desugared_library_dex_index = len(glob.glob('*.dex', root_dir=temp)) + 1
+ desugared_library_dex_name = 'classes%s.dex' % desugared_library_dex_index
+ desugared_library_dex_dst = os.path.join(temp,
+ desugared_library_dex_name)
+ if is_archive(desugared_library_dex):
+ zip_utils.extract_member(desugared_library_dex, 'classes.dex',
+ desugared_library_dex_dst)
+ else:
+ shutil.copyfile(desugared_library_dex, desugared_library_dex_dst)
+
+ # Insert the new dex and resource files from `processed_out` into the APK.
+ with utils.ChangedWorkingDirectory(processed_out, quiet=quiet):
+ dex_files = glob.glob('*.dex')
+ dex_files.sort()
+ resource_files = glob.glob(resources) if resources else []
+ cmd = ['zip', '-u', '-0', processed_apk] + dex_files + resource_files
+ utils.RunCmd(cmd, quiet=quiet, logging=logging)
return processed_apk
- utils.Print(
- 'Repacking APK with dex files from {}'.format(processed_out), quiet=quiet)
-
- # Delete original dex files in APK.
- with utils.ChangedWorkingDirectory(temp, quiet=quiet):
- cmd = ['zip', '-d', 'processed.apk', '*.dex']
- utils.RunCmd(cmd, quiet=quiet, logging=logging)
-
- # Unzip the jar or zip file into `temp`.
- if is_archive(processed_out):
- cmd = ['unzip', processed_out, '-d', temp]
- if quiet:
- cmd.insert(1, '-q')
- utils.RunCmd(cmd, quiet=quiet, logging=logging)
- processed_out = temp
- elif desugared_library_dex:
- for dex_name in glob.glob('*.dex', root_dir=processed_out):
- src = os.path.join(processed_out, dex_name)
- dst = os.path.join(temp, dex_name)
- shutil.copyfile(src, dst)
- processed_out = temp
-
- if desugared_library_dex:
- desugared_library_dex_index = len(glob.glob('*.dex', root_dir=temp)) + 1
- desugared_library_dex_name = 'classes%s.dex' % desugared_library_dex_index
- desugared_library_dex_dst = os.path.join(temp, desugared_library_dex_name)
- if is_archive(desugared_library_dex):
- zip_utils.extract_member(
- desugared_library_dex, 'classes.dex', desugared_library_dex_dst)
- else:
- shutil.copyfile(desugared_library_dex, desugared_library_dex_dst)
-
- # Insert the new dex and resource files from `processed_out` into the APK.
- with utils.ChangedWorkingDirectory(processed_out, quiet=quiet):
- dex_files = glob.glob('*.dex')
- dex_files.sort()
- resource_files = glob.glob(resources) if resources else []
- cmd = ['zip', '-u', '-0', processed_apk] + dex_files + resource_files
- utils.RunCmd(cmd, quiet=quiet, logging=logging)
- return processed_apk
def sign(unsigned_apk, keystore, temp, quiet, logging):
- signed_apk = os.path.join(temp, 'unaligned.apk')
- return apk_utils.sign_with_apksigner(
- unsigned_apk, signed_apk, keystore, quiet=quiet, logging=logging)
+ signed_apk = os.path.join(temp, 'unaligned.apk')
+ return apk_utils.sign_with_apksigner(unsigned_apk,
+ signed_apk,
+ keystore,
+ quiet=quiet,
+ logging=logging)
+
def align(signed_apk, temp, quiet, logging):
- utils.Print('Aligning', quiet=quiet)
- aligned_apk = os.path.join(temp, 'aligned.apk')
- return apk_utils.align(signed_apk, aligned_apk)
+ utils.Print('Aligning', quiet=quiet)
+ aligned_apk = os.path.join(temp, 'aligned.apk')
+ return apk_utils.align(signed_apk, aligned_apk)
-def masseur(
- apk, clear_profile=False, dex=None, desugared_library_dex=None,
- resources=None, out=None, adb_options=None, sign_before_align=False,
- keystore=None, install=False, quiet=False, logging=True):
- if not out:
- out = os.path.basename(apk)
- if not keystore:
- keystore = apk_utils.default_keystore()
- with utils.TempDir() as temp:
- processed_apk = None
- if dex or clear_profile:
- processed_apk = repack(
- apk, clear_profile, dex, desugared_library_dex, resources, temp,
- quiet, logging)
- else:
- assert not desugared_library_dex
- utils.Print(
- 'Signing original APK without modifying apk', quiet=quiet)
- processed_apk = os.path.join(temp, 'processed.apk')
- shutil.copyfile(apk, processed_apk)
- if sign_before_align:
- signed_apk = sign(
- processed_apk, keystore, temp, quiet=quiet, logging=logging)
- aligned_apk = align(signed_apk, temp, quiet=quiet, logging=logging)
- utils.Print('Writing result to {}'.format(out), quiet=quiet)
- shutil.copyfile(aligned_apk, out)
- else:
- aligned_apk = align(processed_apk, temp, quiet=quiet, logging=logging)
- signed_apk = sign(
- aligned_apk, keystore, temp, quiet=quiet, logging=logging)
- utils.Print('Writing result to {}'.format(out), quiet=quiet)
- shutil.copyfile(signed_apk, out)
- if install:
- adb_cmd = ['adb']
- if adb_options:
- adb_cmd.extend(
- [option for option in adb_options.split(' ') if option])
- adb_cmd.extend(['install', '-t', '-r', '-d', out]);
- utils.RunCmd(adb_cmd, quiet=quiet, logging=logging)
+
+def masseur(apk,
+ clear_profile=False,
+ dex=None,
+ desugared_library_dex=None,
+ resources=None,
+ out=None,
+ adb_options=None,
+ sign_before_align=False,
+ keystore=None,
+ install=False,
+ quiet=False,
+ logging=True):
+ if not out:
+ out = os.path.basename(apk)
+ if not keystore:
+ keystore = apk_utils.default_keystore()
+ with utils.TempDir() as temp:
+ processed_apk = None
+ if dex or clear_profile:
+ processed_apk = repack(apk, clear_profile, dex,
+ desugared_library_dex, resources, temp,
+ quiet, logging)
+ else:
+ assert not desugared_library_dex
+ utils.Print('Signing original APK without modifying apk',
+ quiet=quiet)
+ processed_apk = os.path.join(temp, 'processed.apk')
+ shutil.copyfile(apk, processed_apk)
+ if sign_before_align:
+ signed_apk = sign(processed_apk,
+ keystore,
+ temp,
+ quiet=quiet,
+ logging=logging)
+ aligned_apk = align(signed_apk, temp, quiet=quiet, logging=logging)
+ utils.Print('Writing result to {}'.format(out), quiet=quiet)
+ shutil.copyfile(aligned_apk, out)
+ else:
+ aligned_apk = align(processed_apk,
+ temp,
+ quiet=quiet,
+ logging=logging)
+ signed_apk = sign(aligned_apk,
+ keystore,
+ temp,
+ quiet=quiet,
+ logging=logging)
+ utils.Print('Writing result to {}'.format(out), quiet=quiet)
+ shutil.copyfile(signed_apk, out)
+ if install:
+ adb_cmd = ['adb']
+ if adb_options:
+ adb_cmd.extend(
+ [option for option in adb_options.split(' ') if option])
+ adb_cmd.extend(['install', '-t', '-r', '-d', out])
+ utils.RunCmd(adb_cmd, quiet=quiet, logging=logging)
+
def main():
- (options, apk) = parse_options()
- masseur(apk, **vars(options))
- return 0
+ (options, apk) = parse_options()
+ masseur(apk, **vars(options))
+ return 0
+
if __name__ == '__main__':
- sys.exit(main())
+ sys.exit(main())
diff --git a/tools/apk_utils.py b/tools/apk_utils.py
index 1da8d47..8ced491 100755
--- a/tools/apk_utils.py
+++ b/tools/apk_utils.py
@@ -15,116 +15,118 @@
USAGE = 'usage: %prog [options] <apk>'
+
def parse_options():
- parser = optparse.OptionParser(usage=USAGE)
- parser.add_option('--keystore',
- help='keystore file (default ~/.android/app.keystore)',
- default='~/.android/app.keystore')
- parser.add_option('--sign',
- help='Sign the passed in apk.',
- default=False,
- action='store_true')
- parser.add_option('--use_apksigner',
- help='Use apksigner to sign.',
- default=False,
- action='store_true')
- parser.add_option('--output',
- help='Where to put the signed apk.)',
- default=None)
+ parser = optparse.OptionParser(usage=USAGE)
+ parser.add_option('--keystore',
+ help='keystore file (default ~/.android/app.keystore)',
+ default='~/.android/app.keystore')
+ parser.add_option('--sign',
+ help='Sign the passed in apk.',
+ default=False,
+ action='store_true')
+ parser.add_option('--use_apksigner',
+ help='Use apksigner to sign.',
+ default=False,
+ action='store_true')
+ parser.add_option('--output',
+ help='Where to put the signed apk.)',
+ default=None)
- (options, args) = parser.parse_args()
- if len(args) != 1:
- parser.error('Expected <apk> argument, got: ' + ' '.join(args))
- apk = args[0]
- return (options, apk)
+ (options, args) = parser.parse_args()
+ if len(args) != 1:
+ parser.error('Expected <apk> argument, got: ' + ' '.join(args))
+ apk = args[0]
+ return (options, apk)
-def add_baseline_profile_to_apk(
- apk, baseline_profile, baseline_profile_metadata, tmp_dir):
- if baseline_profile is None:
- return apk
- ts = time.time_ns()
- dest_apk = os.path.join(tmp_dir, 'app-%s.apk' % ts)
- dest_apk_aligned = os.path.join(tmp_dir, 'app-aligned-%s.apk' % ts)
- dest_apk_signed = os.path.join(tmp_dir, 'app-signed-%s.apk' % ts)
- shutil.copy2(apk, dest_apk)
- zip_utils.remove_files_from_zip(
- ['assets/dexopt/baseline.prof', 'assets/dexopt/baseline.profm'], dest_apk)
- zip_utils.add_file_to_zip(
- baseline_profile, 'assets/dexopt/baseline.prof', dest_apk)
- if baseline_profile_metadata is not None:
- zip_utils.add_file_to_zip(
- baseline_profile_metadata, 'assets/dexopt/baseline.profm', dest_apk)
- align(dest_apk, dest_apk_aligned)
- sign_with_apksigner(dest_apk_aligned, dest_apk_signed)
- return dest_apk_signed
+
+def add_baseline_profile_to_apk(apk, baseline_profile,
+ baseline_profile_metadata, tmp_dir):
+ if baseline_profile is None:
+ return apk
+ ts = time.time_ns()
+ dest_apk = os.path.join(tmp_dir, 'app-%s.apk' % ts)
+ dest_apk_aligned = os.path.join(tmp_dir, 'app-aligned-%s.apk' % ts)
+ dest_apk_signed = os.path.join(tmp_dir, 'app-signed-%s.apk' % ts)
+ shutil.copy2(apk, dest_apk)
+ zip_utils.remove_files_from_zip(
+ ['assets/dexopt/baseline.prof', 'assets/dexopt/baseline.profm'],
+ dest_apk)
+ zip_utils.add_file_to_zip(baseline_profile, 'assets/dexopt/baseline.prof',
+ dest_apk)
+ if baseline_profile_metadata is not None:
+ zip_utils.add_file_to_zip(baseline_profile_metadata,
+ 'assets/dexopt/baseline.profm', dest_apk)
+ align(dest_apk, dest_apk_aligned)
+ sign_with_apksigner(dest_apk_aligned, dest_apk_signed)
+ return dest_apk_signed
+
def align(apk, aligned_apk):
- zipalign_path = (
- 'zipalign' if 'build_tools' in os.environ.get('PATH')
- else os.path.join(utils.getAndroidBuildTools(), 'zipalign'))
- cmd = [zipalign_path, '-f', '-p', '4', apk, aligned_apk]
- utils.RunCmd(cmd, quiet=True, logging=False)
- return aligned_apk
+ zipalign_path = ('zipalign' if 'build_tools' in os.environ.get('PATH') else
+ os.path.join(utils.getAndroidBuildTools(), 'zipalign'))
+ cmd = [zipalign_path, '-f', '-p', '4', apk, aligned_apk]
+ utils.RunCmd(cmd, quiet=True, logging=False)
+ return aligned_apk
+
def default_keystore():
- return os.path.join(os.getenv('HOME'), '.android', 'app.keystore')
+ return os.path.join(os.getenv('HOME'), '.android', 'app.keystore')
+
def get_min_api(apk):
- aapt = os.path.join(utils.getAndroidBuildTools(), 'aapt')
- cmd = [aapt, 'dump', 'badging', apk]
- stdout = subprocess.check_output(cmd).decode('utf-8').strip()
- for line in stdout.splitlines():
- if line.startswith('sdkVersion:\''):
- return int(line[len('sdkVersion:\''): -1])
- raise ValueError('Unexpected stdout: %s' % stdout)
+ aapt = os.path.join(utils.getAndroidBuildTools(), 'aapt')
+ cmd = [aapt, 'dump', 'badging', apk]
+ stdout = subprocess.check_output(cmd).decode('utf-8').strip()
+ for line in stdout.splitlines():
+ if line.startswith('sdkVersion:\''):
+ return int(line[len('sdkVersion:\''):-1])
+ raise ValueError('Unexpected stdout: %s' % stdout)
+
def sign(unsigned_apk, signed_apk, keystore, quiet=False, logging=True):
- utils.Print('Signing (ignore the warnings)', quiet=quiet)
- cmd = ['zip', '-d', unsigned_apk, 'META-INF/*']
- utils.RunCmd(cmd, quiet=quiet, logging=logging, fail=False)
- cmd = [
- 'jarsigner',
- '-sigalg', 'SHA1withRSA',
- '-digestalg', 'SHA1',
- '-keystore', keystore,
- '-storepass', 'android',
- '-signedjar', signed_apk,
- unsigned_apk,
- 'androiddebugkey'
- ]
- utils.RunCmd(cmd, quiet=quiet)
+ utils.Print('Signing (ignore the warnings)', quiet=quiet)
+ cmd = ['zip', '-d', unsigned_apk, 'META-INF/*']
+ utils.RunCmd(cmd, quiet=quiet, logging=logging, fail=False)
+ cmd = [
+ 'jarsigner', '-sigalg', 'SHA1withRSA', '-digestalg', 'SHA1',
+ '-keystore', keystore, '-storepass', 'android', '-signedjar',
+ signed_apk, unsigned_apk, 'androiddebugkey'
+ ]
+ utils.RunCmd(cmd, quiet=quiet)
-def sign_with_apksigner(
- unsigned_apk, signed_apk, keystore=None, password='android', quiet=False,
- logging=True):
- cmd = [
- os.path.join(utils.getAndroidBuildTools(), 'apksigner'),
- 'sign',
- '-v',
- '--ks', keystore or default_keystore(),
- '--ks-pass', 'pass:' + password,
- '--min-sdk-version', '19',
- '--out', signed_apk,
- '--v2-signing-enabled',
- unsigned_apk
- ]
- utils.RunCmd(cmd, quiet=quiet, logging=logging)
- return signed_apk
+
+def sign_with_apksigner(unsigned_apk,
+ signed_apk,
+ keystore=None,
+ password='android',
+ quiet=False,
+ logging=True):
+ cmd = [
+ os.path.join(utils.getAndroidBuildTools(), 'apksigner'), 'sign', '-v',
+ '--ks', keystore or default_keystore(), '--ks-pass', 'pass:' + password,
+ '--min-sdk-version', '19', '--out', signed_apk, '--v2-signing-enabled',
+ unsigned_apk
+ ]
+ utils.RunCmd(cmd, quiet=quiet, logging=logging)
+ return signed_apk
+
def main():
- (options, apk) = parse_options()
- if options.sign:
- if not options.output:
- print('When signing you must specify an output apk')
- return 1
- if not options.keystore:
- print('When signing you must specify a keystore')
- return 1
- if options.use_apksigner:
- sign_with_apksigner(apk, options.output, options.keystore)
- else:
- sign(apk, options.output, options.keystore)
- return 0
+ (options, apk) = parse_options()
+ if options.sign:
+ if not options.output:
+ print('When signing you must specify an output apk')
+ return 1
+ if not options.keystore:
+ print('When signing you must specify a keystore')
+ return 1
+ if options.use_apksigner:
+ sign_with_apksigner(apk, options.output, options.keystore)
+ else:
+ sign(apk, options.output, options.keystore)
+ return 0
+
if __name__ == '__main__':
- sys.exit(main())
+ sys.exit(main())
diff --git a/tools/archive.py b/tools/archive.py
index b868296..84c0da3 100755
--- a/tools/archive.py
+++ b/tools/archive.py
@@ -11,10 +11,10 @@
import gradle
try:
- import resource
+ import resource
except ImportError:
- # Not a Unix system. Do what Gandalf tells you not to.
- pass
+ # Not a Unix system. Do what Gandalf tells you not to.
+ pass
import shutil
import subprocess
import sys
@@ -23,319 +23,336 @@
ARCHIVE_BUCKET = 'r8-releases'
+
def ParseOptions():
- result = optparse.OptionParser()
- result.add_option('--dry-run', '--dry_run',
- help='Build only, no upload.',
- default=False, action='store_true')
- result.add_option('--dry-run-output', '--dry_run_output',
- help='Output directory for \'build only, no upload\'.',
- type="string", action="store")
- result.add_option('--skip-gradle-build', '--skip_gradle_build',
- help='Skip Gradle build. Can only be used for local testing.',
- default=False, action='store_true')
- return result.parse_args()
+ result = optparse.OptionParser()
+ result.add_option('--dry-run',
+ '--dry_run',
+ help='Build only, no upload.',
+ default=False,
+ action='store_true')
+ result.add_option('--dry-run-output',
+ '--dry_run_output',
+ help='Output directory for \'build only, no upload\'.',
+ type="string",
+ action="store")
+ result.add_option(
+ '--skip-gradle-build',
+ '--skip_gradle_build',
+ help='Skip Gradle build. Can only be used for local testing.',
+ default=False,
+ action='store_true')
+ return result.parse_args()
+
def GetVersion():
- output = subprocess.check_output([
- jdk.GetJavaExecutable(), '-cp', utils.R8_JAR, 'com.android.tools.r8.R8',
- '--version'
- ]).decode('utf-8')
- r8_version = output.splitlines()[0].strip()
- return r8_version.split()[1]
+ output = subprocess.check_output([
+ jdk.GetJavaExecutable(), '-cp', utils.R8_JAR, 'com.android.tools.r8.R8',
+ '--version'
+ ]).decode('utf-8')
+ r8_version = output.splitlines()[0].strip()
+ return r8_version.split()[1]
+
def GetGitBranches():
- return subprocess.check_output(['git', 'show', '-s', '--pretty=%d', 'HEAD'])
+ return subprocess.check_output(['git', 'show', '-s', '--pretty=%d', 'HEAD'])
+
def GetGitHash():
- return subprocess.check_output(['git', 'rev-parse', 'HEAD']).decode('utf-8').strip()
+ return subprocess.check_output(['git', 'rev-parse',
+ 'HEAD']).decode('utf-8').strip()
+
def IsMain(version):
- branches = subprocess.check_output(['git', 'branch', '-r', '--contains',
- 'HEAD']).decode('utf-8')
- # CL runs from gerrit does not have a branch, we always treat them as main
- # commits to archive these to the hash based location
- if len(branches) == 0:
+ branches = subprocess.check_output(
+ ['git', 'branch', '-r', '--contains', 'HEAD']).decode('utf-8')
+ # CL runs from gerrit does not have a branch, we always treat them as main
+ # commits to archive these to the hash based location
+ if len(branches) == 0:
+ return True
+ if not version == 'main':
+ # Sanity check, we don't want to archive on top of release builds EVER
+ # Note that even though we branch, we never push the bots to build the same
+ # commit as main on a branch since we always change the version to
+ # not be just 'main' (or we crash here :-)).
+ if 'origin/main' in branches:
+ raise Exception('We are seeing origin/main in a commit that '
+ 'don\'t have \'main\' as version')
+ return False
+ if not 'origin/main' in branches:
+ raise Exception('We are not seeing origin/main '
+ 'in a commit that have \'main\' as version')
return True
- if not version == 'main':
- # Sanity check, we don't want to archive on top of release builds EVER
- # Note that even though we branch, we never push the bots to build the same
- # commit as main on a branch since we always change the version to
- # not be just 'main' (or we crash here :-)).
- if 'origin/main' in branches:
- raise Exception('We are seeing origin/main in a commit that '
- 'don\'t have \'main\' as version')
- return False
- if not 'origin/main' in branches:
- raise Exception('We are not seeing origin/main '
- 'in a commit that have \'main\' as version')
- return True
-def GetStorageDestination(storage_prefix,
- version_or_path,
- file_name,
- is_main):
- # We archive main commits under raw/main instead of directly under raw
- version_dir = GetVersionDestination(storage_prefix,
- version_or_path,
- is_main)
- return '%s/%s' % (version_dir, file_name)
+
+def GetStorageDestination(storage_prefix, version_or_path, file_name, is_main):
+ # We archive main commits under raw/main instead of directly under raw
+ version_dir = GetVersionDestination(storage_prefix, version_or_path,
+ is_main)
+ return '%s/%s' % (version_dir, file_name)
+
def GetVersionDestination(storage_prefix, version_or_path, is_main):
- archive_dir = 'raw/main' if is_main else 'raw'
- return '%s%s/%s/%s' % (storage_prefix, ARCHIVE_BUCKET,
- archive_dir, version_or_path)
+ archive_dir = 'raw/main' if is_main else 'raw'
+ return '%s%s/%s/%s' % (storage_prefix, ARCHIVE_BUCKET, archive_dir,
+ version_or_path)
+
def GetUploadDestination(version_or_path, file_name, is_main):
- return GetStorageDestination('gs://', version_or_path, file_name, is_main)
+ return GetStorageDestination('gs://', version_or_path, file_name, is_main)
+
def GetUrl(version_or_path, file_name, is_main):
- return GetStorageDestination('https://storage.googleapis.com/',
- version_or_path, file_name, is_main)
+ return GetStorageDestination('https://storage.googleapis.com/',
+ version_or_path, file_name, is_main)
+
def GetMavenUrl(is_main):
- return GetVersionDestination('https://storage.googleapis.com/', '', is_main)
+ return GetVersionDestination('https://storage.googleapis.com/', '', is_main)
+
def SetRLimitToMax():
- (soft, hard) = resource.getrlimit(resource.RLIMIT_NOFILE)
- resource.setrlimit(resource.RLIMIT_NOFILE, (hard, hard))
+ (soft, hard) = resource.getrlimit(resource.RLIMIT_NOFILE)
+ resource.setrlimit(resource.RLIMIT_NOFILE, (hard, hard))
+
def PrintResourceInfo():
- (soft, hard) = resource.getrlimit(resource.RLIMIT_NOFILE)
- print('INFO: Open files soft limit: %s' % soft)
- print('INFO: Open files hard limit: %s' % hard)
+ (soft, hard) = resource.getrlimit(resource.RLIMIT_NOFILE)
+ print('INFO: Open files soft limit: %s' % soft)
+ print('INFO: Open files hard limit: %s' % hard)
def Main():
- (options, args) = ParseOptions()
- Run(options)
+ (options, args) = ParseOptions()
+ Run(options)
+
def Run(options):
- if not utils.is_bot() and not options.dry_run:
- raise Exception('You are not a bot, don\'t archive builds. '
- + 'Use --dry-run to test locally')
- if (options.dry_run_output and
- (not os.path.exists(options.dry_run_output) or
- not os.path.isdir(options.dry_run_output))):
- raise Exception(options.dry_run_output
- + ' does not exist or is not a directory')
- if (options.skip_gradle_build and not options.dry_run):
- raise Exception('Using --skip-gradle-build only supported with --dry-run')
+ if not utils.is_bot() and not options.dry_run:
+ raise Exception('You are not a bot, don\'t archive builds. ' +
+ 'Use --dry-run to test locally')
+ if (options.dry_run_output and
+ (not os.path.exists(options.dry_run_output) or
+ not os.path.isdir(options.dry_run_output))):
+ raise Exception(options.dry_run_output +
+ ' does not exist or is not a directory')
+ if (options.skip_gradle_build and not options.dry_run):
+ raise Exception(
+ 'Using --skip-gradle-build only supported with --dry-run')
- if utils.is_bot() and not utils.IsWindows():
- SetRLimitToMax()
- if not utils.IsWindows():
- PrintResourceInfo()
+ if utils.is_bot() and not utils.IsWindows():
+ SetRLimitToMax()
+ if not utils.IsWindows():
+ PrintResourceInfo()
- with utils.TempDir() as temp:
- version_file = os.path.join(temp, 'r8-version.properties')
- with open(version_file,'w') as version_writer:
- version_writer.write('version.sha=' + GetGitHash() + '\n')
- if not os.environ.get('SWARMING_BOT_ID') and not options.dry_run:
- raise Exception('Environment variable SWARMING_BOT_ID not set')
+ with utils.TempDir() as temp:
+ version_file = os.path.join(temp, 'r8-version.properties')
+ with open(version_file, 'w') as version_writer:
+ version_writer.write('version.sha=' + GetGitHash() + '\n')
+ if not os.environ.get('SWARMING_BOT_ID') and not options.dry_run:
+ raise Exception('Environment variable SWARMING_BOT_ID not set')
- releaser = \
- ("<local developer build>" if options.dry_run
- else 'releaser=go/r8bot ('
- + (os.environ.get('SWARMING_BOT_ID') or 'foo') + ')\n')
- version_writer.write(releaser)
- version_writer.write('version-file.version.code=1\n')
+ releaser = \
+ ("<local developer build>" if options.dry_run
+ else 'releaser=go/r8bot ('
+ + (os.environ.get('SWARMING_BOT_ID') or 'foo') + ')\n')
+ version_writer.write(releaser)
+ version_writer.write('version-file.version.code=1\n')
- create_maven_release.generate_r8_maven_zip(
- utils.MAVEN_ZIP_LIB,
- version_file=version_file,
- skip_gradle_build=options.skip_gradle_build)
+ create_maven_release.generate_r8_maven_zip(
+ utils.MAVEN_ZIP_LIB,
+ version_file=version_file,
+ skip_gradle_build=options.skip_gradle_build)
- # Ensure all archived artifacts has been built before archiving.
- # The target tasks postfixed by 'lib' depend on the actual target task so
- # building it invokes the original task first.
- # The '-Pno_internal' flag is important because we generate the lib based on uses in tests.
- if (not options.skip_gradle_build):
- gradle.RunGradle([
- utils.GRADLE_TASK_CONSOLIDATED_LICENSE,
- utils.GRADLE_TASK_KEEP_ANNO_JAR,
- utils.GRADLE_TASK_R8,
- utils.GRADLE_TASK_R8LIB,
- utils.GRADLE_TASK_R8LIB_NO_DEPS,
- utils.GRADLE_TASK_RETRACE,
- utils.GRADLE_TASK_RETRACE_NO_DEPS,
- utils.GRADLE_TASK_SOURCE_JAR,
- utils.GRADLE_TASK_SWISS_ARMY_KNIFE,
- '-Pno_internal'
- ])
+ # Ensure all archived artifacts has been built before archiving.
+ # The target tasks postfixed by 'lib' depend on the actual target task so
+ # building it invokes the original task first.
+ # The '-Pno_internal' flag is important because we generate the lib based on uses in tests.
+ if (not options.skip_gradle_build):
+ gradle.RunGradle([
+ utils.GRADLE_TASK_CONSOLIDATED_LICENSE,
+ utils.GRADLE_TASK_KEEP_ANNO_JAR, utils.GRADLE_TASK_R8,
+ utils.GRADLE_TASK_R8LIB, utils.GRADLE_TASK_R8LIB_NO_DEPS,
+ utils.GRADLE_TASK_RETRACE, utils.GRADLE_TASK_RETRACE_NO_DEPS,
+ utils.GRADLE_TASK_SOURCE_JAR,
+ utils.GRADLE_TASK_SWISS_ARMY_KNIFE, '-Pno_internal'
+ ])
- # Create maven release of the desuage_jdk_libs configuration. This require
- # an r8.jar with dependencies to have been built.
- create_maven_release.generate_desugar_configuration_maven_zip(
- utils.DESUGAR_CONFIGURATION_MAVEN_ZIP,
- utils.DESUGAR_CONFIGURATION,
- utils.DESUGAR_IMPLEMENTATION,
- utils.LIBRARY_DESUGAR_CONVERSIONS_LEGACY_ZIP)
- create_maven_release.generate_desugar_configuration_maven_zip(
- utils.DESUGAR_CONFIGURATION_JDK11_LEGACY_MAVEN_ZIP,
- utils.DESUGAR_CONFIGURATION_JDK11_LEGACY,
- utils.DESUGAR_IMPLEMENTATION_JDK11,
- utils.LIBRARY_DESUGAR_CONVERSIONS_LEGACY_ZIP)
+ # Create maven release of the desuage_jdk_libs configuration. This require
+ # an r8.jar with dependencies to have been built.
+ create_maven_release.generate_desugar_configuration_maven_zip(
+ utils.DESUGAR_CONFIGURATION_MAVEN_ZIP, utils.DESUGAR_CONFIGURATION,
+ utils.DESUGAR_IMPLEMENTATION,
+ utils.LIBRARY_DESUGAR_CONVERSIONS_LEGACY_ZIP)
+ create_maven_release.generate_desugar_configuration_maven_zip(
+ utils.DESUGAR_CONFIGURATION_JDK11_LEGACY_MAVEN_ZIP,
+ utils.DESUGAR_CONFIGURATION_JDK11_LEGACY,
+ utils.DESUGAR_IMPLEMENTATION_JDK11,
+ utils.LIBRARY_DESUGAR_CONVERSIONS_LEGACY_ZIP)
- create_maven_release.generate_desugar_configuration_maven_zip(
- utils.DESUGAR_CONFIGURATION_JDK11_MINIMAL_MAVEN_ZIP,
- utils.DESUGAR_CONFIGURATION_JDK11_MINIMAL,
- utils.DESUGAR_IMPLEMENTATION_JDK11,
- utils.LIBRARY_DESUGAR_CONVERSIONS_ZIP)
- create_maven_release.generate_desugar_configuration_maven_zip(
- utils.DESUGAR_CONFIGURATION_JDK11_MAVEN_ZIP,
- utils.DESUGAR_CONFIGURATION_JDK11,
- utils.DESUGAR_IMPLEMENTATION_JDK11,
- utils.LIBRARY_DESUGAR_CONVERSIONS_ZIP)
- create_maven_release.generate_desugar_configuration_maven_zip(
- utils.DESUGAR_CONFIGURATION_JDK11_NIO_MAVEN_ZIP,
- utils.DESUGAR_CONFIGURATION_JDK11_NIO,
- utils.DESUGAR_IMPLEMENTATION_JDK11,
- utils.LIBRARY_DESUGAR_CONVERSIONS_ZIP)
+ create_maven_release.generate_desugar_configuration_maven_zip(
+ utils.DESUGAR_CONFIGURATION_JDK11_MINIMAL_MAVEN_ZIP,
+ utils.DESUGAR_CONFIGURATION_JDK11_MINIMAL,
+ utils.DESUGAR_IMPLEMENTATION_JDK11,
+ utils.LIBRARY_DESUGAR_CONVERSIONS_ZIP)
+ create_maven_release.generate_desugar_configuration_maven_zip(
+ utils.DESUGAR_CONFIGURATION_JDK11_MAVEN_ZIP,
+ utils.DESUGAR_CONFIGURATION_JDK11,
+ utils.DESUGAR_IMPLEMENTATION_JDK11,
+ utils.LIBRARY_DESUGAR_CONVERSIONS_ZIP)
+ create_maven_release.generate_desugar_configuration_maven_zip(
+ utils.DESUGAR_CONFIGURATION_JDK11_NIO_MAVEN_ZIP,
+ utils.DESUGAR_CONFIGURATION_JDK11_NIO,
+ utils.DESUGAR_IMPLEMENTATION_JDK11,
+ utils.LIBRARY_DESUGAR_CONVERSIONS_ZIP)
- version = GetVersion()
- is_main = IsMain(version)
- if is_main:
- # On main we use the git hash to archive with
- print('On main, using git hash for archiving')
- version = GetGitHash()
+ version = GetVersion()
+ is_main = IsMain(version)
+ if is_main:
+ # On main we use the git hash to archive with
+ print('On main, using git hash for archiving')
+ version = GetGitHash()
- destination = GetVersionDestination('gs://', version, is_main)
- if utils.cloud_storage_exists(destination) and not options.dry_run:
- raise Exception('Target archive directory %s already exists' % destination)
+ destination = GetVersionDestination('gs://', version, is_main)
+ if utils.cloud_storage_exists(destination) and not options.dry_run:
+ raise Exception('Target archive directory %s already exists' %
+ destination)
- # Create pom file for our maven repository that we build for testing.
- default_pom_file = os.path.join(temp, 'r8.pom')
- create_maven_release.write_default_r8_pom_file(default_pom_file, version)
- for_archiving = [
- utils.R8_JAR,
- utils.R8LIB_JAR,
- utils.R8LIB_JAR + '.map',
- utils.R8LIB_JAR + '_map.zip',
- utils.R8_FULL_EXCLUDE_DEPS_JAR,
- utils.R8LIB_EXCLUDE_DEPS_JAR,
- utils.R8LIB_EXCLUDE_DEPS_JAR + '.map',
- utils.R8LIB_EXCLUDE_DEPS_JAR + '_map.zip',
- utils.MAVEN_ZIP_LIB,
- utils.DESUGAR_CONFIGURATION,
- utils.DESUGAR_CONFIGURATION_MAVEN_ZIP,
- utils.DESUGAR_CONFIGURATION_JDK11_LEGACY,
- utils.DESUGAR_CONFIGURATION_JDK11_LEGACY_MAVEN_ZIP,
- utils.DESUGAR_CONFIGURATION_JDK11_MINIMAL_MAVEN_ZIP,
- utils.DESUGAR_CONFIGURATION_JDK11_MAVEN_ZIP,
- utils.DESUGAR_CONFIGURATION_JDK11_NIO_MAVEN_ZIP,
- utils.R8_SRC_JAR,
- utils.R8RETRACE_JAR,
- utils.R8RETRACE_JAR + '.map',
- utils.R8RETRACE_JAR + '_map.zip',
- utils.R8RETRACE_EXCLUDE_DEPS_JAR,
- utils.R8RETRACE_EXCLUDE_DEPS_JAR + '.map',
- utils.R8RETRACE_EXCLUDE_DEPS_JAR + '_map.zip',
- utils.KEEPANNO_ANNOTATIONS_JAR,
- utils.GENERATED_LICENSE]
- for file in for_archiving:
- file_name = os.path.basename(file)
- tagged_jar = os.path.join(temp, file_name)
- shutil.copyfile(file, tagged_jar)
- if file_name.endswith('.jar') and not file_name.endswith('-src.jar'):
- with zipfile.ZipFile(tagged_jar, 'a') as zip:
- zip.write(version_file, os.path.basename(version_file))
- destination = GetUploadDestination(version, file_name, is_main)
- print('Uploading %s to %s' % (tagged_jar, destination))
- if options.dry_run:
- if options.dry_run_output:
- dry_run_destination = os.path.join(options.dry_run_output, file_name)
- print('Dry run, not actually uploading. Copying to '
- + dry_run_destination)
- shutil.copyfile(tagged_jar, dry_run_destination)
- else:
- print('Dry run, not actually uploading')
- else:
- utils.upload_file_to_cloud_storage(tagged_jar, destination)
- print('File available at: %s' % GetUrl(version, file_name, is_main))
+ # Create pom file for our maven repository that we build for testing.
+ default_pom_file = os.path.join(temp, 'r8.pom')
+ create_maven_release.write_default_r8_pom_file(default_pom_file,
+ version)
+ for_archiving = [
+ utils.R8_JAR, utils.R8LIB_JAR, utils.R8LIB_JAR + '.map',
+ utils.R8LIB_JAR + '_map.zip', utils.R8_FULL_EXCLUDE_DEPS_JAR,
+ utils.R8LIB_EXCLUDE_DEPS_JAR, utils.R8LIB_EXCLUDE_DEPS_JAR + '.map',
+ utils.R8LIB_EXCLUDE_DEPS_JAR + '_map.zip', utils.MAVEN_ZIP_LIB,
+ utils.DESUGAR_CONFIGURATION, utils.DESUGAR_CONFIGURATION_MAVEN_ZIP,
+ utils.DESUGAR_CONFIGURATION_JDK11_LEGACY,
+ utils.DESUGAR_CONFIGURATION_JDK11_LEGACY_MAVEN_ZIP,
+ utils.DESUGAR_CONFIGURATION_JDK11_MINIMAL_MAVEN_ZIP,
+ utils.DESUGAR_CONFIGURATION_JDK11_MAVEN_ZIP,
+ utils.DESUGAR_CONFIGURATION_JDK11_NIO_MAVEN_ZIP, utils.R8_SRC_JAR,
+ utils.R8RETRACE_JAR, utils.R8RETRACE_JAR + '.map',
+ utils.R8RETRACE_JAR + '_map.zip', utils.R8RETRACE_EXCLUDE_DEPS_JAR,
+ utils.R8RETRACE_EXCLUDE_DEPS_JAR + '.map',
+ utils.R8RETRACE_EXCLUDE_DEPS_JAR + '_map.zip',
+ utils.KEEPANNO_ANNOTATIONS_JAR, utils.GENERATED_LICENSE
+ ]
+ for file in for_archiving:
+ file_name = os.path.basename(file)
+ tagged_jar = os.path.join(temp, file_name)
+ shutil.copyfile(file, tagged_jar)
+ if file_name.endswith(
+ '.jar') and not file_name.endswith('-src.jar'):
+ with zipfile.ZipFile(tagged_jar, 'a') as zip:
+ zip.write(version_file, os.path.basename(version_file))
+ destination = GetUploadDestination(version, file_name, is_main)
+ print('Uploading %s to %s' % (tagged_jar, destination))
+ if options.dry_run:
+ if options.dry_run_output:
+ dry_run_destination = os.path.join(options.dry_run_output,
+ file_name)
+ print('Dry run, not actually uploading. Copying to ' +
+ dry_run_destination)
+ shutil.copyfile(tagged_jar, dry_run_destination)
+ else:
+ print('Dry run, not actually uploading')
+ else:
+ utils.upload_file_to_cloud_storage(tagged_jar, destination)
+ print('File available at: %s' %
+ GetUrl(version, file_name, is_main))
- # Upload R8 to a maven compatible location.
- if file == utils.R8_JAR:
- maven_dst = GetUploadDestination(utils.get_maven_path('r8', version),
- 'r8-%s.jar' % version, is_main)
- maven_pom_dst = GetUploadDestination(
- utils.get_maven_path('r8', version),
- 'r8-%s.pom' % version, is_main)
- if options.dry_run:
- print('Dry run, not actually creating maven repo for R8')
- else:
- utils.upload_file_to_cloud_storage(tagged_jar, maven_dst)
- utils.upload_file_to_cloud_storage(default_pom_file, maven_pom_dst)
- print('Maven repo root available at: %s' % GetMavenUrl(is_main))
+ # Upload R8 to a maven compatible location.
+ if file == utils.R8_JAR:
+ maven_dst = GetUploadDestination(
+ utils.get_maven_path('r8', version), 'r8-%s.jar' % version,
+ is_main)
+ maven_pom_dst = GetUploadDestination(
+ utils.get_maven_path('r8', version), 'r8-%s.pom' % version,
+ is_main)
+ if options.dry_run:
+ print('Dry run, not actually creating maven repo for R8')
+ else:
+ utils.upload_file_to_cloud_storage(tagged_jar, maven_dst)
+ utils.upload_file_to_cloud_storage(default_pom_file,
+ maven_pom_dst)
+ print('Maven repo root available at: %s' %
+ GetMavenUrl(is_main))
- # Upload desugar_jdk_libs configuration to a maven compatible location.
- if file == utils.DESUGAR_CONFIGURATION:
- jar_basename = 'desugar_jdk_libs_configuration.jar'
- jar_version_name = 'desugar_jdk_libs_configuration-%s.jar' % version
- maven_dst = GetUploadDestination(
- utils.get_maven_path('desugar_jdk_libs_configuration', version),
- jar_version_name, is_main)
+ # Upload desugar_jdk_libs configuration to a maven compatible location.
+ if file == utils.DESUGAR_CONFIGURATION:
+ jar_basename = 'desugar_jdk_libs_configuration.jar'
+ jar_version_name = 'desugar_jdk_libs_configuration-%s.jar' % version
+ maven_dst = GetUploadDestination(
+ utils.get_maven_path('desugar_jdk_libs_configuration',
+ version), jar_version_name, is_main)
- with utils.TempDir() as tmp_dir:
- desugar_jdk_libs_configuration_jar = os.path.join(tmp_dir,
- jar_version_name)
- create_maven_release.generate_jar_with_desugar_configuration(
- utils.DESUGAR_CONFIGURATION,
- utils.DESUGAR_IMPLEMENTATION,
- utils.LIBRARY_DESUGAR_CONVERSIONS_ZIP,
- desugar_jdk_libs_configuration_jar)
+ with utils.TempDir() as tmp_dir:
+ desugar_jdk_libs_configuration_jar = os.path.join(
+ tmp_dir, jar_version_name)
+ create_maven_release.generate_jar_with_desugar_configuration(
+ utils.DESUGAR_CONFIGURATION,
+ utils.DESUGAR_IMPLEMENTATION,
+ utils.LIBRARY_DESUGAR_CONVERSIONS_ZIP,
+ desugar_jdk_libs_configuration_jar)
- if options.dry_run:
- print('Dry run, not actually creating maven repo for '
- + 'desugar configuration.')
- if options.dry_run_output:
- shutil.copyfile(
- desugar_jdk_libs_configuration_jar,
- os.path.join(options.dry_run_output, jar_version_name))
- else:
- utils.upload_file_to_cloud_storage(
- desugar_jdk_libs_configuration_jar, maven_dst)
- print('Maven repo root available at: %s' % GetMavenUrl(is_main))
- # Also archive the jar as non maven destination for Google3
- jar_destination = GetUploadDestination(
- version, jar_basename, is_main)
- utils.upload_file_to_cloud_storage(
- desugar_jdk_libs_configuration_jar, jar_destination)
+ if options.dry_run:
+ print('Dry run, not actually creating maven repo for ' +
+ 'desugar configuration.')
+ if options.dry_run_output:
+ shutil.copyfile(
+ desugar_jdk_libs_configuration_jar,
+ os.path.join(options.dry_run_output,
+ jar_version_name))
+ else:
+ utils.upload_file_to_cloud_storage(
+ desugar_jdk_libs_configuration_jar, maven_dst)
+ print('Maven repo root available at: %s' %
+ GetMavenUrl(is_main))
+ # Also archive the jar as non maven destination for Google3
+ jar_destination = GetUploadDestination(
+ version, jar_basename, is_main)
+ utils.upload_file_to_cloud_storage(
+ desugar_jdk_libs_configuration_jar, jar_destination)
- # TODO(b/237636871): Refactor this to avoid the duplication of what is above.
- # Upload desugar_jdk_libs JDK-11 legacyconfiguration to a maven compatible location.
- if file == utils.DESUGAR_CONFIGURATION_JDK11_LEGACY:
- jar_basename = 'desugar_jdk_libs_configuration.jar'
- jar_version_name = 'desugar_jdk_libs_configuration-%s-jdk11-legacy.jar' % version
- maven_dst = GetUploadDestination(
- utils.get_maven_path('desugar_jdk_libs_configuration', version),
- jar_version_name, is_main)
+ # TODO(b/237636871): Refactor this to avoid the duplication of what is above.
+ # Upload desugar_jdk_libs JDK-11 legacyconfiguration to a maven compatible location.
+ if file == utils.DESUGAR_CONFIGURATION_JDK11_LEGACY:
+ jar_basename = 'desugar_jdk_libs_configuration.jar'
+ jar_version_name = 'desugar_jdk_libs_configuration-%s-jdk11-legacy.jar' % version
+ maven_dst = GetUploadDestination(
+ utils.get_maven_path('desugar_jdk_libs_configuration',
+ version), jar_version_name, is_main)
- with utils.TempDir() as tmp_dir:
- desugar_jdk_libs_configuration_jar = os.path.join(tmp_dir,
- jar_version_name)
- create_maven_release.generate_jar_with_desugar_configuration(
- utils.DESUGAR_CONFIGURATION_JDK11_LEGACY,
- utils.DESUGAR_IMPLEMENTATION_JDK11,
- utils.LIBRARY_DESUGAR_CONVERSIONS_ZIP,
- desugar_jdk_libs_configuration_jar)
+ with utils.TempDir() as tmp_dir:
+ desugar_jdk_libs_configuration_jar = os.path.join(
+ tmp_dir, jar_version_name)
+ create_maven_release.generate_jar_with_desugar_configuration(
+ utils.DESUGAR_CONFIGURATION_JDK11_LEGACY,
+ utils.DESUGAR_IMPLEMENTATION_JDK11,
+ utils.LIBRARY_DESUGAR_CONVERSIONS_ZIP,
+ desugar_jdk_libs_configuration_jar)
- if options.dry_run:
- print('Dry run, not actually creating maven repo for '
- + 'desugar configuration.')
- if options.dry_run_output:
- shutil.copyfile(
- desugar_jdk_libs_configuration_jar,
- os.path.join(options.dry_run_output, jar_version_name))
- else:
- utils.upload_file_to_cloud_storage(
- desugar_jdk_libs_configuration_jar, maven_dst)
- print('Maven repo root available at: %s' % GetMavenUrl(is_main))
- # Also archive the jar as non maven destination for Google3
- jar_destination = GetUploadDestination(
- version, jar_basename, is_main)
- utils.upload_file_to_cloud_storage(
- desugar_jdk_libs_configuration_jar, jar_destination)
+ if options.dry_run:
+ print('Dry run, not actually creating maven repo for ' +
+ 'desugar configuration.')
+ if options.dry_run_output:
+ shutil.copyfile(
+ desugar_jdk_libs_configuration_jar,
+ os.path.join(options.dry_run_output,
+ jar_version_name))
+ else:
+ utils.upload_file_to_cloud_storage(
+ desugar_jdk_libs_configuration_jar, maven_dst)
+ print('Maven repo root available at: %s' %
+ GetMavenUrl(is_main))
+ # Also archive the jar as non maven destination for Google3
+ jar_destination = GetUploadDestination(
+ version, jar_basename, is_main)
+ utils.upload_file_to_cloud_storage(
+ desugar_jdk_libs_configuration_jar, jar_destination)
+
if __name__ == '__main__':
- sys.exit(Main())
+ sys.exit(Main())
diff --git a/tools/archive_desugar_jdk_libs.py b/tools/archive_desugar_jdk_libs.py
index 30d40a4..ead0bdd 100755
--- a/tools/archive_desugar_jdk_libs.py
+++ b/tools/archive_desugar_jdk_libs.py
@@ -40,11 +40,11 @@
VERSION_FILE_JDK11_NIO = 'VERSION_JDK11_NIO.txt'
VERSION_MAP = {
- 'jdk8': VERSION_FILE_JDK8,
- 'jdk11_legacy': VERSION_FILE_JDK11_LEGACY,
- 'jdk11_minimal': VERSION_FILE_JDK11_MINIMAL,
- 'jdk11': VERSION_FILE_JDK11,
- 'jdk11_nio': VERSION_FILE_JDK11_NIO
+ 'jdk8': VERSION_FILE_JDK8,
+ 'jdk11_legacy': VERSION_FILE_JDK11_LEGACY,
+ 'jdk11_minimal': VERSION_FILE_JDK11_MINIMAL,
+ 'jdk11': VERSION_FILE_JDK11,
+ 'jdk11_nio': VERSION_FILE_JDK11_NIO
}
GITHUB_REPRO = 'desugar_jdk_libs'
@@ -52,327 +52,350 @@
BASE_LIBRARY_NAME = 'desugar_jdk_libs'
LIBRARY_NAME_MAP = {
- 'jdk8': BASE_LIBRARY_NAME,
- 'jdk11_legacy': BASE_LIBRARY_NAME,
- 'jdk11_minimal': BASE_LIBRARY_NAME + '_minimal',
- 'jdk11': BASE_LIBRARY_NAME,
- 'jdk11_nio': BASE_LIBRARY_NAME + '_nio'
+ 'jdk8': BASE_LIBRARY_NAME,
+ 'jdk11_legacy': BASE_LIBRARY_NAME,
+ 'jdk11_minimal': BASE_LIBRARY_NAME + '_minimal',
+ 'jdk11': BASE_LIBRARY_NAME,
+ 'jdk11_nio': BASE_LIBRARY_NAME + '_nio'
}
MAVEN_RELEASE_TARGET_MAP = {
- 'jdk8': 'maven_release',
- 'jdk11_legacy': 'maven_release_jdk11_legacy',
- 'jdk11_minimal': 'maven_release_jdk11_minimal',
- 'jdk11': 'maven_release_jdk11',
- 'jdk11_nio': 'maven_release_jdk11_nio'
+ 'jdk8': 'maven_release',
+ 'jdk11_legacy': 'maven_release_jdk11_legacy',
+ 'jdk11_minimal': 'maven_release_jdk11_minimal',
+ 'jdk11': 'maven_release_jdk11',
+ 'jdk11_nio': 'maven_release_jdk11_nio'
}
MAVEN_RELEASE_ZIP = {
- 'jdk8': BASE_LIBRARY_NAME + '.zip',
- 'jdk11_legacy': BASE_LIBRARY_NAME + '_jdk11_legacy.zip',
- 'jdk11_minimal': BASE_LIBRARY_NAME + '_jdk11_minimal.zip',
- 'jdk11': BASE_LIBRARY_NAME + '_jdk11.zip',
- 'jdk11_nio': BASE_LIBRARY_NAME + '_jdk11_nio.zip'
+ 'jdk8': BASE_LIBRARY_NAME + '.zip',
+ 'jdk11_legacy': BASE_LIBRARY_NAME + '_jdk11_legacy.zip',
+ 'jdk11_minimal': BASE_LIBRARY_NAME + '_jdk11_minimal.zip',
+ 'jdk11': BASE_LIBRARY_NAME + '_jdk11.zip',
+ 'jdk11_nio': BASE_LIBRARY_NAME + '_jdk11_nio.zip'
}
-DESUGAR_JDK_LIBS_HASH_FILE = os.path.join(
- defines.THIRD_PARTY, 'openjdk', 'desugar_jdk_libs_11', 'desugar_jdk_libs_hash')
+DESUGAR_JDK_LIBS_HASH_FILE = os.path.join(defines.THIRD_PARTY, 'openjdk',
+ 'desugar_jdk_libs_11',
+ 'desugar_jdk_libs_hash')
def ParseOptions(argv):
- result = optparse.OptionParser()
- result.add_option('--variant',
- help="Variant(s) to build",
- metavar=('<variants(s)>'),
- choices=['jdk8', 'jdk11_legacy', 'jdk11_minimal', 'jdk11', 'jdk11_nio'],
- default=[],
- action='append')
- result.add_option('--dry-run', '--dry_run',
- help='Running on bot, use third_party dependency.',
- default=False,
- action='store_true')
- result.add_option('--dry-run-output', '--dry_run_output',
- help='Output directory for dry run.',
- type="string", action="store")
- result.add_option('--github-account', '--github_account',
- help='GitHub account to clone from.',
- default="google",
- type="string", action="store")
- result.add_option('--build_only', '--build-only',
- help='Build desugared library without archiving.',
- type="string", action="store")
- (options, args) = result.parse_args(argv)
- return (options, args)
+ result = optparse.OptionParser()
+ result.add_option(
+ '--variant',
+ help="Variant(s) to build",
+ metavar=('<variants(s)>'),
+ choices=['jdk8', 'jdk11_legacy', 'jdk11_minimal', 'jdk11', 'jdk11_nio'],
+ default=[],
+ action='append')
+ result.add_option('--dry-run',
+ '--dry_run',
+ help='Running on bot, use third_party dependency.',
+ default=False,
+ action='store_true')
+ result.add_option('--dry-run-output',
+ '--dry_run_output',
+ help='Output directory for dry run.',
+ type="string",
+ action="store")
+ result.add_option('--github-account',
+ '--github_account',
+ help='GitHub account to clone from.',
+ default="google",
+ type="string",
+ action="store")
+ result.add_option('--build_only',
+ '--build-only',
+ help='Build desugared library without archiving.',
+ type="string",
+ action="store")
+ (options, args) = result.parse_args(argv)
+ return (options, args)
def GetVersion(version_file_name):
- with open(version_file_name, 'r') as version_file:
- lines = [line.strip() for line in version_file.readlines()]
- lines = [line for line in lines if not line.startswith('#')]
- if len(lines) != 1:
- raise Exception('Version file '
- + version_file + ' is expected to have exactly one line')
- version = lines[0].strip()
- utils.check_basic_semver_version(
- version, 'in version file ' + version_file_name, allowPrerelease = True)
- return version
+ with open(version_file_name, 'r') as version_file:
+ lines = [line.strip() for line in version_file.readlines()]
+ lines = [line for line in lines if not line.startswith('#')]
+ if len(lines) != 1:
+ raise Exception('Version file ' + version_file +
+ ' is expected to have exactly one line')
+ version = lines[0].strip()
+ utils.check_basic_semver_version(version,
+ 'in version file ' + version_file_name,
+ allowPrerelease=True)
+ return version
def Upload(options, file_name, storage_path, destination, is_main):
- print('Uploading %s to %s' % (file_name, destination))
- if options.dry_run:
- if options.dry_run_output:
- dry_run_destination = \
- os.path.join(options.dry_run_output, os.path.basename(file_name))
- print('Dry run, not actually uploading. Copying to '
- + dry_run_destination)
- shutil.copyfile(file_name, dry_run_destination)
+ print('Uploading %s to %s' % (file_name, destination))
+ if options.dry_run:
+ if options.dry_run_output:
+ dry_run_destination = \
+ os.path.join(options.dry_run_output, os.path.basename(file_name))
+ print('Dry run, not actually uploading. Copying to ' +
+ dry_run_destination)
+ shutil.copyfile(file_name, dry_run_destination)
+ else:
+ print('Dry run, not actually uploading')
else:
- print('Dry run, not actually uploading')
- else:
- utils.upload_file_to_cloud_storage(file_name, destination)
- print('File available at: %s' %
- destination.replace('gs://', 'https://storage.googleapis.com/', 1))
+ utils.upload_file_to_cloud_storage(file_name, destination)
+ print(
+ 'File available at: %s' %
+ destination.replace('gs://', 'https://storage.googleapis.com/', 1))
+
def CloneDesugaredLibrary(github_account, checkout_dir, desugar_jdk_libs_hash):
- git_utils.GitClone(
- 'https://github.com/'
- + github_account + '/' + GITHUB_REPRO, checkout_dir)
- git_utils.GitCheckout(desugar_jdk_libs_hash, checkout_dir)
+ git_utils.GitClone(
+ 'https://github.com/' + github_account + '/' + GITHUB_REPRO,
+ checkout_dir)
+ git_utils.GitCheckout(desugar_jdk_libs_hash, checkout_dir)
+
def GetJavaEnv(androidHomeTemp):
- java_env = dict(os.environ, JAVA_HOME = jdk.GetJdk11Home())
- java_env['PATH'] = java_env['PATH'] + os.pathsep + os.path.join(jdk.GetJdk11Home(), 'bin')
- java_env['GRADLE_OPTS'] = '-Xmx1g'
- java_env['ANDROID_HOME'] = androidHomeTemp
- return java_env
+ java_env = dict(os.environ, JAVA_HOME=jdk.GetJdk11Home())
+ java_env['PATH'] = java_env['PATH'] + os.pathsep + os.path.join(
+ jdk.GetJdk11Home(), 'bin')
+ java_env['GRADLE_OPTS'] = '-Xmx1g'
+ java_env['ANDROID_HOME'] = androidHomeTemp
+ return java_env
+
def setUpFakeAndroidHome(androidHomeTemp):
- # Bazel will check if 30 is present then extract android.jar from 32.
- # We copy android.jar from third_party to mimic repository structure.
- subpath = os.path.join(androidHomeTemp, "build-tools")
- cmd = ["mkdir", subpath]
- subprocess.check_call(cmd)
- subpath = os.path.join(subpath, "32.0.0")
- cmd = ["mkdir", subpath]
- subprocess.check_call(cmd)
- subpath = os.path.join(androidHomeTemp, "platforms")
- cmd = ["mkdir", subpath]
- subprocess.check_call(cmd)
- subpath30 = os.path.join(subpath, "android-30")
- cmd = ["mkdir", subpath30]
- subprocess.check_call(cmd)
- subpath = os.path.join(subpath, "android-32")
- cmd = ["mkdir", subpath]
- subprocess.check_call(cmd)
- dest = os.path.join(subpath, "android.jar")
- sha = os.path.join(utils.THIRD_PARTY, "android_jar", "lib-v32.tar.gz.sha1")
- utils.DownloadFromGoogleCloudStorage(sha)
- src = os.path.join(utils.THIRD_PARTY, "android_jar", "lib-v32", "android.jar")
- cmd = ["cp", src, dest]
- subprocess.check_call(cmd)
+ # Bazel will check if 30 is present then extract android.jar from 32.
+ # We copy android.jar from third_party to mimic repository structure.
+ subpath = os.path.join(androidHomeTemp, "build-tools")
+ cmd = ["mkdir", subpath]
+ subprocess.check_call(cmd)
+ subpath = os.path.join(subpath, "32.0.0")
+ cmd = ["mkdir", subpath]
+ subprocess.check_call(cmd)
+ subpath = os.path.join(androidHomeTemp, "platforms")
+ cmd = ["mkdir", subpath]
+ subprocess.check_call(cmd)
+ subpath30 = os.path.join(subpath, "android-30")
+ cmd = ["mkdir", subpath30]
+ subprocess.check_call(cmd)
+ subpath = os.path.join(subpath, "android-32")
+ cmd = ["mkdir", subpath]
+ subprocess.check_call(cmd)
+ dest = os.path.join(subpath, "android.jar")
+ sha = os.path.join(utils.THIRD_PARTY, "android_jar", "lib-v32.tar.gz.sha1")
+ utils.DownloadFromGoogleCloudStorage(sha)
+ src = os.path.join(utils.THIRD_PARTY, "android_jar", "lib-v32",
+ "android.jar")
+ cmd = ["cp", src, dest]
+ subprocess.check_call(cmd)
-def BuildDesugaredLibrary(checkout_dir, variant, version = None):
- if not variant in MAVEN_RELEASE_TARGET_MAP:
- raise Exception('Variant ' + variant + ' is not supported')
- if variant != 'jdk8' and variant != 'jdk11_legacy' and version is None:
- raise Exception('Variant ' + variant + ' require version for undesugaring')
- if variant != 'jdk8':
- # Hack to workaround b/256723819.
- os.remove(
- os.path.join(
- checkout_dir,
- "jdk11",
- "src",
- "java.base",
- "share",
- "classes",
- "java",
- "time",
- "format",
- "DesugarDateTimeFormatterBuilder.java"))
- with utils.ChangedWorkingDirectory(checkout_dir):
- with utils.TempDir() as androidHomeTemp:
- setUpFakeAndroidHome(androidHomeTemp)
- javaEnv = GetJavaEnv(androidHomeTemp)
- bazel = os.path.join(utils.BAZEL_TOOL, 'lib', 'bazel', 'bin', 'bazel')
- cmd = [
- bazel,
- '--bazelrc=/dev/null',
- 'build',
- '--spawn_strategy=local',
- '--verbose_failures',
- MAVEN_RELEASE_TARGET_MAP[variant]]
- utils.PrintCmd(cmd)
- subprocess.check_call(cmd, env=javaEnv)
- cmd = [bazel, 'shutdown']
- utils.PrintCmd(cmd)
- subprocess.check_call(cmd, env=javaEnv)
- # Locate the library jar and the maven zip with the jar from the
- # bazel build.
- if variant == 'jdk8':
- library_jar = os.path.join(
- checkout_dir, 'bazel-bin', 'src', 'share', 'classes', 'java', 'libjava.jar')
- else:
- # All JDK11 variants use the same library code.
- library_jar = os.path.join(
- checkout_dir, 'bazel-bin', 'jdk11', 'src', 'd8_java_base_selected_with_addon.jar')
- maven_zip = os.path.join(
- checkout_dir,
- 'bazel-bin',
- MAVEN_RELEASE_ZIP[variant])
+def BuildDesugaredLibrary(checkout_dir, variant, version=None):
+ if not variant in MAVEN_RELEASE_TARGET_MAP:
+ raise Exception('Variant ' + variant + ' is not supported')
+ if variant != 'jdk8' and variant != 'jdk11_legacy' and version is None:
+ raise Exception('Variant ' + variant +
+ ' require version for undesugaring')
+ if variant != 'jdk8':
+ # Hack to workaround b/256723819.
+ os.remove(
+ os.path.join(checkout_dir, "jdk11", "src", "java.base", "share",
+ "classes", "java", "time", "format",
+ "DesugarDateTimeFormatterBuilder.java"))
+ with utils.ChangedWorkingDirectory(checkout_dir):
+ with utils.TempDir() as androidHomeTemp:
+ setUpFakeAndroidHome(androidHomeTemp)
+ javaEnv = GetJavaEnv(androidHomeTemp)
+ bazel = os.path.join(utils.BAZEL_TOOL, 'lib', 'bazel', 'bin',
+ 'bazel')
+ cmd = [
+ bazel, '--bazelrc=/dev/null', 'build', '--spawn_strategy=local',
+ '--verbose_failures', MAVEN_RELEASE_TARGET_MAP[variant]
+ ]
+ utils.PrintCmd(cmd)
+ subprocess.check_call(cmd, env=javaEnv)
+ cmd = [bazel, 'shutdown']
+ utils.PrintCmd(cmd)
+ subprocess.check_call(cmd, env=javaEnv)
- if variant != 'jdk8' and variant != 'jdk11_legacy':
- # The undesugaring is temporary...
- undesugared_maven_zip = os.path.join(checkout_dir, 'undesugared_maven')
- Undesugar(variant, maven_zip, version, undesugared_maven_zip)
- undesugared_maven_zip = os.path.join(checkout_dir, 'undesugared_maven.zip')
- return (library_jar, undesugared_maven_zip)
- else:
- return (library_jar, maven_zip)
+ # Locate the library jar and the maven zip with the jar from the
+ # bazel build.
+ if variant == 'jdk8':
+ library_jar = os.path.join(checkout_dir, 'bazel-bin', 'src',
+ 'share', 'classes', 'java',
+ 'libjava.jar')
+ else:
+ # All JDK11 variants use the same library code.
+ library_jar = os.path.join(checkout_dir, 'bazel-bin', 'jdk11',
+ 'src',
+ 'd8_java_base_selected_with_addon.jar')
+ maven_zip = os.path.join(checkout_dir, 'bazel-bin',
+ MAVEN_RELEASE_ZIP[variant])
+
+ if variant != 'jdk8' and variant != 'jdk11_legacy':
+ # The undesugaring is temporary...
+ undesugared_maven_zip = os.path.join(checkout_dir,
+ 'undesugared_maven')
+ Undesugar(variant, maven_zip, version, undesugared_maven_zip)
+ undesugared_maven_zip = os.path.join(checkout_dir,
+ 'undesugared_maven.zip')
+ return (library_jar, undesugared_maven_zip)
+ else:
+ return (library_jar, maven_zip)
+
def hash_for(file, hash):
- with open(file, 'rb') as f:
- while True:
- # Read chunks of 1MB
- chunk = f.read(2 ** 20)
- if not chunk:
- break
- hash.update(chunk)
- return hash.hexdigest()
+ with open(file, 'rb') as f:
+ while True:
+ # Read chunks of 1MB
+ chunk = f.read(2**20)
+ if not chunk:
+ break
+ hash.update(chunk)
+ return hash.hexdigest()
+
def write_md5_for(file):
- hexdigest = hash_for(file, hashlib.md5())
- with (open(file + '.md5', 'w')) as file:
- file.write(hexdigest)
+ hexdigest = hash_for(file, hashlib.md5())
+ with (open(file + '.md5', 'w')) as file:
+ file.write(hexdigest)
+
def write_sha1_for(file):
- hexdigest = hash_for(file, hashlib.sha1())
- with (open(file + '.sha1', 'w')) as file:
- file.write(hexdigest)
+ hexdigest = hash_for(file, hashlib.sha1())
+ with (open(file + '.sha1', 'w')) as file:
+ file.write(hexdigest)
+
def Undesugar(variant, maven_zip, version, undesugared_maven_zip):
- gradle.RunGradle([utils.GRADLE_TASK_R8,
- utils.GRADLE_TASK_TEST_JAR,
- utils.GRADLE_TASK_TEST_DEPS_JAR,
- '-Pno_internal'])
- with utils.TempDir() as tmp:
- with zipfile.ZipFile(maven_zip, 'r') as zip_ref:
- zip_ref.extractall(tmp)
- desugar_jdk_libs_jar = os.path.join(
- tmp,
- 'com',
- 'android',
- 'tools',
- LIBRARY_NAME_MAP[variant],
- version,
- '%s-%s.jar' % (LIBRARY_NAME_MAP[variant], version))
- print(desugar_jdk_libs_jar)
- undesugared_jar = os.path.join(tmp, 'undesugared.jar')
- buildLibs = os.path.join(defines.REPO_ROOT, 'build', 'libs')
- cmd = [jdk.GetJavaExecutable(),
- '-cp',
- '%s:%s:%s' % (utils.R8_JAR, utils.R8_TESTS_JAR, utils.R8_TESTS_DEPS_JAR),
- 'com.android.tools.r8.desugar.desugaredlibrary.jdk11.DesugaredLibraryJDK11Undesugarer',
- desugar_jdk_libs_jar,
- undesugared_jar]
- print(cmd)
- try:
- output = subprocess.check_output(cmd, stderr = subprocess.STDOUT).decode('utf-8')
- except subprocess.CalledProcessError as e:
- print(e)
- print(e.output)
- raise e
- print(output)
- # Copy the undesugared jar into place and update the checksums.
- shutil.copyfile(undesugared_jar, desugar_jdk_libs_jar)
- write_md5_for(desugar_jdk_libs_jar)
- write_sha1_for(desugar_jdk_libs_jar)
- shutil.make_archive(undesugared_maven_zip, 'zip', tmp)
- print(undesugared_maven_zip)
- output = subprocess.check_output(['ls', '-l', os.path.dirname(undesugared_maven_zip)], stderr = subprocess.STDOUT).decode('utf-8')
- print(output)
+ gradle.RunGradle([
+ utils.GRADLE_TASK_R8, utils.GRADLE_TASK_TEST_JAR,
+ utils.GRADLE_TASK_TEST_DEPS_JAR, '-Pno_internal'
+ ])
+ with utils.TempDir() as tmp:
+ with zipfile.ZipFile(maven_zip, 'r') as zip_ref:
+ zip_ref.extractall(tmp)
+ desugar_jdk_libs_jar = os.path.join(
+ tmp, 'com', 'android', 'tools', LIBRARY_NAME_MAP[variant], version,
+ '%s-%s.jar' % (LIBRARY_NAME_MAP[variant], version))
+ print(desugar_jdk_libs_jar)
+ undesugared_jar = os.path.join(tmp, 'undesugared.jar')
+ buildLibs = os.path.join(defines.REPO_ROOT, 'build', 'libs')
+ cmd = [
+ jdk.GetJavaExecutable(), '-cp',
+ '%s:%s:%s' %
+ (utils.R8_JAR, utils.R8_TESTS_JAR, utils.R8_TESTS_DEPS_JAR),
+ 'com.android.tools.r8.desugar.desugaredlibrary.jdk11.DesugaredLibraryJDK11Undesugarer',
+ desugar_jdk_libs_jar, undesugared_jar
+ ]
+ print(cmd)
+ try:
+ output = subprocess.check_output(
+ cmd, stderr=subprocess.STDOUT).decode('utf-8')
+ except subprocess.CalledProcessError as e:
+ print(e)
+ print(e.output)
+ raise e
+ print(output)
+ # Copy the undesugared jar into place and update the checksums.
+ shutil.copyfile(undesugared_jar, desugar_jdk_libs_jar)
+ write_md5_for(desugar_jdk_libs_jar)
+ write_sha1_for(desugar_jdk_libs_jar)
+ shutil.make_archive(undesugared_maven_zip, 'zip', tmp)
+ print(undesugared_maven_zip)
+ output = subprocess.check_output(
+ ['ls', '-l', os.path.dirname(undesugared_maven_zip)],
+ stderr=subprocess.STDOUT).decode('utf-8')
+ print(output)
+
def MustBeExistingDirectory(path):
- if (not os.path.exists(path) or not os.path.isdir(path)):
- raise Exception(path + ' does not exist or is not a directory')
+ if (not os.path.exists(path) or not os.path.isdir(path)):
+ raise Exception(path + ' does not exist or is not a directory')
+
def BuildAndUpload(options, variant):
- desugar_jdk_libs_hash = ''
- with open(DESUGAR_JDK_LIBS_HASH_FILE, 'r') as input_hash:
- desugar_jdk_libs_hash = input_hash.readline()
- if options.build_only:
+ desugar_jdk_libs_hash = ''
+ with open(DESUGAR_JDK_LIBS_HASH_FILE, 'r') as input_hash:
+ desugar_jdk_libs_hash = input_hash.readline()
+ if options.build_only:
+ with utils.TempDir() as checkout_dir:
+ CloneDesugaredLibrary(options.github_account, checkout_dir,
+ desugar_jdk_libs_hash)
+ (library_jar,
+ maven_zip) = BuildDesugaredLibrary(checkout_dir, variant,
+ desugar_jdk_libs_hash)
+ shutil.copyfile(
+ library_jar,
+ os.path.join(options.build_only, os.path.basename(library_jar)))
+ shutil.copyfile(
+ maven_zip,
+ os.path.join(options.build_only, os.path.basename(maven_zip)))
+ return
+
+ # Only handling versioned desugar_jdk_libs.
+ is_main = False
+
with utils.TempDir() as checkout_dir:
- CloneDesugaredLibrary(options.github_account, checkout_dir, desugar_jdk_libs_hash)
- (library_jar, maven_zip) = BuildDesugaredLibrary(checkout_dir, variant, desugar_jdk_libs_hash)
- shutil.copyfile(
- library_jar,
- os.path.join(options.build_only, os.path.basename(library_jar)))
- shutil.copyfile(
- maven_zip,
- os.path.join(options.build_only, os.path.basename(maven_zip)))
- return
+ CloneDesugaredLibrary(options.github_account, checkout_dir,
+ desugar_jdk_libs_hash)
+ version = GetVersion(os.path.join(checkout_dir, VERSION_MAP[variant]))
- # Only handling versioned desugar_jdk_libs.
- is_main = False
+ destination = archive.GetVersionDestination(
+ 'gs://', LIBRARY_NAME_MAP[variant] + '/' + version, is_main)
+ if utils.cloud_storage_exists(destination) and not options.dry_run:
+ raise Exception('Target archive directory %s already exists' %
+ destination)
- with utils.TempDir() as checkout_dir:
- CloneDesugaredLibrary(options.github_account, checkout_dir, desugar_jdk_libs_hash)
- version = GetVersion(os.path.join(checkout_dir, VERSION_MAP[variant]))
+ (library_jar,
+ maven_zip) = BuildDesugaredLibrary(checkout_dir, variant, version)
- destination = archive.GetVersionDestination(
- 'gs://', LIBRARY_NAME_MAP[variant] + '/' + version, is_main)
- if utils.cloud_storage_exists(destination) and not options.dry_run:
- raise Exception(
- 'Target archive directory %s already exists' % destination)
+ storage_path = LIBRARY_NAME_MAP[variant] + '/' + version
+ # Upload the jar file with the library.
+ destination = archive.GetUploadDestination(
+ storage_path, LIBRARY_NAME_MAP[variant] + '.jar', is_main)
+ Upload(options, library_jar, storage_path, destination, is_main)
- (library_jar, maven_zip) = BuildDesugaredLibrary(checkout_dir, variant, version)
+ # Upload the maven zip file with the library.
+ destination = archive.GetUploadDestination(storage_path,
+ MAVEN_RELEASE_ZIP[variant],
+ is_main)
+ Upload(options, maven_zip, storage_path, destination, is_main)
- storage_path = LIBRARY_NAME_MAP[variant] + '/' + version
- # Upload the jar file with the library.
- destination = archive.GetUploadDestination(
- storage_path, LIBRARY_NAME_MAP[variant] + '.jar', is_main)
- Upload(options, library_jar, storage_path, destination, is_main)
+ # Upload the jar file for accessing GCS as a maven repro.
+ maven_destination = archive.GetUploadDestination(
+ utils.get_maven_path(LIBRARY_NAME_MAP[variant], version),
+ '%s-%s.jar' % (LIBRARY_NAME_MAP[variant], version), is_main)
+ if options.dry_run:
+ print('Dry run, not actually creating maven repo')
+ else:
+ utils.upload_file_to_cloud_storage(library_jar, maven_destination)
+ print('Maven repo root available at: %s' %
+ archive.GetMavenUrl(is_main))
- # Upload the maven zip file with the library.
- destination = archive.GetUploadDestination(
- storage_path, MAVEN_RELEASE_ZIP[variant], is_main)
- Upload(options, maven_zip, storage_path, destination, is_main)
-
- # Upload the jar file for accessing GCS as a maven repro.
- maven_destination = archive.GetUploadDestination(
- utils.get_maven_path(LIBRARY_NAME_MAP[variant], version),
- '%s-%s.jar' % (LIBRARY_NAME_MAP[variant], version),
- is_main)
- if options.dry_run:
- print('Dry run, not actually creating maven repo')
- else:
- utils.upload_file_to_cloud_storage(library_jar, maven_destination)
- print('Maven repo root available at: %s' % archive.GetMavenUrl(is_main))
def Main(argv):
- (options, args) = ParseOptions(argv)
- if (len(args) > 0):
- raise Exception('Unsupported arguments')
- if not utils.is_bot() and not (options.dry_run or options.build_only):
- raise Exception('You are not a bot, don\'t archive builds. '
- + 'Use --dry-run or --build-only to test locally')
- if options.dry_run_output:
- MustBeExistingDirectory(options.dry_run_output)
- if options.build_only:
- MustBeExistingDirectory(options.build_only)
- if utils.is_bot():
- archive.SetRLimitToMax()
+ (options, args) = ParseOptions(argv)
+ if (len(args) > 0):
+ raise Exception('Unsupported arguments')
+ if not utils.is_bot() and not (options.dry_run or options.build_only):
+ raise Exception('You are not a bot, don\'t archive builds. ' +
+ 'Use --dry-run or --build-only to test locally')
+ if options.dry_run_output:
+ MustBeExistingDirectory(options.dry_run_output)
+ if options.build_only:
+ MustBeExistingDirectory(options.build_only)
+ if utils.is_bot():
+ archive.SetRLimitToMax()
- # Make sure bazel is extracted in third_party.
- utils.DownloadFromGoogleCloudStorage(utils.BAZEL_SHA_FILE)
- utils.DownloadFromGoogleCloudStorage(utils.JAVA8_SHA_FILE)
- utils.DownloadFromGoogleCloudStorage(utils.JAVA11_SHA_FILE)
- utils.DownloadFromGoogleCloudStorage(utils.DESUGAR_JDK_LIBS_11_SHA_FILE)
+ # Make sure bazel is extracted in third_party.
+ utils.DownloadFromGoogleCloudStorage(utils.BAZEL_SHA_FILE)
+ utils.DownloadFromGoogleCloudStorage(utils.JAVA8_SHA_FILE)
+ utils.DownloadFromGoogleCloudStorage(utils.JAVA11_SHA_FILE)
+ utils.DownloadFromGoogleCloudStorage(utils.DESUGAR_JDK_LIBS_11_SHA_FILE)
- for v in options.variant:
- BuildAndUpload(options, v)
+ for v in options.variant:
+ BuildAndUpload(options, v)
+
if __name__ == '__main__':
- sys.exit(Main(sys.argv[1:]))
+ sys.exit(Main(sys.argv[1:]))
diff --git a/tools/archive_smali.py b/tools/archive_smali.py
index db7cb89..fac42c5 100755
--- a/tools/archive_smali.py
+++ b/tools/archive_smali.py
@@ -7,10 +7,10 @@
import os
import re
try:
- import resource
+ import resource
except ImportError:
- # Not a Unix system. Do what Gandalf tells you not to.
- pass
+ # Not a Unix system. Do what Gandalf tells you not to.
+ pass
import shutil
import subprocess
import sys
@@ -21,129 +21,155 @@
REPO = 'https://github.com/google/smali'
NO_DRYRUN_OUTPUT = object()
+
def checkout(temp):
- subprocess.check_call(['git', 'clone', REPO, temp])
- return temp
+ subprocess.check_call(['git', 'clone', REPO, temp])
+ return temp
+
def parse_options():
- result = argparse.ArgumentParser(description='Release Smali')
- result.add_argument('--version',
- metavar=('<version>'),
- help='The version of smali to archive.')
- result.add_argument('--dry-run', '--dry_run',
- nargs='?',
- help='Build only, no upload.',
- metavar='<output directory>',
- default=None,
- const=NO_DRYRUN_OUTPUT)
- result.add_argument('--checkout',
- help='Use existing checkout.')
- return result.parse_args()
+ result = argparse.ArgumentParser(description='Release Smali')
+ result.add_argument('--version',
+ metavar=('<version>'),
+ help='The version of smali to archive.')
+ result.add_argument('--dry-run',
+ '--dry_run',
+ nargs='?',
+ help='Build only, no upload.',
+ metavar='<output directory>',
+ default=None,
+ const=NO_DRYRUN_OUTPUT)
+ result.add_argument('--checkout', help='Use existing checkout.')
+ return result.parse_args()
def set_rlimit_to_max():
- (soft, hard) = resource.getrlimit(resource.RLIMIT_NOFILE)
- resource.setrlimit(resource.RLIMIT_NOFILE, (hard, hard))
+ (soft, hard) = resource.getrlimit(resource.RLIMIT_NOFILE)
+ resource.setrlimit(resource.RLIMIT_NOFILE, (hard, hard))
def Main():
- options = parse_options()
- if not utils.is_bot() and not options.dry_run:
- raise Exception('You are not a bot, don\'t archive builds. '
- + 'Use --dry-run to test locally')
- if options.checkout and not options.dry_run:
- raise Exception('Using local checkout is only allowed with --dry-run')
- if not options.checkout and not options.version:
- raise Exception('Option --version is required (when not using local checkout)')
+ options = parse_options()
+ if not utils.is_bot() and not options.dry_run:
+ raise Exception('You are not a bot, don\'t archive builds. ' +
+ 'Use --dry-run to test locally')
+ if options.checkout and not options.dry_run:
+ raise Exception('Using local checkout is only allowed with --dry-run')
+ if not options.checkout and not options.version:
+ raise Exception(
+ 'Option --version is required (when not using local checkout)')
- if utils.is_bot() and not utils.IsWindows():
- set_rlimit_to_max()
+ if utils.is_bot() and not utils.IsWindows():
+ set_rlimit_to_max()
- with utils.TempDir() as temp:
- # Resolve dry run location to support relative directories.
- dry_run_output = None
- if options.dry_run and options.dry_run != NO_DRYRUN_OUTPUT:
- if not os.path.isdir(options.dry_run):
- os.mkdir(options.dry_run)
- dry_run_output = os.path.abspath(options.dry_run)
+ with utils.TempDir() as temp:
+ # Resolve dry run location to support relative directories.
+ dry_run_output = None
+ if options.dry_run and options.dry_run != NO_DRYRUN_OUTPUT:
+ if not os.path.isdir(options.dry_run):
+ os.mkdir(options.dry_run)
+ dry_run_output = os.path.abspath(options.dry_run)
- checkout_dir = options.checkout if options.checkout else checkout(temp)
- with utils.ChangedWorkingDirectory(checkout_dir):
- if options.version:
- output = subprocess.check_output(['git', 'tag', '-l', options.version])
- if len(output) == 0:
- raise Exception(
- 'Repository does not have a release tag for version %s' % options.version)
- subprocess.check_call(['git', 'checkout', options.version])
+ checkout_dir = options.checkout if options.checkout else checkout(temp)
+ with utils.ChangedWorkingDirectory(checkout_dir):
+ if options.version:
+ output = subprocess.check_output(
+ ['git', 'tag', '-l', options.version])
+ if len(output) == 0:
+ raise Exception(
+ 'Repository does not have a release tag for version %s'
+ % options.version)
+ subprocess.check_call(['git', 'checkout', options.version])
- # Find version from `build.gradle`.
- for line in open(os.path.join('build.gradle'), 'r'):
- result = re.match(
- r'^version = \'(\d+)\.(\d+)\.(\d+)\'', line)
- if result:
- break
- version = '%s.%s.%s' % (result.group(1), result.group(2), result.group(3))
- if options.version and version != options.version:
- message = 'version %s, expected version %s' % (version, options.version)
- if (options.checkout):
- raise Exception('Checkout %s has %s' % (options.checkout, message))
- else:
- raise Exception('Tag % has %s' % (options.version, message))
+ # Find version from `build.gradle`.
+ for line in open(os.path.join('build.gradle'), 'r'):
+ result = re.match(r'^version = \'(\d+)\.(\d+)\.(\d+)\'', line)
+ if result:
+ break
+ version = '%s.%s.%s' % (result.group(1), result.group(2),
+ result.group(3))
+ if options.version and version != options.version:
+ message = 'version %s, expected version %s' % (version,
+ options.version)
+ if (options.checkout):
+ raise Exception('Checkout %s has %s' %
+ (options.checkout, message))
+ else:
+ raise Exception('Tag % has %s' % (options.version, message))
- print('Building version: %s' % version)
+ print('Building version: %s' % version)
- # Build release to local Maven repository.
- m2 = os.path.join(temp, 'm2')
- os.mkdir(m2)
- subprocess.check_call(
- ['./gradlew', '-Dmaven.repo.local=%s' % m2 , 'release', 'test', 'publishToMavenLocal'])
- base = os.path.join('com', 'android', 'tools', 'smali')
+ # Build release to local Maven repository.
+ m2 = os.path.join(temp, 'm2')
+ os.mkdir(m2)
+ subprocess.check_call([
+ './gradlew',
+ '-Dmaven.repo.local=%s' % m2, 'release', 'test',
+ 'publishToMavenLocal'
+ ])
+ base = os.path.join('com', 'android', 'tools', 'smali')
- # Check that the local maven repository only has the single version directory in
- # each artifact directory.
- for name in ['smali-util', 'smali-dexlib2', 'smali', 'smali-baksmali']:
- dirnames = next(os.walk(os.path.join(m2, base, name)), (None, None, []))[1]
- if not dirnames or len(dirnames) != 1 or dirnames[0] != version:
- raise Exception('Found unexpected directory %s in %s' % (dirnames, name))
+ # Check that the local maven repository only has the single version directory in
+ # each artifact directory.
+ for name in [
+ 'smali-util', 'smali-dexlib2', 'smali', 'smali-baksmali'
+ ]:
+ dirnames = next(os.walk(os.path.join(m2, base, name)),
+ (None, None, []))[1]
+ if not dirnames or len(dirnames) != 1 or dirnames[0] != version:
+ raise Exception('Found unexpected directory %s in %s' %
+ (dirnames, name))
- # Build an archive with the relevant content of the local maven repository.
- m2_filtered = os.path.join(temp, 'm2_filtered')
- shutil.copytree(m2, m2_filtered, ignore=shutil.ignore_patterns('maven-metadata-local.xml'))
- maven_release_archive = shutil.make_archive(
- 'smali-maven-release-%s' % version, 'zip', m2_filtered, base)
+ # Build an archive with the relevant content of the local maven repository.
+ m2_filtered = os.path.join(temp, 'm2_filtered')
+ shutil.copytree(
+ m2,
+ m2_filtered,
+ ignore=shutil.ignore_patterns('maven-metadata-local.xml'))
+ maven_release_archive = shutil.make_archive(
+ 'smali-maven-release-%s' % version, 'zip', m2_filtered, base)
- # Collect names of the fat jars.
- fat_jars = list(map(
- lambda prefix: '%s-%s-fat.jar' % (prefix, version),
- ['smali/build/libs/smali', 'baksmali/build/libs/baksmali']))
+ # Collect names of the fat jars.
+ fat_jars = list(
+ map(lambda prefix: '%s-%s-fat.jar' % (prefix, version),
+ ['smali/build/libs/smali', 'baksmali/build/libs/baksmali']))
- # Copy artifacts.
- files = [maven_release_archive]
- files.extend(fat_jars)
- if options.dry_run:
- if dry_run_output:
- print('Dry run, not actually uploading. Copying to %s:' % dry_run_output)
- for file in files:
- destination = os.path.join(dry_run_output, os.path.basename(file))
- shutil.copyfile(file, destination)
- print(" %s" % destination)
- else:
- print('Dry run, not actually uploading. Generated files:')
- for file in files:
- print(" %s" % os.path.basename(file))
- else:
- destination_prefix = 'gs://%s/smali/%s' % (ARCHIVE_BUCKET, version)
- if utils.cloud_storage_exists(destination_prefix):
- raise Exception('Target archive directory %s already exists' % destination_prefix)
- for file in files:
- destination = '%s/%s' % (destination_prefix, os.path.basename(file))
- if utils.cloud_storage_exists(destination):
- raise Exception('Target %s already exists' % destination)
- utils.upload_file_to_cloud_storage(file, destination)
- public_url = 'https://storage.googleapis.com/%s/smali/%s' % (ARCHIVE_BUCKET, version)
- print('Artifacts available at: %s' % public_url)
+ # Copy artifacts.
+ files = [maven_release_archive]
+ files.extend(fat_jars)
+ if options.dry_run:
+ if dry_run_output:
+ print('Dry run, not actually uploading. Copying to %s:' %
+ dry_run_output)
+ for file in files:
+ destination = os.path.join(dry_run_output,
+ os.path.basename(file))
+ shutil.copyfile(file, destination)
+ print(" %s" % destination)
+ else:
+ print('Dry run, not actually uploading. Generated files:')
+ for file in files:
+ print(" %s" % os.path.basename(file))
+ else:
+ destination_prefix = 'gs://%s/smali/%s' % (ARCHIVE_BUCKET,
+ version)
+ if utils.cloud_storage_exists(destination_prefix):
+ raise Exception(
+ 'Target archive directory %s already exists' %
+ destination_prefix)
+ for file in files:
+ destination = '%s/%s' % (destination_prefix,
+ os.path.basename(file))
+ if utils.cloud_storage_exists(destination):
+ raise Exception('Target %s already exists' %
+ destination)
+ utils.upload_file_to_cloud_storage(file, destination)
+ public_url = 'https://storage.googleapis.com/%s/smali/%s' % (
+ ARCHIVE_BUCKET, version)
+ print('Artifacts available at: %s' % public_url)
- print("Done!")
+ print("Done!")
+
if __name__ == '__main__':
- sys.exit(Main())
+ sys.exit(Main())
diff --git a/tools/as_utils.py b/tools/as_utils.py
index 3c69d10..69f7562 100644
--- a/tools/as_utils.py
+++ b/tools/as_utils.py
@@ -10,229 +10,246 @@
import utils
if utils.is_python3():
- from html.parser import HTMLParser
+ from html.parser import HTMLParser
else:
- from HTMLParser import HTMLParser
+ from HTMLParser import HTMLParser
def add_r8_dependency(checkout_dir, temp_dir, minified):
- build_file = os.path.join(checkout_dir, 'build.gradle')
- assert os.path.isfile(build_file), (
- 'Expected a file to be present at {}'.format(build_file))
+ build_file = os.path.join(checkout_dir, 'build.gradle')
+ assert os.path.isfile(build_file), (
+ 'Expected a file to be present at {}'.format(build_file))
- with open(build_file) as f:
- lines = f.readlines()
+ with open(build_file) as f:
+ lines = f.readlines()
- added_r8_dependency = False
- is_inside_dependencies = False
+ added_r8_dependency = False
+ is_inside_dependencies = False
- with open(build_file, 'w') as f:
- gradle_version = None
- for line in lines:
- stripped = line.strip()
- if stripped == 'dependencies {':
- assert not is_inside_dependencies, (
- 'Unexpected line with \'dependencies {\'')
- is_inside_dependencies = True
- if is_inside_dependencies:
- if '/r8.jar' in stripped or '/r8lib.jar' in stripped:
- # Skip line to avoid dependency on r8.jar
- continue
- elif 'com.android.tools.build:gradle:' in stripped:
- gradle_version = stripped[stripped.rindex(':')+1:-1]
- indent = ''.ljust(line.index('classpath'))
- jar = os.path.join(temp_dir, 'r8lib.jar' if minified else 'r8.jar')
- f.write('{}classpath files(\'{}\')\n'.format(indent, jar))
- added_r8_dependency = True
- elif stripped == '}':
- is_inside_dependencies = False
- f.write(line)
+ with open(build_file, 'w') as f:
+ gradle_version = None
+ for line in lines:
+ stripped = line.strip()
+ if stripped == 'dependencies {':
+ assert not is_inside_dependencies, (
+ 'Unexpected line with \'dependencies {\'')
+ is_inside_dependencies = True
+ if is_inside_dependencies:
+ if '/r8.jar' in stripped or '/r8lib.jar' in stripped:
+ # Skip line to avoid dependency on r8.jar
+ continue
+ elif 'com.android.tools.build:gradle:' in stripped:
+ gradle_version = stripped[stripped.rindex(':') + 1:-1]
+ indent = ''.ljust(line.index('classpath'))
+ jar = os.path.join(temp_dir,
+ 'r8lib.jar' if minified else 'r8.jar')
+ f.write('{}classpath files(\'{}\')\n'.format(indent, jar))
+ added_r8_dependency = True
+ elif stripped == '}':
+ is_inside_dependencies = False
+ f.write(line)
- assert added_r8_dependency, 'Unable to add R8 as a dependency'
- assert gradle_version
- assert LooseVersion(gradle_version) >= LooseVersion('3.2'), (
- 'Unsupported gradle version: {} (must use at least gradle '
- + 'version 3.2)').format(gradle_version)
+ assert added_r8_dependency, 'Unable to add R8 as a dependency'
+ assert gradle_version
+ assert LooseVersion(gradle_version) >= LooseVersion('3.2'), (
+ 'Unsupported gradle version: {} (must use at least gradle ' +
+ 'version 3.2)').format(gradle_version)
+
def add_settings_gradle(checkout_dir, name):
- settings_file = os.path.join(checkout_dir, 'settings.gradle')
- if os.path.isfile(settings_file):
- return
+ settings_file = os.path.join(checkout_dir, 'settings.gradle')
+ if os.path.isfile(settings_file):
+ return
- with open(settings_file, "w+") as f:
- f.write("rootProject.name = '{}'\n".format(name))
+ with open(settings_file, "w+") as f:
+ f.write("rootProject.name = '{}'\n".format(name))
+
def remove_r8_dependency(checkout_dir):
- build_file = os.path.join(checkout_dir, 'build.gradle')
- assert os.path.isfile(build_file), (
- 'Expected a file to be present at {}'.format(build_file))
- with open(build_file) as f:
- lines = f.readlines()
- with open(build_file, 'w') as f:
- for line in lines:
- if ('/r8.jar' not in line) and ('/r8lib.jar' not in line):
- f.write(line)
+ build_file = os.path.join(checkout_dir, 'build.gradle')
+ assert os.path.isfile(build_file), (
+ 'Expected a file to be present at {}'.format(build_file))
+ with open(build_file) as f:
+ lines = f.readlines()
+ with open(build_file, 'w') as f:
+ for line in lines:
+ if ('/r8.jar' not in line) and ('/r8lib.jar' not in line):
+ f.write(line)
+
def GetMinAndCompileSdk(app, checkout_dir, apk_reference):
- compile_sdk = app.compile_sdk
- min_sdk = app.min_sdk
+ compile_sdk = app.compile_sdk
+ min_sdk = app.min_sdk
- if not compile_sdk or not min_sdk:
- build_gradle_file = os.path.join(checkout_dir, app.module, 'build.gradle')
- assert os.path.isfile(build_gradle_file), (
- 'Expected to find build.gradle file at {}'.format(build_gradle_file))
+ if not compile_sdk or not min_sdk:
+ build_gradle_file = os.path.join(checkout_dir, app.module,
+ 'build.gradle')
+ assert os.path.isfile(build_gradle_file), (
+ 'Expected to find build.gradle file at {}'.format(build_gradle_file)
+ )
- # Attempt to find the sdk values from build.gradle.
- with open(build_gradle_file) as f:
- for line in f.readlines():
- stripped = line.strip()
- if stripped.startswith('compileSdkVersion '):
- if not app.compile_sdk:
- assert not compile_sdk
- compile_sdk = int(stripped[len('compileSdkVersion '):])
- elif stripped.startswith('minSdkVersion '):
- if not app.min_sdk:
- assert not min_sdk
- min_sdk = int(stripped[len('minSdkVersion '):])
+ # Attempt to find the sdk values from build.gradle.
+ with open(build_gradle_file) as f:
+ for line in f.readlines():
+ stripped = line.strip()
+ if stripped.startswith('compileSdkVersion '):
+ if not app.compile_sdk:
+ assert not compile_sdk
+ compile_sdk = int(stripped[len('compileSdkVersion '):])
+ elif stripped.startswith('minSdkVersion '):
+ if not app.min_sdk:
+ assert not min_sdk
+ min_sdk = int(stripped[len('minSdkVersion '):])
- assert min_sdk, (
- 'Expected to find `minSdkVersion` in {}'.format(build_gradle_file))
- assert compile_sdk, (
- 'Expected to find `compileSdkVersion` in {}'.format(build_gradle_file))
+ assert min_sdk, (
+ 'Expected to find `minSdkVersion` in {}'.format(build_gradle_file))
+ assert compile_sdk, (
+ 'Expected to find `compileSdkVersion` in {}'.format(build_gradle_file))
- return (min_sdk, compile_sdk)
+ return (min_sdk, compile_sdk)
+
def IsGradleTaskName(x):
- # Check that it is non-empty.
- if not x:
- return False
- # Check that there is no whitespace.
- for c in x:
- if c.isspace():
- return False
- # Check that the first character following an optional ':' is a lower-case
- # alphabetic character.
- c = x[0]
- if c == ':' and len(x) >= 2:
- c = x[1]
- return c.isalpha() and c.islower()
+ # Check that it is non-empty.
+ if not x:
+ return False
+ # Check that there is no whitespace.
+ for c in x:
+ if c.isspace():
+ return False
+ # Check that the first character following an optional ':' is a lower-case
+ # alphabetic character.
+ c = x[0]
+ if c == ':' and len(x) >= 2:
+ c = x[1]
+ return c.isalpha() and c.islower()
+
def IsGradleCompilerTask(x, shrinker):
- if 'r8' in shrinker:
- assert 'transformClassesWithDexBuilderFor' not in x
- assert 'transformDexArchiveWithDexMergerFor' not in x
- return 'transformClassesAndResourcesWithR8For' in x
+ if 'r8' in shrinker:
+ assert 'transformClassesWithDexBuilderFor' not in x
+ assert 'transformDexArchiveWithDexMergerFor' not in x
+ return 'transformClassesAndResourcesWithR8For' in x
- assert shrinker == 'pg'
- return ('transformClassesAndResourcesWithProguard' in x
- or 'transformClassesWithDexBuilderFor' in x
- or 'transformDexArchiveWithDexMergerFor' in x)
+ assert shrinker == 'pg'
+ return ('transformClassesAndResourcesWithProguard' in x or
+ 'transformClassesWithDexBuilderFor' in x or
+ 'transformDexArchiveWithDexMergerFor' in x)
+
def ListFiles(directory, predicate=None):
- files = []
- for root, directories, filenames in os.walk(directory):
- for filename in filenames:
- file = os.path.join(root, filename)
- if predicate is None or predicate(file):
- files.append(file)
- return files
+ files = []
+ for root, directories, filenames in os.walk(directory):
+ for filename in filenames:
+ file = os.path.join(root, filename)
+ if predicate is None or predicate(file):
+ files.append(file)
+ return files
+
def SetPrintConfigurationDirective(app, checkout_dir, destination):
- proguard_config_file = FindProguardConfigurationFile(app, checkout_dir)
- with open(proguard_config_file) as f:
- lines = f.readlines()
- with open(proguard_config_file, 'w') as f:
- for line in lines:
- if '-printconfiguration' not in line:
- f.write(line)
- # Check that there is a line-break at the end of the file or insert one.
- if len(lines) and lines[-1].strip():
- f.write('\n')
- f.write('-printconfiguration {}\n'.format(destination))
+ proguard_config_file = FindProguardConfigurationFile(app, checkout_dir)
+ with open(proguard_config_file) as f:
+ lines = f.readlines()
+ with open(proguard_config_file, 'w') as f:
+ for line in lines:
+ if '-printconfiguration' not in line:
+ f.write(line)
+ # Check that there is a line-break at the end of the file or insert one.
+ if len(lines) and lines[-1].strip():
+ f.write('\n')
+ f.write('-printconfiguration {}\n'.format(destination))
+
def FindProguardConfigurationFile(app, checkout_dir):
- candidates = [
- 'proguard.cfg',
- 'proguard-rules.pro',
- 'proguard-rules.txt',
- 'proguard-project.txt']
- for candidate in candidates:
- proguard_config_file = os.path.join(checkout_dir, app.module, candidate)
- if os.path.isfile(proguard_config_file):
- return proguard_config_file
- # Currently assuming that the Proguard configuration file can be found at
- # one of the predefined locations.
- assert False, 'Unable to find Proguard configuration file'
+ candidates = [
+ 'proguard.cfg', 'proguard-rules.pro', 'proguard-rules.txt',
+ 'proguard-project.txt'
+ ]
+ for candidate in candidates:
+ proguard_config_file = os.path.join(checkout_dir, app.module, candidate)
+ if os.path.isfile(proguard_config_file):
+ return proguard_config_file
+ # Currently assuming that the Proguard configuration file can be found at
+ # one of the predefined locations.
+ assert False, 'Unable to find Proguard configuration file'
+
def Move(src, dst, quiet=False):
- if not quiet:
- print('Moving `{}` to `{}`'.format(src, dst))
- dst_parent = os.path.dirname(dst)
- if not os.path.isdir(dst_parent):
- os.makedirs(dst_parent)
- elif os.path.isdir(dst):
- shutil.rmtree(dst)
- elif os.path.isfile(dst):
- os.remove(dst)
- shutil.move(src, dst)
+ if not quiet:
+ print('Moving `{}` to `{}`'.format(src, dst))
+ dst_parent = os.path.dirname(dst)
+ if not os.path.isdir(dst_parent):
+ os.makedirs(dst_parent)
+ elif os.path.isdir(dst):
+ shutil.rmtree(dst)
+ elif os.path.isfile(dst):
+ os.remove(dst)
+ shutil.move(src, dst)
+
def MoveDir(src, dst, quiet=False):
- assert os.path.isdir(src)
- Move(src, dst, quiet=quiet)
+ assert os.path.isdir(src)
+ Move(src, dst, quiet=quiet)
+
def MoveFile(src, dst, quiet=False):
- assert os.path.isfile(src), "Expected a file to be present at " + src
- Move(src, dst, quiet=quiet)
+ assert os.path.isfile(src), "Expected a file to be present at " + src
+ Move(src, dst, quiet=quiet)
+
def MoveProfileReportTo(dest_dir, build_stdout, quiet=False):
- html_file = None
- profile_message = 'See the profiling report at: '
- # We are not interested in the profiling report for buildSrc.
- for line in build_stdout:
- if (profile_message in line) and ('buildSrc' not in line):
- assert not html_file, "Only one report should be created"
- html_file = line[len(profile_message):]
- if html_file.startswith('file://'):
- html_file = html_file[len('file://'):]
+ html_file = None
+ profile_message = 'See the profiling report at: '
+ # We are not interested in the profiling report for buildSrc.
+ for line in build_stdout:
+ if (profile_message in line) and ('buildSrc' not in line):
+ assert not html_file, "Only one report should be created"
+ html_file = line[len(profile_message):]
+ if html_file.startswith('file://'):
+ html_file = html_file[len('file://'):]
- if not html_file:
- return
+ if not html_file:
+ return
- assert os.path.isfile(html_file), 'Expected to find HTML file at {}'.format(
- html_file)
- MoveFile(html_file, os.path.join(dest_dir, 'index.html'), quiet=quiet)
+ assert os.path.isfile(html_file), 'Expected to find HTML file at {}'.format(
+ html_file)
+ MoveFile(html_file, os.path.join(dest_dir, 'index.html'), quiet=quiet)
- html_dir = os.path.dirname(html_file)
- for dir_name in ['css', 'js']:
- MoveDir(os.path.join(html_dir, dir_name), os.path.join(dest_dir, dir_name),
- quiet=quiet)
+ html_dir = os.path.dirname(html_file)
+ for dir_name in ['css', 'js']:
+ MoveDir(os.path.join(html_dir, dir_name),
+ os.path.join(dest_dir, dir_name),
+ quiet=quiet)
+
def MoveXMLTestResultFileTo(xml_test_result_dest, test_stdout, quiet=False):
- xml_test_result_file = None
- xml_result_reporter_message = 'XML test result file generated at '
- for line in test_stdout:
- if xml_result_reporter_message in line:
- index_from = (
- line.index(xml_result_reporter_message)
- + len(xml_result_reporter_message))
- index_to = line.index('.xml') + len('.xml')
- xml_test_result_file = line[index_from:index_to]
- break
+ xml_test_result_file = None
+ xml_result_reporter_message = 'XML test result file generated at '
+ for line in test_stdout:
+ if xml_result_reporter_message in line:
+ index_from = (line.index(xml_result_reporter_message) +
+ len(xml_result_reporter_message))
+ index_to = line.index('.xml') + len('.xml')
+ xml_test_result_file = line[index_from:index_to]
+ break
- assert os.path.isfile(xml_test_result_file), (
- 'Expected to find XML file at {}'.format(xml_test_result_file))
+ assert os.path.isfile(xml_test_result_file), (
+ 'Expected to find XML file at {}'.format(xml_test_result_file))
- MoveFile(xml_test_result_file, xml_test_result_dest, quiet=quiet)
+ MoveFile(xml_test_result_file, xml_test_result_dest, quiet=quiet)
+
def ParseProfileReport(profile_dir):
- html_file = os.path.join(profile_dir, 'index.html')
- assert os.path.isfile(html_file)
+ html_file = os.path.join(profile_dir, 'index.html')
+ assert os.path.isfile(html_file)
- parser = ProfileReportParser()
- with open(html_file) as f:
- for line in f.readlines():
- parser.feed(line)
- return parser.result
+ parser = ProfileReportParser()
+ with open(html_file) as f:
+ for line in f.readlines():
+ parser.feed(line)
+ return parser.result
+
# A simple HTML parser that recognizes the following pattern:
#
@@ -242,51 +259,50 @@
# <td></td>
# </tr>
class ProfileReportParser(HTMLParser):
- def __init__(self):
- HTMLParser.__init__(self)
- self.entered_table_row = False
- self.entered_task_name_cell = False
- self.entered_duration_cell = False
- self.current_task_name = None
- self.current_duration = None
+ def __init__(self):
+ HTMLParser.__init__(self)
+ self.entered_table_row = False
+ self.entered_task_name_cell = False
+ self.entered_duration_cell = False
- self.result = {}
+ self.current_task_name = None
+ self.current_duration = None
- def handle_starttag(self, tag, attrs):
- entered_table_row_before = self.entered_table_row
- entered_task_name_cell_before = self.entered_task_name_cell
+ self.result = {}
- self.entered_table_row = (tag == 'tr')
- self.entered_task_name_cell = (tag == 'td' and entered_table_row_before)
- self.entered_duration_cell = (
- self.current_task_name
- and tag == 'td'
- and entered_task_name_cell_before)
+ def handle_starttag(self, tag, attrs):
+ entered_table_row_before = self.entered_table_row
+ entered_task_name_cell_before = self.entered_task_name_cell
- def handle_endtag(self, tag):
- if tag == 'tr':
- if self.current_task_name and self.current_duration:
- self.result[self.current_task_name] = self.current_duration
- self.current_task_name = None
- self.current_duration = None
- self.entered_table_row = False
+ self.entered_table_row = (tag == 'tr')
+ self.entered_task_name_cell = (tag == 'td' and entered_table_row_before)
+ self.entered_duration_cell = (self.current_task_name and tag == 'td' and
+ entered_task_name_cell_before)
- def handle_data(self, data):
- stripped = data.strip()
- if not stripped:
- return
- if self.entered_task_name_cell:
- if IsGradleTaskName(stripped):
- self.current_task_name = stripped
- elif self.entered_duration_cell and stripped.endswith('s'):
- duration = stripped[:-1]
- if 'm' in duration:
- tmp = duration.split('m')
- minutes = int(tmp[0])
- seconds = float(tmp[1])
- else:
- minutes = 0
- seconds = float(duration)
- self.current_duration = 60 * minutes + seconds
- self.entered_table_row = False
+ def handle_endtag(self, tag):
+ if tag == 'tr':
+ if self.current_task_name and self.current_duration:
+ self.result[self.current_task_name] = self.current_duration
+ self.current_task_name = None
+ self.current_duration = None
+ self.entered_table_row = False
+
+ def handle_data(self, data):
+ stripped = data.strip()
+ if not stripped:
+ return
+ if self.entered_task_name_cell:
+ if IsGradleTaskName(stripped):
+ self.current_task_name = stripped
+ elif self.entered_duration_cell and stripped.endswith('s'):
+ duration = stripped[:-1]
+ if 'm' in duration:
+ tmp = duration.split('m')
+ minutes = int(tmp[0])
+ seconds = float(tmp[1])
+ else:
+ minutes = 0
+ seconds = float(duration)
+ self.current_duration = 60 * minutes + seconds
+ self.entered_table_row = False
diff --git a/tools/asmifier.py b/tools/asmifier.py
index bfb1226..9af1499 100755
--- a/tools/asmifier.py
+++ b/tools/asmifier.py
@@ -13,45 +13,51 @@
ASM_VERSION = '9.5'
ASM_JAR = os.path.join(utils.DEPENDENCIES_DIR, 'org', 'ow2', 'asm', 'asm',
ASM_VERSION, 'asm-' + ASM_VERSION + '.jar')
-ASM_UTIL_JAR = os.path.join(utils.DEPENDENCIES_DIR, 'org', 'ow2', 'asm', 'asm-util',
- ASM_VERSION, 'asm-util-' + ASM_VERSION + '.jar')
+ASM_UTIL_JAR = os.path.join(utils.DEPENDENCIES_DIR, 'org', 'ow2', 'asm',
+ 'asm-util', ASM_VERSION,
+ 'asm-util-' + ASM_VERSION + '.jar')
+
def run(args):
- cmd = []
- cmd.append(jdk.GetJavaExecutable())
- cp = ":".join([ASM_JAR, ASM_UTIL_JAR])
- print(cp)
- cmd.extend(['-cp', cp])
- cmd.append('org.objectweb.asm.util.ASMifier')
- cmd.extend(args)
- utils.PrintCmd(cmd)
- result = subprocess.check_output(cmd).decode('utf-8')
- print(result)
- return result
+ cmd = []
+ cmd.append(jdk.GetJavaExecutable())
+ cp = ":".join([ASM_JAR, ASM_UTIL_JAR])
+ print(cp)
+ cmd.extend(['-cp', cp])
+ cmd.append('org.objectweb.asm.util.ASMifier')
+ cmd.extend(args)
+ utils.PrintCmd(cmd)
+ result = subprocess.check_output(cmd).decode('utf-8')
+ print(result)
+ return result
+
def main():
- help = True
- args = []
- for arg in sys.argv[1:]:
- if arg == "--no-debug":
- args.append("-debug")
- elif arg in ("-help", "--help", "-debug"):
- help = True
- break
- else:
- help = False
- args.append(arg)
- if help:
- print("asmifier.py [--no-debug] <classfile>*")
- print(" --no-debug Don't include local variable information in output.")
- return
- try:
- run(args)
- except subprocess.CalledProcessError as e:
- # In case anything relevant was printed to stdout, normally this is already
- # on stderr.
- print(e.output)
- return e.returncode
+ help = True
+ args = []
+ for arg in sys.argv[1:]:
+ if arg == "--no-debug":
+ args.append("-debug")
+ elif arg in ("-help", "--help", "-debug"):
+ help = True
+ break
+ else:
+ help = False
+ args.append(arg)
+ if help:
+ print("asmifier.py [--no-debug] <classfile>*")
+ print(
+ " --no-debug Don't include local variable information in output."
+ )
+ return
+ try:
+ run(args)
+ except subprocess.CalledProcessError as e:
+ # In case anything relevant was printed to stdout, normally this is already
+ # on stderr.
+ print(e.output)
+ return e.returncode
+
if __name__ == '__main__':
- sys.exit(main())
+ sys.exit(main())
diff --git a/tools/benchmarks/get_deps.py b/tools/benchmarks/get_deps.py
index 716db48..0e178ae 100644
--- a/tools/benchmarks/get_deps.py
+++ b/tools/benchmarks/get_deps.py
@@ -7,15 +7,16 @@
import sys
import main_utils
-utils = main_utils.GetUtils();
+
+utils = main_utils.GetUtils()
ANDROID_EMULATORS = os.path.join(utils.TOOLS_DIR, 'benchmarks',
'android-sdk-linux.tar.gz.sha1')
+
def Main():
- utils.DownloadFromGoogleCloudStorage(ANDROID_EMULATORS)
+ utils.DownloadFromGoogleCloudStorage(ANDROID_EMULATORS)
+
if __name__ == '__main__':
- sys.exit(Main())
-
-
+ sys.exit(Main())
diff --git a/tools/benchmarks/main_utils.py b/tools/benchmarks/main_utils.py
index b3ea703..f419004 100644
--- a/tools/benchmarks/main_utils.py
+++ b/tools/benchmarks/main_utils.py
@@ -5,9 +5,10 @@
import imp
import os
-TOOLS_DIR = os.path.abspath(
- os.path.normpath(os.path.join(__file__, '..', '..')))
+TOOLS_DIR = os.path.abspath(os.path.normpath(os.path.join(__file__, '..',
+ '..')))
+
def GetUtils():
- '''Dynamically load the tools/utils.py python module.'''
- return imp.load_source('utils', os.path.join(TOOLS_DIR, 'utils.py'))
+ '''Dynamically load the tools/utils.py python module.'''
+ return imp.load_source('utils', os.path.join(TOOLS_DIR, 'utils.py'))
diff --git a/tools/build_aosp.py b/tools/build_aosp.py
deleted file mode 100755
index 5475830..0000000
--- a/tools/build_aosp.py
+++ /dev/null
@@ -1,85 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (c) 2017, the R8 project authors. Please see the AUTHORS file
-# for details. All rights reserved. Use of this source code is governed by a
-# BSD-style license that can be found in the LICENSE file.
-
-from os.path import join
-from glob import glob
-from itertools import chain
-from stat import S_IRWXU
-import argparse
-import multiprocessing
-import os
-import re
-import sys
-
-import gradle
-import utils
-import utils_aosp
-
-J_DEFAULT = multiprocessing.cpu_count() - 2
-
-EXIT_FAILURE = 1
-
-def parse_arguments():
- parser = argparse.ArgumentParser(
- description = 'Checkout the AOSP source tree.')
- utils_aosp.add_common_arguments(parser)
- parser.add_argument('--tool',
- choices = ['d8', 'r8', 'default'],
- default = 'd8',
- help='Compiler tool to use. Defaults to d8.')
- parser.add_argument('--mmm',
- action = 'store_true',
- help='Use mmm instead of make')
- parser.add_argument('--mmma',
- action = 'store_true',
- help='Use mmma instead of make')
- parser.add_argument('--show-commands',
- action = 'store_true',
- help='Show commands executed during build.')
- parser.add_argument('-j',
- help='Projects to fetch simultaneously. ' +
- 'Defaults to ' + str(J_DEFAULT) + '.',
- type=int,
- default=-1)
- parser.add_argument('target', nargs='?')
- return parser.parse_args()
-
-def build_aosp(aosp_root, lunch, make, tool,
- concurrency, target, show_commands):
- d8_option = 'USE_D8=false'
- if tool == 'd8' or tool == 'r8' :
- d8_option = 'USE_D8=true'
-
- r8_option = 'USE_R8=false'
- if tool == 'r8':
- r8_option = 'USE_R8=true'
-
- j_option = '-j'
- if concurrency > 0:
- j_option += str(concurrency)
-
- command = [make, j_option]
- if show_commands:
- command.append('showcommands')
- command.extend([d8_option, r8_option])
- if target:
- command.append(target)
-
- print 'Building using: ' + ' '.join(command)
- utils_aosp.run_through_aosp_helper(lunch, command, aosp_root)
-
-def Main():
- args = parse_arguments()
-
- make = 'm'
- if args.mmm:
- make = 'mmm'
- if args.mmma:
- make = 'mmma'
- build_aosp(args.aosp_root, args.lunch, make, args.tool,
- args.j, args.target, args.show_commands)
-
-if __name__ == '__main__':
- sys.exit(Main())
diff --git a/tools/build_sample_apk.py b/tools/build_sample_apk.py
index 2cc95aa..0d577d4 100755
--- a/tools/build_sample_apk.py
+++ b/tools/build_sample_apk.py
@@ -18,9 +18,8 @@
import utils
import uuid
-
-DEFAULT_AAPT = 'aapt' # Assume in path.
-DEFAULT_AAPT2 = 'aapt2' # Assume in path.
+DEFAULT_AAPT = 'aapt' # Assume in path.
+DEFAULT_AAPT2 = 'aapt2' # Assume in path.
DEFAULT_D8 = os.path.join(utils.REPO_ROOT, 'tools', 'd8.py')
DEFAULT_DEXSPLITTER = os.path.join(utils.REPO_ROOT, 'tools', 'dexsplitter.py')
DEFAULT_JAVAC = jdk.GetJavacExecutable()
@@ -30,308 +29,342 @@
STANDARD_ACTIVITY = "R8Activity"
BENCHMARK_ITERATIONS = 30
-SAMPLE_APKS = [
- 'simple',
- 'split'
-]
+SAMPLE_APKS = ['simple', 'split']
+
def parse_options():
- result = optparse.OptionParser()
- result.add_option('--aapt',
- help='aapt executable to use',
- default=DEFAULT_AAPT)
- result.add_option('--aapt2',
- help='aapt2 executable to use',
- default=DEFAULT_AAPT2)
- result.add_option('--api',
- help='Android api level',
- default=21,
- choices=['14', '15', '19', '21', '22', '23', '24', '25',
- '26'])
- result.add_option('--keystore',
- help='Keystore used for signing',
- default=DEFAULT_KEYSTORE)
- result.add_option('--split',
- help='Split the app using the split.spec file',
- default=False, action='store_true')
- result.add_option('--generate-proto-apk',
- help='Use aapt2 to generate the proto version of the apk.',
- default=False, action='store_true')
- result.add_option('--install',
- help='Install the app (including featuresplit)',
- default=False, action='store_true')
- result.add_option('--benchmark',
- help='Benchmark the app on the phone with specialized markers',
- default=False, action='store_true')
- result.add_option('--benchmark-output-dir',
- help='Store benchmark results here.',
- default=None)
- result.add_option('--app',
- help='Which app to build',
- default='simple',
- choices=SAMPLE_APKS)
- return result.parse_args()
+ result = optparse.OptionParser()
+ result.add_option('--aapt',
+ help='aapt executable to use',
+ default=DEFAULT_AAPT)
+ result.add_option('--aapt2',
+ help='aapt2 executable to use',
+ default=DEFAULT_AAPT2)
+ result.add_option(
+ '--api',
+ help='Android api level',
+ default=21,
+ choices=['14', '15', '19', '21', '22', '23', '24', '25', '26'])
+ result.add_option('--keystore',
+ help='Keystore used for signing',
+ default=DEFAULT_KEYSTORE)
+ result.add_option('--split',
+ help='Split the app using the split.spec file',
+ default=False,
+ action='store_true')
+ result.add_option(
+ '--generate-proto-apk',
+ help='Use aapt2 to generate the proto version of the apk.',
+ default=False,
+ action='store_true')
+ result.add_option('--install',
+ help='Install the app (including featuresplit)',
+ default=False,
+ action='store_true')
+ result.add_option(
+ '--benchmark',
+ help='Benchmark the app on the phone with specialized markers',
+ default=False,
+ action='store_true')
+ result.add_option('--benchmark-output-dir',
+ help='Store benchmark results here.',
+ default=None)
+ result.add_option('--app',
+ help='Which app to build',
+ default='simple',
+ choices=SAMPLE_APKS)
+ return result.parse_args()
+
def run_aapt(aapt, args):
- command = [aapt]
- command.extend(args)
- utils.PrintCmd(command)
- subprocess.check_call(command)
-
-def get_build_dir(app):
- return os.path.join(utils.BUILD, 'sampleApks', app)
-
-def get_gen_path(app):
- gen_path = os.path.join(get_build_dir(app), 'gen')
- utils.makedirs_if_needed(gen_path)
- return gen_path
-
-def get_bin_path(app):
- bin_path = os.path.join(get_build_dir(app), 'bin')
- utils.makedirs_if_needed(bin_path)
- return bin_path
-
-
-def get_guava_jar():
- return os.path.join(utils.REPO_ROOT,
- 'third_party/gradle-plugin/com/google/guava/guava/22.0/guava-22.0.jar')
-
-def get_sample_dir(app):
- return os.path.join(utils.REPO_ROOT, 'src', 'test', 'sampleApks', app)
-
-def get_src_path(app):
- return os.path.join(get_sample_dir(app), 'src')
-
-def get_dex_path(app):
- return os.path.join(get_bin_path(app), 'classes.dex')
-
-def get_split_path(app, split):
- return os.path.join(get_bin_path(app), split, 'classes.dex')
-
-def get_package_name(app):
- return '%s.%s' % (PACKAGE_PREFIX, app)
-
-def get_qualified_activity(app):
- # The activity specified to adb start is PACKAGE_NAME/.ACTIVITY
- return '%s/.%s' % (get_package_name(app), STANDARD_ACTIVITY)
-
-def run_aapt_pack(aapt, api, app):
- with utils.ChangedWorkingDirectory(get_sample_dir(app)):
- args = ['package',
- '-v', '-f',
- '-I', utils.get_android_jar(api),
- '-M', 'AndroidManifest.xml',
- '-A', 'assets',
- '-S', 'res',
- '-m',
- '-J', get_gen_path(app),
- '-F', os.path.join(get_bin_path(app), 'resources.ap_'),
- '-G', os.path.join(get_build_dir(app), 'proguard_options')]
- run_aapt(aapt, args)
-
-def run_aapt_split_pack(aapt, api, app):
- with utils.ChangedWorkingDirectory(get_sample_dir(app)):
- args = ['package',
- '-v', '-f',
- '-I', utils.get_android_jar(api),
- '-M', 'split_manifest/AndroidManifest.xml',
- '-S', 'res',
- '-F', os.path.join(get_bin_path(app), 'split_resources.ap_')]
- run_aapt(aapt, args)
-
-def compile_with_javac(api, app):
- with utils.ChangedWorkingDirectory(get_sample_dir(app)):
- files = glob.glob(SRC_LOCATION.format(app=app))
- classpath = '%s:%s' % (utils.get_android_jar(api), get_guava_jar())
- command = [DEFAULT_JAVAC,
- '-classpath', classpath,
- '-sourcepath', '%s:%s:%s' % (
- get_src_path(app),
- get_gen_path(app),
- get_guava_jar()),
- '-d', get_bin_path(app)]
- command.extend(files)
+ command = [aapt]
+ command.extend(args)
utils.PrintCmd(command)
subprocess.check_call(command)
+
+def get_build_dir(app):
+ return os.path.join(utils.BUILD, 'sampleApks', app)
+
+
+def get_gen_path(app):
+ gen_path = os.path.join(get_build_dir(app), 'gen')
+ utils.makedirs_if_needed(gen_path)
+ return gen_path
+
+
+def get_bin_path(app):
+ bin_path = os.path.join(get_build_dir(app), 'bin')
+ utils.makedirs_if_needed(bin_path)
+ return bin_path
+
+
+def get_guava_jar():
+ return os.path.join(
+ utils.REPO_ROOT,
+ 'third_party/gradle-plugin/com/google/guava/guava/22.0/guava-22.0.jar')
+
+
+def get_sample_dir(app):
+ return os.path.join(utils.REPO_ROOT, 'src', 'test', 'sampleApks', app)
+
+
+def get_src_path(app):
+ return os.path.join(get_sample_dir(app), 'src')
+
+
+def get_dex_path(app):
+ return os.path.join(get_bin_path(app), 'classes.dex')
+
+
+def get_split_path(app, split):
+ return os.path.join(get_bin_path(app), split, 'classes.dex')
+
+
+def get_package_name(app):
+ return '%s.%s' % (PACKAGE_PREFIX, app)
+
+
+def get_qualified_activity(app):
+ # The activity specified to adb start is PACKAGE_NAME/.ACTIVITY
+ return '%s/.%s' % (get_package_name(app), STANDARD_ACTIVITY)
+
+
+def run_aapt_pack(aapt, api, app):
+ with utils.ChangedWorkingDirectory(get_sample_dir(app)):
+ args = [
+ 'package', '-v', '-f', '-I',
+ utils.get_android_jar(api), '-M', 'AndroidManifest.xml', '-A',
+ 'assets', '-S', 'res', '-m', '-J',
+ get_gen_path(app), '-F',
+ os.path.join(get_bin_path(app), 'resources.ap_'), '-G',
+ os.path.join(get_build_dir(app), 'proguard_options')
+ ]
+ run_aapt(aapt, args)
+
+
+def run_aapt_split_pack(aapt, api, app):
+ with utils.ChangedWorkingDirectory(get_sample_dir(app)):
+ args = [
+ 'package', '-v', '-f', '-I',
+ utils.get_android_jar(api), '-M',
+ 'split_manifest/AndroidManifest.xml', '-S', 'res', '-F',
+ os.path.join(get_bin_path(app), 'split_resources.ap_')
+ ]
+ run_aapt(aapt, args)
+
+
+def compile_with_javac(api, app):
+ with utils.ChangedWorkingDirectory(get_sample_dir(app)):
+ files = glob.glob(SRC_LOCATION.format(app=app))
+ classpath = '%s:%s' % (utils.get_android_jar(api), get_guava_jar())
+ command = [
+ DEFAULT_JAVAC, '-classpath', classpath, '-sourcepath',
+ '%s:%s:%s' %
+ (get_src_path(app), get_gen_path(app), get_guava_jar()), '-d',
+ get_bin_path(app)
+ ]
+ command.extend(files)
+ utils.PrintCmd(command)
+ subprocess.check_call(command)
+
+
def dex(app, api):
- files = []
- for root, dirnames, filenames in os.walk(get_bin_path(app)):
- for filename in fnmatch.filter(filenames, '*.class'):
- files.append(os.path.join(root, filename))
- command = [DEFAULT_D8, '--',
- '--output', get_bin_path(app),
- '--classpath', utils.get_android_jar(api),
- '--min-api', str(api)]
- command.extend(files)
- if app != 'simple':
- command.append(get_guava_jar())
+ files = []
+ for root, dirnames, filenames in os.walk(get_bin_path(app)):
+ for filename in fnmatch.filter(filenames, '*.class'):
+ files.append(os.path.join(root, filename))
+ command = [
+ DEFAULT_D8, '--', '--output',
+ get_bin_path(app), '--classpath',
+ utils.get_android_jar(api), '--min-api',
+ str(api)
+ ]
+ command.extend(files)
+ if app != 'simple':
+ command.append(get_guava_jar())
- utils.PrintCmd(command)
- subprocess.check_call(command)
-
-def split(app):
- split_spec = os.path.join(get_sample_dir(app), 'split.spec')
- command = [DEFAULT_DEXSPLITTER,
- '--input', get_dex_path(app),
- '--output', get_bin_path(app),
- '--feature-splits', split_spec]
- utils.PrintCmd(command)
- subprocess.check_call(command)
-
-def run_adb(args, ignore_exit=False):
- command = ['adb']
- command.extend(args)
- utils.PrintCmd(command)
- # On M adb install-multiple exits 1 but succeed in installing.
- if ignore_exit:
- subprocess.call(command)
- else:
+ utils.PrintCmd(command)
subprocess.check_call(command)
+
+def split(app):
+ split_spec = os.path.join(get_sample_dir(app), 'split.spec')
+ command = [
+ DEFAULT_DEXSPLITTER, '--input',
+ get_dex_path(app), '--output',
+ get_bin_path(app), '--feature-splits', split_spec
+ ]
+ utils.PrintCmd(command)
+ subprocess.check_call(command)
+
+
+def run_adb(args, ignore_exit=False):
+ command = ['adb']
+ command.extend(args)
+ utils.PrintCmd(command)
+ # On M adb install-multiple exits 1 but succeed in installing.
+ if ignore_exit:
+ subprocess.call(command)
+ else:
+ subprocess.check_call(command)
+
+
def adb_install(apks):
- args = [
- 'install-multiple' if len(apks) > 1 else 'install',
- '-r',
- '-d']
- args.extend(apks)
- run_adb(args, ignore_exit=True)
+ args = ['install-multiple' if len(apks) > 1 else 'install', '-r', '-d']
+ args.extend(apks)
+ run_adb(args, ignore_exit=True)
+
def create_temp_apk(app, prefix):
- temp_apk_path = os.path.join(get_bin_path(app), '%s.ap_' % app)
- shutil.copyfile(os.path.join(get_bin_path(app), '%sresources.ap_' % prefix),
- temp_apk_path)
- return temp_apk_path
+ temp_apk_path = os.path.join(get_bin_path(app), '%s.ap_' % app)
+ shutil.copyfile(os.path.join(get_bin_path(app), '%sresources.ap_' % prefix),
+ temp_apk_path)
+ return temp_apk_path
+
def aapt_add_dex(aapt, dex, temp_apk_path):
- args = ['add',
- '-k', temp_apk_path,
- dex]
- run_aapt(aapt, args)
+ args = ['add', '-k', temp_apk_path, dex]
+ run_aapt(aapt, args)
+
def kill(app):
- args = ['shell', 'am', 'force-stop', get_package_name(app)]
- run_adb(args)
+ args = ['shell', 'am', 'force-stop', get_package_name(app)]
+ run_adb(args)
+
def start_logcat():
- return subprocess.Popen(['adb', 'logcat'], bufsize=1024*1024, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ return subprocess.Popen(['adb', 'logcat'],
+ bufsize=1024 * 1024,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+
def start(app):
- args = ['shell', 'am', 'start', '-n', get_qualified_activity(app)]
- run_adb(args)
+ args = ['shell', 'am', 'start', '-n', get_qualified_activity(app)]
+ run_adb(args)
+
def clear_logcat():
- args = ['logcat', '-c']
- run_adb(args)
+ args = ['logcat', '-c']
+ run_adb(args)
+
def stop_logcat(popen):
- popen.terminate()
- lines = []
- for l in popen.stdout:
- if 'System.out' in l:
- lines.append(l)
- return lines
+ popen.terminate()
+ lines = []
+ for l in popen.stdout:
+ if 'System.out' in l:
+ lines.append(l)
+ return lines
+
def store_or_print_benchmarks(lines, output):
- results = {}
- overall_total = 0
- # We assume that the total times are
- # prefixed with 'NAME Total: '. The logcat lines looks like:
- # 06-28 12:22:00.991 13698 13698 I System.out: Call Total: 61614
- for l in lines:
- if 'Total: ' in l:
- split = l.split('Total: ')
- time = split[1]
- name = split[0].split()[-1]
- overall_total += int(time)
- print('%s: %s' % (name, time))
- results[name] = time
+ results = {}
+ overall_total = 0
+ # We assume that the total times are
+ # prefixed with 'NAME Total: '. The logcat lines looks like:
+ # 06-28 12:22:00.991 13698 13698 I System.out: Call Total: 61614
+ for l in lines:
+ if 'Total: ' in l:
+ split = l.split('Total: ')
+ time = split[1]
+ name = split[0].split()[-1]
+ overall_total += int(time)
+ print('%s: %s' % (name, time))
+ results[name] = time
- print('Total: %s' % overall_total)
- if not output:
+ print('Total: %s' % overall_total)
+ if not output:
+ return overall_total
+ results['total'] = str(overall_total)
+ output_dir = os.path.join(output, str(uuid.uuid4()))
+ os.makedirs(output_dir)
+ written_files = []
+ for name, time in results.iteritems():
+ total_file = os.path.join(output_dir, name)
+ written_files.append(total_file)
+ with open(total_file, 'w') as f:
+ f.write(time)
+
+ print('Result stored in: \n%s' % ('\n'.join(written_files)))
return overall_total
- results['total'] = str(overall_total)
- output_dir = os.path.join(output, str(uuid.uuid4()))
- os.makedirs(output_dir)
- written_files = []
- for name, time in results.iteritems():
- total_file = os.path.join(output_dir, name)
- written_files.append(total_file)
- with open(total_file, 'w') as f:
- f.write(time)
- print('Result stored in: \n%s' % ('\n'.join(written_files)))
- return overall_total
def benchmark(app, output_dir):
- # Ensure app is not running
- kill(app)
- clear_logcat()
- logcat = start_logcat()
- start(app)
- # We could do better here by continiously parsing the logcat for a marker, but
- # this works nicely with the current setup.
- time.sleep(12)
- kill(app)
- return float(store_or_print_benchmarks(stop_logcat(logcat), output_dir))
+ # Ensure app is not running
+ kill(app)
+ clear_logcat()
+ logcat = start_logcat()
+ start(app)
+ # We could do better here by continiously parsing the logcat for a marker, but
+ # this works nicely with the current setup.
+ time.sleep(12)
+ kill(app)
+ return float(store_or_print_benchmarks(stop_logcat(logcat), output_dir))
+
def ensure_no_logcat():
- output = subprocess.check_output(['ps', 'aux'])
- if 'adb logcat' in output:
- raise Exception('You have adb logcat running, please close it and rerun')
+ output = subprocess.check_output(['ps', 'aux'])
+ if 'adb logcat' in output:
+ raise Exception(
+ 'You have adb logcat running, please close it and rerun')
+
def generate_proto_apks(apks, options):
- proto_apks = []
- for apk in apks:
- proto_apk = apk + '.proto'
- cmd = [options.aapt2, 'convert',
- '-o', proto_apk,
- '--output-format', 'proto',
- apk]
- utils.PrintCmd(cmd)
- subprocess.check_call(cmd)
- proto_apks.append(proto_apk)
- return proto_apks
+ proto_apks = []
+ for apk in apks:
+ proto_apk = apk + '.proto'
+ cmd = [
+ options.aapt2, 'convert', '-o', proto_apk, '--output-format',
+ 'proto', apk
+ ]
+ utils.PrintCmd(cmd)
+ subprocess.check_call(cmd)
+ proto_apks.append(proto_apk)
+ return proto_apks
+
def Main():
- (options, args) = parse_options()
- apks = []
- is_split = options.split
- run_aapt_pack(options.aapt, options.api, options.app)
- if is_split:
- run_aapt_split_pack(options.aapt, options.api, options.app)
- compile_with_javac(options.api, options.app)
- dex(options.app, options.api)
- dex_files = { options.app: get_dex_path(options.app)}
- dex_path = get_dex_path(options.app)
- if is_split:
- split(options.app)
- dex_path = get_split_path(options.app, 'base')
- temp_apk_path = create_temp_apk(options.app, '')
- aapt_add_dex(options.aapt, dex_path, temp_apk_path)
- apk_path = os.path.join(get_bin_path(options.app), '%s.apk' % options.app)
- apk_utils.sign(temp_apk_path, apk_path, options.keystore)
- apks.append(apk_path)
- if is_split:
- split_temp_apk_path = create_temp_apk(options.app, 'split_')
- aapt_add_dex(options.aapt,
- get_split_path(options.app, 'split'),
- temp_apk_path)
- split_apk_path = os.path.join(get_bin_path(options.app), 'featuresplit.apk')
- apk_utils.sign(temp_apk_path, split_apk_path, options.keystore)
- apks.append(split_apk_path)
- if options.generate_proto_apk:
- proto_apks = generate_proto_apks(apks, options)
- print('Generated proto apks available at: %s' % ' '.join(proto_apks))
- print('Generated apks available at: %s' % ' '.join(apks))
- if options.install or options.benchmark:
- adb_install(apks)
- grand_total = 0
- if options.benchmark:
- ensure_no_logcat()
- for _ in range(BENCHMARK_ITERATIONS):
- grand_total += benchmark(options.app, options.benchmark_output_dir)
- print('Combined average: %s' % (grand_total/BENCHMARK_ITERATIONS))
+ (options, args) = parse_options()
+ apks = []
+ is_split = options.split
+ run_aapt_pack(options.aapt, options.api, options.app)
+ if is_split:
+ run_aapt_split_pack(options.aapt, options.api, options.app)
+ compile_with_javac(options.api, options.app)
+ dex(options.app, options.api)
+ dex_files = {options.app: get_dex_path(options.app)}
+ dex_path = get_dex_path(options.app)
+ if is_split:
+ split(options.app)
+ dex_path = get_split_path(options.app, 'base')
+ temp_apk_path = create_temp_apk(options.app, '')
+ aapt_add_dex(options.aapt, dex_path, temp_apk_path)
+ apk_path = os.path.join(get_bin_path(options.app), '%s.apk' % options.app)
+ apk_utils.sign(temp_apk_path, apk_path, options.keystore)
+ apks.append(apk_path)
+ if is_split:
+ split_temp_apk_path = create_temp_apk(options.app, 'split_')
+ aapt_add_dex(options.aapt, get_split_path(options.app, 'split'),
+ temp_apk_path)
+ split_apk_path = os.path.join(get_bin_path(options.app),
+ 'featuresplit.apk')
+ apk_utils.sign(temp_apk_path, split_apk_path, options.keystore)
+ apks.append(split_apk_path)
+ if options.generate_proto_apk:
+ proto_apks = generate_proto_apks(apks, options)
+ print('Generated proto apks available at: %s' % ' '.join(proto_apks))
+ print('Generated apks available at: %s' % ' '.join(apks))
+ if options.install or options.benchmark:
+ adb_install(apks)
+ grand_total = 0
+ if options.benchmark:
+ ensure_no_logcat()
+ for _ in range(BENCHMARK_ITERATIONS):
+ grand_total += benchmark(options.app, options.benchmark_output_dir)
+ print('Combined average: %s' % (grand_total / BENCHMARK_ITERATIONS))
+
if __name__ == '__main__':
- sys.exit(Main())
+ sys.exit(Main())
diff --git a/tools/check-cherry-picks.py b/tools/check-cherry-picks.py
index 3d66d3c..3b27b08 100755
--- a/tools/check-cherry-picks.py
+++ b/tools/check-cherry-picks.py
@@ -8,20 +8,23 @@
import re
import r8_release
+
class Branch:
- def __init__(self, name, first, last=None):
- self.name = name
- self.first = first
- self.last = last # optional last for testing purposes.
- def origin(self):
- return "origin/%s" % self.name
+ def __init__(self, name, first, last=None):
+ self.name = name
+ self.first = first
+ self.last = last # optional last for testing purposes.
- def last_or_origin(self):
- return self.last if self.last else self.origin()
+ def origin(self):
+ return "origin/%s" % self.name
- def __str__(self):
- return self.name
+ def last_or_origin(self):
+ return self.last if self.last else self.origin()
+
+ def __str__(self):
+ return self.name
+
# The initial commit is the furthest back we need to search on main.
# Currently, it is the merge point of main onto 4.0.23-dev
@@ -30,155 +33,167 @@
DEV_BRANCH_VERSION = [int(s) for s in r8_release.R8_DEV_BRANCH.split('.')]
# List of change ids that should not be reported.
-IGNORED = [
- 'I92d7bf3afbf609fdea21683941cfd15c90305cf2'
-]
+IGNORED = ['I92d7bf3afbf609fdea21683941cfd15c90305cf2']
VERBOSE = False
+
# Helper to call and decode a shell command.
def run_cmd(cmd):
- if VERBOSE:
- print(' '.join(cmd))
- return subprocess.check_output(cmd).decode('UTF-8')
+ if VERBOSE:
+ print(' '.join(cmd))
+ return subprocess.check_output(cmd).decode('UTF-8')
+
# Comparator on major and minor branch versions.
def branch_version_less_than(b1, b2):
- if b1[0] < b2[0]:
- return True
- if b1[0] == b2[0] and b1[1] < b2[1]:
- return True
- return False
+ if b1[0] < b2[0]:
+ return True
+ if b1[0] == b2[0] and b1[1] < b2[1]:
+ return True
+ return False
+
# Find all release branches between OLDEST_BRANCH and DEV_BRANCH
def get_release_branches():
- # Release branches are assumed to be of the form 'origin/X.Y'
- out = run_cmd(['git', 'branch', '-r', '-l'])
- pattern = re.compile('origin/(\d+).(\d+)')
- releases = []
- for line in out.split('\n'):
- m = pattern.search(line.strip())
- if m:
- major = m.group(1)
- minor = m.group(2)
- if major and minor:
- candidate = (int(major), int(minor))
- if branch_version_less_than(candidate, OLDEST_BRANCH_VERSION):
- continue
- if branch_version_less_than(candidate, DEV_BRANCH_VERSION):
- releases.extend(find_dev_cutoff(candidate))
- return releases
+ # Release branches are assumed to be of the form 'origin/X.Y'
+ out = run_cmd(['git', 'branch', '-r', '-l'])
+ pattern = re.compile('origin/(\d+).(\d+)')
+ releases = []
+ for line in out.split('\n'):
+ m = pattern.search(line.strip())
+ if m:
+ major = m.group(1)
+ minor = m.group(2)
+ if major and minor:
+ candidate = (int(major), int(minor))
+ if branch_version_less_than(candidate, OLDEST_BRANCH_VERSION):
+ continue
+ if branch_version_less_than(candidate, DEV_BRANCH_VERSION):
+ releases.extend(find_dev_cutoff(candidate))
+ return releases
+
# Find the most recent commit hash that is for a -dev version.
# This is the starting point for the map of commits after cutoff from main.
def find_dev_cutoff(branch_version):
- out = run_cmd([
- 'git',
- 'log',
- 'origin/%d.%d' % branch_version,
- '--grep', 'Version .*-dev',
- '--pretty=oneline',
- ])
- # Format of output is: <hash> Version <version>-dev
- try:
- hash = out[0:out.index(' ')]
- return [Branch('%d.%d' % branch_version, hash)]
- except ValueError:
- throw_error("Failed to find dev cutoff for branch %d.%d" % branch_version)
+ out = run_cmd([
+ 'git',
+ 'log',
+ 'origin/%d.%d' % branch_version,
+ '--grep',
+ 'Version .*-dev',
+ '--pretty=oneline',
+ ])
+ # Format of output is: <hash> Version <version>-dev
+ try:
+ hash = out[0:out.index(' ')]
+ return [Branch('%d.%d' % branch_version, hash)]
+ except ValueError:
+ throw_error("Failed to find dev cutoff for branch %d.%d" %
+ branch_version)
+
# Build a map from each "Change-Id" hash to the hash of its commit.
def get_change_id_map(branch):
- out = run_cmd([
- 'git',
- 'log',
- '%s..%s' % (branch.first, branch.last_or_origin())
- ])
- map = {}
- current_commit = None
- for line in out.split('\n'):
- if line.startswith('commit '):
- current_commit = line[len('commit '):]
- assert len(current_commit) == 40
- elif line.strip().startswith('Change-Id: '):
- change_id = line.strip()[len('Change-Id: '):]
- assert len(change_id) == 41
- map[change_id] = current_commit
- return map
+ out = run_cmd(
+ ['git', 'log',
+ '%s..%s' % (branch.first, branch.last_or_origin())])
+ map = {}
+ current_commit = None
+ for line in out.split('\n'):
+ if line.startswith('commit '):
+ current_commit = line[len('commit '):]
+ assert len(current_commit) == 40
+ elif line.strip().startswith('Change-Id: '):
+ change_id = line.strip()[len('Change-Id: '):]
+ assert len(change_id) == 41
+ map[change_id] = current_commit
+ return map
+
# Check if a specific commit is present on a specific branch.
def is_commit_in(commit, branch):
- out = run_cmd(['git', 'branch', '-r', branch.origin(), '--contains', commit])
- return out.strip() == branch.origin()
+ out = run_cmd(
+ ['git', 'branch', '-r',
+ branch.origin(), '--contains', commit])
+ return out.strip() == branch.origin()
+
def main():
- found_errors = False
- # The main map is all commits back to the "init" point.
- main_map = get_change_id_map(MAIN)
- # Compute the release branches.
- release_branches = get_release_branches()
- # Populate the release maps with all commits after the last -dev point.
- release_maps = {}
- for branch in release_branches:
- release_maps[branch.name] = get_change_id_map(branch)
- # Each branch is then compared forwards with each subsequent branch.
- for i in range(len(release_branches)):
- branch = release_branches[i]
- newer_branches = release_branches[i+1:]
- if (len(newer_branches) == 0):
- print('Last non-dev release branch is %s, nothing to check.' % branch)
- continue
- print('Checking branch %s.' % branch)
- changes = release_maps[branch.name]
- cherry_picks_count = 0
- for change in changes.keys():
- is_cherry_pick = False
- missing_from = None
- commit_on_main = main_map.get(change)
- for newer_branch in newer_branches:
- if change in release_maps[newer_branch.name]:
- is_cherry_pick = True
- # If the change is in the release mappings check for holes.
- if missing_from:
- found_errors |= change_error(
- change,
- 'Error: missing Change-Id %s on branch %s. '
- 'Is present on %s and again on %s.' % (
- change, missing_from, branch, newer_branch,
- ))
- elif commit_on_main:
- is_cherry_pick = True
- # The change is not in the non-dev part of the branch, so we need to
- # check that the fork from main included the change.
- if not is_commit_in(commit_on_main, newer_branch):
- found_errors |= change_error(
- change,
- 'Error: missing Change-Id %s on branch %s. '
- 'Is present on %s and on main as commit %s.' % (
- change, newer_branch, branch, commit_on_main
- ))
- else:
- # The change is not on "main" so we just record for holes on releases.
- missing_from = newer_branch
- if is_cherry_pick:
- cherry_picks_count += 1
- print('Found %d cherry-picks (out of %d commits).' % (
- cherry_picks_count, len(changes)))
+ found_errors = False
+ # The main map is all commits back to the "init" point.
+ main_map = get_change_id_map(MAIN)
+ # Compute the release branches.
+ release_branches = get_release_branches()
+ # Populate the release maps with all commits after the last -dev point.
+ release_maps = {}
+ for branch in release_branches:
+ release_maps[branch.name] = get_change_id_map(branch)
+ # Each branch is then compared forwards with each subsequent branch.
+ for i in range(len(release_branches)):
+ branch = release_branches[i]
+ newer_branches = release_branches[i + 1:]
+ if (len(newer_branches) == 0):
+ print('Last non-dev release branch is %s, nothing to check.' %
+ branch)
+ continue
+ print('Checking branch %s.' % branch)
+ changes = release_maps[branch.name]
+ cherry_picks_count = 0
+ for change in changes.keys():
+ is_cherry_pick = False
+ missing_from = None
+ commit_on_main = main_map.get(change)
+ for newer_branch in newer_branches:
+ if change in release_maps[newer_branch.name]:
+ is_cherry_pick = True
+ # If the change is in the release mappings check for holes.
+ if missing_from:
+ found_errors |= change_error(
+ change, 'Error: missing Change-Id %s on branch %s. '
+ 'Is present on %s and again on %s.' % (
+ change,
+ missing_from,
+ branch,
+ newer_branch,
+ ))
+ elif commit_on_main:
+ is_cherry_pick = True
+ # The change is not in the non-dev part of the branch, so we need to
+ # check that the fork from main included the change.
+ if not is_commit_in(commit_on_main, newer_branch):
+ found_errors |= change_error(
+ change, 'Error: missing Change-Id %s on branch %s. '
+ 'Is present on %s and on main as commit %s.' %
+ (change, newer_branch, branch, commit_on_main))
+ else:
+ # The change is not on "main" so we just record for holes on releases.
+ missing_from = newer_branch
+ if is_cherry_pick:
+ cherry_picks_count += 1
+ print('Found %d cherry-picks (out of %d commits).' %
+ (cherry_picks_count, len(changes)))
- if found_errors:
- return 1
- return 0
+ if found_errors:
+ return 1
+ return 0
+
def change_error(change, msg):
- if change in IGNORED:
- return False
- error(msg)
- return True
+ if change in IGNORED:
+ return False
+ error(msg)
+ return True
+
def throw_error(msg):
- raise ValueError(msg)
+ raise ValueError(msg)
+
def error(msg):
- print(msg, file=sys.stderr)
+ print(msg, file=sys.stderr)
+
if __name__ == '__main__':
- sys.exit(main())
+ sys.exit(main())
diff --git a/tools/checkout_aosp.py b/tools/checkout_aosp.py
index 36a5333..bef9413 100755
--- a/tools/checkout_aosp.py
+++ b/tools/checkout_aosp.py
@@ -14,56 +14,59 @@
import utils
import utils_aosp
-AOSP_MANIFEST_XML = join(utils.REPO_ROOT, 'third_party',
- 'aosp_manifest.xml')
+AOSP_MANIFEST_XML = join(utils.REPO_ROOT, 'third_party', 'aosp_manifest.xml')
AOSP_MANIFEST_URL = 'https://android.googlesource.com/platform/manifest'
J_DEFAULT = multiprocessing.cpu_count() - 2
+
# Checkout AOSP source to the specified direcotry using the speficied manifest.
def checkout_aosp(aosp_root, url, branch, manifest_xml, concurrency, shallow):
- utils.makedirs_if_needed(aosp_root)
- command = ['repo', 'init', '-u', url]
- if (shallow):
- command.extend(['--depth=1'])
- if (branch):
- command.extend(['-b', branch])
- else:
- manifests_dir = join(aosp_root, '.repo', 'manifests')
- utils.makedirs_if_needed(manifests_dir)
- copy2(manifest_xml, manifests_dir)
- command.extend(['-m', basename(manifest_xml)])
- check_call(command, cwd = aosp_root)
+ utils.makedirs_if_needed(aosp_root)
+ command = ['repo', 'init', '-u', url]
+ if (shallow):
+ command.extend(['--depth=1'])
+ if (branch):
+ command.extend(['-b', branch])
+ else:
+ manifests_dir = join(aosp_root, '.repo', 'manifests')
+ utils.makedirs_if_needed(manifests_dir)
+ copy2(manifest_xml, manifests_dir)
+ command.extend(['-m', basename(manifest_xml)])
+ check_call(command, cwd=aosp_root)
- check_call(['repo', 'sync', '-dq', '-j' + concurrency], cwd = aosp_root)
+ check_call(['repo', 'sync', '-dq', '-j' + concurrency], cwd=aosp_root)
+
def parse_arguments():
- parser = argparse.ArgumentParser(
- description = 'Checkout the AOSP source tree.')
- utils_aosp.add_root_argument(parser)
- parser.add_argument('--url',
- help='URL the repo. ' +
- 'Defaults to ' + AOSP_MANIFEST_URL + '.',
- default=AOSP_MANIFEST_URL)
- parser.add_argument('--manifest',
- help='Manifest to use for the checkout. ' +
- 'Defaults to ' + AOSP_MANIFEST_XML + '.',
- default=AOSP_MANIFEST_XML)
- parser.add_argument('--branch',
- help='Branch to checkout. This overrides ' +
- 'passing --manifest')
- parser.add_argument('--shallow',
- action = 'store_true',
- help='Shallow checkout.')
- parser.add_argument('-j',
- help='Projects to fetch simultaneously. ' +
- 'Defaults to ' + str(J_DEFAULT) + '.',
- default=str(J_DEFAULT))
- return parser.parse_args()
+ parser = argparse.ArgumentParser(
+ description='Checkout the AOSP source tree.')
+ utils_aosp.add_root_argument(parser)
+ parser.add_argument('--url',
+ help='URL the repo. ' + 'Defaults to ' +
+ AOSP_MANIFEST_URL + '.',
+ default=AOSP_MANIFEST_URL)
+ parser.add_argument('--manifest',
+ help='Manifest to use for the checkout. ' +
+ 'Defaults to ' + AOSP_MANIFEST_XML + '.',
+ default=AOSP_MANIFEST_XML)
+ parser.add_argument('--branch',
+ help='Branch to checkout. This overrides ' +
+ 'passing --manifest')
+ parser.add_argument('--shallow',
+ action='store_true',
+ help='Shallow checkout.')
+ parser.add_argument('-j',
+ help='Projects to fetch simultaneously. ' +
+ 'Defaults to ' + str(J_DEFAULT) + '.',
+ default=str(J_DEFAULT))
+ return parser.parse_args()
+
def Main():
- args = parse_arguments()
- checkout_aosp(args.aosp_root, args.url, args.branch, args.manifest,
- args.j, args.shallow)
+ args = parse_arguments()
+ checkout_aosp(args.aosp_root, args.url, args.branch, args.manifest, args.j,
+ args.shallow)
+
if __name__ == '__main__':
- sys.exit(Main())
+ sys.exit(Main())
diff --git a/tools/cherry-pick.py b/tools/cherry-pick.py
index 6a7109c..6e505c5 100755
--- a/tools/cherry-pick.py
+++ b/tools/cherry-pick.py
@@ -12,137 +12,158 @@
VERSION_FILE = 'src/main/java/com/android/tools/r8/Version.java'
VERSION_PREFIX = 'String LABEL = "'
-def parse_options():
- parser = argparse.ArgumentParser(description='Release r8')
- parser.add_argument('--branch',
- metavar=('<branch>'),
- help='Branch to cherry-pick to')
- parser.add_argument('--current-checkout', '--current_checkout',
- default=False,
- action='store_true',
- help='Perform cherry picks into the current checkout')
- parser.add_argument('--no-upload', '--no_upload',
- default=False,
- action='store_true',
- help='Do not upload to Gerrit')
- parser.add_argument('hashes', metavar='<hash>', nargs='+',
- help='Hashed to merge')
- return parser.parse_args()
+def parse_options():
+ parser = argparse.ArgumentParser(description='Release r8')
+ parser.add_argument('--branch',
+ metavar=('<branch>'),
+ help='Branch to cherry-pick to')
+ parser.add_argument('--current-checkout',
+ '--current_checkout',
+ default=False,
+ action='store_true',
+ help='Perform cherry picks into the current checkout')
+ parser.add_argument('--no-upload',
+ '--no_upload',
+ default=False,
+ action='store_true',
+ help='Do not upload to Gerrit')
+ parser.add_argument('hashes',
+ metavar='<hash>',
+ nargs='+',
+ help='Hashed to merge')
+
+ return parser.parse_args()
def run(args):
- # Checkout the branch.
- subprocess.check_output(['git', 'checkout', args.branch])
+ # Checkout the branch.
+ subprocess.check_output(['git', 'checkout', args.branch])
- if (args.current_checkout):
- for i in range(len(args.hashes) + 1):
- branch = 'cherry-%d' % (i + 1)
- print('Deleting branch %s' % branch)
- subprocess.run(['git', 'branch', branch, '-D'])
+ if (args.current_checkout):
+ for i in range(len(args.hashes) + 1):
+ branch = 'cherry-%d' % (i + 1)
+ print('Deleting branch %s' % branch)
+ subprocess.run(['git', 'branch', branch, '-D'])
- bugs = set()
+ bugs = set()
- count = 1
- for hash in args.hashes:
+ count = 1
+ for hash in args.hashes:
+ branch = 'cherry-%d' % count
+ print('Cherry-picking %s in %s' % (hash, branch))
+ if (count == 1):
+ subprocess.run([
+ 'git', 'new-branch', branch, '--upstream',
+ 'origin/%s' % args.branch
+ ])
+ else:
+ subprocess.run(['git', 'new-branch', branch, '--upstream-current'])
+
+ subprocess.run(['git', 'cherry-pick', hash])
+ confirm_and_upload(branch, args, bugs)
+ count = count + 1
+
branch = 'cherry-%d' % count
- print('Cherry-picking %s in %s' % (hash, branch))
- if (count == 1):
- subprocess.run(['git', 'new-branch', branch, '--upstream', 'origin/%s' % args.branch])
+ subprocess.run(['git', 'new-branch', branch, '--upstream-current'])
+
+ old_version = 'unknown'
+ for line in open(VERSION_FILE, 'r'):
+ index = line.find(VERSION_PREFIX)
+ if index > 0:
+ index += len(VERSION_PREFIX)
+ subline = line[index:]
+ old_version = subline[:subline.index('"')]
+ break
+
+ new_version = 'unknown'
+ if old_version.find('.') > 0:
+ split_version = old_version.split('.')
+ new_version = '.'.join(split_version[:-1] +
+ [str(int(split_version[-1]) + 1)])
+ subprocess.run([
+ 'sed', '-i',
+ 's/%s/%s/' % (old_version, new_version), VERSION_FILE
+ ])
else:
- subprocess.run(['git', 'new-branch', branch, '--upstream-current'])
+ editor = os.environ.get('VISUAL')
+ if not editor:
+ editor = os.environ.get('EDITOR')
+ if not editor:
+ editor = 'vi'
+ else:
+ print("Opening %s for version update with %s" %
+ (VERSION_FILE, editor))
+ subprocess.run([editor, VERSION_FILE])
- subprocess.run(['git', 'cherry-pick', hash])
- confirm_and_upload(branch, args, bugs)
- count = count + 1
+ message = ("Version %s\n\n" % new_version)
+ for bug in sorted(bugs):
+ message += 'Bug: b/%s\n' % bug
- branch = 'cherry-%d' % count
- subprocess.run(['git', 'new-branch', branch, '--upstream-current'])
+ subprocess.run(['git', 'commit', '-a', '-m', message])
+ confirm_and_upload(branch, args, None)
+ if (not args.current_checkout):
+ while True:
+ try:
+ answer = input(
+ "Type 'delete' to finish and delete checkout in %s: " %
+ os.getcwd())
+ if answer == 'delete':
+ break
+ except KeyboardInterrupt:
+ pass
- old_version = 'unknown'
- for line in open(VERSION_FILE, 'r'):
- index = line.find(VERSION_PREFIX)
- if index > 0:
- index += len(VERSION_PREFIX)
- subline = line[index:]
- old_version = subline[:subline.index('"')]
- break
-
- new_version = 'unknown'
- if old_version.find('.') > 0:
- split_version = old_version.split('.')
- new_version = '.'.join(split_version[:-1] + [str(int(split_version[-1]) + 1)])
- subprocess.run(['sed', '-i', 's/%s/%s/' % (old_version, new_version), VERSION_FILE])
- else:
- editor = os.environ.get('VISUAL')
- if not editor:
- editor = os.environ.get('EDITOR')
- if not editor:
- editor = 'vi'
- else:
- print("Opening %s for version update with %s" % (VERSION_FILE, editor))
- subprocess.run([editor, VERSION_FILE])
-
- message = ("Version %s\n\n" % new_version)
- for bug in sorted(bugs):
- message += 'Bug: b/%s\n' % bug
-
- subprocess.run(['git', 'commit', '-a', '-m', message])
- confirm_and_upload(branch, args, None)
- if (not args.current_checkout):
- while True:
- try:
- answer = input("Type 'delete' to finish and delete checkout in %s: " % os.getcwd())
- if answer == 'delete':
- break
- except KeyboardInterrupt:
- pass
def confirm_and_upload(branch, args, bugs):
- question = ('Ready to continue (cwd %s, will not upload to Gerrit)' % os.getcwd()
- if args.no_upload else
- 'Ready to upload %s (cwd %s)' % (branch, os.getcwd()))
+ question = ('Ready to continue (cwd %s, will not upload to Gerrit)' %
+ os.getcwd() if args.no_upload else
+ 'Ready to upload %s (cwd %s)' % (branch, os.getcwd()))
- while True:
- try:
- answer = input(question + ' [yes/abort]? ')
- if answer == 'yes':
- break
- if answer == 'abort':
- print('Aborting new branch for %s' % branch)
- sys.exit(1)
- except KeyboardInterrupt:
- pass
+ while True:
+ try:
+ answer = input(question + ' [yes/abort]? ')
+ if answer == 'yes':
+ break
+ if answer == 'abort':
+ print('Aborting new branch for %s' % branch)
+ sys.exit(1)
+ except KeyboardInterrupt:
+ pass
- # Compute the set of bug refs from the commit message after confirmation.
- # If done before a conflicting cherry-pick status will potentially include
- # references that are orthogonal to the pick.
- if bugs != None:
- commit_message = subprocess.check_output(['git', 'log', '--format=%B', '-n', '1', 'HEAD'])
- commit_lines = [l.strip() for l in commit_message.decode('UTF-8').split('\n')]
- for line in commit_lines:
- if line.startswith('Bug: '):
- normalized = line.replace('Bug: ', '').replace('b/', '')
- if len(normalized) > 0:
- bugs.add(normalized)
+ # Compute the set of bug refs from the commit message after confirmation.
+ # If done before a conflicting cherry-pick status will potentially include
+ # references that are orthogonal to the pick.
+ if bugs != None:
+ commit_message = subprocess.check_output(
+ ['git', 'log', '--format=%B', '-n', '1', 'HEAD'])
+ commit_lines = [
+ l.strip() for l in commit_message.decode('UTF-8').split('\n')
+ ]
+ for line in commit_lines:
+ if line.startswith('Bug: '):
+ normalized = line.replace('Bug: ', '').replace('b/', '')
+ if len(normalized) > 0:
+ bugs.add(normalized)
- if (not args.no_upload):
- subprocess.run(['git', 'cl', 'upload', '--bypass-hooks'])
+ if (not args.no_upload):
+ subprocess.run(['git', 'cl', 'upload', '--bypass-hooks'])
+
def main():
- args = parse_options()
+ args = parse_options()
- if (not args.current_checkout):
- with utils.TempDir() as temp:
- print("Performing cherry-picking in %s" % temp)
- subprocess.check_call(['git', 'clone', utils.REPO_SOURCE, temp])
- with utils.ChangedWorkingDirectory(temp):
+ if (not args.current_checkout):
+ with utils.TempDir() as temp:
+ print("Performing cherry-picking in %s" % temp)
+ subprocess.check_call(['git', 'clone', utils.REPO_SOURCE, temp])
+ with utils.ChangedWorkingDirectory(temp):
+ run(args)
+ else:
+ # Run in current directory.
+ print("Performing cherry-picking in %s" % os.getcwd())
+ subprocess.check_output(['git', 'fetch', 'origin'])
run(args)
- else:
- # Run in current directory.
- print("Performing cherry-picking in %s" % os.getcwd())
- subprocess.check_output(['git', 'fetch', 'origin'])
- run(args)
+
if __name__ == '__main__':
- sys.exit(main())
+ sys.exit(main())
diff --git a/tools/chrome_data.py b/tools/chrome_data.py
index 5a7b6c3..8499d72 100644
--- a/tools/chrome_data.py
+++ b/tools/chrome_data.py
@@ -9,50 +9,62 @@
BASE = os.path.join(utils.THIRD_PARTY, 'chrome')
V200430_BASE = os.path.join(BASE, 'chrome_200430')
-V200520_MINIMAL_BASE = os.path.join(
- BASE, 'monochrome_public_minimal_apks', 'chrome_200520')
+V200520_MINIMAL_BASE = os.path.join(BASE, 'monochrome_public_minimal_apks',
+ 'chrome_200520')
VERSIONS = {
- '200430': {
- 'deploy' : {
- 'inputs': [os.path.join(V200430_BASE, 'program.jar')],
- 'pgconf': [os.path.join(V200430_BASE, 'proguard.config')],
- 'libraries': [os.path.join(V200430_BASE, 'library.jar')],
- 'min-api': ANDROID_N_API,
+ '200430': {
+ 'deploy': {
+ 'inputs': [os.path.join(V200430_BASE, 'program.jar')],
+ 'pgconf': [os.path.join(V200430_BASE, 'proguard.config')],
+ 'libraries': [os.path.join(V200430_BASE, 'library.jar')],
+ 'min-api': ANDROID_N_API,
+ },
},
- },
- '200520-monochrome_public_minimal_apks': {
- 'deploy' : {
- 'inputs': [os.path.join(V200520_MINIMAL_BASE, 'program.jar')],
- 'features': [
- { 'inputs': [os.path.join(V200520_MINIMAL_BASE, 'feature-1.jar')] },
- { 'inputs': [os.path.join(V200520_MINIMAL_BASE, 'feature-2.jar')] },
- { 'inputs': [os.path.join(V200520_MINIMAL_BASE, 'feature-3.jar')] },
- { 'inputs': [os.path.join(V200520_MINIMAL_BASE, 'feature-4.jar')] },
- { 'inputs': [os.path.join(V200520_MINIMAL_BASE, 'feature-5.jar')] },
- { 'inputs': [os.path.join(V200520_MINIMAL_BASE, 'feature-6.jar')] },
- { 'inputs': [os.path.join(V200520_MINIMAL_BASE, 'feature-7.jar')] },
- { 'inputs': [os.path.join(V200520_MINIMAL_BASE, 'feature-8.jar')] }
- ],
- 'pgconf': [os.path.join(V200520_MINIMAL_BASE, 'proguard.config'),
- utils.IGNORE_WARNINGS_RULES],
- 'libraries': [os.path.join(V200520_MINIMAL_BASE, 'library.jar')],
- 'min-api': ANDROID_N_API
+ '200520-monochrome_public_minimal_apks': {
+ 'deploy': {
+ 'inputs': [os.path.join(V200520_MINIMAL_BASE, 'program.jar')],
+ 'features': [{
+ 'inputs': [os.path.join(V200520_MINIMAL_BASE, 'feature-1.jar')]
+ }, {
+ 'inputs': [os.path.join(V200520_MINIMAL_BASE, 'feature-2.jar')]
+ }, {
+ 'inputs': [os.path.join(V200520_MINIMAL_BASE, 'feature-3.jar')]
+ }, {
+ 'inputs': [os.path.join(V200520_MINIMAL_BASE, 'feature-4.jar')]
+ }, {
+ 'inputs': [os.path.join(V200520_MINIMAL_BASE, 'feature-5.jar')]
+ }, {
+ 'inputs': [os.path.join(V200520_MINIMAL_BASE, 'feature-6.jar')]
+ }, {
+ 'inputs': [os.path.join(V200520_MINIMAL_BASE, 'feature-7.jar')]
+ }, {
+ 'inputs': [os.path.join(V200520_MINIMAL_BASE, 'feature-8.jar')]
+ }],
+ 'pgconf': [
+ os.path.join(V200520_MINIMAL_BASE, 'proguard.config'),
+ utils.IGNORE_WARNINGS_RULES
+ ],
+ 'libraries': [os.path.join(V200520_MINIMAL_BASE, 'library.jar')],
+ 'min-api': ANDROID_N_API
+ },
},
- },
}
+
def GetLatestVersion():
- return '200520-monochrome_public_minimal_apks'
+ return '200520-monochrome_public_minimal_apks'
+
def GetName():
- return 'chrome'
+ return 'chrome'
+
def GetMemoryData(version):
- assert version == '200520-monochrome_public_minimal_apks'
- return {
- 'find-xmx-min': 600,
- 'find-xmx-max': 700,
- 'find-xmx-range': 16,
- 'oom-threshold': 625,
- }
+ assert version == '200520-monochrome_public_minimal_apks'
+ return {
+ 'find-xmx-min': 600,
+ 'find-xmx-max': 700,
+ 'find-xmx-range': 16,
+ 'oom-threshold': 625,
+ }
diff --git a/tools/compare_apk_sizes.py b/tools/compare_apk_sizes.py
index ad75024..6349689 100755
--- a/tools/compare_apk_sizes.py
+++ b/tools/compare_apk_sizes.py
@@ -21,177 +21,190 @@
USAGE = """%prog [options] app1 app2
NOTE: This only makes sense if minification is disabled"""
-MAX_THREADS=40
+MAX_THREADS = 40
+
def parse_options():
- result = optparse.OptionParser(usage=USAGE)
- result.add_option('--no-build',
- help='Run without building first',
- default=False,
- action='store_true')
- result.add_option('--temp',
- help='Temporary directory to store extracted classes in')
- result.add_option('--use_code_size',
- help='Use the size of code segments instead of the full size of the dex.',
- default=False, action='store_true')
- result.add_option('--report',
- help='Print comparison to this location instead of stdout')
- return result.parse_args()
+ result = optparse.OptionParser(usage=USAGE)
+ result.add_option('--no-build',
+ help='Run without building first',
+ default=False,
+ action='store_true')
+ result.add_option('--temp',
+ help='Temporary directory to store extracted classes in')
+ result.add_option(
+ '--use_code_size',
+ help=
+ 'Use the size of code segments instead of the full size of the dex.',
+ default=False,
+ action='store_true')
+ result.add_option(
+ '--report', help='Print comparison to this location instead of stdout')
+ return result.parse_args()
+
def extract_apk(apk, output):
- if os.path.exists(output):
- shutil.rmtree(output)
- zipfile.ZipFile(apk).extractall(output)
- with utils.ChangedWorkingDirectory(output):
- dex = glob.glob('*.dex')
- return [os.path.join(output, dexfile) for dexfile in dex]
+ if os.path.exists(output):
+ shutil.rmtree(output)
+ zipfile.ZipFile(apk).extractall(output)
+ with utils.ChangedWorkingDirectory(output):
+ dex = glob.glob('*.dex')
+ return [os.path.join(output, dexfile) for dexfile in dex]
+
def ensure_exists(files):
- for f in files:
- if not os.path.exists(f):
- raise Exception('%s does not exist' % f)
+ for f in files:
+ if not os.path.exists(f):
+ raise Exception('%s does not exist' % f)
+
def extract_classes(input, output, options):
- if os.path.exists(output):
- shutil.rmtree(output)
- os.makedirs(output)
- args = ['--file-per-class',
- '--output', output]
- if options.no_build:
- args.extend(['--no-build'])
- args.extend(input)
- if toolhelper.run('d8', args) is not 0:
- raise Exception('Failed running d8')
+ if os.path.exists(output):
+ shutil.rmtree(output)
+ os.makedirs(output)
+ args = ['--file-per-class', '--output', output]
+ if options.no_build:
+ args.extend(['--no-build'])
+ args.extend(input)
+ if toolhelper.run('d8', args) is not 0:
+ raise Exception('Failed running d8')
+
def get_code_size(path):
- segments = toolhelper.run('dexsegments',
- [path],
- build=False,
- return_stdout=True)
- for line in segments.splitlines():
- if 'Code' in line:
- # The code size line looks like:
- # - Code: 264 / 4
- splits = line.split(' ')
- return int(splits[3])
- # Some classes has no code.
- return 0
+ segments = toolhelper.run('dexsegments', [path],
+ build=False,
+ return_stdout=True)
+ for line in segments.splitlines():
+ if 'Code' in line:
+ # The code size line looks like:
+ # - Code: 264 / 4
+ splits = line.split(' ')
+ return int(splits[3])
+ # Some classes has no code.
+ return 0
+
class FileInfo:
- def __init__(self, path, root):
- self.path = path
- self.full_path = os.path.join(root, path)
- def __eq__(self, other):
- return self.full_path == other.full_path
+ def __init__(self, path, root):
+ self.path = path
+ self.full_path = os.path.join(root, path)
- def set_size(self, use_code_size):
- if use_code_size:
- self.size = get_code_size(self.full_path)
- else:
- self.size = os.path.getsize(self.full_path)
+ def __eq__(self, other):
+ return self.full_path == other.full_path
+
+ def set_size(self, use_code_size):
+ if use_code_size:
+ self.size = get_code_size(self.full_path)
+ else:
+ self.size = os.path.getsize(self.full_path)
+
def generate_file_info(path, options):
- file_info_map = {}
- with utils.ChangedWorkingDirectory(path):
- for root, dirs, files in os.walk('.'):
- for f in files:
- assert f.endswith('dex')
- file_path = os.path.join(root, f)
- entry = FileInfo(file_path, path)
- if not options.use_code_size:
- entry.set_size(False)
- file_info_map[file_path] = entry
- threads = []
- file_infos = file_info_map.values() if options.use_code_size else []
- while len(file_infos) > 0 or len(threads)> 0:
- for t in threads:
- if not t.is_alive():
- threads.remove(t)
- # sleep
- if len(threads) == MAX_THREADS or len(file_infos) == 0:
- time.sleep(0.5)
- while len(threads) < MAX_THREADS and len(file_infos) > 0:
- info = file_infos.pop()
- print('Added %s for size calculation' % info.full_path)
- t = threading.Thread(target=info.set_size, args=(options.use_code_size,))
- threads.append(t)
- t.start()
- print('Missing %s files, threads=%s ' % (len(file_infos), len(threads)))
+ file_info_map = {}
+ with utils.ChangedWorkingDirectory(path):
+ for root, dirs, files in os.walk('.'):
+ for f in files:
+ assert f.endswith('dex')
+ file_path = os.path.join(root, f)
+ entry = FileInfo(file_path, path)
+ if not options.use_code_size:
+ entry.set_size(False)
+ file_info_map[file_path] = entry
+ threads = []
+ file_infos = file_info_map.values() if options.use_code_size else []
+ while len(file_infos) > 0 or len(threads) > 0:
+ for t in threads:
+ if not t.is_alive():
+ threads.remove(t)
+ # sleep
+ if len(threads) == MAX_THREADS or len(file_infos) == 0:
+ time.sleep(0.5)
+ while len(threads) < MAX_THREADS and len(file_infos) > 0:
+ info = file_infos.pop()
+ print('Added %s for size calculation' % info.full_path)
+ t = threading.Thread(target=info.set_size,
+ args=(options.use_code_size,))
+ threads.append(t)
+ t.start()
+ print('Missing %s files, threads=%s ' % (len(file_infos), len(threads)))
- return file_info_map
+ return file_info_map
+
def print_info(app, app_files, only_in_app, bigger_in_app, output):
- output.write('Only in %s\n' % app)
- only_app_sorted = sorted(only_in_app,
- key=lambda a: app_files[a].size,
- reverse=True)
- output.write('\n'.join([' %s %s bytes' %
- (x, app_files[x].size) for x in only_app_sorted]))
- output.write('\n\n')
- output.write('Bigger in %s\n' % app)
- # Sort by the percentage diff compared to size
- percent = lambda a: (0.0 + bigger_in_app.get(a))/app_files.get(a).size * 100
- for bigger in sorted(bigger_in_app, key=percent, reverse=True):
- output.write(' {0:.3f}% {1} bytes {2}\n'.format(percent(bigger),
- bigger_in_app[bigger],
- bigger))
- output.write('\n\n')
+ output.write('Only in %s\n' % app)
+ only_app_sorted = sorted(only_in_app,
+ key=lambda a: app_files[a].size,
+ reverse=True)
+ output.write('\n'.join(
+ [' %s %s bytes' % (x, app_files[x].size) for x in only_app_sorted]))
+ output.write('\n\n')
+ output.write('Bigger in %s\n' % app)
+ # Sort by the percentage diff compared to size
+ percent = lambda a: (0.0 + bigger_in_app.get(a)) / app_files.get(a
+ ).size * 100
+ for bigger in sorted(bigger_in_app, key=percent, reverse=True):
+ output.write(' {0:.3f}% {1} bytes {2}\n'.format(
+ percent(bigger), bigger_in_app[bigger], bigger))
+ output.write('\n\n')
def compare(app1_classes_dir, app2_classes_dir, app1, app2, options):
- app1_files = generate_file_info(app1_classes_dir, options)
- app2_files = generate_file_info(app2_classes_dir, options)
- only_in_app1 = [k for k in app1_files if k not in app2_files]
- only_in_app2 = [k for k in app2_files if k not in app1_files]
- in_both = [k for k in app2_files if k in app1_files]
- assert len(app1_files) == len(only_in_app1) + len(in_both)
- assert len(app2_files) == len(only_in_app2) + len(in_both)
- bigger_in_app1 = {}
- bigger_in_app2 = {}
- same_size = []
- for f in in_both:
- app1_entry = app1_files[f]
- app2_entry = app2_files[f]
- if app1_entry.size > app2_entry.size:
- bigger_in_app1[f] = app1_entry.size - app2_entry.size
- elif app2_entry.size > app1_entry.size:
- bigger_in_app2[f] = app2_entry.size - app1_entry.size
- else:
- same_size.append(f)
- output = open(options.report, 'w') if options.report else sys.stdout
- print_info(app1, app1_files, only_in_app1, bigger_in_app1, output)
- print_info(app2, app2_files, only_in_app2, bigger_in_app2, output)
- output.write('Same size\n')
- output.write('\n'.join([' %s' % x for x in same_size]))
- if options.report:
- output.close()
+ app1_files = generate_file_info(app1_classes_dir, options)
+ app2_files = generate_file_info(app2_classes_dir, options)
+ only_in_app1 = [k for k in app1_files if k not in app2_files]
+ only_in_app2 = [k for k in app2_files if k not in app1_files]
+ in_both = [k for k in app2_files if k in app1_files]
+ assert len(app1_files) == len(only_in_app1) + len(in_both)
+ assert len(app2_files) == len(only_in_app2) + len(in_both)
+ bigger_in_app1 = {}
+ bigger_in_app2 = {}
+ same_size = []
+ for f in in_both:
+ app1_entry = app1_files[f]
+ app2_entry = app2_files[f]
+ if app1_entry.size > app2_entry.size:
+ bigger_in_app1[f] = app1_entry.size - app2_entry.size
+ elif app2_entry.size > app1_entry.size:
+ bigger_in_app2[f] = app2_entry.size - app1_entry.size
+ else:
+ same_size.append(f)
+ output = open(options.report, 'w') if options.report else sys.stdout
+ print_info(app1, app1_files, only_in_app1, bigger_in_app1, output)
+ print_info(app2, app2_files, only_in_app2, bigger_in_app2, output)
+ output.write('Same size\n')
+ output.write('\n'.join([' %s' % x for x in same_size]))
+ if options.report:
+ output.close()
+
def Main():
- (options, args) = parse_options()
- if len(args) is not 2:
- print(args)
- print('Takes exactly two arguments, the two apps to compare')
- return 1
- app1 = args[0]
- app2 = args[1]
- ensure_exists([app1, app2])
- with utils.TempDir() as temporary:
- # If a temp dir is passed in, use that instead of the generated temporary
- output = options.temp if options.temp else temporary
- ensure_exists([output])
- app1_input = [app1]
- app2_input = [app2]
- if app1.endswith('apk'):
- app1_input = extract_apk(app1, os.path.join(output, 'app1'))
- if app2.endswith('apk'):
- app2_input = extract_apk(app2, os.path.join(output, 'app2'))
- app1_classes_dir = os.path.join(output, 'app1_classes')
- app2_classes_dir = os.path.join(output, 'app2_classes')
+ (options, args) = parse_options()
+ if len(args) is not 2:
+ print(args)
+ print('Takes exactly two arguments, the two apps to compare')
+ return 1
+ app1 = args[0]
+ app2 = args[1]
+ ensure_exists([app1, app2])
+ with utils.TempDir() as temporary:
+ # If a temp dir is passed in, use that instead of the generated temporary
+ output = options.temp if options.temp else temporary
+ ensure_exists([output])
+ app1_input = [app1]
+ app2_input = [app2]
+ if app1.endswith('apk'):
+ app1_input = extract_apk(app1, os.path.join(output, 'app1'))
+ if app2.endswith('apk'):
+ app2_input = extract_apk(app2, os.path.join(output, 'app2'))
+ app1_classes_dir = os.path.join(output, 'app1_classes')
+ app2_classes_dir = os.path.join(output, 'app2_classes')
- extract_classes(app1_input, app1_classes_dir, options)
- extract_classes(app2_input, app2_classes_dir, options)
- compare(app1_classes_dir, app2_classes_dir, app1, app2, options)
+ extract_classes(app1_input, app1_classes_dir, options)
+ extract_classes(app2_input, app2_classes_dir, options)
+ compare(app1_classes_dir, app2_classes_dir, app1, app2, options)
+
if __name__ == '__main__':
- sys.exit(Main())
+ sys.exit(Main())
diff --git a/tools/compare_cts_results.py b/tools/compare_cts_results.py
index 0da116d..eed3435 100755
--- a/tools/compare_cts_results.py
+++ b/tools/compare_cts_results.py
@@ -13,127 +13,142 @@
import utils
+
class Module:
- def __init__(self):
- self.test_cases = {}
- self.bf_covered_in_file = 0 # bitfield, one bit per file
- def get_test_case_maybe_create(self, test_case_name):
- return self.test_cases.setdefault(test_case_name, TestCase())
+ def __init__(self):
+ self.test_cases = {}
+ self.bf_covered_in_file = 0 # bitfield, one bit per file
- def set_file_index_present(self, file_idx):
- self.bf_covered_in_file |= (1 << file_idx)
+ def get_test_case_maybe_create(self, test_case_name):
+ return self.test_cases.setdefault(test_case_name, TestCase())
- def report(self, module_name, files, diff_only):
- bf_all_files = self.bf_covered_in_file
- for test_case_name, test_case in self.test_cases.iteritems():
- if test_case.bf_covered_in_file != bf_all_files:
- report_missing_thing('test_case', module_name + '/' + test_case_name,
- test_case.bf_covered_in_file, files)
- for test_case_name, test_case in self.test_cases.iteritems():
- test_case.report(module_name, test_case_name, files, diff_only)
+ def set_file_index_present(self, file_idx):
+ self.bf_covered_in_file |= (1 << file_idx)
+
+ def report(self, module_name, files, diff_only):
+ bf_all_files = self.bf_covered_in_file
+ for test_case_name, test_case in self.test_cases.iteritems():
+ if test_case.bf_covered_in_file != bf_all_files:
+ report_missing_thing('test_case',
+ module_name + '/' + test_case_name,
+ test_case.bf_covered_in_file, files)
+ for test_case_name, test_case in self.test_cases.iteritems():
+ test_case.report(module_name, test_case_name, files, diff_only)
+
class TestCase:
- def __init__(self):
- self.tests = {}
- self.bf_covered_in_file = 0 # bitfield, one bit per file
- def get_test_maybe_create(self, test_name):
- return self.tests.setdefault(test_name, Test())
+ def __init__(self):
+ self.tests = {}
+ self.bf_covered_in_file = 0 # bitfield, one bit per file
- def set_file_index_present(self, file_idx):
- self.bf_covered_in_file |= (1 << file_idx)
+ def get_test_maybe_create(self, test_name):
+ return self.tests.setdefault(test_name, Test())
- def report(self, module_name, test_case_name, files, diff_only):
- bf_all_files = self.bf_covered_in_file
- for test_name, test in self.tests.iteritems():
- do_report = test.bf_passing_in_file != bf_all_files
- if diff_only:
- do_report = do_report and test.bf_failing_in_file != bf_all_files
- if do_report:
- test.report(module_name, test_case_name, test_name, files)
+ def set_file_index_present(self, file_idx):
+ self.bf_covered_in_file |= (1 << file_idx)
+
+ def report(self, module_name, test_case_name, files, diff_only):
+ bf_all_files = self.bf_covered_in_file
+ for test_name, test in self.tests.iteritems():
+ do_report = test.bf_passing_in_file != bf_all_files
+ if diff_only:
+ do_report = do_report and test.bf_failing_in_file != bf_all_files
+ if do_report:
+ test.report(module_name, test_case_name, test_name, files)
+
class Test:
- def __init__(self):
- self.bf_failing_in_file = 0 # bitfields, one bit per file
- self.bf_passing_in_file = 0
- def set_file_index_outcome(self, outcome_is_passed, file_idx):
- bf_value = (1 << file_idx)
- if outcome_is_passed:
- self.bf_passing_in_file |= bf_value
- else:
- self.bf_failing_in_file |= bf_value
+ def __init__(self):
+ self.bf_failing_in_file = 0 # bitfields, one bit per file
+ self.bf_passing_in_file = 0
- # Report test's status in all files: pass/fail/missing
- def report(self, module_name, test_case_name, test_name, files):
- print('Test: {}/{}/{}:'.format(module_name, test_case_name, test_name))
- for file_idx, f in enumerate(files):
- bf_value = 1 << file_idx
- print('\t- {:20}'.format(basename(f)), end = '')
- if self.bf_passing_in_file & bf_value:
- print('PASS')
- elif self.bf_failing_in_file & bf_value:
- print(' FAIL')
- else:
- print(' -- -- (missing)')
+ def set_file_index_outcome(self, outcome_is_passed, file_idx):
+ bf_value = (1 << file_idx)
+ if outcome_is_passed:
+ self.bf_passing_in_file |= bf_value
+ else:
+ self.bf_failing_in_file |= bf_value
+
+ # Report test's status in all files: pass/fail/missing
+ def report(self, module_name, test_case_name, test_name, files):
+ print('Test: {}/{}/{}:'.format(module_name, test_case_name, test_name))
+ for file_idx, f in enumerate(files):
+ bf_value = 1 << file_idx
+ print('\t- {:20}'.format(basename(f)), end='')
+ if self.bf_passing_in_file & bf_value:
+ print('PASS')
+ elif self.bf_failing_in_file & bf_value:
+ print(' FAIL')
+ else:
+ print(' -- -- (missing)')
+
def parse_arguments():
- parser = argparse.ArgumentParser(
- description = 'Compare multiple Android CTS test_result.xml files.')
- parser.add_argument('files', nargs = '+',
- help = 'List of (possibly renamed) test_result.xml files')
- parser.add_argument('--diff-only',
- action = 'store_true',
- help = "Don't list tests that consistently fail in all result files,"
- " list only differences.")
- return parser.parse_args()
+ parser = argparse.ArgumentParser(
+ description='Compare multiple Android CTS test_result.xml files.')
+ parser.add_argument('files',
+ nargs='+',
+ help='List of (possibly renamed) test_result.xml files')
+ parser.add_argument(
+ '--diff-only',
+ action='store_true',
+ help="Don't list tests that consistently fail in all result files,"
+ " list only differences.")
+ return parser.parse_args()
+
# Read CTS test_result.xml from file and merge into result_tree
def add_to_result_tree(result_tree, file_xml, file_idx):
- module = None
- test_case = None
- for x in utils.read_cts_test_result(file_xml):
- if type(x) is utils.CtsModule:
- module = result_tree.setdefault(x.name, Module())
- module.set_file_index_present(file_idx)
- elif type(x) is utils.CtsTestCase:
- test_case = module.get_test_case_maybe_create(x.name)
- test_case.set_file_index_present(file_idx)
- else:
- assert(type(x) is utils.CtsTest)
- v = test_case.get_test_maybe_create(x.name)
- v.set_file_index_outcome(x.outcome, file_idx)
+ module = None
+ test_case = None
+ for x in utils.read_cts_test_result(file_xml):
+ if type(x) is utils.CtsModule:
+ module = result_tree.setdefault(x.name, Module())
+ module.set_file_index_present(file_idx)
+ elif type(x) is utils.CtsTestCase:
+ test_case = module.get_test_case_maybe_create(x.name)
+ test_case.set_file_index_present(file_idx)
+ else:
+ assert (type(x) is utils.CtsTest)
+ v = test_case.get_test_maybe_create(x.name)
+ v.set_file_index_outcome(x.outcome, file_idx)
+
# main tree_report function
def tree_report(result_tree, files, diff_only):
- bf_all_files = (1 << len(files)) - 1
- for module_name, module in result_tree.iteritems():
- if module.bf_covered_in_file != bf_all_files:
- report_missing_thing('module', module_name, module.bf_covered_in_file,
- files)
- for module_name, module in result_tree.iteritems():
- module.report(module_name, files, diff_only)
+ bf_all_files = (1 << len(files)) - 1
+ for module_name, module in result_tree.iteritems():
+ if module.bf_covered_in_file != bf_all_files:
+ report_missing_thing('module', module_name,
+ module.bf_covered_in_file, files)
+ for module_name, module in result_tree.iteritems():
+ module.report(module_name, files, diff_only)
+
def report_missing_thing(thing_type, thing_name, bf_covered_in_file, files):
- print('Missing {}: {}, from:'.format(thing_type, thing_name))
- for file_idx, f in enumerate(files):
- if not (bf_covered_in_file & (1 << file_idx)):
- print('\t- ' + f)
+ print('Missing {}: {}, from:'.format(thing_type, thing_name))
+ for file_idx, f in enumerate(files):
+ if not (bf_covered_in_file & (1 << file_idx)):
+ print('\t- ' + f)
+
def Main():
- m = Module()
- m.get_test_case_maybe_create('qwe')
+ m = Module()
+ m.get_test_case_maybe_create('qwe')
- args = parse_arguments()
+ args = parse_arguments()
- result_tree = {}
- for file_idx, f in enumerate(args.files):
- add_to_result_tree(result_tree, f, file_idx)
+ result_tree = {}
+ for file_idx, f in enumerate(args.files):
+ add_to_result_tree(result_tree, f, file_idx)
- tree_report(result_tree, args.files, args.diff_only)
+ tree_report(result_tree, args.files, args.diff_only)
- return 0
+ return 0
+
if __name__ == '__main__':
- sys.exit(Main())
+ sys.exit(Main())
diff --git a/tools/compatdx.py b/tools/compatdx.py
index d58b907..777facd 100755
--- a/tools/compatdx.py
+++ b/tools/compatdx.py
@@ -7,4 +7,4 @@
import toolhelper
if __name__ == '__main__':
- sys.exit(toolhelper.run('compatdx', sys.argv[1:]))
+ sys.exit(toolhelper.run('compatdx', sys.argv[1:]))
diff --git a/tools/compatproguard.py b/tools/compatproguard.py
index 7c56549..4a2e462 100755
--- a/tools/compatproguard.py
+++ b/tools/compatproguard.py
@@ -7,4 +7,4 @@
import toolhelper
if __name__ == '__main__':
- sys.exit(toolhelper.run('compatproguard', sys.argv[1:]))
+ sys.exit(toolhelper.run('compatproguard', sys.argv[1:]))
diff --git a/tools/compiledump.py b/tools/compiledump.py
index 255f00b..2d1a3a8 100755
--- a/tools/compiledump.py
+++ b/tools/compiledump.py
@@ -17,683 +17,727 @@
def make_parser():
- parser = argparse.ArgumentParser(description = 'Compile a dump artifact.')
- parser.add_argument(
- '--summary',
- help='List a summary of the contents of the dumps.',
- default=False,
- action='store_true')
- parser.add_argument(
- '-d',
- '--dump',
- help='Dump file or directory to compile',
- default=None)
- parser.add_argument(
- '-o',
- '--output',
- help='File to output (defaults to out.jar in temp)',
- default=None)
- parser.add_argument(
- '--temp',
- help='Temp directory to extract the dump to, allows you to rerun the command'
- ' more easily in the terminal with changes',
- default=None)
- parser.add_argument(
- '-c',
- '--compiler',
- help='Compiler to use',
- default=None)
- parser.add_argument(
- '--minify',
- help='Force enable/disable minification'
- ' (defaults to app proguard config)',
- choices=['default', 'force-enable', 'force-disable'],
- default='default')
- parser.add_argument(
- '--optimize',
- help='Force enable/disable optimizations'
- ' (defaults to app proguard config)',
- choices=['default', 'force-enable', 'force-disable'],
- default='default')
- parser.add_argument(
- '--shrink',
- help='Force enable/disable shrinking'
- ' (defaults to app proguard config)',
- choices=['default', 'force-enable', 'force-disable'],
- default='default')
- parser.add_argument(
- '-v',
- '--version',
- help='Compiler version to use (default read from dump version file).'
- 'Valid arguments are:'
- ' "main" to run from your own tree,'
- ' "source" to run from build classes directly,'
- ' "X.Y.Z" to run a specific version, or'
- ' <hash> to run that hash from main.',
- default=None)
- parser.add_argument(
- '--r8-jar',
- help='Path to an R8 jar.',
- default=None)
- parser.add_argument(
- '--r8-flags', '--r8_flags',
- help='Additional option(s) for the compiler.')
- parser.add_argument(
- '--pg-conf', '--pg_conf',
- help='Keep rule file(s).',
- action='append')
- parser.add_argument(
- '--override',
- help='Do not override any extracted dump in temp-dir',
- default=False,
- action='store_true')
- parser.add_argument(
- '--nolib',
- help='Use the non-lib distribution (default uses the lib distribution)',
- default=False,
- action='store_true')
- parser.add_argument(
- '--print-times',
- help='Print timing information from r8',
- default=False,
- action='store_true')
- parser.add_argument(
- '--disable-assertions', '--disable_assertions', '-da',
- help='Disable Java assertions when running the compiler (default enabled)',
- default=False,
- action='store_true')
- parser.add_argument(
- '--classfile',
- help='Run with classfile output',
- default=False,
- action='store_true')
- parser.add_argument(
- '--debug-agent',
- help='Enable Java debug agent and suspend compilation (default disabled)',
- default=False,
- action='store_true')
- parser.add_argument(
- '--xmx',
- help='Set JVM max heap size (-Xmx)',
- default=None)
- parser.add_argument(
- '--threads',
- help='Set the number of threads to use',
- default=None)
- parser.add_argument(
- '--min-api',
- help='Set min-api (default read from dump properties file)',
- default=None)
- parser.add_argument(
- '--desugared-lib',
- help='Set desugared-library (default set from dump)',
- default=None)
- parser.add_argument(
- '--disable-desugared-lib',
- help='Disable desugared-libary if it will be set from dump',
- default=False,
- action='store_true'
- )
- parser.add_argument(
- '--loop',
- help='Run the compilation in a loop',
- default=False,
- action='store_true')
- parser.add_argument(
- '--enable-missing-library-api-modeling',
- help='Run with api modeling',
- default=False,
- action='store_true')
- parser.add_argument(
- '--android-platform-build',
- help='Run as a platform build',
- default=False,
- action='store_true')
- parser.add_argument(
- '--compilation-mode', '--compilation_mode',
- help='Run compilation in specified mode',
- choices=['debug', 'release'],
- default=None)
- return parser
+ parser = argparse.ArgumentParser(description='Compile a dump artifact.')
+ parser.add_argument('--summary',
+ help='List a summary of the contents of the dumps.',
+ default=False,
+ action='store_true')
+ parser.add_argument('-d',
+ '--dump',
+ help='Dump file or directory to compile',
+ default=None)
+ parser.add_argument('-o',
+ '--output',
+ help='File to output (defaults to out.jar in temp)',
+ default=None)
+ parser.add_argument(
+ '--temp',
+ help=
+ 'Temp directory to extract the dump to, allows you to rerun the command'
+ ' more easily in the terminal with changes',
+ default=None)
+ parser.add_argument('-c',
+ '--compiler',
+ help='Compiler to use',
+ default=None)
+ parser.add_argument('--minify',
+ help='Force enable/disable minification'
+ ' (defaults to app proguard config)',
+ choices=['default', 'force-enable', 'force-disable'],
+ default='default')
+ parser.add_argument('--optimize',
+ help='Force enable/disable optimizations'
+ ' (defaults to app proguard config)',
+ choices=['default', 'force-enable', 'force-disable'],
+ default='default')
+ parser.add_argument('--shrink',
+ help='Force enable/disable shrinking'
+ ' (defaults to app proguard config)',
+ choices=['default', 'force-enable', 'force-disable'],
+ default='default')
+ parser.add_argument(
+ '-v',
+ '--version',
+ help='Compiler version to use (default read from dump version file).'
+ 'Valid arguments are:'
+ ' "main" to run from your own tree,'
+ ' "source" to run from build classes directly,'
+ ' "X.Y.Z" to run a specific version, or'
+ ' <hash> to run that hash from main.',
+ default=None)
+ parser.add_argument('--r8-jar', help='Path to an R8 jar.', default=None)
+ parser.add_argument('--r8-flags',
+ '--r8_flags',
+ help='Additional option(s) for the compiler.')
+ parser.add_argument('--pg-conf',
+ '--pg_conf',
+ help='Keep rule file(s).',
+ action='append')
+ parser.add_argument('--override',
+ help='Do not override any extracted dump in temp-dir',
+ default=False,
+ action='store_true')
+ parser.add_argument(
+ '--nolib',
+ help='Use the non-lib distribution (default uses the lib distribution)',
+ default=False,
+ action='store_true')
+ parser.add_argument('--print-times',
+ help='Print timing information from r8',
+ default=False,
+ action='store_true')
+ parser.add_argument(
+ '--disable-assertions',
+ '--disable_assertions',
+ '-da',
+ help=
+ 'Disable Java assertions when running the compiler (default enabled)',
+ default=False,
+ action='store_true')
+ parser.add_argument('--classfile',
+ help='Run with classfile output',
+ default=False,
+ action='store_true')
+ parser.add_argument(
+ '--debug-agent',
+ help=
+ 'Enable Java debug agent and suspend compilation (default disabled)',
+ default=False,
+ action='store_true')
+ parser.add_argument('--xmx',
+ help='Set JVM max heap size (-Xmx)',
+ default=None)
+ parser.add_argument('--threads',
+ help='Set the number of threads to use',
+ default=None)
+ parser.add_argument(
+ '--min-api',
+ help='Set min-api (default read from dump properties file)',
+ default=None)
+ parser.add_argument('--desugared-lib',
+ help='Set desugared-library (default set from dump)',
+ default=None)
+ parser.add_argument(
+ '--disable-desugared-lib',
+ help='Disable desugared-libary if it will be set from dump',
+ default=False,
+ action='store_true')
+ parser.add_argument('--loop',
+ help='Run the compilation in a loop',
+ default=False,
+ action='store_true')
+ parser.add_argument('--enable-missing-library-api-modeling',
+ help='Run with api modeling',
+ default=False,
+ action='store_true')
+ parser.add_argument('--android-platform-build',
+ help='Run as a platform build',
+ default=False,
+ action='store_true')
+ parser.add_argument('--compilation-mode',
+ '--compilation_mode',
+ help='Run compilation in specified mode',
+ choices=['debug', 'release'],
+ default=None)
+ return parser
+
def error(msg):
- print(msg)
- sys.exit(1)
+ print(msg)
+ sys.exit(1)
+
class Dump(object):
- def __init__(self, directory):
- self.directory = directory
+ def __init__(self, directory):
+ self.directory = directory
- def if_exists(self, name):
- f = os.path.join(self.directory, name)
- if os.path.exists(f):
- return f
- return None
+ def if_exists(self, name):
+ f = os.path.join(self.directory, name)
+ if os.path.exists(f):
+ return f
+ return None
- def program_jar(self):
- return self.if_exists('program.jar')
+ def program_jar(self):
+ return self.if_exists('program.jar')
- def feature_jars(self):
- feature_jars = []
- i = 1
- while True:
- feature_jar = self.if_exists('feature-%s.jar' % i)
- if feature_jar:
- feature_jars.append(feature_jar)
- i = i + 1
- else:
- return feature_jars
+ def feature_jars(self):
+ feature_jars = []
+ i = 1
+ while True:
+ feature_jar = self.if_exists('feature-%s.jar' % i)
+ if feature_jar:
+ feature_jars.append(feature_jar)
+ i = i + 1
+ else:
+ return feature_jars
- def library_jar(self):
- return self.if_exists('library.jar')
+ def library_jar(self):
+ return self.if_exists('library.jar')
- def classpath_jar(self):
- return self.if_exists('classpath.jar')
+ def classpath_jar(self):
+ return self.if_exists('classpath.jar')
- def desugared_library_json(self):
- return self.if_exists('desugared-library.json')
+ def desugared_library_json(self):
+ return self.if_exists('desugared-library.json')
- def proguard_input_map(self):
- if self.if_exists('proguard_input.config'):
- print("Unimplemented: proguard_input configuration.")
+ def proguard_input_map(self):
+ if self.if_exists('proguard_input.config'):
+ print("Unimplemented: proguard_input configuration.")
- def main_dex_list_resource(self):
- return self.if_exists('main-dex-list.txt')
+ def main_dex_list_resource(self):
+ return self.if_exists('main-dex-list.txt')
- def main_dex_rules_resource(self):
- return self.if_exists('main-dex-rules.txt')
+ def main_dex_rules_resource(self):
+ return self.if_exists('main-dex-rules.txt')
- def art_profile_resources(self):
- art_profile_resources = []
- while True:
- current_art_profile_index = len(art_profile_resources) + 1
- art_profile_resource = self.if_exists(
- 'art-profile-%s.txt' % current_art_profile_index)
- if art_profile_resource is None:
- return art_profile_resources
- art_profile_resources.append(art_profile_resource)
+ def art_profile_resources(self):
+ art_profile_resources = []
+ while True:
+ current_art_profile_index = len(art_profile_resources) + 1
+ art_profile_resource = self.if_exists('art-profile-%s.txt' %
+ current_art_profile_index)
+ if art_profile_resource is None:
+ return art_profile_resources
+ art_profile_resources.append(art_profile_resource)
- def startup_profile_resources(self):
- startup_profile_resources = []
- while True:
- current_startup_profile_index = len(startup_profile_resources) + 1
- startup_profile_resource = self.if_exists(
- 'startup-profile-%s.txt' % current_startup_profile_index)
- if startup_profile_resource is None:
- return startup_profile_resources
- startup_profile_resources.append(startup_profile_resource)
+ def startup_profile_resources(self):
+ startup_profile_resources = []
+ while True:
+ current_startup_profile_index = len(startup_profile_resources) + 1
+ startup_profile_resource = self.if_exists(
+ 'startup-profile-%s.txt' % current_startup_profile_index)
+ if startup_profile_resource is None:
+ return startup_profile_resources
+ startup_profile_resources.append(startup_profile_resource)
- def build_properties_file(self):
- return self.if_exists('build.properties')
+ def build_properties_file(self):
+ return self.if_exists('build.properties')
- def config_file(self):
- return self.if_exists('proguard.config')
+ def config_file(self):
+ return self.if_exists('proguard.config')
- def version_file(self):
- return self.if_exists('r8-version')
+ def version_file(self):
+ return self.if_exists('r8-version')
- def version(self):
- f = self.version_file()
- if f:
- return open(f).read().split(' ')[0]
- return None
+ def version(self):
+ f = self.version_file()
+ if f:
+ return open(f).read().split(' ')[0]
+ return None
+
def read_dump_from_args(args, temp):
- if args.dump is None:
- error("A dump file or directory must be specified")
- return read_dump(args.dump, temp, args.override)
+ if args.dump is None:
+ error("A dump file or directory must be specified")
+ return read_dump(args.dump, temp, args.override)
+
def read_dump(dump, temp, override=False):
- if os.path.isdir(dump):
- return Dump(dump)
- dump_file = zipfile.ZipFile(os.path.abspath(dump), 'r')
- r8_version_file = os.path.join(temp, 'r8-version')
+ if os.path.isdir(dump):
+ return Dump(dump)
+ dump_file = zipfile.ZipFile(os.path.abspath(dump), 'r')
+ r8_version_file = os.path.join(temp, 'r8-version')
- if override or not os.path.isfile(r8_version_file):
- dump_file.extractall(temp)
- if not os.path.isfile(r8_version_file):
- error("Did not extract into %s. Either the zip file is invalid or the "
- "dump is missing files" % temp)
- return Dump(temp)
+ if override or not os.path.isfile(r8_version_file):
+ dump_file.extractall(temp)
+ if not os.path.isfile(r8_version_file):
+ error(
+ "Did not extract into %s. Either the zip file is invalid or the "
+ "dump is missing files" % temp)
+ return Dump(temp)
+
def determine_build_properties(args, dump):
- build_properties = {}
- build_properties_file = dump.build_properties_file()
- if build_properties_file:
- with open(build_properties_file) as f:
- build_properties_contents = f.readlines()
- for line in build_properties_contents:
- stripped = line.strip()
- if stripped:
- pair = stripped.split('=')
- build_properties[pair[0]] = pair[1]
- if 'mode' not in build_properties:
- build_properties['mode'] = 'release'
- return build_properties
+ build_properties = {}
+ build_properties_file = dump.build_properties_file()
+ if build_properties_file:
+ with open(build_properties_file) as f:
+ build_properties_contents = f.readlines()
+ for line in build_properties_contents:
+ stripped = line.strip()
+ if stripped:
+ pair = stripped.split('=')
+ build_properties[pair[0]] = pair[1]
+ if 'mode' not in build_properties:
+ build_properties['mode'] = 'release'
+ return build_properties
+
def determine_version(args, dump):
- if args.version is None:
- return dump.version()
- return args.version
+ if args.version is None:
+ return dump.version()
+ return args.version
+
def determine_compiler(args, build_properties):
- compilers = ['d8', 'r8', 'r8full', 'l8', 'l8d8', 'tracereferences']
- compiler = args.compiler
- if not compiler and 'tool' in build_properties:
- compiler = build_properties.get('tool').lower()
- if compiler == 'r8':
- if not 'force-proguard-compatibility' in build_properties:
- error("Unable to determine R8 compiler variant from build.properties."
- " No value for 'force-proguard-compatibility'.")
- if build_properties.get('force-proguard-compatibility').lower() == 'false':
- compiler = compiler + 'full'
- if compiler == 'TraceReferences':
- compiler = build_properties.get('tool').lower()
- if compiler not in compilers:
- error("Unable to determine a compiler to use. Specified %s,"
- " Valid options: %s" % (args.compiler, ', '.join(compilers)))
- return compiler
+ compilers = ['d8', 'r8', 'r8full', 'l8', 'l8d8', 'tracereferences']
+ compiler = args.compiler
+ if not compiler and 'tool' in build_properties:
+ compiler = build_properties.get('tool').lower()
+ if compiler == 'r8':
+ if not 'force-proguard-compatibility' in build_properties:
+ error(
+ "Unable to determine R8 compiler variant from build.properties."
+ " No value for 'force-proguard-compatibility'.")
+ if build_properties.get(
+ 'force-proguard-compatibility').lower() == 'false':
+ compiler = compiler + 'full'
+ if compiler == 'TraceReferences':
+ compiler = build_properties.get('tool').lower()
+ if compiler not in compilers:
+ error("Unable to determine a compiler to use. Specified %s,"
+ " Valid options: %s" % (args.compiler, ', '.join(compilers)))
+ return compiler
+
def determine_trace_references_commands(build_properties, output):
- trace_ref_consumer = build_properties.get('trace_references_consumer')
- if trace_ref_consumer == 'com.android.tools.r8.tracereferences.TraceReferencesCheckConsumer':
- return ["--check"]
- else:
- assert trace_ref_consumer == 'com.android.tools.r8.tracereferences.TraceReferencesKeepRules'
- args = ['--allowobfuscation'] if build_properties.get('minification') == 'true' else []
- args.extend(['--keep-rules', '--output', output])
- return args
+ trace_ref_consumer = build_properties.get('trace_references_consumer')
+ if trace_ref_consumer == 'com.android.tools.r8.tracereferences.TraceReferencesCheckConsumer':
+ return ["--check"]
+ else:
+ assert trace_ref_consumer == 'com.android.tools.r8.tracereferences.TraceReferencesKeepRules'
+ args = ['--allowobfuscation'
+ ] if build_properties.get('minification') == 'true' else []
+ args.extend(['--keep-rules', '--output', output])
+ return args
+
def is_l8_compiler(compiler):
- return compiler.startswith('l8')
+ return compiler.startswith('l8')
+
def is_r8_compiler(compiler):
- return compiler.startswith('r8')
+ return compiler.startswith('r8')
+
def determine_config_files(args, dump, temp):
- if args.pg_conf:
- config_files = []
- for config_file in args.pg_conf:
- dst = os.path.join(temp, 'proguard-%s.config' % len(config_files))
- shutil.copyfile(config_file, dst)
- config_files.append(dst)
- return config_files
- dump_config_file = dump.config_file()
- if dump_config_file:
- return [dump_config_file]
- return []
+ if args.pg_conf:
+ config_files = []
+ for config_file in args.pg_conf:
+ dst = os.path.join(temp, 'proguard-%s.config' % len(config_files))
+ shutil.copyfile(config_file, dst)
+ config_files.append(dst)
+ return config_files
+ dump_config_file = dump.config_file()
+ if dump_config_file:
+ return [dump_config_file]
+ return []
+
def determine_output(args, temp):
- if (args.output):
- return args.output
- return os.path.join(temp, 'out.jar')
+ if (args.output):
+ return args.output
+ return os.path.join(temp, 'out.jar')
+
def determine_min_api(args, build_properties):
- if args.min_api:
- return args.min_api
- if 'min-api' in build_properties:
- return build_properties.get('min-api')
- return None
-
-def determine_residual_art_profile_output(art_profile, temp):
- return os.path.join(temp, os.path.basename(art_profile)[:-4] + ".out.txt")
-
-def determine_desugared_lib_pg_conf_output(temp):
- return os.path.join(temp, 'desugared-library-keep-rules.config')
-
-def determine_feature_output(feature_jar, temp):
- return os.path.join(temp, os.path.basename(feature_jar)[:-4] + ".out.jar")
-
-def determine_program_jar(args, dump):
- if hasattr(args, 'program_jar') and args.program_jar:
- return args.program_jar
- return dump.program_jar()
-
-def determine_class_file(args, build_properties):
- return args.classfile \
- or build_properties.get('backend', 'dex').lower() == 'cf'
-
-def determine_android_platform_build(args, build_properties):
- if args.android_platform_build:
- return True
- return build_properties.get('android-platform-build') == 'true'
-
-def determine_enable_missing_library_api_modeling(args, build_properties):
- if args.enable_missing_library_api_modeling:
- return True
- return build_properties.get('enable-missing-library-api-modeling') == 'true'
-
-def determine_compilation_mode(args, build_properties):
- if args.compilation_mode:
- return args.compilation_mode
- return build_properties.get('mode')
-
-def determine_properties(build_properties):
- args = []
- for key, value in build_properties.items():
- # When writing dumps all system properties starting with com.android.tools.r8
- # are written to the build.properties file in the format
- # system-property-com.android.tools.r8.XXX=<value>
- if key.startswith('system-property-'):
- name = key[len('system-property-'):]
- if name.endswith('dumpinputtofile') or name.endswith('dumpinputtodirectory'):
- continue
- if len(value) == 0:
- args.append('-D' + name)
- else:
- args.append('-D' + name + '=' + value)
- return args
-
-def download_distribution(version, args, temp):
- nolib = args.nolib
- if version == 'main':
- return utils.R8_JAR if nolib else utils.R8LIB_JAR
- if version == 'source':
- return '%s:%s' % (utils.BUILD_JAVA_MAIN_DIR, utils.ALL_DEPS_JAR)
- name = 'r8.jar' if nolib else 'r8lib.jar'
- source = archive.GetUploadDestination(version, name, is_hash(version))
- dest = os.path.join(temp, 'r8.jar')
- utils.download_file_from_cloud_storage(source, dest)
- return dest
-
-def clean_configs(files, args):
- for file in files:
- clean_config(file, args)
-
-def clean_config(file, args):
- with open(file) as f:
- lines = f.readlines()
- minify = args.minify
- optimize = args.optimize
- shrink = args.shrink
- with open(file, 'w') as f:
- if minify == 'force-disable':
- print('Adding config line: -dontobfuscate')
- f.write('-dontobfuscate\n')
- if optimize == 'force-disable':
- print('Adding config line: -dontoptimize')
- f.write('-dontoptimize\n')
- if shrink == 'force-disable':
- print('Adding config line: -dontshrink')
- f.write('-dontshrink\n')
- for line in lines:
- if clean_config_line(line, minify, optimize, shrink):
- print('Removing from config line: \n%s' % line)
- else:
- f.write(line)
-
-def clean_config_line(line, minify, optimize, shrink):
- if line.lstrip().startswith('#'):
- return False
- if ('-injars' in line or '-libraryjars' in line or
- '-print' in line or '-applymapping' in line):
- return True
- if minify == 'force-enable' and '-dontobfuscate' in line:
- return True
- if optimize == 'force-enable' and '-dontoptimize' in line:
- return True
- if shrink == 'force-enable' and '-dontshrink' in line:
- return True
- return False
-
-def prepare_r8_wrapper(dist, temp, jdkhome):
- compile_wrapper_with_javac(
- dist,
- temp,
- jdkhome,
- os.path.join(
- utils.REPO_ROOT,
- 'src/main/java/com/android/tools/r8/utils/CompileDumpCompatR8.java'))
-
-def prepare_d8_wrapper(dist, temp, jdkhome):
- compile_wrapper_with_javac(
- dist,
- temp,
- jdkhome,
- os.path.join(
- utils.REPO_ROOT,
- 'src/main/java/com/android/tools/r8/utils/CompileDumpD8.java'))
-
-def compile_wrapper_with_javac(dist, temp, jdkhome, path):
- base_path = os.path.join(
- utils.REPO_ROOT,
- 'src/main/java/com/android/tools/r8/utils/CompileDumpBase.java')
- cmd = [
- jdk.GetJavacExecutable(jdkhome),
- path,
- base_path,
- '-d', temp,
- '-cp', dist,
- ]
- utils.PrintCmd(cmd)
- subprocess.check_output(cmd)
-
-def is_hash(version):
- return len(version) == 40
-
-def run1(out, args, otherargs, jdkhome=None, worker_id=None):
- jvmargs = []
- compilerargs = []
- for arg in otherargs:
- if arg.startswith('-D'):
- jvmargs.append(arg)
- else:
- compilerargs.append(arg)
- with utils.TempDir() as temp:
- if out:
- temp = out
- if not os.path.exists(temp):
- os.makedirs(temp)
- dump = read_dump_from_args(args, temp)
- if not dump.program_jar():
- error("Cannot compile dump with no program classes")
- if not dump.library_jar():
- print("WARNING: Unexpected lack of library classes in dump")
- build_properties = determine_build_properties(args, dump)
- version = determine_version(args, dump)
- compiler = determine_compiler(args, build_properties)
- config_files = determine_config_files(args, dump, temp)
- out = determine_output(args, temp)
- min_api = determine_min_api(args, build_properties)
- classfile = determine_class_file(args, build_properties)
- android_platform_build = determine_android_platform_build(args, build_properties)
- enable_missing_library_api_modeling = determine_enable_missing_library_api_modeling(args, build_properties)
- mode = determine_compilation_mode(args, build_properties)
- jar = args.r8_jar if args.r8_jar else download_distribution(version, args, temp)
- if ':' not in jar and not os.path.exists(jar):
- error("Distribution does not exist: " + jar)
- cmd = [jdk.GetJavaExecutable(jdkhome)]
- cmd.extend(jvmargs)
- if args.debug_agent:
- if not args.nolib:
- print("WARNING: Running debugging agent on r8lib is questionable...")
- cmd.append(
- '-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=*:5005')
- if args.xmx:
- cmd.append('-Xmx' + args.xmx)
- if not args.disable_assertions:
- cmd.append('-ea')
- cmd.append('-Dcom.android.tools.r8.enableTestAssertions=1')
- if args.print_times:
- cmd.append('-Dcom.android.tools.r8.printtimes=1')
- if args.r8_flags:
- cmd.extend(args.r8_flags.split(' '))
- if hasattr(args, 'properties'):
- cmd.extend(args.properties)
- cmd.extend(determine_properties(build_properties))
- cmd.extend(['-cp', '%s:%s' % (temp, jar)])
- if compiler == 'd8':
- prepare_d8_wrapper(jar, temp, jdkhome)
- cmd.append('com.android.tools.r8.utils.CompileDumpD8')
- if is_l8_compiler(compiler):
- cmd.append('com.android.tools.r8.L8')
- if compiler == 'tracereferences':
- cmd.append('com.android.tools.r8.tracereferences.TraceReferences')
- cmd.extend(determine_trace_references_commands(build_properties, out))
- if compiler.startswith('r8'):
- prepare_r8_wrapper(jar, temp, jdkhome)
- cmd.append('com.android.tools.r8.utils.CompileDumpCompatR8')
- if compiler == 'r8':
- cmd.append('--compat')
- if compiler != 'tracereferences':
- assert mode == 'debug' or mode == 'release'
- cmd.append('--' + mode)
- # For recompilation of dumps run_on_app_dumps pass in a program jar.
- program_jar = determine_program_jar(args, dump)
- if compiler != 'tracereferences':
- cmd.append(program_jar)
- cmd.extend(['--output', out])
- else:
- cmd.extend(['--source', program_jar])
- for feature_jar in dump.feature_jars():
- cmd.extend(['--feature-jar', feature_jar,
- determine_feature_output(feature_jar, temp)])
- if dump.library_jar():
- cmd.extend(['--lib', dump.library_jar()])
- if dump.classpath_jar() and not is_l8_compiler(compiler):
- cmd.extend(
- ['--target' if compiler == 'tracereferences' else '--classpath',
- dump.classpath_jar()])
- if dump.desugared_library_json() and not args.disable_desugared_lib:
- cmd.extend(['--desugared-lib', dump.desugared_library_json()])
- if not is_l8_compiler(compiler):
- cmd.extend([
- '--desugared-lib-pg-conf-output',
- determine_desugared_lib_pg_conf_output(temp)])
- if (is_r8_compiler(compiler) or compiler == 'l8') and config_files:
- if hasattr(args, 'config_files_consumer') and args.config_files_consumer:
- args.config_files_consumer(config_files)
- else:
- # If we get a dump from the wild we can't use -injars, -libraryjars or
- # -print{mapping,usage}
- clean_configs(config_files, args)
- for config_file in config_files:
- cmd.extend(['--pg-conf', config_file])
- cmd.extend(['--pg-map-output', '%s.map' % out])
- if dump.main_dex_list_resource():
- cmd.extend(['--main-dex-list', dump.main_dex_list_resource()])
- if dump.main_dex_rules_resource():
- cmd.extend(['--main-dex-rules', dump.main_dex_rules_resource()])
- for art_profile_resource in dump.art_profile_resources():
- residual_art_profile_output = \
- determine_residual_art_profile_output(art_profile_resource, temp)
- cmd.extend([
- '--art-profile', art_profile_resource, residual_art_profile_output])
- for startup_profile_resource in dump.startup_profile_resources():
- cmd.extend(['--startup-profile', startup_profile_resource])
- if min_api:
- cmd.extend(['--min-api', min_api])
- if classfile:
- cmd.extend(['--classfile'])
- if android_platform_build:
- cmd.extend(['--android-platform-build'])
- if enable_missing_library_api_modeling:
- cmd.extend(['--enable-missing-library-api-modeling'])
- if args.threads:
- cmd.extend(['--threads', args.threads])
- cmd.extend(compilerargs)
- utils.PrintCmd(cmd, worker_id=worker_id)
- try:
- print(subprocess.check_output(cmd, stderr=subprocess.STDOUT).decode('utf-8'))
- return 0
- except subprocess.CalledProcessError as e:
- if args.nolib \
- or version == 'source' \
- or not try_retrace_output(e, version, temp):
- print(e.output.decode('UTF-8'))
- return 1
-
-def try_retrace_output(e, version, temp):
- try:
- stacktrace = os.path.join(temp, 'stacktrace')
- open(stacktrace, 'w+').write(e.output.decode('UTF-8'))
- print("=" * 80)
- print(" RETRACED OUTPUT")
- print("=" * 80)
- retrace.run(get_map_file(version, temp), stacktrace, None, no_r8lib=False)
- return True
- except Exception as e2:
- print("Failed to retrace for version: %s" % version)
- print(e2)
- return False
-
-def get_map_file(version, temp):
- if version == 'main':
- return utils.R8LIB_MAP
- download_path = archive.GetUploadDestination(
- version,
- 'r8lib.jar.map',
- is_hash(version))
- if utils.file_exists_on_cloud_storage(download_path):
- map_path = os.path.join(temp, 'mapping.map')
- utils.download_file_from_cloud_storage(download_path, map_path)
- return map_path
- else:
- print('Could not find map file from argument: %s.' % version)
+ if args.min_api:
+ return args.min_api
+ if 'min-api' in build_properties:
+ return build_properties.get('min-api')
return None
-def summarize_dump_files(dumpfiles):
- if len(dumpfiles) == 0:
- error('Summary command expects a list of dumps to summarize')
- for f in dumpfiles:
- print(f + ':')
+
+def determine_residual_art_profile_output(art_profile, temp):
+ return os.path.join(temp, os.path.basename(art_profile)[:-4] + ".out.txt")
+
+
+def determine_desugared_lib_pg_conf_output(temp):
+ return os.path.join(temp, 'desugared-library-keep-rules.config')
+
+
+def determine_feature_output(feature_jar, temp):
+ return os.path.join(temp, os.path.basename(feature_jar)[:-4] + ".out.jar")
+
+
+def determine_program_jar(args, dump):
+ if hasattr(args, 'program_jar') and args.program_jar:
+ return args.program_jar
+ return dump.program_jar()
+
+
+def determine_class_file(args, build_properties):
+ return args.classfile \
+ or build_properties.get('backend', 'dex').lower() == 'cf'
+
+
+def determine_android_platform_build(args, build_properties):
+ if args.android_platform_build:
+ return True
+ return build_properties.get('android-platform-build') == 'true'
+
+
+def determine_enable_missing_library_api_modeling(args, build_properties):
+ if args.enable_missing_library_api_modeling:
+ return True
+ return build_properties.get('enable-missing-library-api-modeling') == 'true'
+
+
+def determine_compilation_mode(args, build_properties):
+ if args.compilation_mode:
+ return args.compilation_mode
+ return build_properties.get('mode')
+
+
+def determine_properties(build_properties):
+ args = []
+ for key, value in build_properties.items():
+ # When writing dumps all system properties starting with com.android.tools.r8
+ # are written to the build.properties file in the format
+ # system-property-com.android.tools.r8.XXX=<value>
+ if key.startswith('system-property-'):
+ name = key[len('system-property-'):]
+ if name.endswith('dumpinputtofile') or name.endswith(
+ 'dumpinputtodirectory'):
+ continue
+ if len(value) == 0:
+ args.append('-D' + name)
+ else:
+ args.append('-D' + name + '=' + value)
+ return args
+
+
+def download_distribution(version, args, temp):
+ nolib = args.nolib
+ if version == 'main':
+ return utils.R8_JAR if nolib else utils.R8LIB_JAR
+ if version == 'source':
+ return '%s:%s' % (utils.BUILD_JAVA_MAIN_DIR, utils.ALL_DEPS_JAR)
+ name = 'r8.jar' if nolib else 'r8lib.jar'
+ source = archive.GetUploadDestination(version, name, is_hash(version))
+ dest = os.path.join(temp, 'r8.jar')
+ utils.download_file_from_cloud_storage(source, dest)
+ return dest
+
+
+def clean_configs(files, args):
+ for file in files:
+ clean_config(file, args)
+
+
+def clean_config(file, args):
+ with open(file) as f:
+ lines = f.readlines()
+ minify = args.minify
+ optimize = args.optimize
+ shrink = args.shrink
+ with open(file, 'w') as f:
+ if minify == 'force-disable':
+ print('Adding config line: -dontobfuscate')
+ f.write('-dontobfuscate\n')
+ if optimize == 'force-disable':
+ print('Adding config line: -dontoptimize')
+ f.write('-dontoptimize\n')
+ if shrink == 'force-disable':
+ print('Adding config line: -dontshrink')
+ f.write('-dontshrink\n')
+ for line in lines:
+ if clean_config_line(line, minify, optimize, shrink):
+ print('Removing from config line: \n%s' % line)
+ else:
+ f.write(line)
+
+
+def clean_config_line(line, minify, optimize, shrink):
+ if line.lstrip().startswith('#'):
+ return False
+ if ('-injars' in line or '-libraryjars' in line or '-print' in line or
+ '-applymapping' in line):
+ return True
+ if minify == 'force-enable' and '-dontobfuscate' in line:
+ return True
+ if optimize == 'force-enable' and '-dontoptimize' in line:
+ return True
+ if shrink == 'force-enable' and '-dontshrink' in line:
+ return True
+ return False
+
+
+def prepare_r8_wrapper(dist, temp, jdkhome):
+ compile_wrapper_with_javac(
+ dist, temp, jdkhome,
+ os.path.join(
+ utils.REPO_ROOT,
+ 'src/main/java/com/android/tools/r8/utils/CompileDumpCompatR8.java')
+ )
+
+
+def prepare_d8_wrapper(dist, temp, jdkhome):
+ compile_wrapper_with_javac(
+ dist, temp, jdkhome,
+ os.path.join(
+ utils.REPO_ROOT,
+ 'src/main/java/com/android/tools/r8/utils/CompileDumpD8.java'))
+
+
+def compile_wrapper_with_javac(dist, temp, jdkhome, path):
+ base_path = os.path.join(
+ utils.REPO_ROOT,
+ 'src/main/java/com/android/tools/r8/utils/CompileDumpBase.java')
+ cmd = [
+ jdk.GetJavacExecutable(jdkhome),
+ path,
+ base_path,
+ '-d',
+ temp,
+ '-cp',
+ dist,
+ ]
+ utils.PrintCmd(cmd)
+ subprocess.check_output(cmd)
+
+
+def is_hash(version):
+ return len(version) == 40
+
+
+def run1(out, args, otherargs, jdkhome=None, worker_id=None):
+ jvmargs = []
+ compilerargs = []
+ for arg in otherargs:
+ if arg.startswith('-D'):
+ jvmargs.append(arg)
+ else:
+ compilerargs.append(arg)
+ with utils.TempDir() as temp:
+ if out:
+ temp = out
+ if not os.path.exists(temp):
+ os.makedirs(temp)
+ dump = read_dump_from_args(args, temp)
+ if not dump.program_jar():
+ error("Cannot compile dump with no program classes")
+ if not dump.library_jar():
+ print("WARNING: Unexpected lack of library classes in dump")
+ build_properties = determine_build_properties(args, dump)
+ version = determine_version(args, dump)
+ compiler = determine_compiler(args, build_properties)
+ config_files = determine_config_files(args, dump, temp)
+ out = determine_output(args, temp)
+ min_api = determine_min_api(args, build_properties)
+ classfile = determine_class_file(args, build_properties)
+ android_platform_build = determine_android_platform_build(
+ args, build_properties)
+ enable_missing_library_api_modeling = determine_enable_missing_library_api_modeling(
+ args, build_properties)
+ mode = determine_compilation_mode(args, build_properties)
+ jar = args.r8_jar if args.r8_jar else download_distribution(
+ version, args, temp)
+ if ':' not in jar and not os.path.exists(jar):
+ error("Distribution does not exist: " + jar)
+ cmd = [jdk.GetJavaExecutable(jdkhome)]
+ cmd.extend(jvmargs)
+ if args.debug_agent:
+ if not args.nolib:
+ print(
+ "WARNING: Running debugging agent on r8lib is questionable..."
+ )
+ cmd.append(
+ '-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=*:5005'
+ )
+ if args.xmx:
+ cmd.append('-Xmx' + args.xmx)
+ if not args.disable_assertions:
+ cmd.append('-ea')
+ cmd.append('-Dcom.android.tools.r8.enableTestAssertions=1')
+ if args.print_times:
+ cmd.append('-Dcom.android.tools.r8.printtimes=1')
+ if args.r8_flags:
+ cmd.extend(args.r8_flags.split(' '))
+ if hasattr(args, 'properties'):
+ cmd.extend(args.properties)
+ cmd.extend(determine_properties(build_properties))
+ cmd.extend(['-cp', '%s:%s' % (temp, jar)])
+ if compiler == 'd8':
+ prepare_d8_wrapper(jar, temp, jdkhome)
+ cmd.append('com.android.tools.r8.utils.CompileDumpD8')
+ if is_l8_compiler(compiler):
+ cmd.append('com.android.tools.r8.L8')
+ if compiler == 'tracereferences':
+ cmd.append('com.android.tools.r8.tracereferences.TraceReferences')
+ cmd.extend(
+ determine_trace_references_commands(build_properties, out))
+ if compiler.startswith('r8'):
+ prepare_r8_wrapper(jar, temp, jdkhome)
+ cmd.append('com.android.tools.r8.utils.CompileDumpCompatR8')
+ if compiler == 'r8':
+ cmd.append('--compat')
+ if compiler != 'tracereferences':
+ assert mode == 'debug' or mode == 'release'
+ cmd.append('--' + mode)
+ # For recompilation of dumps run_on_app_dumps pass in a program jar.
+ program_jar = determine_program_jar(args, dump)
+ if compiler != 'tracereferences':
+ cmd.append(program_jar)
+ cmd.extend(['--output', out])
+ else:
+ cmd.extend(['--source', program_jar])
+ for feature_jar in dump.feature_jars():
+ cmd.extend([
+ '--feature-jar', feature_jar,
+ determine_feature_output(feature_jar, temp)
+ ])
+ if dump.library_jar():
+ cmd.extend(['--lib', dump.library_jar()])
+ if dump.classpath_jar() and not is_l8_compiler(compiler):
+ cmd.extend([
+ '--target' if compiler == 'tracereferences' else '--classpath',
+ dump.classpath_jar()
+ ])
+ if dump.desugared_library_json() and not args.disable_desugared_lib:
+ cmd.extend(['--desugared-lib', dump.desugared_library_json()])
+ if not is_l8_compiler(compiler):
+ cmd.extend([
+ '--desugared-lib-pg-conf-output',
+ determine_desugared_lib_pg_conf_output(temp)
+ ])
+ if (is_r8_compiler(compiler) or compiler == 'l8') and config_files:
+ if hasattr(args,
+ 'config_files_consumer') and args.config_files_consumer:
+ args.config_files_consumer(config_files)
+ else:
+ # If we get a dump from the wild we can't use -injars, -libraryjars or
+ # -print{mapping,usage}
+ clean_configs(config_files, args)
+ for config_file in config_files:
+ cmd.extend(['--pg-conf', config_file])
+ cmd.extend(['--pg-map-output', '%s.map' % out])
+ if dump.main_dex_list_resource():
+ cmd.extend(['--main-dex-list', dump.main_dex_list_resource()])
+ if dump.main_dex_rules_resource():
+ cmd.extend(['--main-dex-rules', dump.main_dex_rules_resource()])
+ for art_profile_resource in dump.art_profile_resources():
+ residual_art_profile_output = \
+ determine_residual_art_profile_output(art_profile_resource, temp)
+ cmd.extend([
+ '--art-profile', art_profile_resource,
+ residual_art_profile_output
+ ])
+ for startup_profile_resource in dump.startup_profile_resources():
+ cmd.extend(['--startup-profile', startup_profile_resource])
+ if min_api:
+ cmd.extend(['--min-api', min_api])
+ if classfile:
+ cmd.extend(['--classfile'])
+ if android_platform_build:
+ cmd.extend(['--android-platform-build'])
+ if enable_missing_library_api_modeling:
+ cmd.extend(['--enable-missing-library-api-modeling'])
+ if args.threads:
+ cmd.extend(['--threads', args.threads])
+ cmd.extend(compilerargs)
+ utils.PrintCmd(cmd, worker_id=worker_id)
+ try:
+ print(
+ subprocess.check_output(
+ cmd, stderr=subprocess.STDOUT).decode('utf-8'))
+ return 0
+ except subprocess.CalledProcessError as e:
+ if args.nolib \
+ or version == 'source' \
+ or not try_retrace_output(e, version, temp):
+ print(e.output.decode('UTF-8'))
+ return 1
+
+
+def try_retrace_output(e, version, temp):
try:
- with utils.TempDir() as temp:
- dump = read_dump(f, temp)
- summarize_dump(dump)
- except IOError as e:
- print("Error: " + str(e))
- except zipfile.BadZipfile as e:
- print("Error: " + str(e))
+ stacktrace = os.path.join(temp, 'stacktrace')
+ open(stacktrace, 'w+').write(e.output.decode('UTF-8'))
+ print("=" * 80)
+ print(" RETRACED OUTPUT")
+ print("=" * 80)
+ retrace.run(get_map_file(version, temp),
+ stacktrace,
+ None,
+ no_r8lib=False)
+ return True
+ except Exception as e2:
+ print("Failed to retrace for version: %s" % version)
+ print(e2)
+ return False
+
+
+def get_map_file(version, temp):
+ if version == 'main':
+ return utils.R8LIB_MAP
+ download_path = archive.GetUploadDestination(version, 'r8lib.jar.map',
+ is_hash(version))
+ if utils.file_exists_on_cloud_storage(download_path):
+ map_path = os.path.join(temp, 'mapping.map')
+ utils.download_file_from_cloud_storage(download_path, map_path)
+ return map_path
+ else:
+ print('Could not find map file from argument: %s.' % version)
+ return None
+
+
+def summarize_dump_files(dumpfiles):
+ if len(dumpfiles) == 0:
+ error('Summary command expects a list of dumps to summarize')
+ for f in dumpfiles:
+ print(f + ':')
+ try:
+ with utils.TempDir() as temp:
+ dump = read_dump(f, temp)
+ summarize_dump(dump)
+ except IOError as e:
+ print("Error: " + str(e))
+ except zipfile.BadZipfile as e:
+ print("Error: " + str(e))
+
def summarize_dump(dump):
- version = dump.version()
- if not version:
- print('No dump version info')
- return
- print('version=' + version)
- props = dump.build_properties_file()
- if props:
- with open(props) as props_file:
- print(props_file.read())
- if dump.library_jar():
- print('library.jar present')
- if dump.classpath_jar():
- print('classpath.jar present')
- prog = dump.program_jar()
- if prog:
- print('program.jar content:')
- summarize_jar(prog)
+ version = dump.version()
+ if not version:
+ print('No dump version info')
+ return
+ print('version=' + version)
+ props = dump.build_properties_file()
+ if props:
+ with open(props) as props_file:
+ print(props_file.read())
+ if dump.library_jar():
+ print('library.jar present')
+ if dump.classpath_jar():
+ print('classpath.jar present')
+ prog = dump.program_jar()
+ if prog:
+ print('program.jar content:')
+ summarize_jar(prog)
+
def summarize_jar(jar):
- with zipfile.ZipFile(jar) as zip:
- pkgs = {}
- for info in zip.infolist():
- if info.filename.endswith('.class'):
- pkg, clazz = os.path.split(info.filename)
- count = pkgs.get(pkg, 0)
- pkgs[pkg] = count + 1
- sorted = list(pkgs.keys())
- sorted.sort()
- for p in sorted:
- print(' ' + p + ': ' + str(pkgs[p]))
+ with zipfile.ZipFile(jar) as zip:
+ pkgs = {}
+ for info in zip.infolist():
+ if info.filename.endswith('.class'):
+ pkg, clazz = os.path.split(info.filename)
+ count = pkgs.get(pkg, 0)
+ pkgs[pkg] = count + 1
+ sorted = list(pkgs.keys())
+ sorted.sort()
+ for p in sorted:
+ print(' ' + p + ': ' + str(pkgs[p]))
+
def run(args, otherargs):
- if args.summary:
- summarize_dump_files(otherargs)
- elif args.loop:
- count = 1
- while True:
- print('Iteration {:03d}'.format(count))
- out = args.temp
- if out:
- out = os.path.join(out, '{:03d}'.format(count))
- run1(out, args, otherargs)
- count += 1
- else:
- run1(args.temp, args, otherargs)
+ if args.summary:
+ summarize_dump_files(otherargs)
+ elif args.loop:
+ count = 1
+ while True:
+ print('Iteration {:03d}'.format(count))
+ out = args.temp
+ if out:
+ out = os.path.join(out, '{:03d}'.format(count))
+ run1(out, args, otherargs)
+ count += 1
+ else:
+ run1(args.temp, args, otherargs)
+
if __name__ == '__main__':
- (args, otherargs) = make_parser().parse_known_args(sys.argv[1:])
- sys.exit(run(args, otherargs))
+ (args, otherargs) = make_parser().parse_known_args(sys.argv[1:])
+ sys.exit(run(args, otherargs))
diff --git a/tools/create_art_tests.py b/tools/create_art_tests.py
index 0c59ae9..99e9085 100755
--- a/tools/create_art_tests.py
+++ b/tools/create_art_tests.py
@@ -15,7 +15,7 @@
]
TOOLS = ["r8", "d8", "r8cf"]
TEMPLATE = Template(
-"""// Copyright (c) 2016, the R8 project authors. Please see the AUTHORS file
+ """// Copyright (c) 2016, the R8 project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
package com.android.tools.r8.art.$testGeneratingToolchain.$compilerUnderTest;
@@ -61,43 +61,47 @@
def get_test_configurations():
- for toolchain, source_dir in TOOLCHAINS:
- for tool in TOOLS:
- if tool == "d8" and toolchain == "none":
- tool_enum = 'R8_AFTER_D8'
- else:
- tool_enum = tool.upper()
- if tool == "r8cf":
- if toolchain != "none":
- continue
- tool_enum = 'D8_AFTER_R8CF'
- output_dir = os.path.join(OUTPUT_DIR, toolchain, tool)
- yield (tool_enum, tool, toolchain, source_dir, output_dir)
+ for toolchain, source_dir in TOOLCHAINS:
+ for tool in TOOLS:
+ if tool == "d8" and toolchain == "none":
+ tool_enum = 'R8_AFTER_D8'
+ else:
+ tool_enum = tool.upper()
+ if tool == "r8cf":
+ if toolchain != "none":
+ continue
+ tool_enum = 'D8_AFTER_R8CF'
+ output_dir = os.path.join(OUTPUT_DIR, toolchain, tool)
+ yield (tool_enum, tool, toolchain, source_dir, output_dir)
def create_tests():
- for tool_enum, tool, toolchain, source_dir, output_dir in get_test_configurations():
- test_cases = [d for d in os.listdir(source_dir)
- if os.path.isdir(os.path.join(source_dir, d))]
- if os.path.exists(output_dir):
- shutil.rmtree(output_dir)
- os.makedirs(output_dir)
- for test_case in test_cases:
- class_name = "Art" + test_case.replace("-", "_") + "Test"
- contents = TEMPLATE.substitute(
- name=test_case,
- compilerUnderTestEnum=tool_enum,
- compilerUnderTest=tool,
- testGeneratingToolchain=toolchain,
- testGeneratingToolchainEnum=toolchain.upper(),
- testClassName=class_name)
- with open(os.path.join(output_dir, class_name + ".java"), "w") as fp:
- fp.write(contents)
+ for tool_enum, tool, toolchain, source_dir, output_dir in get_test_configurations(
+ ):
+ test_cases = [
+ d for d in os.listdir(source_dir)
+ if os.path.isdir(os.path.join(source_dir, d))
+ ]
+ if os.path.exists(output_dir):
+ shutil.rmtree(output_dir)
+ os.makedirs(output_dir)
+ for test_case in test_cases:
+ class_name = "Art" + test_case.replace("-", "_") + "Test"
+ contents = TEMPLATE.substitute(
+ name=test_case,
+ compilerUnderTestEnum=tool_enum,
+ compilerUnderTest=tool,
+ testGeneratingToolchain=toolchain,
+ testGeneratingToolchainEnum=toolchain.upper(),
+ testClassName=class_name)
+ with open(os.path.join(output_dir, class_name + ".java"),
+ "w") as fp:
+ fp.write(contents)
def main():
- create_tests()
+ create_tests()
if __name__ == "__main__":
- main()
+ main()
diff --git a/tools/create_dx_replay.py b/tools/create_dx_replay.py
index 8dfb0a0..1648eb0 100755
--- a/tools/create_dx_replay.py
+++ b/tools/create_dx_replay.py
@@ -27,61 +27,66 @@
import utils
-IN_SUBDIR = 'in' # subdirectory for the local copy of the input files
-OUT_SUBDIR = 'out' # subdirectory prefix for the output of DX
+IN_SUBDIR = 'in' # subdirectory for the local copy of the input files
+OUT_SUBDIR = 'out' # subdirectory prefix for the output of DX
REPLAY_SCRIPT_NAME = 'replay_script.py'
+
# This function will be called with arguments of the original DX invocation. It
# copies the original input files into the local input directory and replaces
# the references in orig_args to the local input files.
# Returns the new line to be appended to the replay script.
def process_line(out_dir, input_counter, orig_args):
- args = []
- inputs = []
- for arg in orig_args:
- if arg.startswith('--output='):
- continue # nothing to do, just skip this arg
- if arg.startswith('--'):
- args.append(arg)
- else:
- # 'arg' is the path of an input file: copy arg to local dir with
- # a new, unique name
- if isdir(arg):
- raise IOError("Adding directories ('{}') to the replay script is not"
- " implemented.".format(arg))
- elif not exists(arg):
- print("The input file to DX does not exist: '{}'.".format(arg))
+ args = []
+ inputs = []
+ for arg in orig_args:
+ if arg.startswith('--output='):
+ continue # nothing to do, just skip this arg
+ if arg.startswith('--'):
+ args.append(arg)
+ else:
+ # 'arg' is the path of an input file: copy arg to local dir with
+ # a new, unique name
+ if isdir(arg):
+ raise IOError(
+ "Adding directories ('{}') to the replay script is not"
+ " implemented.".format(arg))
+ elif not exists(arg):
+ print("The input file to DX does not exist: '{}'.".format(arg))
- input_file = '{}_{}'.format(input_counter, basename(arg))
+ input_file = '{}_{}'.format(input_counter, basename(arg))
- copy2(arg, join(out_dir, join(IN_SUBDIR, input_file)))
- inputs.append(input_file)
+ copy2(arg, join(out_dir, join(IN_SUBDIR, input_file)))
+ inputs.append(input_file)
- return 'call_dx({}, {}, {})\n'.format(input_counter, args, inputs)
+ return 'call_dx({}, {}, {})\n'.format(input_counter, args, inputs)
def parse_arguments():
- parser = argparse.ArgumentParser(
- description = 'Creates a self-contained directory for playing back a '
- ' sequence of DX calls.')
- parser.add_argument('dx_call_log',
- help = 'File containing tab-separated arguments for a DX call on each'
- ' line.')
- parser.add_argument('output_dir',
- help = 'Target path the create the self-contained directory at.')
- return parser.parse_args()
+ parser = argparse.ArgumentParser(
+ description='Creates a self-contained directory for playing back a '
+ ' sequence of DX calls.')
+ parser.add_argument(
+ 'dx_call_log',
+ help='File containing tab-separated arguments for a DX call on each'
+ ' line.')
+ parser.add_argument(
+ 'output_dir',
+ help='Target path the create the self-contained directory at.')
+ return parser.parse_args()
+
def Main():
- args = parse_arguments()
+ args = parse_arguments()
- if isdir(args.output_dir):
- rmdir(args.output_dir) # make sure to write only to empty out dir
+ if isdir(args.output_dir):
+ rmdir(args.output_dir) # make sure to write only to empty out dir
- utils.makedirs_if_needed(join(args.output_dir, IN_SUBDIR))
+ utils.makedirs_if_needed(join(args.output_dir, IN_SUBDIR))
- # create the first lines of the replay script
- replay_script = \
-"""#!/usr/bin/env python3
+ # create the first lines of the replay script
+ replay_script = \
+ """#!/usr/bin/env python3
import os
import shutil
import subprocess
@@ -108,22 +113,24 @@
""".format(IN_SUBDIR, OUT_SUBDIR)
- with open(args.dx_call_log) as f:
- lines = f.read().splitlines()
+ with open(args.dx_call_log) as f:
+ lines = f.read().splitlines()
- input_counter = 1
- for line in lines:
- replay_script += \
- process_line(args.output_dir, input_counter, line.split('\t'))
- input_counter += 1
+ input_counter = 1
+ for line in lines:
+ replay_script += \
+ process_line(args.output_dir, input_counter, line.split('\t'))
+ input_counter += 1
- script_file = join(args.output_dir, REPLAY_SCRIPT_NAME)
- with open(script_file, 'w') as f:
- f.write(replay_script)
+ script_file = join(args.output_dir, REPLAY_SCRIPT_NAME)
+ with open(script_file, 'w') as f:
+ f.write(replay_script)
- # chmod +x for script_file
- st = os.stat(script_file)
- os.chmod(script_file, st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
+ # chmod +x for script_file
+ st = os.stat(script_file)
+ os.chmod(script_file,
+ st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
+
if __name__ == '__main__':
- sys.exit(Main())
+ sys.exit(Main())
diff --git a/tools/create_local_maven_with_dependencies.py b/tools/create_local_maven_with_dependencies.py
index 76caf6b..4ae9d25 100755
--- a/tools/create_local_maven_with_dependencies.py
+++ b/tools/create_local_maven_with_dependencies.py
@@ -15,9 +15,9 @@
# prefix with X- to control the order, as many dependencies are present
# in several repositories.
REPOSITORIES = [
- 'A-Google=https://maven.google.com/',
- 'B-Maven Central=https://repo1.maven.org/maven2/',
- "C-Gradle Plugins=https://plugins.gradle.org/m2/",
+ 'A-Google=https://maven.google.com/',
+ 'B-Maven Central=https://repo1.maven.org/maven2/',
+ "C-Gradle Plugins=https://plugins.gradle.org/m2/",
]
ANDRDID_SUPPORT_VERSION = '25.4.0'
@@ -42,125 +42,134 @@
STUDIO_SDK_VERSION = '31.2.0-alpha10'
BUILD_DEPENDENCIES = [
- 'com.google.code.gson:gson:{version}'.format(version = GSON_VERSION),
- 'com.google.guava:guava:{version}'.format(version = GUAVA_VERSION),
- 'it.unimi.dsi:fastutil:{version}'.format(version = FASTUTIL_VERSION),
- 'org.jetbrains.kotlinx:kotlinx-metadata-jvm:{version}'.format(version = KOTLIN_METADATA_VERSION),
- 'org.ow2.asm:asm:{version}'.format(version = ASM_VERSION),
- 'org.ow2.asm:asm-util:{version}'.format(version = ASM_VERSION),
- 'org.ow2.asm:asm-commons:{version}'.format(version = ASM_VERSION),
+ 'com.google.code.gson:gson:{version}'.format(version=GSON_VERSION),
+ 'com.google.guava:guava:{version}'.format(version=GUAVA_VERSION),
+ 'it.unimi.dsi:fastutil:{version}'.format(version=FASTUTIL_VERSION),
+ 'org.jetbrains.kotlinx:kotlinx-metadata-jvm:{version}'.format(
+ version=KOTLIN_METADATA_VERSION),
+ 'org.ow2.asm:asm:{version}'.format(version=ASM_VERSION),
+ 'org.ow2.asm:asm-util:{version}'.format(version=ASM_VERSION),
+ 'org.ow2.asm:asm-commons:{version}'.format(version=ASM_VERSION),
]
TEST_DEPENDENCIES = [
- 'junit:junit:{version}'.format(version = JUNIT_VERSION),
- 'com.android.tools.smali:smali:{version}'.format(version = SMALI_VERSION),
- 'com.android.tools.smali:smali-util:{version}'.format(version = SMALI_VERSION),
- 'com.google.errorprone:error_prone_core:{version}'.format(version = ERROR_PRONE_VERSION),
- 'org.javassist:javassist:{version}'.format(version = JAVASSIST_VERSION),
- 'org.jetbrains.kotlin:kotlin-stdlib:{version}'.format(version = KOTLIN_VERSION),
- 'org.jetbrains.kotlin:kotlin-reflect:{version}'.format(version = KOTLIN_VERSION),
- 'org.mockito:mockito-core:{version}'.format(version = MOCKITO_VERSION),
- 'org.testng:testng:{version}'.format(version = TESTNG_VERSION),
+ 'junit:junit:{version}'.format(version=JUNIT_VERSION),
+ 'com.android.tools.smali:smali:{version}'.format(version=SMALI_VERSION),
+ 'com.android.tools.smali:smali-util:{version}'.format(
+ version=SMALI_VERSION),
+ 'com.google.errorprone:error_prone_core:{version}'.format(
+ version=ERROR_PRONE_VERSION),
+ 'org.javassist:javassist:{version}'.format(version=JAVASSIST_VERSION),
+ 'org.jetbrains.kotlin:kotlin-stdlib:{version}'.format(
+ version=KOTLIN_VERSION),
+ 'org.jetbrains.kotlin:kotlin-reflect:{version}'.format(
+ version=KOTLIN_VERSION),
+ 'org.mockito:mockito-core:{version}'.format(version=MOCKITO_VERSION),
+ 'org.testng:testng:{version}'.format(version=TESTNG_VERSION),
]
NEW_DEPENDENCIES = [
- 'com.google.guava:guava:{version}'.format(version = GUAVA_VERSION_NEW),
- 'org.gradle.kotlin.kotlin-dsl:org.gradle.kotlin.kotlin-dsl.gradle.plugin:4.0.6',
- 'org.jetbrains.kotlin:kotlin-gradle-plugin-api:1.8.10',
- 'org.jetbrains.kotlin:kotlin-gradle-plugin-idea:1.8.10',
- 'org.jetbrains.kotlin:kotlin-reflect:1.6.10',
- 'org.jetbrains.kotlin:kotlin-reflect:1.8.10',
- 'org.jetbrains.kotlin:kotlin-script-runtime:1.8.10',
- 'org.jetbrains.kotlin:kotlin-tooling-core:1.8.10',
- 'net.ltgt.errorprone:net.ltgt.errorprone.gradle.plugin:3.0.1',
- 'com.google.errorprone:javac:9+181-r4173-1',
- # Gradle 8.3
- 'org.gradle.kotlin.kotlin-dsl:org.gradle.kotlin.kotlin-dsl.gradle.plugin:4.1.0',
- 'org.jetbrains.kotlin:kotlin-assignment-compiler-plugin-embeddable:1.9.0',
- 'org.jetbrains.kotlin:kotlin-gradle-plugin-api:1.9.0',
- 'org.jetbrains.kotlin:kotlin-reflect:1.9.0',
- 'org.jetbrains.kotlin:kotlin-script-runtime:1.9.0',
- 'org.jetbrains.kotlin:kotlin-sam-with-receiver-compiler-plugin-embeddable:1.9.0',
- # Resource shrinker
- 'com.android.tools.build:aapt2-proto:{version}'.format(version = AAPT2_PROTO_VERSION),
- 'com.android.tools.layoutlib:layoutlib-api:{version}'.format(version = STUDIO_SDK_VERSION),
- 'com.android.tools:common:{version}'.format(version = STUDIO_SDK_VERSION),
- 'com.android.tools:sdk-common:{version}'.format(version = STUDIO_SDK_VERSION),
- 'com.google.protobuf:protobuf-java:{version}'.format(version = PROTOBUF_VERSION),
+ 'com.google.guava:guava:{version}'.format(version=GUAVA_VERSION_NEW),
+ 'org.gradle.kotlin.kotlin-dsl:org.gradle.kotlin.kotlin-dsl.gradle.plugin:4.0.6',
+ 'org.jetbrains.kotlin:kotlin-gradle-plugin-api:1.8.10',
+ 'org.jetbrains.kotlin:kotlin-gradle-plugin-idea:1.8.10',
+ 'org.jetbrains.kotlin:kotlin-reflect:1.6.10',
+ 'org.jetbrains.kotlin:kotlin-reflect:1.8.10',
+ 'org.jetbrains.kotlin:kotlin-script-runtime:1.8.10',
+ 'org.jetbrains.kotlin:kotlin-tooling-core:1.8.10',
+ 'net.ltgt.errorprone:net.ltgt.errorprone.gradle.plugin:3.0.1',
+ 'com.google.errorprone:javac:9+181-r4173-1',
+ # Gradle 8.3
+ 'org.gradle.kotlin.kotlin-dsl:org.gradle.kotlin.kotlin-dsl.gradle.plugin:4.1.0',
+ 'org.jetbrains.kotlin:kotlin-assignment-compiler-plugin-embeddable:1.9.0',
+ 'org.jetbrains.kotlin:kotlin-gradle-plugin-api:1.9.0',
+ 'org.jetbrains.kotlin:kotlin-reflect:1.9.0',
+ 'org.jetbrains.kotlin:kotlin-script-runtime:1.9.0',
+ 'org.jetbrains.kotlin:kotlin-sam-with-receiver-compiler-plugin-embeddable:1.9.0',
+ # Resource shrinker
+ 'com.android.tools.build:aapt2-proto:{version}'.format(
+ version=AAPT2_PROTO_VERSION),
+ 'com.android.tools.layoutlib:layoutlib-api:{version}'.format(
+ version=STUDIO_SDK_VERSION),
+ 'com.android.tools:common:{version}'.format(version=STUDIO_SDK_VERSION),
+ 'com.android.tools:sdk-common:{version}'.format(version=STUDIO_SDK_VERSION),
+ 'com.google.protobuf:protobuf-java:{version}'.format(
+ version=PROTOBUF_VERSION),
]
+
def dependencies_tar(dependencies_path):
- return os.path.join(
- os.path.dirname(dependencies_path),
- os.path.basename(dependencies_path) + '.tar.gz')
+ return os.path.join(os.path.dirname(dependencies_path),
+ os.path.basename(dependencies_path) + '.tar.gz')
+
def dependencies_tar_sha1(dependencies_path):
- return os.path.join(
- os.path.dirname(dependencies_path),
- os.path.basename(dependencies_path) + '.tar.gz.sha1')
+ return os.path.join(os.path.dirname(dependencies_path),
+ os.path.basename(dependencies_path) + '.tar.gz.sha1')
+
def remove_local_maven_repository(dependencies_path):
- if os.path.exists(dependencies_path):
- shutil.rmtree(dependencies_path)
- tar = dependencies_tar(dependencies_path)
- if os.path.exists(tar):
- os.remove(tar)
- sha1 = dependencies_tar_sha1(dependencies_path)
- if os.path.exists(sha1):
- os.remove(sha1)
+ if os.path.exists(dependencies_path):
+ shutil.rmtree(dependencies_path)
+ tar = dependencies_tar(dependencies_path)
+ if os.path.exists(tar):
+ os.remove(tar)
+ sha1 = dependencies_tar_sha1(dependencies_path)
+ if os.path.exists(sha1):
+ os.remove(sha1)
-def create_local_maven_repository(args, dependencies_path, repositories, dependencies):
- with utils.ChangedWorkingDirectory(args.studio):
- cmd = [
- os.path.join('tools', 'base', 'bazel', 'bazel'),
- 'run',
- '//tools/base/bazel:local_maven_repository_generator_cli',
- '--',
- '--repo-path',
- dependencies_path,
- '--fetch']
- for repository in repositories:
- cmd.extend(['--remote-repo', repository])
- for dependency in dependencies:
- cmd.append(dependency)
- subprocess.check_call(cmd)
+
+def create_local_maven_repository(args, dependencies_path, repositories,
+ dependencies):
+ with utils.ChangedWorkingDirectory(args.studio):
+ cmd = [
+ os.path.join('tools', 'base', 'bazel', 'bazel'), 'run',
+ '//tools/base/bazel:local_maven_repository_generator_cli', '--',
+ '--repo-path', dependencies_path, '--fetch'
+ ]
+ for repository in repositories:
+ cmd.extend(['--remote-repo', repository])
+ for dependency in dependencies:
+ cmd.append(dependency)
+ subprocess.check_call(cmd)
+
def parse_options():
- result = argparse.ArgumentParser(
- description='Create local Maven repository woth dependencies')
- result.add_argument('--studio',
- metavar=('<path>'),
- required=True,
- help='Path to a studio-main checkout (to get the tool '
- '//tools/base/bazel:local_maven_repository_generator_cli)')
- return result.parse_args()
+ result = argparse.ArgumentParser(
+ description='Create local Maven repository woth dependencies')
+ result.add_argument(
+ '--studio',
+ metavar=('<path>'),
+ required=True,
+ help='Path to a studio-main checkout (to get the tool '
+ '//tools/base/bazel:local_maven_repository_generator_cli)')
+ return result.parse_args()
def main():
- args = parse_options()
+ args = parse_options()
- dependencies_path = os.path.join(utils.THIRD_PARTY, 'dependencies')
- print("Downloading to " + dependencies_path)
- remove_local_maven_repository(dependencies_path)
- create_local_maven_repository(
- args, dependencies_path, REPOSITORIES, BUILD_DEPENDENCIES + TEST_DEPENDENCIES)
+ dependencies_path = os.path.join(utils.THIRD_PARTY, 'dependencies')
+ print("Downloading to " + dependencies_path)
+ remove_local_maven_repository(dependencies_path)
+ create_local_maven_repository(args, dependencies_path, REPOSITORIES,
+ BUILD_DEPENDENCIES + TEST_DEPENDENCIES)
- dependencies_new_path = os.path.join(utils.THIRD_PARTY, 'dependencies_new')
- print("Downloading to " + dependencies_new_path)
- remove_local_maven_repository(dependencies_new_path)
- create_local_maven_repository(
- args, dependencies_new_path, REPOSITORIES, NEW_DEPENDENCIES)
+ dependencies_new_path = os.path.join(utils.THIRD_PARTY, 'dependencies_new')
+ print("Downloading to " + dependencies_new_path)
+ remove_local_maven_repository(dependencies_new_path)
+ create_local_maven_repository(args, dependencies_new_path, REPOSITORIES,
+ NEW_DEPENDENCIES)
- print("Uploading to Google Cloud Storage:")
- with utils.ChangedWorkingDirectory(utils.THIRD_PARTY):
- for dependency in ['dependencies', 'dependencies_new']:
- cmd = [
- 'upload_to_google_storage.py',
- '-a',
- '--bucket',
- 'r8-deps',
- dependency]
- subprocess.check_call(cmd)
+ print("Uploading to Google Cloud Storage:")
+ with utils.ChangedWorkingDirectory(utils.THIRD_PARTY):
+ for dependency in ['dependencies', 'dependencies_new']:
+ cmd = [
+ 'upload_to_google_storage.py', '-a', '--bucket', 'r8-deps',
+ dependency
+ ]
+ subprocess.check_call(cmd)
+
if __name__ == '__main__':
- sys.exit(main())
+ sys.exit(main())
diff --git a/tools/create_maven_release.py b/tools/create_maven_release.py
index ac7aa85..d90ea42 100755
--- a/tools/create_maven_release.py
+++ b/tools/create_maven_release.py
@@ -20,16 +20,14 @@
import gradle
import utils
-LICENSETEMPLATE = Template(
-"""
+LICENSETEMPLATE = Template("""
<license>
<name>$name</name>
<url>$url</url>
<distribution>repo</distribution>
</license>""")
-R8_POMTEMPLATE = Template(
-"""<project
+R8_POMTEMPLATE = Template("""<project
xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
@@ -68,8 +66,7 @@
</project>
""")
-DESUGAR_CONFIGUATION_POMTEMPLATE = Template(
-"""<project
+DESUGAR_CONFIGUATION_POMTEMPLATE = Template("""<project
xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
@@ -106,174 +103,189 @@
</project>
""")
+
def parse_options(argv):
- result = argparse.ArgumentParser()
- result.add_argument('--out', help='The zip file to output')
- group = result.add_mutually_exclusive_group()
- group.add_argument('--desugar-configuration', action='store_true',
- help='Build desugar library configuration (original JDK-8)')
- group.add_argument('--desugar-configuration-jdk8', action='store_true',
- help='Build desugar library configuration (original JDK-8)')
- group.add_argument('--desugar-configuration-jdk11-legacy', action='store_true',
- help='Build desugar library configuration (JDK-11 legacy)')
- group.add_argument('--desugar-configuration-jdk11-minimal', action='store_true',
- help='Build desugar library configuration (JDK-11 minimal)')
- group.add_argument('--desugar-configuration-jdk11', action='store_true',
- help='Build desugar library configuration (JDK-11)')
- group.add_argument('--desugar-configuration-jdk11-nio', action='store_true',
- help='Build desugar library configuration (JDK-11 nio)')
- return result.parse_args(argv)
+ result = argparse.ArgumentParser()
+ result.add_argument('--out', help='The zip file to output')
+ group = result.add_mutually_exclusive_group()
+ group.add_argument(
+ '--desugar-configuration',
+ action='store_true',
+ help='Build desugar library configuration (original JDK-8)')
+ group.add_argument(
+ '--desugar-configuration-jdk8',
+ action='store_true',
+ help='Build desugar library configuration (original JDK-8)')
+ group.add_argument(
+ '--desugar-configuration-jdk11-legacy',
+ action='store_true',
+ help='Build desugar library configuration (JDK-11 legacy)')
+ group.add_argument(
+ '--desugar-configuration-jdk11-minimal',
+ action='store_true',
+ help='Build desugar library configuration (JDK-11 minimal)')
+ group.add_argument('--desugar-configuration-jdk11',
+ action='store_true',
+ help='Build desugar library configuration (JDK-11)')
+ group.add_argument('--desugar-configuration-jdk11-nio',
+ action='store_true',
+ help='Build desugar library configuration (JDK-11 nio)')
+ return result.parse_args(argv)
+
def determine_version():
- version_file = join(
- utils.SRC_ROOT, 'com', 'android', 'tools', 'r8', 'Version.java')
- with open(version_file, 'r') as file:
- for line in file:
- if 'final String LABEL ' in line:
- result = line[line.find('"') + 1:]
- result = result[:result.find('"')]
- return result
- raise Exception('Unable to determine version.')
+ version_file = join(utils.SRC_ROOT, 'com', 'android', 'tools', 'r8',
+ 'Version.java')
+ with open(version_file, 'r') as file:
+ for line in file:
+ if 'final String LABEL ' in line:
+ result = line[line.find('"') + 1:]
+ result = result[:result.find('"')]
+ return result
+ raise Exception('Unable to determine version.')
+
def generate_library_licenses():
- artifact_prefix = '- artifact: '
- license_prefix = 'license: '
- licenses = []
- license_url_prefix = 'licenseUrl: '
- license_urls = []
- # The ./LIBRARY-LICENSE file is a simple yaml file, which for each dependency
- # has the following information:
- #
- # - artifact: <maven artifact> // in the form <group-id>:<artifact-id>:+
- # name: <name of dependency>
- # copyrightHolder: <name of copyright holder>
- # license: <license name>
- # licenseUrl: <url to license test>
- #
- # E.g. for Guava:
- #
- # - artifact: com.google.guava:guava:+
- # name: Guava Google Core Libraries for Java
- # copyrightHolder: The Guava Authors
- # license: The Apache Software License, Version 2.0
- # licenseUrl: http://www.apache.org/licenses/LICENSE-2.0.txt
- #
- # This file should always be up to date as the build will fail if it
- # is does not have information for all dependencies.
- with open('LIBRARY-LICENSE', 'r') as file:
- name = None
- url = None
- for line in file:
- trimmed = line.strip()
- # Collect license name and url for each artifact. They must come in
- # pairs for each artifact.
- if trimmed.startswith(artifact_prefix):
- assert not name
- assert not url
- if trimmed.startswith(license_prefix):
- name = trimmed[len(license_prefix):]
- if trimmed.startswith(license_url_prefix):
- url = trimmed[len(license_url_prefix):]
- # Licenses come in name/url pairs. When both are present add pair
- # to collected licenses if either name or url has not been recorded yet,
- # as some licenses with slightly different names point to the same url.
- if name and url:
- if (not name in licenses) or (not url in license_urls):
- licenses.append(name)
- license_urls.append(url)
+ artifact_prefix = '- artifact: '
+ license_prefix = 'license: '
+ licenses = []
+ license_url_prefix = 'licenseUrl: '
+ license_urls = []
+ # The ./LIBRARY-LICENSE file is a simple yaml file, which for each dependency
+ # has the following information:
+ #
+ # - artifact: <maven artifact> // in the form <group-id>:<artifact-id>:+
+ # name: <name of dependency>
+ # copyrightHolder: <name of copyright holder>
+ # license: <license name>
+ # licenseUrl: <url to license test>
+ #
+ # E.g. for Guava:
+ #
+ # - artifact: com.google.guava:guava:+
+ # name: Guava Google Core Libraries for Java
+ # copyrightHolder: The Guava Authors
+ # license: The Apache Software License, Version 2.0
+ # licenseUrl: http://www.apache.org/licenses/LICENSE-2.0.txt
+ #
+ # This file should always be up to date as the build will fail if it
+ # is does not have information for all dependencies.
+ with open('LIBRARY-LICENSE', 'r') as file:
name = None
url = None
- assert len(licenses) == len(license_urls)
- result = ''
- for i in range(len(licenses)):
- name = licenses[i]
- url = license_urls[i]
- result += LICENSETEMPLATE.substitute(name=name, url=url)
- return result
+ for line in file:
+ trimmed = line.strip()
+ # Collect license name and url for each artifact. They must come in
+ # pairs for each artifact.
+ if trimmed.startswith(artifact_prefix):
+ assert not name
+ assert not url
+ if trimmed.startswith(license_prefix):
+ name = trimmed[len(license_prefix):]
+ if trimmed.startswith(license_url_prefix):
+ url = trimmed[len(license_url_prefix):]
+ # Licenses come in name/url pairs. When both are present add pair
+ # to collected licenses if either name or url has not been recorded yet,
+ # as some licenses with slightly different names point to the same url.
+ if name and url:
+ if (not name in licenses) or (not url in license_urls):
+ licenses.append(name)
+ license_urls.append(url)
+ name = None
+ url = None
+ assert len(licenses) == len(license_urls)
+ result = ''
+ for i in range(len(licenses)):
+ name = licenses[i]
+ url = license_urls[i]
+ result += LICENSETEMPLATE.substitute(name=name, url=url)
+ return result
+
def write_default_r8_pom_file(pom_file, version):
- write_pom_file(R8_POMTEMPLATE, pom_file, version)
+ write_pom_file(R8_POMTEMPLATE, pom_file, version)
-def write_pom_file(
- template, pom_file, version, artifact_id=None, library_licenses=''):
- version_pom = (
- template.substitute(
- artifactId=artifact_id,
- version=version,
- library_licenses=library_licenses)
- if artifact_id else
- template.substitute(
- version=version, library_licenses=library_licenses))
- with open(pom_file, 'w') as file:
- file.write(version_pom)
+
+def write_pom_file(template,
+ pom_file,
+ version,
+ artifact_id=None,
+ library_licenses=''):
+ version_pom = (template.substitute(artifactId=artifact_id,
+ version=version,
+ library_licenses=library_licenses)
+ if artifact_id else template.substitute(
+ version=version, library_licenses=library_licenses))
+ with open(pom_file, 'w') as file:
+ file.write(version_pom)
+
def hash_for(file, hash):
- with open(file, 'rb') as f:
- while True:
- # Read chunks of 1MB
- chunk = f.read(2 ** 20)
- if not chunk:
- break
- hash.update(chunk)
- return hash.hexdigest()
+ with open(file, 'rb') as f:
+ while True:
+ # Read chunks of 1MB
+ chunk = f.read(2**20)
+ if not chunk:
+ break
+ hash.update(chunk)
+ return hash.hexdigest()
+
def write_md5_for(file):
- hexdigest = hash_for(file, hashlib.md5())
- with (open(file + '.md5', 'w')) as file:
- file.write(hexdigest)
+ hexdigest = hash_for(file, hashlib.md5())
+ with (open(file + '.md5', 'w')) as file:
+ file.write(hexdigest)
+
def write_sha1_for(file):
- hexdigest = hash_for(file, hashlib.sha1())
- with (open(file + '.sha1', 'w')) as file:
- file.write(hexdigest)
+ hexdigest = hash_for(file, hashlib.sha1())
+ with (open(file + '.sha1', 'w')) as file:
+ file.write(hexdigest)
+
def generate_maven_zip(name, version, pom_file, jar_file, out):
- with utils.TempDir() as tmp_dir:
- # Create the base maven version directory
- version_dir = join(tmp_dir, utils.get_maven_path(name, version))
- makedirs(version_dir)
- # Write the pom file.
- pom_file_location = join(version_dir, name + '-' + version + '.pom')
- copyfile(pom_file, pom_file_location)
- # Write the jar file.
- jar_file_location = join(version_dir, name + '-' + version + '.jar')
- copyfile(jar_file, jar_file_location)
- # Create check sums.
- write_md5_for(jar_file_location)
- write_md5_for(pom_file_location)
- write_sha1_for(jar_file_location)
- write_sha1_for(pom_file_location)
- # Zip it up - make_archive will append zip to the file, so remove.
- assert out.endswith('.zip')
- base_no_zip = out[0:len(out)-4]
- make_archive(base_no_zip, 'zip', tmp_dir)
+ with utils.TempDir() as tmp_dir:
+ # Create the base maven version directory
+ version_dir = join(tmp_dir, utils.get_maven_path(name, version))
+ makedirs(version_dir)
+ # Write the pom file.
+ pom_file_location = join(version_dir, name + '-' + version + '.pom')
+ copyfile(pom_file, pom_file_location)
+ # Write the jar file.
+ jar_file_location = join(version_dir, name + '-' + version + '.jar')
+ copyfile(jar_file, jar_file_location)
+ # Create check sums.
+ write_md5_for(jar_file_location)
+ write_md5_for(pom_file_location)
+ write_sha1_for(jar_file_location)
+ write_sha1_for(pom_file_location)
+ # Zip it up - make_archive will append zip to the file, so remove.
+ assert out.endswith('.zip')
+ base_no_zip = out[0:len(out) - 4]
+ make_archive(base_no_zip, 'zip', tmp_dir)
+
def generate_r8_maven_zip(out, version_file=None, skip_gradle_build=False):
- if not skip_gradle_build:
- gradle.RunGradle([utils.GRADLE_TASK_R8LIB, '-Pno_internal'])
- version = determine_version()
- with utils.TempDir() as tmp_dir:
- file_copy = join(tmp_dir, 'copy_of_jar.jar')
- copyfile(utils.R8LIB_JAR, file_copy)
+ if not skip_gradle_build:
+ gradle.RunGradle([utils.GRADLE_TASK_R8LIB, '-Pno_internal'])
+ version = determine_version()
+ with utils.TempDir() as tmp_dir:
+ file_copy = join(tmp_dir, 'copy_of_jar.jar')
+ copyfile(utils.R8LIB_JAR, file_copy)
- if version_file:
- with zipfile.ZipFile(file_copy, 'a') as zip:
- zip.write(version_file, basename(version_file))
+ if version_file:
+ with zipfile.ZipFile(file_copy, 'a') as zip:
+ zip.write(version_file, basename(version_file))
- # Generate the pom file.
- pom_file = join(tmp_dir, 'r8.pom')
- write_pom_file(
- R8_POMTEMPLATE,
- pom_file,
- version,
- library_licenses=generate_library_licenses())
- # Write the maven zip file.
- generate_maven_zip(
- 'r8',
- version,
- pom_file,
- file_copy,
- out)
+ # Generate the pom file.
+ pom_file = join(tmp_dir, 'r8.pom')
+ write_pom_file(R8_POMTEMPLATE,
+ pom_file,
+ version,
+ library_licenses=generate_library_licenses())
+ # Write the maven zip file.
+ generate_maven_zip('r8', version, pom_file, file_copy, out)
+
# Write the desugaring configuration of a jar file with the following content:
# java/
@@ -287,113 +299,107 @@
# desugar.json
# lint/
# <lint files>
-def generate_jar_with_desugar_configuration(
- configuration, implementation, conversions, destination):
- with utils.TempDir() as tmp_dir:
- # Add conversion classes.
- with zipfile.ZipFile(conversions, 'r') as conversions_zip:
- conversions_zip.extractall(tmp_dir)
+def generate_jar_with_desugar_configuration(configuration, implementation,
+ conversions, destination):
+ with utils.TempDir() as tmp_dir:
+ # Add conversion classes.
+ with zipfile.ZipFile(conversions, 'r') as conversions_zip:
+ conversions_zip.extractall(tmp_dir)
- # Add configuration.
- configuration_dir = join(tmp_dir, 'META-INF', 'desugar', 'd8')
- makedirs(configuration_dir)
- copyfile(configuration, join(configuration_dir, 'desugar.json'))
+ # Add configuration.
+ configuration_dir = join(tmp_dir, 'META-INF', 'desugar', 'd8')
+ makedirs(configuration_dir)
+ copyfile(configuration, join(configuration_dir, 'desugar.json'))
- # Add lint configuartion.
- lint_dir = join(configuration_dir, 'lint')
- makedirs(lint_dir)
+ # Add lint configuartion.
+ lint_dir = join(configuration_dir, 'lint')
+ makedirs(lint_dir)
+ cmd = [
+ jdk.GetJavaExecutable(), '-cp', utils.R8_JAR,
+ 'com.android.tools.r8.ir.desugar.desugaredlibrary.lint.GenerateDesugaredLibraryLintFiles',
+ configuration, implementation, lint_dir,
+ utils.get_android_jar(34)
+ ]
+ utils.PrintCmd(cmd)
+ subprocess.check_call(cmd)
+
+ # Add LICENSE file.
+ copyfile(join(utils.REPO_ROOT, 'LICENSE'), join(tmp_dir, 'LICENSE'))
+
+ make_archive(destination, 'zip', tmp_dir)
+ move(destination + '.zip', destination)
+
+
+def convert_desugar_configuration(configuration, conversions, implementation,
+ machine_configuration):
cmd = [
- jdk.GetJavaExecutable(),
- '-cp',
- utils.R8_JAR,
- 'com.android.tools.r8.ir.desugar.desugaredlibrary.lint.GenerateDesugaredLibraryLintFiles',
- configuration,
- implementation,
- lint_dir,
- utils.get_android_jar(34)]
- utils.PrintCmd(cmd)
+ jdk.GetJavaExecutable(), '-cp', utils.R8_JAR,
+ 'com.android.tools.r8.ir.desugar.desugaredlibrary.specificationconversion.DesugaredLibraryConverter',
+ configuration, implementation, conversions,
+ utils.get_android_jar(33), machine_configuration
+ ]
subprocess.check_call(cmd)
- # Add LICENSE file.
- copyfile(join(utils.REPO_ROOT, 'LICENSE'), join(tmp_dir, 'LICENSE'))
-
- make_archive(destination, 'zip', tmp_dir)
- move(destination + '.zip', destination)
-
-def convert_desugar_configuration(
- configuration, conversions, implementation, machine_configuration):
- cmd = [jdk.GetJavaExecutable(),
- '-cp',
- utils.R8_JAR,
- 'com.android.tools.r8.ir.desugar.desugaredlibrary.specificationconversion.DesugaredLibraryConverter',
- configuration,
- implementation,
- conversions,
- utils.get_android_jar(33),
- machine_configuration]
- subprocess.check_call(cmd)
# Generate the maven zip for the configuration to desugar desugar_jdk_libs.
-def generate_desugar_configuration_maven_zip(
- out, configuration, implementation, conversions):
- with utils.TempDir() as tmp_dir:
- (name, version) = utils.desugar_configuration_name_and_version(configuration, False)
+def generate_desugar_configuration_maven_zip(out, configuration, implementation,
+ conversions):
+ with utils.TempDir() as tmp_dir:
+ (name, version) = utils.desugar_configuration_name_and_version(
+ configuration, False)
- if (not version.startswith("1.")):
- machine_configuration = join(tmp_dir, "machine.json")
- convert_desugar_configuration(configuration, conversions, implementation, machine_configuration)
- configuration = machine_configuration
+ if (not version.startswith("1.")):
+ machine_configuration = join(tmp_dir, "machine.json")
+ convert_desugar_configuration(configuration, conversions,
+ implementation, machine_configuration)
+ configuration = machine_configuration
- # Generate the pom file.
- pom_file = join(tmp_dir, 'desugar_configuration.pom')
- write_pom_file(DESUGAR_CONFIGUATION_POMTEMPLATE, pom_file, version, artifact_id=name)
- # Generate the jar with the configuration file.
- jar_file = join(tmp_dir, 'desugar_configuration.jar')
- generate_jar_with_desugar_configuration(
- configuration,
- implementation,
- conversions,
- jar_file)
- # Write the maven zip file.
- generate_maven_zip(name, version, pom_file, jar_file, out)
+ # Generate the pom file.
+ pom_file = join(tmp_dir, 'desugar_configuration.pom')
+ write_pom_file(DESUGAR_CONFIGUATION_POMTEMPLATE,
+ pom_file,
+ version,
+ artifact_id=name)
+ # Generate the jar with the configuration file.
+ jar_file = join(tmp_dir, 'desugar_configuration.jar')
+ generate_jar_with_desugar_configuration(configuration, implementation,
+ conversions, jar_file)
+ # Write the maven zip file.
+ generate_maven_zip(name, version, pom_file, jar_file, out)
+
def main(argv):
- options = parse_options(argv)
- if options.out == None:
- raise Exception(
- 'Need to supply output zip with --out.')
- if options.desugar_configuration or options.desugar_configuration_jdk8:
- generate_desugar_configuration_maven_zip(
- options.out,
- utils.DESUGAR_CONFIGURATION,
- utils.DESUGAR_IMPLEMENTATION,
- utils.LIBRARY_DESUGAR_CONVERSIONS_LEGACY_ZIP)
- elif options.desugar_configuration_jdk11_legacy:
- generate_desugar_configuration_maven_zip(
- options.out,
- utils.DESUGAR_CONFIGURATION_JDK11_LEGACY,
- utils.DESUGAR_IMPLEMENTATION_JDK11,
- utils.LIBRARY_DESUGAR_CONVERSIONS_LEGACY_ZIP)
- elif options.desugar_configuration_jdk11_minimal:
- generate_desugar_configuration_maven_zip(
- options.out,
- utils.DESUGAR_CONFIGURATION_JDK11_MINIMAL,
- utils.DESUGAR_IMPLEMENTATION_JDK11,
- utils.LIBRARY_DESUGAR_CONVERSIONS_ZIP)
- elif options.desugar_configuration_jdk11:
- generate_desugar_configuration_maven_zip(
- options.out,
- utils.DESUGAR_CONFIGURATION_JDK11,
- utils.DESUGAR_IMPLEMENTATION_JDK11,
- utils.LIBRARY_DESUGAR_CONVERSIONS_ZIP)
- elif options.desugar_configuration_jdk11_nio:
- generate_desugar_configuration_maven_zip(
- options.out,
- utils.DESUGAR_CONFIGURATION_JDK11_NIO,
- utils.DESUGAR_IMPLEMENTATION_JDK11,
- utils.LIBRARY_DESUGAR_CONVERSIONS_ZIP)
- else:
- generate_r8_maven_zip(options.out)
+ options = parse_options(argv)
+ if options.out == None:
+ raise Exception('Need to supply output zip with --out.')
+ if options.desugar_configuration or options.desugar_configuration_jdk8:
+ generate_desugar_configuration_maven_zip(
+ options.out, utils.DESUGAR_CONFIGURATION,
+ utils.DESUGAR_IMPLEMENTATION,
+ utils.LIBRARY_DESUGAR_CONVERSIONS_LEGACY_ZIP)
+ elif options.desugar_configuration_jdk11_legacy:
+ generate_desugar_configuration_maven_zip(
+ options.out, utils.DESUGAR_CONFIGURATION_JDK11_LEGACY,
+ utils.DESUGAR_IMPLEMENTATION_JDK11,
+ utils.LIBRARY_DESUGAR_CONVERSIONS_LEGACY_ZIP)
+ elif options.desugar_configuration_jdk11_minimal:
+ generate_desugar_configuration_maven_zip(
+ options.out, utils.DESUGAR_CONFIGURATION_JDK11_MINIMAL,
+ utils.DESUGAR_IMPLEMENTATION_JDK11,
+ utils.LIBRARY_DESUGAR_CONVERSIONS_ZIP)
+ elif options.desugar_configuration_jdk11:
+ generate_desugar_configuration_maven_zip(
+ options.out, utils.DESUGAR_CONFIGURATION_JDK11,
+ utils.DESUGAR_IMPLEMENTATION_JDK11,
+ utils.LIBRARY_DESUGAR_CONVERSIONS_ZIP)
+ elif options.desugar_configuration_jdk11_nio:
+ generate_desugar_configuration_maven_zip(
+ options.out, utils.DESUGAR_CONFIGURATION_JDK11_NIO,
+ utils.DESUGAR_IMPLEMENTATION_JDK11,
+ utils.LIBRARY_DESUGAR_CONVERSIONS_ZIP)
+ else:
+ generate_r8_maven_zip(options.out)
+
if __name__ == "__main__":
- exit(main(sys.argv[1:]))
+ exit(main(sys.argv[1:]))
diff --git a/tools/create_r8lib.py b/tools/create_r8lib.py
index a778ab3..93bfcbd 100755
--- a/tools/create_r8lib.py
+++ b/tools/create_r8lib.py
@@ -20,112 +20,105 @@
}
"""
+
def parse_options():
- parser = argparse.ArgumentParser(description='Tag R8 Versions')
- parser.add_argument(
- '--classpath',
- action='append',
- help='Dependencies to add to classpath')
- parser.add_argument(
- '--debug-agent',
- action='store_true',
- default=False,
- help='Create a socket for debugging')
- parser.add_argument(
- '--excldeps-variant',
- action='store_true',
- default=False,
- help='Mark this artifact as an "excldeps" variant of the compiler')
- parser.add_argument(
- '--debug-variant',
- action='store_true',
- default=False,
- help='Compile with debug flag')
- parser.add_argument(
- '--lib',
- action='append',
- help='Additional libraries (JDK 1.8 rt.jar already included)')
- parser.add_argument(
- '--output',
- required=True,
- help='The output path for the r8lib')
- parser.add_argument(
- '--pg-conf',
- action='append',
- help='Keep configuration')
- parser.add_argument(
- '--pg-map',
- default=None,
- help='Input map for distribution and composition')
- parser.add_argument(
- '--r8jar',
- required=True,
- help='The R8 jar to compile')
- parser.add_argument(
- '--r8compiler',
- default='build/libs/r8_with_deps.jar',
- help='The R8 compiler to use')
- return parser.parse_args()
+ parser = argparse.ArgumentParser(description='Tag R8 Versions')
+ parser.add_argument('--classpath',
+ action='append',
+ help='Dependencies to add to classpath')
+ parser.add_argument('--debug-agent',
+ action='store_true',
+ default=False,
+ help='Create a socket for debugging')
+ parser.add_argument(
+ '--excldeps-variant',
+ action='store_true',
+ default=False,
+ help='Mark this artifact as an "excldeps" variant of the compiler')
+ parser.add_argument('--debug-variant',
+ action='store_true',
+ default=False,
+ help='Compile with debug flag')
+ parser.add_argument(
+ '--lib',
+ action='append',
+ help='Additional libraries (JDK 1.8 rt.jar already included)')
+ parser.add_argument('--output',
+ required=True,
+ help='The output path for the r8lib')
+ parser.add_argument('--pg-conf', action='append', help='Keep configuration')
+ parser.add_argument('--pg-map',
+ default=None,
+ help='Input map for distribution and composition')
+ parser.add_argument('--r8jar', required=True, help='The R8 jar to compile')
+ parser.add_argument('--r8compiler',
+ default='build/libs/r8_with_deps.jar',
+ help='The R8 compiler to use')
+ return parser.parse_args()
+
def get_r8_version(r8jar):
- with utils.TempDir() as temp:
- name = os.path.join(temp, "VersionExtractor.java")
- fd = open(name, 'w')
- fd.write(VERSION_EXTRACTOR)
- fd.close()
- cmd = [jdk.GetJavacExecutable(), '-cp', r8jar, name]
- print(' '.join(cmd))
- cp_separator = ';' if utils.IsWindows() else ':'
- subprocess.check_call(cmd)
- output = subprocess.check_output([
- jdk.GetJavaExecutable(),
- '-cp',
- cp_separator.join([r8jar, os.path.dirname(name)]),
- 'VersionExtractor'
- ]).decode('UTF-8').strip()
- if output == 'main':
- return subprocess.check_output(
- ['git', 'rev-parse', 'HEAD']).decode('UTF-8').strip()
- else:
- return output
+ with utils.TempDir() as temp:
+ name = os.path.join(temp, "VersionExtractor.java")
+ fd = open(name, 'w')
+ fd.write(VERSION_EXTRACTOR)
+ fd.close()
+ cmd = [jdk.GetJavacExecutable(), '-cp', r8jar, name]
+ print(' '.join(cmd))
+ cp_separator = ';' if utils.IsWindows() else ':'
+ subprocess.check_call(cmd)
+ output = subprocess.check_output([
+ jdk.GetJavaExecutable(), '-cp',
+ cp_separator.join([r8jar, os.path.dirname(name)]),
+ 'VersionExtractor'
+ ]).decode('UTF-8').strip()
+ if output == 'main':
+ return subprocess.check_output(['git', 'rev-parse',
+ 'HEAD']).decode('UTF-8').strip()
+ else:
+ return output
+
def main():
- args = parse_options()
- if not os.path.exists(args.r8jar):
- print("Could not find jar: " + args.r8jar)
- return 1
- version = get_r8_version(args.r8jar)
- variant = '+excldeps' if args.excldeps_variant else ''
- map_id_template = version + variant
- source_file_template = 'R8_%MAP_ID_%MAP_HASH'
- # TODO(b/139725780): See if we can remove or lower the heap size (-Xmx8g).
- cmd = [jdk.GetJavaExecutable(), '-Xmx8g', '-ea']
- if args.debug_agent:
- cmd.extend(['-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=*:5005'])
- cmd.extend(['-cp', args.r8compiler, 'com.android.tools.r8.R8'])
- cmd.append(args.r8jar)
- if args.debug_variant:
- cmd.append('--debug')
- cmd.append('--classfile')
- cmd.extend(['--map-id-template', map_id_template])
- cmd.extend(['--source-file-template', source_file_template])
- cmd.extend(['--output', args.output])
- cmd.extend(['--pg-map-output', args.output + '.map'])
- cmd.extend(['--partition-map-output', args.output + '_map.zip'])
- cmd.extend(['--lib', jdk.GetJdkHome()])
- if args.pg_conf:
- for pgconf in args.pg_conf:
- cmd.extend(['--pg-conf', pgconf])
- if args.lib:
- for lib in args.lib:
- cmd.extend(['--lib', lib])
- if args.classpath:
- for cp in args.classpath:
- cmd.extend(['--classpath', cp])
- if args.pg_map:
- cmd.extend(['--pg-map', args.pg_map])
- print(' '.join(cmd))
- subprocess.check_call(cmd)
+ args = parse_options()
+ if not os.path.exists(args.r8jar):
+ print("Could not find jar: " + args.r8jar)
+ return 1
+ version = get_r8_version(args.r8jar)
+ variant = '+excldeps' if args.excldeps_variant else ''
+ map_id_template = version + variant
+ source_file_template = 'R8_%MAP_ID_%MAP_HASH'
+ # TODO(b/139725780): See if we can remove or lower the heap size (-Xmx8g).
+ cmd = [jdk.GetJavaExecutable(), '-Xmx8g', '-ea']
+ if args.debug_agent:
+ cmd.extend([
+ '-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=*:5005'
+ ])
+ cmd.extend(['-cp', args.r8compiler, 'com.android.tools.r8.R8'])
+ cmd.append(args.r8jar)
+ if args.debug_variant:
+ cmd.append('--debug')
+ cmd.append('--classfile')
+ cmd.extend(['--map-id-template', map_id_template])
+ cmd.extend(['--source-file-template', source_file_template])
+ cmd.extend(['--output', args.output])
+ cmd.extend(['--pg-map-output', args.output + '.map'])
+ cmd.extend(['--partition-map-output', args.output + '_map.zip'])
+ cmd.extend(['--lib', jdk.GetJdkHome()])
+ if args.pg_conf:
+ for pgconf in args.pg_conf:
+ cmd.extend(['--pg-conf', pgconf])
+ if args.lib:
+ for lib in args.lib:
+ cmd.extend(['--lib', lib])
+ if args.classpath:
+ for cp in args.classpath:
+ cmd.extend(['--classpath', cp])
+ if args.pg_map:
+ cmd.extend(['--pg-map', args.pg_map])
+ print(' '.join(cmd))
+ subprocess.check_call(cmd)
+
if __name__ == '__main__':
- sys.exit(main())
+ sys.exit(main())
diff --git a/tools/d8.py b/tools/d8.py
index c2dfe87..aab63ed 100755
--- a/tools/d8.py
+++ b/tools/d8.py
@@ -9,45 +9,41 @@
import sys
import toolhelper
+
def ParseOptions(argv):
- parser = optparse.OptionParser(usage='%prog [options] -- [D8 options]')
- parser.add_option(
- '-c',
- '--commit-hash',
- '--commit_hash',
- help='Commit hash of D8 to use.',
- default=None)
- parser.add_option(
- '--print-runtimeraw', '--print_runtimeraw',
- metavar='BENCHMARKNAME',
- help='Print the line \'<BENCHMARKNAME>(RunTimeRaw):' +
- ' <elapsed> ms\' at the end where <elapsed> is' +
- ' the elapsed time in milliseconds.')
- parser.add_option(
- '--version',
- help='Version of D8 to use.',
- default=None)
- parser.add_option(
- '--tag',
- help='Tag of D8 to use.',
- default=None)
- return parser.parse_args(argv)
+ parser = optparse.OptionParser(usage='%prog [options] -- [D8 options]')
+ parser.add_option('-c',
+ '--commit-hash',
+ '--commit_hash',
+ help='Commit hash of D8 to use.',
+ default=None)
+ parser.add_option('--print-runtimeraw',
+ '--print_runtimeraw',
+ metavar='BENCHMARKNAME',
+ help='Print the line \'<BENCHMARKNAME>(RunTimeRaw):' +
+ ' <elapsed> ms\' at the end where <elapsed> is' +
+ ' the elapsed time in milliseconds.')
+ parser.add_option('--version', help='Version of D8 to use.', default=None)
+ parser.add_option('--tag', help='Tag of D8 to use.', default=None)
+ return parser.parse_args(argv)
+
def main(argv):
- (options, args) = ParseOptions(sys.argv)
- d8_args = args[1:]
- time_consumer = lambda duration : print_duration(duration, options)
- return toolhelper.run(
- 'd8',
- d8_args,
- jar=utils.find_r8_jar_from_options(options),
- main='com.android.tools.r8.D8',
- time_consumer=time_consumer)
+ (options, args) = ParseOptions(sys.argv)
+ d8_args = args[1:]
+ time_consumer = lambda duration: print_duration(duration, options)
+ return toolhelper.run('d8',
+ d8_args,
+ jar=utils.find_r8_jar_from_options(options),
+ main='com.android.tools.r8.D8',
+ time_consumer=time_consumer)
+
def print_duration(duration, options):
- benchmark_name = options.print_runtimeraw
- if benchmark_name:
- print('%s-Total(RunTimeRaw): %s ms' % (benchmark_name, duration))
+ benchmark_name = options.print_runtimeraw
+ if benchmark_name:
+ print('%s-Total(RunTimeRaw): %s ms' % (benchmark_name, duration))
+
if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
+ sys.exit(main(sys.argv[1:]))
diff --git a/tools/d8logger.py b/tools/d8logger.py
index b523294..73d4a9e 100755
--- a/tools/d8logger.py
+++ b/tools/d8logger.py
@@ -7,4 +7,4 @@
import toolhelper
if __name__ == '__main__':
- sys.exit(toolhelper.run('d8logger', sys.argv[1:]))
+ sys.exit(toolhelper.run('d8logger', sys.argv[1:]))
diff --git a/tools/defines.py b/tools/defines.py
index 6fdd24f..8f2cecc 100644
--- a/tools/defines.py
+++ b/tools/defines.py
@@ -12,11 +12,14 @@
REPO_ROOT = os.path.realpath(os.path.join(TOOLS_DIR, '..'))
THIRD_PARTY = os.path.join(REPO_ROOT, 'third_party')
+
def IsWindows():
- return sys.platform.startswith('win')
+ return sys.platform.startswith('win')
+
def IsLinux():
- return sys.platform.startswith('linux')
+ return sys.platform.startswith('linux')
+
def IsOsX():
- return sys.platform.startswith('darwin')
\ No newline at end of file
+ return sys.platform.startswith('darwin')
diff --git a/tools/desugar_jdk_libs_repository.py b/tools/desugar_jdk_libs_repository.py
index d128bc6..d2ebeac 100755
--- a/tools/desugar_jdk_libs_repository.py
+++ b/tools/desugar_jdk_libs_repository.py
@@ -17,6 +17,7 @@
import create_maven_release
import archive_desugar_jdk_libs
+
class Variant(Enum):
jdk8 = 'jdk8'
jdk11_legacy = 'jdk11_legacy'
@@ -27,259 +28,274 @@
def __str__(self):
return self.value
+
def parse_options():
- parser = argparse.ArgumentParser(
- description='Local desugared library repository for desugared library configurations')
- parser.add_argument('--repo-root', '--repo_root',
- default='/tmp/repo',
- metavar=('<path>'),
- help='Location for Maven repository.')
- parser.add_argument('--clear-repo', '--clear_repo',
- default=False,
- action='store_true',
- help='Clear the Maven repository so it only has one version present')
- parser.add_argument('--variant', type=Variant, choices=list(Variant))
- parser.add_argument('--desugar-jdk-libs-checkout', '--desugar_jdk_libs_checkout',
- default=None,
- metavar=('<path>'),
- help='Use existing checkout of github.com/google/desugar_jdk_libs.')
- parser.add_argument('--desugar-jdk-libs-revision', '--desugar_jdk_libs_revision',
- default=None,
- metavar=('<revision>'),
- help='Revision of github.com/google/desugar_jdk_libs to use.')
- parser.add_argument('--release-version', '--release_version',
- metavar=('<version>'),
- help='The desugared library release version to use. This will pull from the archived releases')
- args = parser.parse_args()
- return args
+ parser = argparse.ArgumentParser(
+ description=
+ 'Local desugared library repository for desugared library configurations'
+ )
+ parser.add_argument('--repo-root',
+ '--repo_root',
+ default='/tmp/repo',
+ metavar=('<path>'),
+ help='Location for Maven repository.')
+ parser.add_argument(
+ '--clear-repo',
+ '--clear_repo',
+ default=False,
+ action='store_true',
+ help='Clear the Maven repository so it only has one version present')
+ parser.add_argument('--variant', type=Variant, choices=list(Variant))
+ parser.add_argument(
+ '--desugar-jdk-libs-checkout',
+ '--desugar_jdk_libs_checkout',
+ default=None,
+ metavar=('<path>'),
+ help='Use existing checkout of github.com/google/desugar_jdk_libs.')
+ parser.add_argument(
+ '--desugar-jdk-libs-revision',
+ '--desugar_jdk_libs_revision',
+ default=None,
+ metavar=('<revision>'),
+ help='Revision of github.com/google/desugar_jdk_libs to use.')
+ parser.add_argument(
+ '--release-version',
+ '--release_version',
+ metavar=('<version>'),
+ help=
+ 'The desugared library release version to use. This will pull from the archived releases'
+ )
+ args = parser.parse_args()
+ return args
+
def jar_or_pom_file(unzip_dir, artifact, version, extension):
- return join(
- unzip_dir,
- 'com',
- 'android',
- 'tools',
- artifact,
- version,
- artifact + '-' + version + '.' + extension)
+ return join(unzip_dir, 'com', 'android', 'tools', artifact, version,
+ artifact + '-' + version + '.' + extension)
+
def jar_file(unzip_dir, artifact, version):
- return jar_or_pom_file(unzip_dir, artifact, version, 'jar')
+ return jar_or_pom_file(unzip_dir, artifact, version, 'jar')
+
def pom_file(unzip_dir, artifact, version):
- return jar_or_pom_file(unzip_dir, artifact, version, 'pom')
+ return jar_or_pom_file(unzip_dir, artifact, version, 'pom')
+
def run(args):
- artifact = None
- configuration_artifact = None
- configuration = None
- conversions = None
- implementation = None
- version_file = None
- implementation_build_target = None
- implementation_maven_zip = None
- release_archive_location = None
- match args.variant:
- case Variant.jdk8:
- artifact = 'desugar_jdk_libs'
- configuration_artifact = 'desugar_jdk_libs_configuration'
- configuration = utils.DESUGAR_CONFIGURATION
- conversions = utils.LIBRARY_DESUGAR_CONVERSIONS_LEGACY_ZIP
- implementation = utils.DESUGAR_IMPLEMENTATION
- version_file = 'VERSION.txt'
- implementation_build_target = ':maven_release'
- implementation_maven_zip = 'desugar_jdk_libs.zip'
- release_archive_location = 'desugar_jdk_libs'
- case Variant.jdk11_legacy:
- artifact = 'desugar_jdk_libs'
- configuration_artifact = 'desugar_jdk_libs_configuration'
- configuration = utils.DESUGAR_CONFIGURATION_JDK11_LEGACY
- conversions = utils.LIBRARY_DESUGAR_CONVERSIONS_LEGACY_ZIP
- implementation = utils.DESUGAR_IMPLEMENTATION_JDK11
- version_file = 'VERSION_JDK11_LEGACY.txt'
- implementation_build_target = ':maven_release_jdk11_legacy'
- implementation_maven_zip = 'desugar_jdk_libs_jdk11_legacy.zip'
- release_archive_location = 'desugar_jdk_libs'
- case Variant.jdk11_minimal:
- artifact = 'desugar_jdk_libs_minimal'
- configuration_artifact = 'desugar_jdk_libs_configuration_minimal'
- configuration = utils.DESUGAR_CONFIGURATION_JDK11_MINIMAL
- conversions = utils.LIBRARY_DESUGAR_CONVERSIONS_ZIP
- implementation = utils.DESUGAR_IMPLEMENTATION_JDK11
- version_file = 'VERSION_JDK11_MINIMAL.txt'
- implementation_build_target = ':maven_release_jdk11_minimal'
- implementation_maven_zip = 'desugar_jdk_libs_jdk11_minimal.zip'
- release_archive_location = 'desugar_jdk_libs_minimal'
- case Variant.jdk11:
- artifact = 'desugar_jdk_libs'
- configuration_artifact = 'desugar_jdk_libs_configuration'
- configuration = utils.DESUGAR_CONFIGURATION_JDK11
- conversions = utils.LIBRARY_DESUGAR_CONVERSIONS_ZIP
- implementation = utils.DESUGAR_IMPLEMENTATION_JDK11
- version_file = 'VERSION_JDK11.txt'
- implementation_build_target = ':maven_release_jdk11'
- implementation_maven_zip = 'desugar_jdk_libs_jdk11.zip'
- release_archive_location = 'desugar_jdk_libs'
- case Variant.jdk11_nio:
- artifact = 'desugar_jdk_libs_nio'
- configuration_artifact = 'desugar_jdk_libs_configuration_nio'
- configuration = utils.DESUGAR_CONFIGURATION_JDK11_NIO
- conversions = utils.LIBRARY_DESUGAR_CONVERSIONS_ZIP
- implementation = utils.DESUGAR_IMPLEMENTATION_JDK11
- version_file = 'VERSION_JDK11_NIO.txt'
- implementation_build_target = ':maven_release_jdk11_nio'
- implementation_maven_zip = 'desugar_jdk_libs_jdk11_nio.zip'
- release_archive_location = 'desugar_jdk_libs_nio'
- implementation_build_output = join('bazel-bin', implementation_maven_zip)
- gradle.RunGradle([utils.GRADLE_TASK_R8])
- with utils.TempDir() as tmp_dir:
- (name, configuration_version) = utils.desugar_configuration_name_and_version(configuration, False)
- if (args.release_version != None and args.release_version != configuration_version):
- raise Exception(
- 'Configuration version %s is different for specified version %s'
- % (configuration_version, version))
- version = configuration_version
- print("Name: %s" % name)
- print("Version: %s" % version)
- # Checkout desugar_jdk_libs from GitHub
- use_existing_checkout = args.desugar_jdk_libs_checkout != None
- checkout_dir = (args.desugar_jdk_libs_checkout
- if use_existing_checkout
- else join(tmp_dir, 'desugar_jdk_libs'))
- if (not args.release_version and not use_existing_checkout):
- subprocess.check_call(
- ['git', 'clone', 'https://github.com/google/desugar_jdk_libs.git', checkout_dir])
- if (args.desugar_jdk_libs_revision):
- subprocess.check_call(
- ['git', '-C', checkout_dir, 'checkout', args.desugar_jdk_libs_revision])
- with utils.ChangedWorkingDirectory(checkout_dir):
- with open(version_file) as version_file:
- version_file_lines = version_file.readlines()
- for line in version_file_lines:
- if not line.startswith('#'):
- desugar_jdk_libs_version = line.strip()
- if (version != desugar_jdk_libs_version):
- raise Exception(
- "Version mismatch. Configuration has version '"
- + version
- + "', and desugar_jdk_libs has version '"
- + desugar_jdk_libs_version
- + "'")
+ artifact = None
+ configuration_artifact = None
+ configuration = None
+ conversions = None
+ implementation = None
+ version_file = None
+ implementation_build_target = None
+ implementation_maven_zip = None
+ release_archive_location = None
+ match args.variant:
+ case Variant.jdk8:
+ artifact = 'desugar_jdk_libs'
+ configuration_artifact = 'desugar_jdk_libs_configuration'
+ configuration = utils.DESUGAR_CONFIGURATION
+ conversions = utils.LIBRARY_DESUGAR_CONVERSIONS_LEGACY_ZIP
+ implementation = utils.DESUGAR_IMPLEMENTATION
+ version_file = 'VERSION.txt'
+ implementation_build_target = ':maven_release'
+ implementation_maven_zip = 'desugar_jdk_libs.zip'
+ release_archive_location = 'desugar_jdk_libs'
+ case Variant.jdk11_legacy:
+ artifact = 'desugar_jdk_libs'
+ configuration_artifact = 'desugar_jdk_libs_configuration'
+ configuration = utils.DESUGAR_CONFIGURATION_JDK11_LEGACY
+ conversions = utils.LIBRARY_DESUGAR_CONVERSIONS_LEGACY_ZIP
+ implementation = utils.DESUGAR_IMPLEMENTATION_JDK11
+ version_file = 'VERSION_JDK11_LEGACY.txt'
+ implementation_build_target = ':maven_release_jdk11_legacy'
+ implementation_maven_zip = 'desugar_jdk_libs_jdk11_legacy.zip'
+ release_archive_location = 'desugar_jdk_libs'
+ case Variant.jdk11_minimal:
+ artifact = 'desugar_jdk_libs_minimal'
+ configuration_artifact = 'desugar_jdk_libs_configuration_minimal'
+ configuration = utils.DESUGAR_CONFIGURATION_JDK11_MINIMAL
+ conversions = utils.LIBRARY_DESUGAR_CONVERSIONS_ZIP
+ implementation = utils.DESUGAR_IMPLEMENTATION_JDK11
+ version_file = 'VERSION_JDK11_MINIMAL.txt'
+ implementation_build_target = ':maven_release_jdk11_minimal'
+ implementation_maven_zip = 'desugar_jdk_libs_jdk11_minimal.zip'
+ release_archive_location = 'desugar_jdk_libs_minimal'
+ case Variant.jdk11:
+ artifact = 'desugar_jdk_libs'
+ configuration_artifact = 'desugar_jdk_libs_configuration'
+ configuration = utils.DESUGAR_CONFIGURATION_JDK11
+ conversions = utils.LIBRARY_DESUGAR_CONVERSIONS_ZIP
+ implementation = utils.DESUGAR_IMPLEMENTATION_JDK11
+ version_file = 'VERSION_JDK11.txt'
+ implementation_build_target = ':maven_release_jdk11'
+ implementation_maven_zip = 'desugar_jdk_libs_jdk11.zip'
+ release_archive_location = 'desugar_jdk_libs'
+ case Variant.jdk11_nio:
+ artifact = 'desugar_jdk_libs_nio'
+ configuration_artifact = 'desugar_jdk_libs_configuration_nio'
+ configuration = utils.DESUGAR_CONFIGURATION_JDK11_NIO
+ conversions = utils.LIBRARY_DESUGAR_CONVERSIONS_ZIP
+ implementation = utils.DESUGAR_IMPLEMENTATION_JDK11
+ version_file = 'VERSION_JDK11_NIO.txt'
+ implementation_build_target = ':maven_release_jdk11_nio'
+ implementation_maven_zip = 'desugar_jdk_libs_jdk11_nio.zip'
+ release_archive_location = 'desugar_jdk_libs_nio'
+ implementation_build_output = join('bazel-bin', implementation_maven_zip)
+ gradle.RunGradle([utils.GRADLE_TASK_R8])
+ with utils.TempDir() as tmp_dir:
+ (name,
+ configuration_version) = utils.desugar_configuration_name_and_version(
+ configuration, False)
+ if (args.release_version != None and
+ args.release_version != configuration_version):
+ raise Exception(
+ 'Configuration version %s is different for specified version %s'
+ % (configuration_version, version))
+ version = configuration_version
+ print("Name: %s" % name)
+ print("Version: %s" % version)
+ # Checkout desugar_jdk_libs from GitHub
+ use_existing_checkout = args.desugar_jdk_libs_checkout != None
+ checkout_dir = (args.desugar_jdk_libs_checkout if use_existing_checkout
+ else join(tmp_dir, 'desugar_jdk_libs'))
+ if (not args.release_version and not use_existing_checkout):
+ subprocess.check_call([
+ 'git', 'clone',
+ 'https://github.com/google/desugar_jdk_libs.git', checkout_dir
+ ])
+ if (args.desugar_jdk_libs_revision):
+ subprocess.check_call([
+ 'git', '-C', checkout_dir, 'checkout',
+ args.desugar_jdk_libs_revision
+ ])
+ with utils.ChangedWorkingDirectory(checkout_dir):
+ with open(version_file) as version_file:
+ version_file_lines = version_file.readlines()
+ for line in version_file_lines:
+ if not line.startswith('#'):
+ desugar_jdk_libs_version = line.strip()
+ if (version != desugar_jdk_libs_version):
+ raise Exception(
+ "Version mismatch. Configuration has version '"
+ + version +
+ "', and desugar_jdk_libs has version '" +
+ desugar_jdk_libs_version + "'")
- # Build desugared library configuration.
- print("Building desugared library configuration " + version)
- maven_zip = join(tmp_dir, 'desugar_configuration.zip')
- create_maven_release.generate_desugar_configuration_maven_zip(
- maven_zip,
- configuration,
- implementation,
- conversions)
- unzip_dir = join(tmp_dir, 'desugar_jdk_libs_configuration_unzipped')
- cmd = ['unzip', '-q', maven_zip, '-d', unzip_dir]
- subprocess.check_call(cmd)
- cmd = [
- 'mvn',
- 'deploy:deploy-file',
- '-Durl=file:' + args.repo_root,
- '-DrepositoryId=someName',
- '-Dfile=' + jar_file(unzip_dir, configuration_artifact, version),
- '-DpomFile=' + pom_file(unzip_dir, configuration_artifact, version)]
- subprocess.check_call(cmd)
+ # Build desugared library configuration.
+ print("Building desugared library configuration " + version)
+ maven_zip = join(tmp_dir, 'desugar_configuration.zip')
+ create_maven_release.generate_desugar_configuration_maven_zip(
+ maven_zip, configuration, implementation, conversions)
+ unzip_dir = join(tmp_dir, 'desugar_jdk_libs_configuration_unzipped')
+ cmd = ['unzip', '-q', maven_zip, '-d', unzip_dir]
+ subprocess.check_call(cmd)
+ cmd = [
+ 'mvn', 'deploy:deploy-file', '-Durl=file:' + args.repo_root,
+ '-DrepositoryId=someName',
+ '-Dfile=' + jar_file(unzip_dir, configuration_artifact, version),
+ '-DpomFile=' + pom_file(unzip_dir, configuration_artifact, version)
+ ]
+ subprocess.check_call(cmd)
- undesugared_if_needed = None
- if not args.release_version:
- # Build desugared library.
- print("Building desugared library " + version)
- with utils.ChangedWorkingDirectory(checkout_dir):
- subprocess.check_call([
- 'bazel',
- '--bazelrc=/dev/null',
- 'build',
- '--spawn_strategy=local',
- '--verbose_failures',
- implementation_build_target])
+ undesugared_if_needed = None
+ if not args.release_version:
+ # Build desugared library.
+ print("Building desugared library " + version)
+ with utils.ChangedWorkingDirectory(checkout_dir):
+ subprocess.check_call([
+ 'bazel', '--bazelrc=/dev/null', 'build',
+ '--spawn_strategy=local', '--verbose_failures',
+ implementation_build_target
+ ])
- # Undesugar desugared library if needed.
- undesugared_if_needed = join(checkout_dir, implementation_build_output)
- if (args.variant == Variant.jdk11_minimal
- or args.variant == Variant.jdk11
- or args.variant == Variant.jdk11_nio):
- undesugared_if_needed = join(tmp_dir, 'undesugared.zip')
- archive_desugar_jdk_libs.Undesugar(
- str(args.variant),
- join(checkout_dir, implementation_build_output),
- version,
- undesugared_if_needed)
- else:
- # Download the already built and undesugared library from release archive.
- undesugared_if_needed = join(tmp_dir, implementation_maven_zip)
- urllib.request.urlretrieve(
- ('https://storage.googleapis.com/r8-releases/raw/%s/%s/%s'
- % (release_archive_location, version, implementation_maven_zip)),
- undesugared_if_needed)
+ # Undesugar desugared library if needed.
+ undesugared_if_needed = join(checkout_dir,
+ implementation_build_output)
+ if (args.variant == Variant.jdk11_minimal or
+ args.variant == Variant.jdk11 or
+ args.variant == Variant.jdk11_nio):
+ undesugared_if_needed = join(tmp_dir, 'undesugared.zip')
+ archive_desugar_jdk_libs.Undesugar(
+ str(args.variant),
+ join(checkout_dir, implementation_build_output), version,
+ undesugared_if_needed)
+ else:
+ # Download the already built and undesugared library from release archive.
+ undesugared_if_needed = join(tmp_dir, implementation_maven_zip)
+ urllib.request.urlretrieve(
+ ('https://storage.googleapis.com/r8-releases/raw/%s/%s/%s' %
+ (release_archive_location, version, implementation_maven_zip)),
+ undesugared_if_needed)
- unzip_dir = join(tmp_dir, 'desugar_jdk_libs_unzipped')
- cmd = [
- 'unzip',
- '-q',
- undesugared_if_needed,
- '-d',
- unzip_dir]
- subprocess.check_call(cmd)
- cmd = [
- 'mvn',
- 'deploy:deploy-file',
- '-Durl=file:' + args.repo_root,
- '-DrepositoryId=someName',
- '-Dfile=' + jar_file(unzip_dir, artifact, version),
- '-DpomFile=' + pom_file(unzip_dir, artifact, version)]
- subprocess.check_call(cmd)
+ unzip_dir = join(tmp_dir, 'desugar_jdk_libs_unzipped')
+ cmd = ['unzip', '-q', undesugared_if_needed, '-d', unzip_dir]
+ subprocess.check_call(cmd)
+ cmd = [
+ 'mvn', 'deploy:deploy-file', '-Durl=file:' + args.repo_root,
+ '-DrepositoryId=someName',
+ '-Dfile=' + jar_file(unzip_dir, artifact, version),
+ '-DpomFile=' + pom_file(unzip_dir, artifact, version)
+ ]
+ subprocess.check_call(cmd)
- print()
- print("Artifacts:")
- print(" com.android.tools:%s:%s" % (configuration_artifact, version))
- print(" com.android.tools:%s:%s" % (artifact, version))
- print()
- print("deployed to Maven repository at " + args.repo_root + ".")
- print()
- print("Add")
- print()
- print(" maven {")
- print(" url uri('file://" + args.repo_root + "')")
- print(" }")
- print()
- print("to dependencyResolutionManagement.repositories in settings.gradle, and use")
- print('the "changing" property of the coreLibraryDesugaring dependency:')
- print()
- print(" coreLibraryDesugaring('com.android.tools:%s:%s') {" % (artifact, version))
- print(" changing = true")
- print(" }")
- print()
- print('If not using the "changing" propertyRemember to run gradle with '
- + " --refresh-dependencies (./gradlew --refresh-dependencies ...) "
- + "to ensure the cache is not used when the same version is published."
- + "multiple times.")
+ print()
+ print("Artifacts:")
+ print(" com.android.tools:%s:%s" % (configuration_artifact, version))
+ print(" com.android.tools:%s:%s" % (artifact, version))
+ print()
+ print("deployed to Maven repository at " + args.repo_root + ".")
+ print()
+ print("Add")
+ print()
+ print(" maven {")
+ print(" url uri('file://" + args.repo_root + "')")
+ print(" }")
+ print()
+ print(
+ "to dependencyResolutionManagement.repositories in settings.gradle, and use"
+ )
+ print(
+ 'the "changing" property of the coreLibraryDesugaring dependency:')
+ print()
+ print(" coreLibraryDesugaring('com.android.tools:%s:%s') {" %
+ (artifact, version))
+ print(" changing = true")
+ print(" }")
+ print()
+ print(
+ 'If not using the "changing" propertyRemember to run gradle with ' +
+ " --refresh-dependencies (./gradlew --refresh-dependencies ...) " +
+ "to ensure the cache is not used when the same version is published."
+ + "multiple times.")
+
def main():
- args = parse_options()
- if args.desugar_jdk_libs_checkout and args.release_version:
- raise Exception(
- 'Options --desugar-jdk-libs-checkout and --release-version are mutually exclusive')
- if args.desugar_jdk_libs_revision and args.release_version:
- raise Exception(
- 'Options --desugar-jdk-libs-revision and --release-version are mutually exclusive')
- if args.desugar_jdk_libs_checkout and args.desugar_jdk_libs_revision:
- raise Exception(
- 'Options --desugar-jdk-libs-checkout and --desugar-jdk-libs-revision are mutually exclusive')
- if args.clear_repo:
- shutil.rmtree(args.repo_root, ignore_errors=True)
- utils.makedirs_if_needed(args.repo_root)
- if (args.variant):
- run(args)
- else:
- for v in Variant:
- args.variant = v
- run(args)
+ args = parse_options()
+ if args.desugar_jdk_libs_checkout and args.release_version:
+ raise Exception(
+ 'Options --desugar-jdk-libs-checkout and --release-version are mutually exclusive'
+ )
+ if args.desugar_jdk_libs_revision and args.release_version:
+ raise Exception(
+ 'Options --desugar-jdk-libs-revision and --release-version are mutually exclusive'
+ )
+ if args.desugar_jdk_libs_checkout and args.desugar_jdk_libs_revision:
+ raise Exception(
+ 'Options --desugar-jdk-libs-checkout and --desugar-jdk-libs-revision are mutually exclusive'
+ )
+ if args.clear_repo:
+ shutil.rmtree(args.repo_root, ignore_errors=True)
+ utils.makedirs_if_needed(args.repo_root)
+ if (args.variant):
+ run(args)
+ else:
+ for v in Variant:
+ args.variant = v
+ run(args)
+
if __name__ == '__main__':
- sys.exit(main())
+ sys.exit(main())
diff --git a/tools/desugar_jdk_libs_update.py b/tools/desugar_jdk_libs_update.py
index c1aa5df..3514cf8 100755
--- a/tools/desugar_jdk_libs_update.py
+++ b/tools/desugar_jdk_libs_update.py
@@ -14,108 +14,107 @@
import utils
+
def sed(pattern, replace, path):
- with open(path, "r") as sources:
- lines = sources.readlines()
- with open(path, "w") as sources:
- for line in lines:
- sources.write(re.sub(pattern, replace, line))
+ with open(path, "r") as sources:
+ lines = sources.readlines()
+ with open(path, "w") as sources:
+ for line in lines:
+ sources.write(re.sub(pattern, replace, line))
+
def GetGitHash(checkout_dir):
- return subprocess.check_output(
- ['git', '-C', checkout_dir, 'rev-parse', 'HEAD']).decode('utf-8').strip()
+ return subprocess.check_output(
+ ['git', '-C', checkout_dir, 'rev-parse',
+ 'HEAD']).decode('utf-8').strip()
+
def run(args):
- with utils.TempDir() as tmp_dir:
- use_existing_checkout = args.desugar_jdk_libs_checkout != None
- checkout_dir = (args.desugar_jdk_libs_checkout
- if use_existing_checkout
- else join(tmp_dir, 'desugar_jdk_libs'))
- if (not use_existing_checkout):
- subprocess.check_call(
- ['git', 'clone', 'https://github.com/google/desugar_jdk_libs.git', checkout_dir])
- if (args.desugar_jdk_libs_revision):
- subprocess.check_call(
- ['git', '-C', checkout_dir, 'checkout', args.desugar_jdk_libs_revision])
- print("Hack to workaround b/256723819")
- os.remove(
- join(
- checkout_dir,
- "jdk11",
- "src",
- "java.base",
- "share",
- "classes",
- "java",
- "time",
- "format",
- "DesugarDateTimeFormatterBuilder.java"))
- print("Building desugared library")
- bazel = os.path.join(utils.BAZEL_TOOL, 'lib', 'bazel', 'bin', 'bazel')
- with utils.ChangedWorkingDirectory(checkout_dir):
- for target in [':desugar_jdk_libs_jdk11', '//jdk11/src:java_base_chm_only']:
- subprocess.check_call([
- bazel,
- '--bazelrc=/dev/null',
- 'build',
- '--spawn_strategy=local',
- '--verbose_failures',
- target])
+ with utils.TempDir() as tmp_dir:
+ use_existing_checkout = args.desugar_jdk_libs_checkout != None
+ checkout_dir = (args.desugar_jdk_libs_checkout if use_existing_checkout
+ else join(tmp_dir, 'desugar_jdk_libs'))
+ if (not use_existing_checkout):
+ subprocess.check_call([
+ 'git', 'clone',
+ 'https://github.com/google/desugar_jdk_libs.git', checkout_dir
+ ])
+ if (args.desugar_jdk_libs_revision):
+ subprocess.check_call([
+ 'git', '-C', checkout_dir, 'checkout',
+ args.desugar_jdk_libs_revision
+ ])
+ print("Hack to workaround b/256723819")
+ os.remove(
+ join(checkout_dir, "jdk11", "src", "java.base", "share", "classes",
+ "java", "time", "format",
+ "DesugarDateTimeFormatterBuilder.java"))
+ print("Building desugared library")
+ bazel = os.path.join(utils.BAZEL_TOOL, 'lib', 'bazel', 'bin', 'bazel')
+ with utils.ChangedWorkingDirectory(checkout_dir):
+ for target in [
+ ':desugar_jdk_libs_jdk11', '//jdk11/src:java_base_chm_only'
+ ]:
+ subprocess.check_call([
+ bazel, '--bazelrc=/dev/null', 'build',
+ '--spawn_strategy=local', '--verbose_failures', target
+ ])
- openjdk_dir = join('third_party', 'openjdk')
- openjdk_subdir = 'desugar_jdk_libs_11'
- dest_dir = join(openjdk_dir, openjdk_subdir)
- src_dir = join(checkout_dir, 'bazel-bin', 'jdk11', 'src')
+ openjdk_dir = join('third_party', 'openjdk')
+ openjdk_subdir = 'desugar_jdk_libs_11'
+ dest_dir = join(openjdk_dir, openjdk_subdir)
+ src_dir = join(checkout_dir, 'bazel-bin', 'jdk11', 'src')
- metadata_files = ('LICENSE', 'README.google')
- for f in metadata_files:
- shutil.copyfile(join(dest_dir, f), join(tmp_dir, f))
- shutil.rmtree(dest_dir)
- os.remove(join(openjdk_dir, openjdk_subdir + '.tar.gz'))
- os.remove(join(openjdk_dir, openjdk_subdir + '.tar.gz.sha1'))
- os.mkdir(dest_dir)
- for s in [
- (join(src_dir, 'd8_java_base_selected_with_addon.jar'),
- join(dest_dir, 'desugar_jdk_libs.jar')),
- (join(src_dir, 'java_base_chm_only.jar'),
- join(dest_dir, 'desugar_jdk_libs_chm_only.jar'))]:
- shutil.copyfile(s[0], s[1])
- for f in metadata_files:
- shutil.copyfile(join(tmp_dir, f), join(dest_dir, f))
- desugar_jdk_libs_hash = os.path.join(dest_dir, 'desugar_jdk_libs_hash')
- with open(desugar_jdk_libs_hash, 'w') as desugar_jdk_libs_hash_writer:
- desugar_jdk_libs_hash_writer.write(GetGitHash(checkout_dir))
- sed('^Version: [0-9a-f]{40}$',
- 'Version: %s' % GetGitHash(checkout_dir),
- join(dest_dir, 'README.google'))
- sed('^Date: .*$',
- 'Date: %s' % datetime.today().strftime('%Y-%m-%d'),
- join(dest_dir, 'README.google'))
+ metadata_files = ('LICENSE', 'README.google')
+ for f in metadata_files:
+ shutil.copyfile(join(dest_dir, f), join(tmp_dir, f))
+ shutil.rmtree(dest_dir)
+ os.remove(join(openjdk_dir, openjdk_subdir + '.tar.gz'))
+ os.remove(join(openjdk_dir, openjdk_subdir + '.tar.gz.sha1'))
+ os.mkdir(dest_dir)
+ for s in [(join(src_dir, 'd8_java_base_selected_with_addon.jar'),
+ join(dest_dir, 'desugar_jdk_libs.jar')),
+ (join(src_dir, 'java_base_chm_only.jar'),
+ join(dest_dir, 'desugar_jdk_libs_chm_only.jar'))]:
+ shutil.copyfile(s[0], s[1])
+ for f in metadata_files:
+ shutil.copyfile(join(tmp_dir, f), join(dest_dir, f))
+ desugar_jdk_libs_hash = os.path.join(dest_dir, 'desugar_jdk_libs_hash')
+ with open(desugar_jdk_libs_hash, 'w') as desugar_jdk_libs_hash_writer:
+ desugar_jdk_libs_hash_writer.write(GetGitHash(checkout_dir))
+ sed('^Version: [0-9a-f]{40}$', 'Version: %s' % GetGitHash(checkout_dir),
+ join(dest_dir, 'README.google'))
+ sed('^Date: .*$', 'Date: %s' % datetime.today().strftime('%Y-%m-%d'),
+ join(dest_dir, 'README.google'))
- print('Now run')
- print(' (cd %s; upload_to_google_storage.py -a --bucket r8-deps %s)'
- % (openjdk_dir, openjdk_subdir))
-
+ print('Now run')
+ print(' (cd %s; upload_to_google_storage.py -a --bucket r8-deps %s)' %
+ (openjdk_dir, openjdk_subdir))
def main():
- args = parse_options()
- run(args)
+ args = parse_options()
+ run(args)
+
def parse_options():
- parser = argparse.ArgumentParser(
- description='Script for updating third_party/openjdk/desugar_jdk_libs*')
- parser.add_argument('--desugar-jdk-libs-checkout', '--desugar_jdk_libs_checkout',
- default=None,
- metavar=('<path>'),
- help='Use existing checkout of github.com/google/desugar_jdk_libs.')
- parser.add_argument('--desugar-jdk-libs-revision', '--desugar_jdk_libs_revision',
- default=None,
- metavar=('<revision>'),
- help='Revision of github.com/google/desugar_jdk_libs to use.')
- args = parser.parse_args()
- return args
+ parser = argparse.ArgumentParser(
+ description='Script for updating third_party/openjdk/desugar_jdk_libs*')
+ parser.add_argument(
+ '--desugar-jdk-libs-checkout',
+ '--desugar_jdk_libs_checkout',
+ default=None,
+ metavar=('<path>'),
+ help='Use existing checkout of github.com/google/desugar_jdk_libs.')
+ parser.add_argument(
+ '--desugar-jdk-libs-revision',
+ '--desugar_jdk_libs_revision',
+ default=None,
+ metavar=('<revision>'),
+ help='Revision of github.com/google/desugar_jdk_libs to use.')
+ args = parser.parse_args()
+ return args
if __name__ == '__main__':
- sys.exit(main())
+ sys.exit(main())
diff --git a/tools/dex2oat.py b/tools/dex2oat.py
index 52082cc..8df8f86 100755
--- a/tools/dex2oat.py
+++ b/tools/dex2oat.py
@@ -15,172 +15,179 @@
LATEST = '12.0.0'
VERSIONS = [
- '12.0.0',
- # TODO(b/258170524): Fix the broken dex2oat versions.
- # 'default',
- # '9.0.0',
- # '8.1.0',
- # '7.0.0',
- '6.0.1',
- # '5.1.1',
+ '12.0.0',
+ # TODO(b/258170524): Fix the broken dex2oat versions.
+ # 'default',
+ # '9.0.0',
+ # '8.1.0',
+ # '7.0.0',
+ '6.0.1',
+ # '5.1.1',
]
DIRS = {
- '12.0.0': 'host/art-12.0.0-beta4',
- 'default': 'art',
- '9.0.0': 'art-9.0.0',
- '8.1.0': 'art-8.1.0',
- '7.0.0': 'art-7.0.0',
- '6.0.1': 'art-6.0.1',
- '5.1.1': 'art-5.1.1',
+ '12.0.0': 'host/art-12.0.0-beta4',
+ 'default': 'art',
+ '9.0.0': 'art-9.0.0',
+ '8.1.0': 'art-8.1.0',
+ '7.0.0': 'art-7.0.0',
+ '6.0.1': 'art-6.0.1',
+ '5.1.1': 'art-5.1.1',
}
PRODUCTS = {
- '12.0.0': 'redfin',
- 'default': 'angler',
- '9.0.0': 'marlin',
- '8.1.0': 'marlin',
- '7.0.0': 'angler',
- '6.0.1': 'angler',
- '5.1.1': 'mako',
+ '12.0.0': 'redfin',
+ 'default': 'angler',
+ '9.0.0': 'marlin',
+ '8.1.0': 'marlin',
+ '7.0.0': 'angler',
+ '6.0.1': 'angler',
+ '5.1.1': 'mako',
}
ARCHS = {
- '12.0.0': 'x86_64',
- 'default': 'arm64',
- '9.0.0': 'arm64',
- '8.1.0': 'arm64',
- '7.0.0': 'arm64',
- '6.0.1': 'arm64',
- '5.1.1': 'arm',
+ '12.0.0': 'x86_64',
+ 'default': 'arm64',
+ '9.0.0': 'arm64',
+ '8.1.0': 'arm64',
+ '7.0.0': 'arm64',
+ '6.0.1': 'arm64',
+ '5.1.1': 'arm',
}
VERBOSE_OPTIONS = [
- 'verifier',
- 'compiler',
- 'gc',
- 'jit',
- 'jni',
- 'class',
- 'all',
+ 'verifier',
+ 'compiler',
+ 'gc',
+ 'jit',
+ 'jni',
+ 'class',
+ 'all',
]
-BOOT_IMAGE = {
- '12.0.0': 'apex/art_boot_images/javalib/boot.art'
-}
+BOOT_IMAGE = {'12.0.0': 'apex/art_boot_images/javalib/boot.art'}
+
def ParseOptions():
- parser = optparse.OptionParser()
- parser.add_option('--version',
- help='Version of dex2oat. (defaults to latest: ' + LATEST + ').',
- choices=VERSIONS,
- default=LATEST)
- parser.add_option('--device',
- help='Run dex2oat on this device (this is passed as the -s SERIAL.')
- parser.add_option('--all',
- help='Run dex2oat on all possible versions',
- default=False,
- action='store_true')
- parser.add_option('--output',
- help='Where to place the output oat (defaults to no output / temp file).',
- default=None)
- parser.add_option('--verbose',
- help='Enable verbose dex2oat logging.',
- choices=VERBOSE_OPTIONS,
- default=None)
- return parser.parse_args()
+ parser = optparse.OptionParser()
+ parser.add_option('--version',
+ help='Version of dex2oat. (defaults to latest: ' +
+ LATEST + ').',
+ choices=VERSIONS,
+ default=LATEST)
+ parser.add_option(
+ '--device',
+ help='Run dex2oat on this device (this is passed as the -s SERIAL.')
+ parser.add_option('--all',
+ help='Run dex2oat on all possible versions',
+ default=False,
+ action='store_true')
+ parser.add_option(
+ '--output',
+ help=
+ 'Where to place the output oat (defaults to no output / temp file).',
+ default=None)
+ parser.add_option('--verbose',
+ help='Enable verbose dex2oat logging.',
+ choices=VERBOSE_OPTIONS,
+ default=None)
+ return parser.parse_args()
+
def Main():
- (options, args) = ParseOptions()
- if len(args) != 1:
- print("Can only take a single dex/zip/jar/apk file as input.")
- return 1
- if (options.device):
- return run_device_dex2oat(options, args)
- else:
- return run_host_dex2oat(options, args)
+ (options, args) = ParseOptions()
+ if len(args) != 1:
+ print("Can only take a single dex/zip/jar/apk file as input.")
+ return 1
+ if (options.device):
+ return run_device_dex2oat(options, args)
+ else:
+ return run_host_dex2oat(options, args)
+
def run_host_dex2oat(options, args):
- if options.all and options.output:
- print("Can't write output when running all versions.")
- return 1
- dexfile = args[0]
- oatfile = options.output
- versions = VERSIONS if options.all else [options.version]
- for version in versions:
- run(options, dexfile, oatfile, version)
- print("")
- return 0
+ if options.all and options.output:
+ print("Can't write output when running all versions.")
+ return 1
+ dexfile = args[0]
+ oatfile = options.output
+ versions = VERSIONS if options.all else [options.version]
+ for version in versions:
+ run(options, dexfile, oatfile, version)
+ print("")
+ return 0
+
def adb_cmd(serial, *args):
- cmd = ['adb', '-s', serial]
- cmd.extend(args)
- return cmd
+ cmd = ['adb', '-s', serial]
+ cmd.extend(args)
+ return cmd
+
def append_dex2oat_verbose_flags(options, cmd):
- verbose = [options.verbose] if options.verbose else []
- if 'all' in verbose:
- verbose = [x for x in VERBOSE_OPTIONS if x != 'all']
- for flag in verbose:
- cmd += ['--runtime-arg', '-verbose:' + flag]
- return cmd
+ verbose = [options.verbose] if options.verbose else []
+ if 'all' in verbose:
+ verbose = [x for x in VERBOSE_OPTIONS if x != 'all']
+ for flag in verbose:
+ cmd += ['--runtime-arg', '-verbose:' + flag]
+ return cmd
+
def run_device_dex2oat(options, args):
- serial = options.device
- dexfile = args[0]
- device_dexfile = '/data/local/tmp/' + os.path.basename(dexfile)
- device_oatfile = '/data/local/tmp/unused.oat'
- cmd = adb_cmd(serial, 'shell', 'rm', '-f', device_dexfile, device_oatfile)
- utils.PrintCmd(cmd)
- subprocess.check_call(cmd)
- cmd = adb_cmd(serial, 'push', dexfile, device_dexfile)
- utils.PrintCmd(cmd)
- subprocess.check_call(cmd)
- cmd = adb_cmd(serial, 'logcat', '-c')
- utils.PrintCmd(cmd)
- subprocess.check_call(cmd)
- cmd = adb_cmd(
- serial,
- 'shell',
- 'dex2oat',
- '--dex-file=' + device_dexfile,
- '--oat-file=/data/local/tmp/unused.oat')
- append_dex2oat_verbose_flags(options, cmd)
- utils.PrintCmd(cmd)
- subprocess.check_call(cmd)
- cmd = adb_cmd(serial, 'logcat', '-d', '-s', 'dex2oat')
- utils.PrintCmd(cmd)
- subprocess.check_call(cmd)
- return 0
+ serial = options.device
+ dexfile = args[0]
+ device_dexfile = '/data/local/tmp/' + os.path.basename(dexfile)
+ device_oatfile = '/data/local/tmp/unused.oat'
+ cmd = adb_cmd(serial, 'shell', 'rm', '-f', device_dexfile, device_oatfile)
+ utils.PrintCmd(cmd)
+ subprocess.check_call(cmd)
+ cmd = adb_cmd(serial, 'push', dexfile, device_dexfile)
+ utils.PrintCmd(cmd)
+ subprocess.check_call(cmd)
+ cmd = adb_cmd(serial, 'logcat', '-c')
+ utils.PrintCmd(cmd)
+ subprocess.check_call(cmd)
+ cmd = adb_cmd(serial, 'shell', 'dex2oat', '--dex-file=' + device_dexfile,
+ '--oat-file=/data/local/tmp/unused.oat')
+ append_dex2oat_verbose_flags(options, cmd)
+ utils.PrintCmd(cmd)
+ subprocess.check_call(cmd)
+ cmd = adb_cmd(serial, 'logcat', '-d', '-s', 'dex2oat')
+ utils.PrintCmd(cmd)
+ subprocess.check_call(cmd)
+ return 0
+
def run(options, dexfile, oatfile=None, version=None):
- if not version:
- version = LATEST
- # dex2oat accepts non-existent dex files, check here instead
- if not os.path.exists(dexfile):
- raise Exception('DEX file not found: "{}"'.format(dexfile))
- with utils.TempDir() as temp:
- if not oatfile:
- oatfile = os.path.join(temp, "out.oat")
- base = os.path.join(LINUX_DIR, DIRS[version])
- product = PRODUCTS[version]
- arch = ARCHS[version]
- cmd = [
- os.path.join(base, 'bin', 'dex2oat'),
- '--android-root=' + os.path.join(base, 'product', product, 'system'),
- '--runtime-arg',
- '-Xnorelocate',
- '--dex-file=' + dexfile,
- '--oat-file=' + oatfile,
- '--instruction-set=' + arch,
- ]
- append_dex2oat_verbose_flags(options, cmd)
- if version in BOOT_IMAGE:
- cmd += ['--boot-image=' + BOOT_IMAGE[version]]
- env = {"LD_LIBRARY_PATH": os.path.join(base, 'lib')}
- utils.PrintCmd(cmd)
- with utils.ChangedWorkingDirectory(base):
- subprocess.check_call(cmd, env = env)
+ if not version:
+ version = LATEST
+ # dex2oat accepts non-existent dex files, check here instead
+ if not os.path.exists(dexfile):
+ raise Exception('DEX file not found: "{}"'.format(dexfile))
+ with utils.TempDir() as temp:
+ if not oatfile:
+ oatfile = os.path.join(temp, "out.oat")
+ base = os.path.join(LINUX_DIR, DIRS[version])
+ product = PRODUCTS[version]
+ arch = ARCHS[version]
+ cmd = [
+ os.path.join(base, 'bin', 'dex2oat'),
+ '--android-root=' +
+ os.path.join(base, 'product', product, 'system'),
+ '--runtime-arg',
+ '-Xnorelocate',
+ '--dex-file=' + dexfile,
+ '--oat-file=' + oatfile,
+ '--instruction-set=' + arch,
+ ]
+ append_dex2oat_verbose_flags(options, cmd)
+ if version in BOOT_IMAGE:
+ cmd += ['--boot-image=' + BOOT_IMAGE[version]]
+ env = {"LD_LIBRARY_PATH": os.path.join(base, 'lib')}
+ utils.PrintCmd(cmd)
+ with utils.ChangedWorkingDirectory(base):
+ subprocess.check_call(cmd, env=env)
+
if __name__ == '__main__':
- sys.exit(Main())
+ sys.exit(Main())
diff --git a/tools/dexfilemerger.py b/tools/dexfilemerger.py
index de4e6ae..c14a65b 100755
--- a/tools/dexfilemerger.py
+++ b/tools/dexfilemerger.py
@@ -7,4 +7,4 @@
import toolhelper
if __name__ == '__main__':
- sys.exit(toolhelper.run('dexfilemerger', sys.argv[1:]))
+ sys.exit(toolhelper.run('dexfilemerger', sys.argv[1:]))
diff --git a/tools/dexsegments.py b/tools/dexsegments.py
index 7b901de..737688b 100755
--- a/tools/dexsegments.py
+++ b/tools/dexsegments.py
@@ -7,4 +7,4 @@
import toolhelper
if __name__ == '__main__':
- sys.exit(toolhelper.run('dexsegments', sys.argv[1:]))
+ sys.exit(toolhelper.run('dexsegments', sys.argv[1:]))
diff --git a/tools/disasm.py b/tools/disasm.py
index e37ac4c..e341557 100755
--- a/tools/disasm.py
+++ b/tools/disasm.py
@@ -7,4 +7,4 @@
import toolhelper
if __name__ == '__main__':
- sys.exit(toolhelper.run('disasm', sys.argv[1:], debug=False))
+ sys.exit(toolhelper.run('disasm', sys.argv[1:], debug=False))
diff --git a/tools/download_from_x20.py b/tools/download_from_x20.py
index 5796823..b8eba4e 100755
--- a/tools/download_from_x20.py
+++ b/tools/download_from_x20.py
@@ -16,36 +16,40 @@
GMSCORE_DEPS = '/google/data/ro/teams/r8/deps'
+
def parse_options():
- return optparse.OptionParser().parse_args()
+ return optparse.OptionParser().parse_args()
+
def download(src, dest):
- print('Downloading %s to %s' % (src, dest))
- shutil.copyfile(src, dest)
- utils.unpack_archive(dest)
+ print('Downloading %s to %s' % (src, dest))
+ shutil.copyfile(src, dest)
+ utils.unpack_archive(dest)
+
def Main():
- (options, args) = parse_options()
- assert len(args) == 1
- sha1_file = args[0]
- dest = sha1_file[:-5]
- print('Ensuring %s' % dest)
- with open(sha1_file, 'r') as input_sha:
- sha1 = input_sha.readline()
- if os.path.exists(dest) and utils.get_sha1(dest) == sha1:
- print('sha1 matches, not downloading')
- dest_dir = utils.extract_dir(dest)
- if os.path.exists(dest_dir):
- print('destination directory exists, no extraction')
- else:
- utils.unpack_archive(dest)
- return
- src = os.path.join(GMSCORE_DEPS, sha1)
- if not os.path.exists(src):
- print('File (%s) does not exist on x20' % src)
- print('Maybe pass -Pno_internal to your gradle invocation')
- return 42
- download(src, dest)
+ (options, args) = parse_options()
+ assert len(args) == 1
+ sha1_file = args[0]
+ dest = sha1_file[:-5]
+ print('Ensuring %s' % dest)
+ with open(sha1_file, 'r') as input_sha:
+ sha1 = input_sha.readline()
+ if os.path.exists(dest) and utils.get_sha1(dest) == sha1:
+ print('sha1 matches, not downloading')
+ dest_dir = utils.extract_dir(dest)
+ if os.path.exists(dest_dir):
+ print('destination directory exists, no extraction')
+ else:
+ utils.unpack_archive(dest)
+ return
+ src = os.path.join(GMSCORE_DEPS, sha1)
+ if not os.path.exists(src):
+ print('File (%s) does not exist on x20' % src)
+ print('Maybe pass -Pno_internal to your gradle invocation')
+ return 42
+ download(src, dest)
+
if __name__ == '__main__':
- sys.exit(Main())
+ sys.exit(Main())
diff --git a/tools/download_kotlin_dev.py b/tools/download_kotlin_dev.py
index 3bcf9b7..aa73ba8 100755
--- a/tools/download_kotlin_dev.py
+++ b/tools/download_kotlin_dev.py
@@ -5,13 +5,13 @@
import utils
if utils.is_python3():
- from html.parser import HTMLParser
- import urllib.request
- url_request = urllib.request
+ from html.parser import HTMLParser
+ import urllib.request
+ url_request = urllib.request
else:
- from HTMLParser import HTMLParser
- import urllib
- url_request = urllib
+ from HTMLParser import HTMLParser
+ import urllib
+ url_request = urllib
import os
import sys
@@ -19,69 +19,75 @@
"kotlin/bootstrap/org/jetbrains/kotlin/"
KOTLIN_RELEASE_URL = JETBRAINS_KOTLIN_MAVEN_URL + "kotlin-compiler/"
+
def download_newest():
- response = url_request.urlopen(KOTLIN_RELEASE_URL)
- if response.getcode() != 200:
- raise Exception('Url: %s \n returned %s'
- % (KOTLIN_RELEASE_URL, response.getcode()))
- content = str(response.read())
- release_candidates = []
+ response = url_request.urlopen(KOTLIN_RELEASE_URL)
+ if response.getcode() != 200:
+ raise Exception('Url: %s \n returned %s' %
+ (KOTLIN_RELEASE_URL, response.getcode()))
+ content = str(response.read())
+ release_candidates = []
- class HTMLContentParser(HTMLParser):
- def handle_data(self, data):
- if ('-dev-' in data):
- release_candidates.append(data)
+ class HTMLContentParser(HTMLParser):
- parser = HTMLContentParser()
- parser.feed(content)
+ def handle_data(self, data):
+ if ('-dev-' in data):
+ release_candidates.append(data)
- top_most_version = (0, 0, 0, 0)
- top_most_version_and_build = None
+ parser = HTMLContentParser()
+ parser.feed(content)
- for version in release_candidates:
- # The compiler version is on the form <major>.<minor>.<revision>-dev-<build>/
- version = version.replace('/', '')
- version_build_args = version.split('-')
- version_components = version_build_args[0].split('.')
- version_components.append(version_build_args[2])
- current_version = tuple(map(int, version_components))
- if (current_version > top_most_version):
- top_most_version = current_version
- top_most_version_and_build = version
+ top_most_version = (0, 0, 0, 0)
+ top_most_version_and_build = None
- if (top_most_version_and_build is None):
- raise Exception('Url: %s \n returned %s'
- % (KOTLIN_RELEASE_URL, response.getcode()))
+ for version in release_candidates:
+ # The compiler version is on the form <major>.<minor>.<revision>-dev-<build>/
+ version = version.replace('/', '')
+ version_build_args = version.split('-')
+ version_components = version_build_args[0].split('.')
+ version_components.append(version_build_args[2])
+ current_version = tuple(map(int, version_components))
+ if (current_version > top_most_version):
+ top_most_version = current_version
+ top_most_version_and_build = version
- # We can now download all files related to the kotlin compiler version.
- print("Downloading version: " + top_most_version_and_build)
+ if (top_most_version_and_build is None):
+ raise Exception('Url: %s \n returned %s' %
+ (KOTLIN_RELEASE_URL, response.getcode()))
- kotlinc_lib = os.path.join(
- utils.THIRD_PARTY, "kotlin", "kotlin-compiler-dev", "kotlinc", "lib")
+ # We can now download all files related to the kotlin compiler version.
+ print("Downloading version: " + top_most_version_and_build)
- utils.DownloadFromGoogleCloudStorage(
- os.path.join(
- utils.THIRD_PARTY, "kotlin", "kotlin-compiler-dev.tar.gz.sha1"))
+ kotlinc_lib = os.path.join(utils.THIRD_PARTY, "kotlin",
+ "kotlin-compiler-dev", "kotlinc", "lib")
- download_and_save(
- JETBRAINS_KOTLIN_MAVEN_URL + "kotlin-compiler/{0}/kotlin-compiler-{0}.jar"
- .format(top_most_version_and_build), kotlinc_lib, "kotlin-compiler.jar")
- download_and_save(
- JETBRAINS_KOTLIN_MAVEN_URL + "kotlin-stdlib/{0}/kotlin-stdlib-{0}.jar"
- .format(top_most_version_and_build), kotlinc_lib, "kotlin-stdlib.jar")
- download_and_save(
- JETBRAINS_KOTLIN_MAVEN_URL + "kotlin-reflect/{0}/kotlin-reflect-{0}.jar"
- .format(top_most_version_and_build), kotlinc_lib, "kotlin-reflect.jar")
- download_and_save(
- JETBRAINS_KOTLIN_MAVEN_URL + "kotlin-script-runtime/{0}/kotlin-script-runtime-{0}.jar"
- .format(top_most_version_and_build), kotlinc_lib, "kotlin-script-runtime.jar")
+ utils.DownloadFromGoogleCloudStorage(
+ os.path.join(utils.THIRD_PARTY, "kotlin",
+ "kotlin-compiler-dev.tar.gz.sha1"))
+
+ download_and_save(
+ JETBRAINS_KOTLIN_MAVEN_URL +
+ "kotlin-compiler/{0}/kotlin-compiler-{0}.jar".format(
+ top_most_version_and_build), kotlinc_lib, "kotlin-compiler.jar")
+ download_and_save(
+ JETBRAINS_KOTLIN_MAVEN_URL +
+ "kotlin-stdlib/{0}/kotlin-stdlib-{0}.jar".format(
+ top_most_version_and_build), kotlinc_lib, "kotlin-stdlib.jar")
+ download_and_save(
+ JETBRAINS_KOTLIN_MAVEN_URL +
+ "kotlin-reflect/{0}/kotlin-reflect-{0}.jar".format(
+ top_most_version_and_build), kotlinc_lib, "kotlin-reflect.jar")
+ download_and_save(
+ JETBRAINS_KOTLIN_MAVEN_URL +
+ "kotlin-script-runtime/{0}/kotlin-script-runtime-{0}.jar".format(
+ top_most_version_and_build), kotlinc_lib,
+ "kotlin-script-runtime.jar")
def download_and_save(url, path, name):
- print('Downloading: ' + url)
- url_request.urlretrieve(url, os.path.join(path, name))
+ print('Downloading: ' + url)
+ url_request.urlretrieve(url, os.path.join(path, name))
if __name__ == '__main__':
- sys.exit(download_newest())
-
+ sys.exit(download_newest())
diff --git a/tools/emulator_aosp.py b/tools/emulator_aosp.py
deleted file mode 100755
index d5f80dc..0000000
--- a/tools/emulator_aosp.py
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (c) 2017, the R8 project authors. Please see the AUTHORS file
-# for details. All rights reserved. Use of this source code is governed by a
-# BSD-style license that can be found in the LICENSE file.
-
-import argparse
-import sys
-
-import utils
-import utils_aosp
-
-def parse_arguments():
- parser = argparse.ArgumentParser(
- description = 'Checkout the AOSP source tree.')
- utils_aosp.add_common_arguments(parser)
- return parser.parse_args()
-
-def emulator_aosp(aosp_root, lunch):
- print "Running AOSP emulator in " + aosp_root
-
- utils_aosp.run_through_aosp_helper(lunch, ['emulator_fg',
- '-partition-size', '4096', '-wipe-data'], cwd = aosp_root)
-
-def Main():
- args = parse_arguments()
-
- emulator_aosp(args.aosp_root, args.lunch)
-
-if __name__ == '__main__':
- sys.exit(Main())
diff --git a/tools/extractmarker.py b/tools/extractmarker.py
index d8348dd..522be29 100755
--- a/tools/extractmarker.py
+++ b/tools/extractmarker.py
@@ -7,26 +7,32 @@
import sys
import toolhelper
+
def extractmarker(apk_or_dex, build=True):
- stdout = toolhelper.run('extractmarker', [apk_or_dex], build=build, return_stdout=True)
- return stdout
+ stdout = toolhelper.run('extractmarker', [apk_or_dex],
+ build=build,
+ return_stdout=True)
+ return stdout
+
def parse_options(argv):
- result = argparse.ArgumentParser(
- description='Relayout a given APK using a startup profile.')
- result.add_argument('--no-build',
- action='store_true',
- default=False,
- help='To disable building using gradle')
- options, args = result.parse_known_args(argv)
- return options, args
+ result = argparse.ArgumentParser(
+ description='Relayout a given APK using a startup profile.')
+ result.add_argument('--no-build',
+ action='store_true',
+ default=False,
+ help='To disable building using gradle')
+ options, args = result.parse_known_args(argv)
+ return options, args
+
def main(argv):
- options, args = parse_options(argv)
- build = not options.no_build
- for apk_or_dex in args:
- print(extractmarker(apk_or_dex, build=build))
- build = False
+ options, args = parse_options(argv)
+ build = not options.no_build
+ for apk_or_dex in args:
+ print(extractmarker(apk_or_dex, build=build))
+ build = False
+
if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
+ sys.exit(main(sys.argv[1:]))
diff --git a/tools/find_haning_test.py b/tools/find_haning_test.py
deleted file mode 100755
index 3c65317..0000000
--- a/tools/find_haning_test.py
+++ /dev/null
@@ -1,50 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (c) 2019, the R8 project authors. Please see the AUTHORS file
-# for details. All rights reserved. Use of this source code is governed by a
-# BSD-style license that can be found in the LICENSE file.
-
-import argparse
-import sys
-
-
-def ParseOptions():
- parser = argparse.ArgumentParser(
- description = 'Find tests started but not done from bot stdout.')
- return parser.parse_known_args()
-
-def get_started(stdout):
- # Lines look like:
- # Start executing test runBigInteger_ZERO_A01 [com.android.tools.r8.x.r8cf.math.BigInteger.ZERO.BigInteger_ZERO_A01]
- start_lines = []
- for line in stdout:
- if line.startswith('Start executing test'):
- split = line.split(' ')
- start_lines.append('%s %s' % (split[3], split[4].strip()))
- return start_lines
-
-def get_ended(stdout):
- # Lines look like:
- # Done executing test runBigInteger_subtract_A01 [com.android.tools.r8.x.r8cf.math.BigInteger.subtractLjava_math_BigInteger.BigInteger_subtract_A01] with result: SUCCESS
- done_lines = []
- for line in stdout:
- if line.startswith('Done executing test'):
- split = line.split(' ')
- done_lines.append('%s %s' % (split[3], split[4].strip()))
- return done_lines
-
-def Main():
- (options, args) = ParseOptions()
- if len(args) != 1:
- raise "fail"
-
- with open(args[0], 'r') as f:
- lines = f.readlines()
- started = get_started(lines)
- ended = get_ended(lines)
- for test in started:
- if not test in ended:
- print 'Test %s started but did not end' % test
-
-
-if __name__ == '__main__':
- sys.exit(Main())
diff --git a/tools/git_sync_cl_chain.py b/tools/git_sync_cl_chain.py
index 0fd65c6..f89bde6 100755
--- a/tools/git_sync_cl_chain.py
+++ b/tools/git_sync_cl_chain.py
@@ -32,165 +32,189 @@
REPO_ROOT = defines.REPO_ROOT
+
class Repo(object):
- def __init__(self, name, is_current, upstream):
- self.name = name
- self.is_current = is_current
- self.upstream = upstream
+
+ def __init__(self, name, is_current, upstream):
+ self.name = name
+ self.is_current = is_current
+ self.upstream = upstream
+
def ParseOptions(argv):
- result = optparse.OptionParser()
- result.add_option('--bypass-hooks',
- help='Bypass presubmit hooks',
- action='store_true')
- result.add_option('--delete', '-d',
- help='Delete closed branches',
- choices=['y', 'n', 'ask'],
- default='ask')
- result.add_option('--from_branch', '-f',
- help='Uppermost upstream to sync from',
- default='main')
- result.add_option('--leave_upstream', '--leave-upstream',
- help='To not update the upstream of the first open branch',
- action='store_true')
- result.add_option('--message', '-m',
- help='Message for patchset', default='Sync')
- result.add_option('--rebase',
- help='To use `git pull --rebase` instead of `git pull`',
- action='store_true')
- result.add_option('--no_upload', '--no-upload',
- help='Disable uploading to Gerrit', action='store_true')
- result.add_option('--skip_main', '--skip-main',
- help='Disable syncing for main',
- action='store_true')
- (options, args) = result.parse_args(argv)
- options.upload = not options.no_upload
- assert options.delete != 'y' or not options.leave_upstream, (
- 'Inconsistent options: cannot leave the upstream of the first open ' +
- 'branch (--leave_upstream) and delete the closed branches at the same ' +
- 'time (--delete).')
- assert options.message, 'A message for the patchset is required.'
- assert len(args) == 0
- return options
+ result = optparse.OptionParser()
+ result.add_option('--bypass-hooks',
+ help='Bypass presubmit hooks',
+ action='store_true')
+ result.add_option('--delete',
+ '-d',
+ help='Delete closed branches',
+ choices=['y', 'n', 'ask'],
+ default='ask')
+ result.add_option('--from_branch',
+ '-f',
+ help='Uppermost upstream to sync from',
+ default='main')
+ result.add_option(
+ '--leave_upstream',
+ '--leave-upstream',
+ help='To not update the upstream of the first open branch',
+ action='store_true')
+ result.add_option('--message',
+ '-m',
+ help='Message for patchset',
+ default='Sync')
+ result.add_option('--rebase',
+ help='To use `git pull --rebase` instead of `git pull`',
+ action='store_true')
+ result.add_option('--no_upload',
+ '--no-upload',
+ help='Disable uploading to Gerrit',
+ action='store_true')
+ result.add_option('--skip_main',
+ '--skip-main',
+ help='Disable syncing for main',
+ action='store_true')
+ (options, args) = result.parse_args(argv)
+ options.upload = not options.no_upload
+ assert options.delete != 'y' or not options.leave_upstream, (
+ 'Inconsistent options: cannot leave the upstream of the first open ' +
+ 'branch (--leave_upstream) and delete the closed branches at the same '
+ + 'time (--delete).')
+ assert options.message, 'A message for the patchset is required.'
+ assert len(args) == 0
+ return options
+
def main(argv):
- options = ParseOptions(argv)
- with utils.ChangedWorkingDirectory(REPO_ROOT, quiet=True):
- branches = [
- parse(line)
- for line in utils.RunCmd(['git', 'branch', '-vv'], quiet=True)]
+ options = ParseOptions(argv)
+ with utils.ChangedWorkingDirectory(REPO_ROOT, quiet=True):
+ branches = [
+ parse(line)
+ for line in utils.RunCmd(['git', 'branch', '-vv'], quiet=True)
+ ]
- current_branch = None
- for branch in branches:
- if branch.is_current:
- current_branch = branch
- break
- assert current_branch is not None
+ current_branch = None
+ for branch in branches:
+ if branch.is_current:
+ current_branch = branch
+ break
+ assert current_branch is not None
- if is_root_branch(current_branch, options):
- print('Nothing to sync')
- return
+ if is_root_branch(current_branch, options):
+ print('Nothing to sync')
+ return
- stack = []
- while current_branch:
- stack.append(current_branch)
- if is_root_branch(current_branch, options):
- break
- current_branch = get_branch_with_name(current_branch.upstream, branches)
+ stack = []
+ while current_branch:
+ stack.append(current_branch)
+ if is_root_branch(current_branch, options):
+ break
+ current_branch = get_branch_with_name(current_branch.upstream,
+ branches)
- closed_branches = []
- has_seen_local_branch = False # A branch that is not uploaded.
- has_seen_open_branch = False # A branch that is not closed.
- while len(stack) > 0:
- branch = stack.pop()
+ closed_branches = []
+ has_seen_local_branch = False # A branch that is not uploaded.
+ has_seen_open_branch = False # A branch that is not closed.
+ while len(stack) > 0:
+ branch = stack.pop()
- utils.RunCmd(['git', 'checkout', branch.name], quiet=True)
+ utils.RunCmd(['git', 'checkout', branch.name], quiet=True)
- status = get_status_for_current_branch(branch)
- print('Syncing %s (status: %s)' % (branch.name, status))
+ status = get_status_for_current_branch(branch)
+ print('Syncing %s (status: %s)' % (branch.name, status))
- pull_for_current_branch(branch, options)
+ pull_for_current_branch(branch, options)
- if branch.name == 'main':
- continue
+ if branch.name == 'main':
+ continue
- if status == 'closed':
- assert not has_seen_local_branch, (
- 'Unexpected closed branch %s after new branch' % branch.name)
- assert not has_seen_open_branch, (
- 'Unexpected closed branch %s after open branch' % branch.name)
- closed_branches.append(branch.name)
- continue
+ if status == 'closed':
+ assert not has_seen_local_branch, (
+ 'Unexpected closed branch %s after new branch' %
+ branch.name)
+ assert not has_seen_open_branch, (
+ 'Unexpected closed branch %s after open branch' %
+ branch.name)
+ closed_branches.append(branch.name)
+ continue
- if not options.leave_upstream:
- if not has_seen_open_branch and len(closed_branches) > 0:
- print(
- 'Setting upstream for first open branch %s to main'
- % branch.name)
- set_upstream_for_current_branch_to_main()
+ if not options.leave_upstream:
+ if not has_seen_open_branch and len(closed_branches) > 0:
+ print('Setting upstream for first open branch %s to main' %
+ branch.name)
+ set_upstream_for_current_branch_to_main()
- has_seen_open_branch = True
- has_seen_local_branch = has_seen_local_branch or (status == 'None')
+ has_seen_open_branch = True
+ has_seen_local_branch = has_seen_local_branch or (status == 'None')
- if options.upload and status != 'closed':
- if has_seen_local_branch:
- print(
- 'Cannot upload branch %s since it comes after a local branch'
- % branch.name)
- else:
- upload_cmd = ['git', 'cl', 'upload', '-m', options.message]
- if options.bypass_hooks:
- upload_cmd.append('--bypass-hooks')
- utils.RunCmd(upload_cmd, quiet=True)
+ if options.upload and status != 'closed':
+ if has_seen_local_branch:
+ print(
+ 'Cannot upload branch %s since it comes after a local branch'
+ % branch.name)
+ else:
+ upload_cmd = ['git', 'cl', 'upload', '-m', options.message]
+ if options.bypass_hooks:
+ upload_cmd.append('--bypass-hooks')
+ utils.RunCmd(upload_cmd, quiet=True)
- if get_delete_branches_option(closed_branches, options):
- delete_branches(closed_branches)
+ if get_delete_branches_option(closed_branches, options):
+ delete_branches(closed_branches)
- utils.RunCmd(['git', 'cl', 'issue'])
+ utils.RunCmd(['git', 'cl', 'issue'])
+
def delete_branches(branches):
- assert len(branches) > 0
- cmd = ['git', 'branch', '-D']
- cmd.extend(branches)
- utils.RunCmd(cmd, quiet=True)
+ assert len(branches) > 0
+ cmd = ['git', 'branch', '-D']
+ cmd.extend(branches)
+ utils.RunCmd(cmd, quiet=True)
+
def get_branch_with_name(name, branches):
- for branch in branches:
- if branch.name == name:
- return branch
- return None
+ for branch in branches:
+ if branch.name == name:
+ return branch
+ return None
+
def get_delete_branches_option(closed_branches, options):
- if len(closed_branches) == 0:
- return False
- if options.leave_upstream:
- return False
- if options.delete == 'y':
- return True
- if options.delete == 'n':
- return False
- assert options.delete == 'ask'
- print('Delete closed branches: %s (Y/N)?' % ", ".join(closed_branches))
- answer = sys.stdin.read(1)
- return answer.lower() == 'y'
+ if len(closed_branches) == 0:
+ return False
+ if options.leave_upstream:
+ return False
+ if options.delete == 'y':
+ return True
+ if options.delete == 'n':
+ return False
+ assert options.delete == 'ask'
+ print('Delete closed branches: %s (Y/N)?' % ", ".join(closed_branches))
+ answer = sys.stdin.read(1)
+ return answer.lower() == 'y'
+
def get_status_for_current_branch(current_branch):
- if current_branch.name == 'main':
- return 'main'
- return utils.RunCmd(['git', 'cl', 'status', '--field', 'status'], quiet=True)[0].strip()
+ if current_branch.name == 'main':
+ return 'main'
+ return utils.RunCmd(['git', 'cl', 'status', '--field', 'status'],
+ quiet=True)[0].strip()
+
def is_root_branch(branch, options):
- return branch.name == options.from_branch or branch.upstream is None
+ return branch.name == options.from_branch or branch.upstream is None
+
def pull_for_current_branch(branch, options):
- if branch.name == 'main' and options.skip_main:
- return
- rebase_args = ['--rebase'] if options.rebase else []
- utils.RunCmd(['git', 'pull'] + rebase_args, quiet=True)
+ if branch.name == 'main' and options.skip_main:
+ return
+ rebase_args = ['--rebase'] if options.rebase else []
+ utils.RunCmd(['git', 'pull'] + rebase_args, quiet=True)
def set_upstream_for_current_branch_to_main():
- utils.RunCmd(['git', 'cl', 'upstream', 'main'], quiet=True)
+ utils.RunCmd(['git', 'cl', 'upstream', 'main'], quiet=True)
+
# Parses a line from the output of `git branch -vv`.
#
@@ -203,29 +227,30 @@
# feature_prereq_a xxxxxxxxx [main: ...] ...
# main xxxxxxxxx [origin/main] ...
def parse(line):
- is_current = False
- if line.startswith('*'):
- is_current = True
- line = line[1:].lstrip()
- else:
- line = line.lstrip()
+ is_current = False
+ if line.startswith('*'):
+ is_current = True
+ line = line[1:].lstrip()
+ else:
+ line = line.lstrip()
- name_end_index = line.index(' ')
- name = line[:name_end_index]
- line = line[name_end_index:].lstrip()
+ name_end_index = line.index(' ')
+ name = line[:name_end_index]
+ line = line[name_end_index:].lstrip()
- if '[' in line:
- line = line[line.index('[')+1:]
+ if '[' in line:
+ line = line[line.index('[') + 1:]
- if ':' in line:
- upstream = line[:line.index(':')]
- return Repo(name, is_current, upstream)
+ if ':' in line:
+ upstream = line[:line.index(':')]
+ return Repo(name, is_current, upstream)
- if ']' in line:
- upstream = line[:line.index(']')]
- return Repo(name, is_current, upstream)
+ if ']' in line:
+ upstream = line[:line.index(']')]
+ return Repo(name, is_current, upstream)
- return Repo(name, is_current, None)
+ return Repo(name, is_current, None)
+
if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
+ sys.exit(main(sys.argv[1:]))
diff --git a/tools/git_utils.py b/tools/git_utils.py
index 542947a..764039d 100644
--- a/tools/git_utils.py
+++ b/tools/git_utils.py
@@ -6,20 +6,23 @@
import utils
import subprocess
-def GitClone(url, checkout_dir):
- cmd = ['git', 'clone', url, checkout_dir]
- utils.PrintCmd(cmd)
- return subprocess.check_call(cmd)
-def GitCheckout(revision, checkout_dir):
- with utils.ChangedWorkingDirectory(checkout_dir):
- cmd = ['git', 'checkout', revision]
+def GitClone(url, checkout_dir):
+ cmd = ['git', 'clone', url, checkout_dir]
utils.PrintCmd(cmd)
return subprocess.check_call(cmd)
+
+def GitCheckout(revision, checkout_dir):
+ with utils.ChangedWorkingDirectory(checkout_dir):
+ cmd = ['git', 'checkout', revision]
+ utils.PrintCmd(cmd)
+ return subprocess.check_call(cmd)
+
+
def GetHeadRevision(checkout_dir, use_main=False):
- revision_from = 'origin/main' if use_main else 'HEAD'
- cmd = ['git', 'rev-parse', revision_from]
- utils.PrintCmd(cmd)
- with utils.ChangedWorkingDirectory(checkout_dir):
- return subprocess.check_output(cmd).strip().decode('utf-8')
+ revision_from = 'origin/main' if use_main else 'HEAD'
+ cmd = ['git', 'rev-parse', revision_from]
+ utils.PrintCmd(cmd)
+ with utils.ChangedWorkingDirectory(checkout_dir):
+ return subprocess.check_output(cmd).strip().decode('utf-8')
diff --git a/tools/gmail_data.py b/tools/gmail_data.py
index 6cb0322..6170a66 100644
--- a/tools/gmail_data.py
+++ b/tools/gmail_data.py
@@ -16,29 +16,32 @@
ANDROID_JAR = utils.get_android_jar(25)
VERSIONS = {
- '180826.15': {
- 'dex' : {
- 'flags': '--no-desugaring',
- 'inputs': [os.path.join(V180826_15_BASE, 'Gmail_release_unsigned.apk')],
- 'main-dex-list': os.path.join(V180826_15_BASE, 'main_dex_list.txt') ,
- 'pgmap': '%s_proguard.map' % V180826_15_PREFIX,
- 'libraries' : [ANDROID_JAR],
+ '180826.15': {
+ 'dex': {
+ 'flags': '--no-desugaring',
+ 'inputs': [
+ os.path.join(V180826_15_BASE, 'Gmail_release_unsigned.apk')
+ ],
+ 'main-dex-list': os.path.join(V180826_15_BASE, 'main_dex_list.txt'),
+ 'pgmap': '%s_proguard.map' % V180826_15_PREFIX,
+ 'libraries': [ANDROID_JAR],
+ },
+ 'deploy': {
+ 'flags': '--no-desugaring',
+ 'inputs': ['%s_deploy.jar' % V180826_15_PREFIX],
+ 'pgconf': [
+ '%s_proguard.config' % V180826_15_PREFIX,
+ '%s/proguardsettings/Gmail_proguard.config' % utils.THIRD_PARTY,
+ utils.IGNORE_WARNINGS_RULES
+ ],
+ 'min-api': ANDROID_L_API,
+ 'allow-type-errors': 1,
+ },
+ 'proguarded': {
+ 'flags': '--no-desugaring',
+ 'inputs': ['%s_proguard.jar' % V180826_15_PREFIX],
+ 'main-dex-list': os.path.join(V180826_15_BASE, 'main_dex_list.txt'),
+ 'pgmap': '%s_proguard.map' % V180826_15_PREFIX,
+ }
},
- 'deploy' : {
- 'flags': '--no-desugaring',
- 'inputs': ['%s_deploy.jar' % V180826_15_PREFIX],
- 'pgconf': [
- '%s_proguard.config' % V180826_15_PREFIX,
- '%s/proguardsettings/Gmail_proguard.config' % utils.THIRD_PARTY,
- utils.IGNORE_WARNINGS_RULES],
- 'min-api' : ANDROID_L_API,
- 'allow-type-errors' : 1,
- },
- 'proguarded' : {
- 'flags': '--no-desugaring',
- 'inputs': ['%s_proguard.jar' % V180826_15_PREFIX],
- 'main-dex-list': os.path.join(V180826_15_BASE, 'main_dex_list.txt') ,
- 'pgmap': '%s_proguard.map' % V180826_15_PREFIX,
- }
- },
}
diff --git a/tools/gmaven.py b/tools/gmaven.py
index 1ad96c0..ce52e48 100644
--- a/tools/gmaven.py
+++ b/tools/gmaven.py
@@ -7,53 +7,55 @@
import subprocess
GMAVEN_PUBLISHER = '/google/bin/releases/android-devtools/gmaven/publisher/gmaven-publisher'
-GMAVEN_PUBLISH_STAGE_RELEASE_ID_PATTERN = re.compile('Release ID = ([0-9a-f\-]+)')
+GMAVEN_PUBLISH_STAGE_RELEASE_ID_PATTERN = re.compile(
+ 'Release ID = ([0-9a-f\-]+)')
-def publisher_stage(gfiles, dry_run = False):
- if dry_run:
- print('Dry-run, would have staged %s' % gfiles)
- return 'dry-run-release-id'
+def publisher_stage(gfiles, dry_run=False):
+ if dry_run:
+ print('Dry-run, would have staged %s' % gfiles)
+ return 'dry-run-release-id'
- print("Staging: %s" % ', '.join(gfiles))
- print("")
+ print("Staging: %s" % ', '.join(gfiles))
+ print("")
- cmd = [GMAVEN_PUBLISHER, 'stage', '--gfile', ','.join(gfiles)]
- output = subprocess.check_output(cmd)
+ cmd = [GMAVEN_PUBLISHER, 'stage', '--gfile', ','.join(gfiles)]
+ output = subprocess.check_output(cmd)
- # Expect output to contain:
- # [INFO] 06/19/2020 09:35:12 CEST: >>>>>>>>>> Staged
- # [INFO] 06/19/2020 09:35:12 CEST: Release ID = 9171d015-18f6-4a90-9984-1c362589dc1b
- # [INFO] 06/19/2020 09:35:12 CEST: Stage Path = /bigstore/studio_staging/maven2/sgjesse/9171d015-18f6-4a90-9984-1c362589dc1b
+ # Expect output to contain:
+ # [INFO] 06/19/2020 09:35:12 CEST: >>>>>>>>>> Staged
+ # [INFO] 06/19/2020 09:35:12 CEST: Release ID = 9171d015-18f6-4a90-9984-1c362589dc1b
+ # [INFO] 06/19/2020 09:35:12 CEST: Stage Path = /bigstore/studio_staging/maven2/sgjesse/9171d015-18f6-4a90-9984-1c362589dc1b
- matches = GMAVEN_PUBLISH_STAGE_RELEASE_ID_PATTERN.findall(output.decode("utf-8"))
- if matches == None or len(matches) > 1:
- print("Could not determine the release ID from the gmaven_publisher " +
- "output. Expected a line with 'Release ID = <release id>'.")
- print("Output was:")
+ matches = GMAVEN_PUBLISH_STAGE_RELEASE_ID_PATTERN.findall(
+ output.decode("utf-8"))
+ if matches == None or len(matches) > 1:
+ print("Could not determine the release ID from the gmaven_publisher " +
+ "output. Expected a line with 'Release ID = <release id>'.")
+ print("Output was:")
+ print(output)
+ sys.exit(1)
+
print(output)
- sys.exit(1)
- print(output)
-
- release_id = matches[0]
- return release_id
+ release_id = matches[0]
+ return release_id
def publisher_stage_redir_test_info(release_id, artifact, dst):
- redir_command = ("/google/data/ro/teams/android-devtools-infra/tools/redir "
- + "--alsologtostderr "
- + "--gcs_bucket_path=/bigstore/gmaven-staging/${USER}/%s "
- + "--port=1480") % release_id
+ redir_command = ("/google/data/ro/teams/android-devtools-infra/tools/redir "
+ + "--alsologtostderr " +
+ "--gcs_bucket_path=/bigstore/gmaven-staging/${USER}/%s " +
+ "--port=1480") % release_id
- get_command = ("mvn org.apache.maven.plugins:maven-dependency-plugin:2.4:get "
- + "-Dmaven.repo.local=/tmp/maven_repo_local "
- + "-DremoteRepositories=http://localhost:1480 "
- + "-Dartifact=%s "
- + "-Ddest=%s") % (artifact, dst)
+ get_command = (
+ "mvn org.apache.maven.plugins:maven-dependency-plugin:2.4:get " +
+ "-Dmaven.repo.local=/tmp/maven_repo_local " +
+ "-DremoteRepositories=http://localhost:1480 " + "-Dartifact=%s " +
+ "-Ddest=%s") % (artifact, dst)
- print("""To test the staged content with 'redir' run:
+ print("""To test the staged content with 'redir' run:
%s
@@ -84,10 +86,10 @@
""" % (redir_command, artifact, get_command))
-def publisher_publish(release_id, dry_run = False):
- if dry_run:
- print('Dry-run, would have published %s' % release_id)
- return
+def publisher_publish(release_id, dry_run=False):
+ if dry_run:
+ print('Dry-run, would have published %s' % release_id)
+ return
- cmd = [GMAVEN_PUBLISHER, 'publish', release_id]
- output = subprocess.check_output(cmd)
+ cmd = [GMAVEN_PUBLISHER, 'publish', release_id]
+ output = subprocess.check_output(cmd)
diff --git a/tools/golem_build.py b/tools/golem_build.py
index 4f668d6..ae09558 100755
--- a/tools/golem_build.py
+++ b/tools/golem_build.py
@@ -13,15 +13,18 @@
GRADLE_ARGS = ['--no-daemon', '-Pno_internal']
+
def lower(items):
- return [ item.lower() for item in items ]
+ return [item.lower() for item in items]
+
def Main():
- targets = set()
- targets.update(lower(run_benchmark.GOLEM_BUILD_TARGETS))
- targets.update(lower(run_on_app_dump.GOLEM_BUILD_TARGETS))
- cmd = GRADLE_ARGS + [target for target in targets]
- gradle.RunGradle(cmd)
+ targets = set()
+ targets.update(lower(run_benchmark.GOLEM_BUILD_TARGETS))
+ targets.update(lower(run_on_app_dump.GOLEM_BUILD_TARGETS))
+ cmd = GRADLE_ARGS + [target for target in targets]
+ gradle.RunGradle(cmd)
+
if __name__ == '__main__':
- sys.exit(Main())
+ sys.exit(Main())
diff --git a/tools/google-java-format-diff.py b/tools/google-java-format-diff.py
index c9c3dc3..c3d89ca 100755
--- a/tools/google-java-format-diff.py
+++ b/tools/google-java-format-diff.py
@@ -8,7 +8,6 @@
# License. See LICENSE.TXT for details.
#
#===------------------------------------------------------------------------===#
-
"""
google-java-format Diff Reformatter
============================
@@ -36,122 +35,145 @@
import sys
from shutil import which
+
def main():
- parser = argparse.ArgumentParser(description=
- 'Reformat changed lines in diff. Without -i '
- 'option just output the diff that would be '
- 'introduced.')
- parser.add_argument('-i', action='store_true', default=False,
- help='apply edits to files instead of displaying a diff')
+ parser = argparse.ArgumentParser(
+ description='Reformat changed lines in diff. Without -i '
+ 'option just output the diff that would be '
+ 'introduced.')
+ parser.add_argument(
+ '-i',
+ action='store_true',
+ default=False,
+ help='apply edits to files instead of displaying a diff')
- parser.add_argument('-p', metavar='NUM', default=0,
- help='strip the smallest prefix containing P slashes')
- parser.add_argument('-regex', metavar='PATTERN', default=None,
- help='custom pattern selecting file paths to reformat '
- '(case sensitive, overrides -iregex)')
- parser.add_argument('-iregex', metavar='PATTERN', default=r'.*\.java',
- help='custom pattern selecting file paths to reformat '
- '(case insensitive, overridden by -regex)')
- parser.add_argument('-v', '--verbose', action='store_true',
- help='be more verbose, ineffective without -i')
- parser.add_argument('-a', '--aosp', action='store_true',
- help='use AOSP style instead of Google Style (4-space indentation)')
- parser.add_argument('--skip-sorting-imports', action='store_true',
- help='do not fix the import order')
- parser.add_argument('--skip-removing-unused-imports', action='store_true',
- help='do not remove ununsed imports')
- parser.add_argument(
- '--skip-javadoc-formatting',
- action='store_true',
- default=False,
- help='do not reformat javadoc')
- parser.add_argument('-b', '--binary', help='path to google-java-format binary')
- parser.add_argument('--google-java-format-jar', metavar='ABSOLUTE_PATH', default=None,
- help='use a custom google-java-format jar')
+ parser.add_argument('-p',
+ metavar='NUM',
+ default=0,
+ help='strip the smallest prefix containing P slashes')
+ parser.add_argument('-regex',
+ metavar='PATTERN',
+ default=None,
+ help='custom pattern selecting file paths to reformat '
+ '(case sensitive, overrides -iregex)')
+ parser.add_argument('-iregex',
+ metavar='PATTERN',
+ default=r'.*\.java',
+ help='custom pattern selecting file paths to reformat '
+ '(case insensitive, overridden by -regex)')
+ parser.add_argument('-v',
+ '--verbose',
+ action='store_true',
+ help='be more verbose, ineffective without -i')
+ parser.add_argument(
+ '-a',
+ '--aosp',
+ action='store_true',
+ help='use AOSP style instead of Google Style (4-space indentation)')
+ parser.add_argument('--skip-sorting-imports',
+ action='store_true',
+ help='do not fix the import order')
+ parser.add_argument('--skip-removing-unused-imports',
+ action='store_true',
+ help='do not remove ununsed imports')
+ parser.add_argument('--skip-javadoc-formatting',
+ action='store_true',
+ default=False,
+ help='do not reformat javadoc')
+ parser.add_argument('-b',
+ '--binary',
+ help='path to google-java-format binary')
+ parser.add_argument('--google-java-format-jar',
+ metavar='ABSOLUTE_PATH',
+ default=None,
+ help='use a custom google-java-format jar')
- args = parser.parse_args()
+ args = parser.parse_args()
- # Extract changed lines for each file.
- filename = None
- lines_by_file = {}
+ # Extract changed lines for each file.
+ filename = None
+ lines_by_file = {}
- for line in sys.stdin:
- match = re.search('^\+\+\+\ (.*?/){%s}(\S*)' % args.p, line)
- if match:
- filename = match.group(2)
- if filename == None:
- continue
+ for line in sys.stdin:
+ match = re.search('^\+\+\+\ (.*?/){%s}(\S*)' % args.p, line)
+ if match:
+ filename = match.group(2)
+ if filename == None:
+ continue
- if args.regex is not None:
- if not re.match('^%s$' % args.regex, filename):
- continue
+ if args.regex is not None:
+ if not re.match('^%s$' % args.regex, filename):
+ continue
+ else:
+ if not re.match('^%s$' % args.iregex, filename, re.IGNORECASE):
+ continue
+
+ match = re.search('^@@.*\+(\d+)(,(\d+))?', line)
+ if match:
+ start_line = int(match.group(1))
+ line_count = 1
+ if match.group(3):
+ line_count = int(match.group(3))
+ if line_count == 0:
+ continue
+ end_line = start_line + line_count - 1
+ lines_by_file.setdefault(filename, []).extend(
+ ['-lines', str(start_line) + ':' + str(end_line)])
+
+ if args.binary:
+ base_command = [args.binary]
+ elif args.google_java_format_jar:
+ base_command = [
+ os.path.join('third_party', 'openjdk', 'jdk-17', 'linux', 'bin',
+ 'java'), '-jar',
+ '--add-opens=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED',
+ '--add-opens=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED',
+ '--add-opens=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED',
+ '--add-opens=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED',
+ '--add-opens=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED',
+ '--add-opens=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED',
+ args.google_java_format_jar
+ ]
else:
- if not re.match('^%s$' % args.iregex, filename, re.IGNORECASE):
- continue
+ binary = which('google-java-format') or '/usr/bin/google-java-format'
+ base_command = [binary]
- match = re.search('^@@.*\+(\d+)(,(\d+))?', line)
- if match:
- start_line = int(match.group(1))
- line_count = 1
- if match.group(3):
- line_count = int(match.group(3))
- if line_count == 0:
- continue
- end_line = start_line + line_count - 1;
- lines_by_file.setdefault(filename, []).extend(
- ['-lines', str(start_line) + ':' + str(end_line)])
+ # Reformat files containing changes in place.
+ for filename, lines in lines_by_file.items():
+ if args.i and args.verbose:
+ print('Formatting', filename)
+ command = base_command[:]
+ if args.i:
+ command.append('-i')
+ if args.aosp:
+ command.append('--aosp')
+ if args.skip_sorting_imports:
+ command.append('--skip-sorting-imports')
+ if args.skip_removing_unused_imports:
+ command.append('--skip-removing-unused-imports')
+ if args.skip_javadoc_formatting:
+ command.append('--skip-javadoc-formatting')
+ command.extend(lines)
+ command.append(filename)
+ p = subprocess.Popen(command,
+ stdout=subprocess.PIPE,
+ stderr=None,
+ stdin=subprocess.PIPE)
+ stdout, stderr = p.communicate()
+ if p.returncode != 0:
+ sys.exit(p.returncode)
- if args.binary:
- base_command = [args.binary]
- elif args.google_java_format_jar:
- base_command = [
- os.path.join(
- 'third_party', 'openjdk', 'jdk-17', 'linux', 'bin', 'java'),
- '-jar',
- '--add-opens=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED',
- '--add-opens=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED',
- '--add-opens=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED',
- '--add-opens=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED',
- '--add-opens=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED',
- '--add-opens=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED',
- args.google_java_format_jar]
- else:
- binary = which('google-java-format') or '/usr/bin/google-java-format'
- base_command = [binary]
+ if not args.i:
+ with open(filename) as f:
+ code = f.readlines()
+ formatted_code = io.StringIO(stdout.decode('utf-8')).readlines()
+ diff = difflib.unified_diff(code, formatted_code, filename,
+ filename, '(before formatting)',
+ '(after formatting)')
+ diff_string = ''.join(diff)
+ if len(diff_string) > 0:
+ sys.stdout.write(diff_string)
- # Reformat files containing changes in place.
- for filename, lines in lines_by_file.items():
- if args.i and args.verbose:
- print('Formatting', filename)
- command = base_command[:]
- if args.i:
- command.append('-i')
- if args.aosp:
- command.append('--aosp')
- if args.skip_sorting_imports:
- command.append('--skip-sorting-imports')
- if args.skip_removing_unused_imports:
- command.append('--skip-removing-unused-imports')
- if args.skip_javadoc_formatting:
- command.append('--skip-javadoc-formatting')
- command.extend(lines)
- command.append(filename)
- p = subprocess.Popen(command, stdout=subprocess.PIPE,
- stderr=None, stdin=subprocess.PIPE)
- stdout, stderr = p.communicate()
- if p.returncode != 0:
- sys.exit(p.returncode);
-
- if not args.i:
- with open(filename) as f:
- code = f.readlines()
- formatted_code = io.StringIO(stdout.decode('utf-8')).readlines()
- diff = difflib.unified_diff(code, formatted_code,
- filename, filename,
- '(before formatting)', '(after formatting)')
- diff_string = ''.join(diff)
- if len(diff_string) > 0:
- sys.stdout.write(diff_string)
if __name__ == '__main__':
- main()
+ main()
diff --git a/tools/gradle.py b/tools/gradle.py
index 8ce123f..76039f9 100755
--- a/tools/gradle.py
+++ b/tools/gradle.py
@@ -19,116 +19,134 @@
GRADLE8_SHA1 = os.path.join(GRADLE_DIR, 'gradle-8.3.tar.gz.sha1')
GRADLE8_TGZ = os.path.join(GRADLE_DIR, 'gradle-8.3.tar.gz')
+
def get_gradle():
- gradle_dir = 'gradle-8.3'
- if utils.IsWindows():
- return os.path.join(GRADLE_DIR, gradle_dir, 'bin', 'gradle.bat')
- else:
- return os.path.join(GRADLE_DIR, gradle_dir, 'bin', 'gradle')
+ gradle_dir = 'gradle-8.3'
+ if utils.IsWindows():
+ return os.path.join(GRADLE_DIR, gradle_dir, 'bin', 'gradle.bat')
+ else:
+ return os.path.join(GRADLE_DIR, gradle_dir, 'bin', 'gradle')
+
def ParseOptions():
- parser = argparse.ArgumentParser(description = 'Call gradle.')
- parser.add_argument('--exclude-deps', '--exclude_deps',
- help='Build without internalized dependencies.',
- default=False, action='store_true')
- parser.add_argument('--no-internal', '--no_internal',
- help='Do not build with support for Google internal tests.',
- default=False, action='store_true')
- parser.add_argument('--java-home', '--java_home',
- help='Use a custom java version to run gradle.')
- parser.add_argument('--worktree',
- help='Gradle is running in a worktree and may lock up '
- 'the gradle caches.',
- action='store_true',
- default=False)
- return parser.parse_known_args()
+ parser = argparse.ArgumentParser(description='Call gradle.')
+ parser.add_argument('--exclude-deps',
+ '--exclude_deps',
+ help='Build without internalized dependencies.',
+ default=False,
+ action='store_true')
+ parser.add_argument(
+ '--no-internal',
+ '--no_internal',
+ help='Do not build with support for Google internal tests.',
+ default=False,
+ action='store_true')
+ parser.add_argument('--java-home',
+ '--java_home',
+ help='Use a custom java version to run gradle.')
+ parser.add_argument('--worktree',
+ help='Gradle is running in a worktree and may lock up '
+ 'the gradle caches.',
+ action='store_true',
+ default=False)
+ return parser.parse_known_args()
+
def GetJavaEnv(env):
- java_env = dict(env if env else os.environ, JAVA_HOME = jdk.GetJdkHome())
- java_env['PATH'] = java_env['PATH'] + os.pathsep + os.path.join(jdk.GetJdkHome(), 'bin')
- java_env['GRADLE_OPTS'] = '-Xmx1g'
- return java_env
+ java_env = dict(env if env else os.environ, JAVA_HOME=jdk.GetJdkHome())
+ java_env['PATH'] = java_env['PATH'] + os.pathsep + os.path.join(
+ jdk.GetJdkHome(), 'bin')
+ java_env['GRADLE_OPTS'] = '-Xmx1g'
+ return java_env
+
def PrintCmd(s):
- if type(s) is list:
- s = ' '.join(s)
- print('Running: %s' % s)
- # I know this will hit os on windows eventually if we don't do this.
- sys.stdout.flush()
+ if type(s) is list:
+ s = ' '.join(s)
+ print('Running: %s' % s)
+ # I know this will hit os on windows eventually if we don't do this.
+ sys.stdout.flush()
+
def EnsureGradle():
- utils.EnsureDepFromGoogleCloudStorage(
- get_gradle(), GRADLE8_TGZ, GRADLE8_SHA1, 'Gradle binary')
+ utils.EnsureDepFromGoogleCloudStorage(get_gradle(), GRADLE8_TGZ,
+ GRADLE8_SHA1, 'Gradle binary')
+
def EnsureJdk():
- # Gradle in the new setup will use the jdks in the evaluation - fetch
- # all beforehand.
- for root in jdk.GetAllJdkDirs():
- jdkTgz = root + '.tar.gz'
- jdkSha1 = jdkTgz + '.sha1'
- utils.EnsureDepFromGoogleCloudStorage(root, jdkTgz, jdkSha1, root)
+ # Gradle in the new setup will use the jdks in the evaluation - fetch
+ # all beforehand.
+ for root in jdk.GetAllJdkDirs():
+ jdkTgz = root + '.tar.gz'
+ jdkSha1 = jdkTgz + '.sha1'
+ utils.EnsureDepFromGoogleCloudStorage(root, jdkTgz, jdkSha1, root)
+
def EnsureDeps():
- EnsureGradle()
- EnsureJdk()
+ EnsureGradle()
+ EnsureJdk()
+
def RunGradleIn(gradleCmd, args, cwd, throw_on_failure=True, env=None):
- EnsureDeps()
- cmd = [gradleCmd]
- args.extend(['--offline', '-c=d8_r8/settings.gradle.kts'])
- cmd.extend(args)
- utils.PrintCmd(cmd)
- with utils.ChangedWorkingDirectory(cwd):
- return_value = subprocess.call(cmd, env=GetJavaEnv(env))
- if throw_on_failure and return_value != 0:
- raise Exception('Failed to execute gradle')
- return return_value
+ EnsureDeps()
+ cmd = [gradleCmd]
+ args.extend(['--offline', '-c=d8_r8/settings.gradle.kts'])
+ cmd.extend(args)
+ utils.PrintCmd(cmd)
+ with utils.ChangedWorkingDirectory(cwd):
+ return_value = subprocess.call(cmd, env=GetJavaEnv(env))
+ if throw_on_failure and return_value != 0:
+ raise Exception('Failed to execute gradle')
+ return return_value
+
def RunGradleWrapperIn(args, cwd, throw_on_failure=True, env=None):
- return RunGradleIn('./gradlew', args, cwd, throw_on_failure, env=env)
+ return RunGradleIn('./gradlew', args, cwd, throw_on_failure, env=env)
+
def RunGradle(args, throw_on_failure=True, env=None):
- return RunGradleIn(
- get_gradle(),
- args,
- utils.REPO_ROOT,
- throw_on_failure,
- env=env)
+ return RunGradleIn(get_gradle(),
+ args,
+ utils.REPO_ROOT,
+ throw_on_failure,
+ env=env)
+
def RunGradleExcludeDeps(args, throw_on_failure=True, env=None):
- EnsureDeps()
- args.append('-Pexclude_deps')
- return RunGradle(args, throw_on_failure, env=env)
+ EnsureDeps()
+ args.append('-Pexclude_deps')
+ return RunGradle(args, throw_on_failure, env=env)
+
def RunGradleInGetOutput(gradleCmd, args, cwd, env=None):
- EnsureDeps()
- cmd = [gradleCmd]
- cmd.extend(args)
- utils.PrintCmd(cmd)
- with utils.ChangedWorkingDirectory(cwd):
- return subprocess.check_output(cmd, env=GetJavaEnv(env)).decode('utf-8')
+ EnsureDeps()
+ cmd = [gradleCmd]
+ cmd.extend(args)
+ utils.PrintCmd(cmd)
+ with utils.ChangedWorkingDirectory(cwd):
+ return subprocess.check_output(cmd, env=GetJavaEnv(env)).decode('utf-8')
+
def RunGradleWrapperInGetOutput(args, cwd, env=None):
- return RunGradleInGetOutput('./gradlew', args, cwd, env=env)
+ return RunGradleInGetOutput('./gradlew', args, cwd, env=env)
+
def RunGradleGetOutput(args, env=None):
- return RunGradleInGetOutput(
- get_gradle(),
- args,
- utils.REPO_ROOT,
- env=env)
+ return RunGradleInGetOutput(get_gradle(), args, utils.REPO_ROOT, env=env)
+
def Main():
- (options, args) = ParseOptions()
- if options.java_home:
- args.append('-Dorg.gradle.java.home=' + options.java_home)
- if options.no_internal:
- args.append('-Pno_internal')
- if options.exclude_deps:
- args.append('-Pexclude_deps')
- if options.worktree:
- args.append('-g=' + os.path.join(utils.REPO_ROOT, ".gradle_user_home"))
- return RunGradle(args)
+ (options, args) = ParseOptions()
+ if options.java_home:
+ args.append('-Dorg.gradle.java.home=' + options.java_home)
+ if options.no_internal:
+ args.append('-Pno_internal')
+ if options.exclude_deps:
+ args.append('-Pexclude_deps')
+ if options.worktree:
+ args.append('-g=' + os.path.join(utils.REPO_ROOT, ".gradle_user_home"))
+ return RunGradle(args)
+
if __name__ == '__main__':
- sys.exit(Main())
+ sys.exit(Main())
diff --git a/tools/historic_memory_usage.py b/tools/historic_memory_usage.py
index 3ef2268..a40cf2a 100755
--- a/tools/historic_memory_usage.py
+++ b/tools/historic_memory_usage.py
@@ -20,60 +20,64 @@
APPS = ['gmscore', 'nest', 'youtube', 'gmail', 'chrome']
COMPILERS = ['d8', 'r8']
+
def ParseOptions(argv):
- result = optparse.OptionParser()
- result.add_option('--compiler',
- help='The compiler to use',
- default='d8',
- choices=COMPILERS)
- result.add_option('--app',
- help='What app to run on',
- default='gmail',
- choices=APPS)
- result.add_option('--top',
- default=historic_run.top_or_default(),
- help='The most recent commit to test')
- result.add_option('--bottom',
- help='The oldest commit to test')
- result.add_option('--output',
- default='build',
- help='Directory where to output results')
- result.add_option('--timeout',
- type=int,
- default=0,
- help='Set timeout instead of waiting for OOM.')
- return result.parse_args(argv)
+ result = optparse.OptionParser()
+ result.add_option('--compiler',
+ help='The compiler to use',
+ default='d8',
+ choices=COMPILERS)
+ result.add_option('--app',
+ help='What app to run on',
+ default='gmail',
+ choices=APPS)
+ result.add_option('--top',
+ default=historic_run.top_or_default(),
+ help='The most recent commit to test')
+ result.add_option('--bottom', help='The oldest commit to test')
+ result.add_option('--output',
+ default='build',
+ help='Directory where to output results')
+ result.add_option('--timeout',
+ type=int,
+ default=0,
+ help='Set timeout instead of waiting for OOM.')
+ return result.parse_args(argv)
+
def make_run_on_app_command(options):
- return lambda commit: run_on_app(options, commit)
+ return lambda commit: run_on_app(options, commit)
+
def run_on_app(options, commit):
- app = options.app
- compiler = options.compiler
- cmd = ['tools/run_on_app.py',
- '--app', app,
- '--compiler', compiler,
- '--timeout', str(options.timeout),
- '--no-build', '--find-min-xmx']
- stdout = subprocess.check_output(cmd)
- output_path = options.output or 'build'
- time_commit = '%s_%s' % (commit.timestamp, commit.git_hash)
- time_commit_path = os.path.join(output_path, time_commit)
- if not os.path.exists(time_commit_path):
- os.makedirs(time_commit_path)
- stdout_path = os.path.join(time_commit_path, 'stdout')
- with open(stdout_path, 'w') as f:
- f.write(stdout)
- print('Wrote stdout to: %s' % stdout_path)
+ app = options.app
+ compiler = options.compiler
+ cmd = [
+ 'tools/run_on_app.py', '--app', app, '--compiler', compiler,
+ '--timeout',
+ str(options.timeout), '--no-build', '--find-min-xmx'
+ ]
+ stdout = subprocess.check_output(cmd)
+ output_path = options.output or 'build'
+ time_commit = '%s_%s' % (commit.timestamp, commit.git_hash)
+ time_commit_path = os.path.join(output_path, time_commit)
+ if not os.path.exists(time_commit_path):
+ os.makedirs(time_commit_path)
+ stdout_path = os.path.join(time_commit_path, 'stdout')
+ with open(stdout_path, 'w') as f:
+ f.write(stdout)
+ print('Wrote stdout to: %s' % stdout_path)
+
def main(argv):
- (options, args) = ParseOptions(argv)
- if not options.app:
- raise Exception('Please specify an app')
- top = historic_run.top_or_default(options.top)
- bottom = historic_run.bottom_or_default(options.bottom)
- command = make_run_on_app_command(options)
- historic_run.run(command, top, bottom)
+ (options, args) = ParseOptions(argv)
+ if not options.app:
+ raise Exception('Please specify an app')
+ top = historic_run.top_or_default(options.top)
+ bottom = historic_run.bottom_or_default(options.bottom)
+ command = make_run_on_app_command(options)
+ historic_run.run(command, top, bottom)
+
if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
+ sys.exit(main(sys.argv[1:]))
diff --git a/tools/historic_run.py b/tools/historic_run.py
index 75819bb..9a035b0 100755
--- a/tools/historic_run.py
+++ b/tools/historic_run.py
@@ -17,171 +17,183 @@
MASTER_COMMITS = 'gs://r8-releases/raw/master'
+
def ParseOptions(argv):
- result = optparse.OptionParser()
- result.add_option(
- '--cmd',
- help='Command to run')
- result.add_option(
- '--top',
- default=top_or_default(),
- help='The most recent commit to test')
- result.add_option(
- '--bottom',
- help='The oldest commit to test')
- result.add_option(
- '--dry-run',
- help='Do not download or run the command, but print the actions',
- default=False,
- action='store_true')
- result.add_option(
- '--output',
- default='build',
- help='Directory where to output results')
- return result.parse_args(argv)
+ result = optparse.OptionParser()
+ result.add_option('--cmd', help='Command to run')
+ result.add_option('--top',
+ default=top_or_default(),
+ help='The most recent commit to test')
+ result.add_option('--bottom', help='The oldest commit to test')
+ result.add_option(
+ '--dry-run',
+ help='Do not download or run the command, but print the actions',
+ default=False,
+ action='store_true')
+ result.add_option('--output',
+ default='build',
+ help='Directory where to output results')
+ return result.parse_args(argv)
class GitCommit(object):
- def __init__(self, git_hash, destination_dir, destination, timestamp):
- self.git_hash = git_hash
- self.destination_dir = destination_dir
- self.destination = destination
- self.timestamp = timestamp
- def __str__(self):
- return '%s : %s (%s)' % (self.git_hash, self.destination, self.timestamp)
+ def __init__(self, git_hash, destination_dir, destination, timestamp):
+ self.git_hash = git_hash
+ self.destination_dir = destination_dir
+ self.destination = destination
+ self.timestamp = timestamp
- def __repr__(self):
- return self.__str__()
+ def __str__(self):
+ return '%s : %s (%s)' % (self.git_hash, self.destination,
+ self.timestamp)
+
+ def __repr__(self):
+ return self.__str__()
+
def git_commit_from_hash(hash):
- commit_timestamp = subprocess.check_output(['git', 'show', '--no-patch',
- '--no-notes', '--pretty=\'%ct\'',
- hash]).strip().strip('\'')
- destination_dir = '%s/%s/' % (MASTER_COMMITS, hash)
- destination = '%s%s' % (destination_dir, 'r8.jar')
- commit = GitCommit(hash, destination_dir, destination, commit_timestamp)
- return commit
+ commit_timestamp = subprocess.check_output(
+ ['git', 'show', '--no-patch', '--no-notes', '--pretty=\'%ct\'',
+ hash]).strip().strip('\'')
+ destination_dir = '%s/%s/' % (MASTER_COMMITS, hash)
+ destination = '%s%s' % (destination_dir, 'r8.jar')
+ commit = GitCommit(hash, destination_dir, destination, commit_timestamp)
+ return commit
+
def enumerate_git_commits(top, bottom):
- output = subprocess.check_output(['git', 'rev-list', '--first-parent', top])
- found_bottom = False
- commits = []
- for c in output.splitlines():
- commit_hash = c.strip()
- commits.append(git_commit_from_hash(commit_hash))
- if commit_hash == bottom:
- found_bottom = True
- break
- if not found_bottom:
- raise Exception('Bottom not found, did you not use a merge commit')
- return commits
+ output = subprocess.check_output(['git', 'rev-list', '--first-parent', top])
+ found_bottom = False
+ commits = []
+ for c in output.splitlines():
+ commit_hash = c.strip()
+ commits.append(git_commit_from_hash(commit_hash))
+ if commit_hash == bottom:
+ found_bottom = True
+ break
+ if not found_bottom:
+ raise Exception('Bottom not found, did you not use a merge commit')
+ return commits
+
def get_available_commits(commits):
- cloud_commits = subprocess.check_output(
- ['gsutil.py', 'ls', MASTER_COMMITS]).splitlines()
- available_commits = []
- for commit in commits:
- if commit.destination_dir in cloud_commits:
- available_commits.append(commit)
- return available_commits
+ cloud_commits = subprocess.check_output(['gsutil.py', 'ls',
+ MASTER_COMMITS]).splitlines()
+ available_commits = []
+ for commit in commits:
+ if commit.destination_dir in cloud_commits:
+ available_commits.append(commit)
+ return available_commits
+
def print_commits(commits):
- for commit in commits:
- print(commit)
+ for commit in commits:
+ print(commit)
+
def permutate_range(start, end):
- diff = end - start
- assert diff >= 0
- if diff == 1:
- return [start, end]
- if diff == 0:
- return [start]
- half = end - (diff / 2)
- numbers = [half]
- first_half = permutate_range(start, half - 1)
- second_half = permutate_range(half + 1, end)
- for index in range(len(first_half)):
- numbers.append(first_half[index])
- if index < len(second_half):
- numbers.append(second_half[index])
- return numbers
+ diff = end - start
+ assert diff >= 0
+ if diff == 1:
+ return [start, end]
+ if diff == 0:
+ return [start]
+ half = end - (diff / 2)
+ numbers = [half]
+ first_half = permutate_range(start, half - 1)
+ second_half = permutate_range(half + 1, end)
+ for index in range(len(first_half)):
+ numbers.append(first_half[index])
+ if index < len(second_half):
+ numbers.append(second_half[index])
+ return numbers
+
def permutate(number_of_commits):
- assert number_of_commits > 0
- numbers = permutate_range(0, number_of_commits - 1)
- assert all(n in numbers for n in range(number_of_commits))
- return numbers
+ assert number_of_commits > 0
+ numbers = permutate_range(0, number_of_commits - 1)
+ assert all(n in numbers for n in range(number_of_commits))
+ return numbers
+
def pull_r8_from_cloud(commit):
- utils.download_file_from_cloud_storage(commit.destination, utils.R8_JAR)
+ utils.download_file_from_cloud_storage(commit.destination, utils.R8_JAR)
+
def benchmark(commits, command, dryrun=False):
- commit_permutations = permutate(len(commits))
- count = 0
- for index in commit_permutations:
- count += 1
- print('Running commit %s out of %s' % (count, len(commits)))
- commit = commits[index]
- if not utils.cloud_storage_exists(commit.destination):
- # We may have a directory, but no r8.jar
- continue
- if not dryrun:
- pull_r8_from_cloud(commit)
- print('Running for commit: %s' % commit.git_hash)
- command(commit)
+ commit_permutations = permutate(len(commits))
+ count = 0
+ for index in commit_permutations:
+ count += 1
+ print('Running commit %s out of %s' % (count, len(commits)))
+ commit = commits[index]
+ if not utils.cloud_storage_exists(commit.destination):
+ # We may have a directory, but no r8.jar
+ continue
+ if not dryrun:
+ pull_r8_from_cloud(commit)
+ print('Running for commit: %s' % commit.git_hash)
+ command(commit)
+
def top_or_default(top=None):
- return top if top else utils.get_HEAD_sha1()
+ return top if top else utils.get_HEAD_sha1()
+
def bottom_or_default(bottom=None):
- # TODO(ricow): if not set, search back 1000
- if not bottom:
- raise Exception('No bottom specified')
- return bottom
+ # TODO(ricow): if not set, search back 1000
+ if not bottom:
+ raise Exception('No bottom specified')
+ return bottom
+
def run(command, top, bottom, dryrun=False):
- commits = enumerate_git_commits(top, bottom)
- available_commits = get_available_commits(commits)
- print('Running for:')
- print_commits(available_commits)
- benchmark(available_commits, command, dryrun=dryrun)
+ commits = enumerate_git_commits(top, bottom)
+ available_commits = get_available_commits(commits)
+ print('Running for:')
+ print_commits(available_commits)
+ benchmark(available_commits, command, dryrun=dryrun)
+
def make_cmd(options):
- return lambda commit: run_cmd(options, commit)
+ return lambda commit: run_cmd(options, commit)
+
def run_cmd(options, commit):
- cmd = [options.cmd, commit.git_hash]
- output_path = options.output or 'build'
- time_commit = '%s_%s' % (commit.timestamp, commit.git_hash)
- time_commit_path = os.path.join(output_path, time_commit)
- print(' '.join(cmd))
- if not options.dry_run:
- if not os.path.exists(time_commit_path):
- os.makedirs(time_commit_path)
- stdout_path = os.path.join(time_commit_path, 'stdout')
- stderr_path = os.path.join(time_commit_path, 'stderr')
- with open(stdout_path, 'w') as stdout:
- with open(stderr_path, 'w') as stderr:
- process = subprocess.Popen(cmd, stdout=stdout, stderr=stderr)
- timeout = 1000
- while process.poll() is None and timeout > 0:
- time.sleep(1)
- timeout -= 1
- if process.poll() is None:
- process.kill()
- print("Task timed out")
- stderr.write("timeout\n")
- print('Wrote outputs to: %s' % time_commit_path)
+ cmd = [options.cmd, commit.git_hash]
+ output_path = options.output or 'build'
+ time_commit = '%s_%s' % (commit.timestamp, commit.git_hash)
+ time_commit_path = os.path.join(output_path, time_commit)
+ print(' '.join(cmd))
+ if not options.dry_run:
+ if not os.path.exists(time_commit_path):
+ os.makedirs(time_commit_path)
+ stdout_path = os.path.join(time_commit_path, 'stdout')
+ stderr_path = os.path.join(time_commit_path, 'stderr')
+ with open(stdout_path, 'w') as stdout:
+ with open(stderr_path, 'w') as stderr:
+ process = subprocess.Popen(cmd, stdout=stdout, stderr=stderr)
+ timeout = 1000
+ while process.poll() is None and timeout > 0:
+ time.sleep(1)
+ timeout -= 1
+ if process.poll() is None:
+ process.kill()
+ print("Task timed out")
+ stderr.write("timeout\n")
+ print('Wrote outputs to: %s' % time_commit_path)
+
def main(argv):
- (options, args) = ParseOptions(argv)
- if not options.cmd:
- raise Exception('Please specify a command')
- top = top_or_default(options.top)
- bottom = bottom_or_default(options.bottom)
- command = make_cmd(options)
- run(command, top, bottom, dryrun=options.dry_run)
+ (options, args) = ParseOptions(argv)
+ if not options.cmd:
+ raise Exception('Please specify a command')
+ top = top_or_default(options.top)
+ bottom = bottom_or_default(options.bottom)
+ command = make_cmd(options)
+ run(command, top, bottom, dryrun=options.dry_run)
+
if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
+ sys.exit(main(sys.argv[1:]))
diff --git a/tools/internal_test.py b/tools/internal_test.py
index 47844c8..129b087 100755
--- a/tools/internal_test.py
+++ b/tools/internal_test.py
@@ -62,52 +62,47 @@
DEPENDENT_PYTHON_FILES = [gradle, utils, run_on_app]
+
def find_min_xmx_command(app_data):
- record = app_data.GetMemoryData(app_data.GetLatestVersion())
- assert record['find-xmx-min'] < record['find-xmx-max']
- assert record['find-xmx-range'] < record['find-xmx-max'] - record['find-xmx-min']
- return [
- 'tools/run_on_app.py',
- '--compiler=r8',
- '--compiler-build=lib',
- '--app=%s' % app_data.GetName(),
- '--version=%s' % app_data.GetLatestVersion(),
- '--no-debug',
- '--no-build',
- '--find-min-xmx',
- '--find-min-xmx-min-memory=%s' % record['find-xmx-min'],
- '--find-min-xmx-max-memory=%s' % record['find-xmx-max'],
- '--find-min-xmx-range-size=%s' % record['find-xmx-range'],
- '--find-min-xmx-archive']
+ record = app_data.GetMemoryData(app_data.GetLatestVersion())
+ assert record['find-xmx-min'] < record['find-xmx-max']
+ assert record[
+ 'find-xmx-range'] < record['find-xmx-max'] - record['find-xmx-min']
+ return [
+ 'tools/run_on_app.py', '--compiler=r8', '--compiler-build=lib',
+ '--app=%s' % app_data.GetName(),
+ '--version=%s' % app_data.GetLatestVersion(), '--no-debug',
+ '--no-build', '--find-min-xmx',
+ '--find-min-xmx-min-memory=%s' % record['find-xmx-min'],
+ '--find-min-xmx-max-memory=%s' % record['find-xmx-max'],
+ '--find-min-xmx-range-size=%s' % record['find-xmx-range'],
+ '--find-min-xmx-archive'
+ ]
+
def compile_with_memory_max_command(app_data):
- # TODO(b/152939233): Remove this special handling when fixed.
- factor = 1.25 if app_data.GetName() == 'chrome' else 1.15
- record = app_data.GetMemoryData(app_data.GetLatestVersion())
- return [] if 'skip-find-xmx-max' in record else [
- 'tools/run_on_app.py',
- '--compiler=r8',
- '--compiler-build=lib',
- '--app=%s' % app_data.GetName(),
- '--version=%s' % app_data.GetLatestVersion(),
- '--no-debug',
- '--no-build',
- '--max-memory=%s' % int(record['oom-threshold'] * factor)
- ]
+ # TODO(b/152939233): Remove this special handling when fixed.
+ factor = 1.25 if app_data.GetName() == 'chrome' else 1.15
+ record = app_data.GetMemoryData(app_data.GetLatestVersion())
+ return [] if 'skip-find-xmx-max' in record else [
+ 'tools/run_on_app.py', '--compiler=r8', '--compiler-build=lib',
+ '--app=%s' % app_data.GetName(),
+ '--version=%s' %
+ app_data.GetLatestVersion(), '--no-debug', '--no-build',
+ '--max-memory=%s' % int(record['oom-threshold'] * factor)
+ ]
+
def compile_with_memory_min_command(app_data):
- record = app_data.GetMemoryData(app_data.GetLatestVersion())
- return [
- 'tools/run_on_app.py',
- '--compiler=r8',
- '--compiler-build=lib',
- '--app=%s' % app_data.GetName(),
- '--version=%s' % app_data.GetLatestVersion(),
- '--no-debug',
- '--no-build',
- '--expect-oom',
- '--max-memory=%s' % int(record['oom-threshold'] * 0.85)
- ]
+ record = app_data.GetMemoryData(app_data.GetLatestVersion())
+ return [
+ 'tools/run_on_app.py', '--compiler=r8', '--compiler-build=lib',
+ '--app=%s' % app_data.GetName(),
+ '--version=%s' % app_data.GetLatestVersion(), '--no-debug',
+ '--no-build', '--expect-oom',
+ '--max-memory=%s' % int(record['oom-threshold'] * 0.85)
+ ]
+
CLEAN_COMMANDS = [
# Make sure we have a clean build to not be polluted by old test files
@@ -117,8 +112,10 @@
# TODO(b/210982978): Enable testing of min xmx again
TEST_COMMANDS = [
# Run test.py internal testing.
- ['tools/test.py', '--only_internal', '--slow_tests',
- '--java_max_memory_size=8G'],
+ [
+ 'tools/test.py', '--only_internal', '--slow_tests',
+ '--java_max_memory_size=8G'
+ ],
# Ensure that all internal apps compile.
['tools/run_on_app.py', '--run-all', '--out=out', '--workers', '4'],
]
@@ -127,259 +124,292 @@
RUN_TIMEOUT = 3600 * 7
BOT_RUN_TIMEOUT = RUN_TIMEOUT * len(TEST_COMMANDS)
+
def log(str):
- print("%s: %s" % (time.strftime("%c"), str))
- sys.stdout.flush()
+ print("%s: %s" % (time.strftime("%c"), str))
+ sys.stdout.flush()
+
def ParseOptions():
- result = optparse.OptionParser()
- result.add_option('--continuous',
- help='Continuously run internal tests and post results to GCS.',
- default=False, action='store_true')
- result.add_option('--print_logs',
- help='Fetch logs from gcs and print them, takes the commit to print for.',
- default=None)
- result.add_option('--bot',
- help='Run in bot mode, i.e., scheduling runs.',
- default=False, action='store_true')
- result.add_option('--archive',
- help='Post result to GCS, implied by --continuous',
- default=False, action='store_true')
- return result.parse_args()
+ result = optparse.OptionParser()
+ result.add_option(
+ '--continuous',
+ help='Continuously run internal tests and post results to GCS.',
+ default=False,
+ action='store_true')
+ result.add_option(
+ '--print_logs',
+ help=
+ 'Fetch logs from gcs and print them, takes the commit to print for.',
+ default=None)
+ result.add_option('--bot',
+ help='Run in bot mode, i.e., scheduling runs.',
+ default=False,
+ action='store_true')
+ result.add_option('--archive',
+ help='Post result to GCS, implied by --continuous',
+ default=False,
+ action='store_true')
+ return result.parse_args()
+
def ensure_git_clean():
- # Ensure clean git repo.
- diff = subprocess.check_output(['git', 'diff']).decode('utf-8')
- if len(diff) > 0:
- log('Local modifications to the git repo, exiting')
- sys.exit(1)
+ # Ensure clean git repo.
+ diff = subprocess.check_output(['git', 'diff']).decode('utf-8')
+ if len(diff) > 0:
+ log('Local modifications to the git repo, exiting')
+ sys.exit(1)
+
def git_pull():
- ensure_git_clean()
- subprocess.check_call(['git', 'checkout', 'main'])
- subprocess.check_call(['git', 'pull'])
- return utils.get_HEAD_sha1()
+ ensure_git_clean()
+ subprocess.check_call(['git', 'checkout', 'main'])
+ subprocess.check_call(['git', 'pull'])
+ return utils.get_HEAD_sha1()
+
def git_checkout(git_hash):
- ensure_git_clean()
- # Ensure that we are up to date to get the commit.
- git_pull()
- exitcode = subprocess.call(['git', 'checkout', git_hash])
- if exitcode != 0:
- return None
- return utils.get_HEAD_sha1()
+ ensure_git_clean()
+ # Ensure that we are up to date to get the commit.
+ git_pull()
+ exitcode = subprocess.call(['git', 'checkout', git_hash])
+ if exitcode != 0:
+ return None
+ return utils.get_HEAD_sha1()
+
def get_test_result_dir():
- return os.path.join(utils.R8_INTERNAL_TEST_RESULTS_BUCKET, TEST_RESULT_DIR)
+ return os.path.join(utils.R8_INTERNAL_TEST_RESULTS_BUCKET, TEST_RESULT_DIR)
+
def get_sha_destination(sha):
- return os.path.join(get_test_result_dir(), sha)
+ return os.path.join(get_test_result_dir(), sha)
+
def archive_status(failed):
- gs_destination = 'gs://%s' % get_sha_destination(utils.get_HEAD_sha1())
- utils.archive_value('status', gs_destination, failed)
+ gs_destination = 'gs://%s' % get_sha_destination(utils.get_HEAD_sha1())
+ utils.archive_value('status', gs_destination, failed)
+
def get_status(sha):
- gs_destination = 'gs://%s/status' % get_sha_destination(sha)
- return utils.cat_file_on_cloud_storage(gs_destination)
+ gs_destination = 'gs://%s/status' % get_sha_destination(sha)
+ return utils.cat_file_on_cloud_storage(gs_destination)
+
def archive_log(stdout, stderr, exitcode, timed_out, cmd):
- sha = utils.get_HEAD_sha1()
- cmd_dir = cmd.replace(' ', '_').replace('/', '_')
- destination = os.path.join(get_sha_destination(sha), cmd_dir)
- gs_destination = 'gs://%s' % destination
- url = 'https://storage.cloud.google.com/%s' % destination
- log('Archiving logs to: %s' % gs_destination)
- utils.archive_value(EXITCODE, gs_destination, exitcode)
- utils.archive_value(TIMED_OUT, gs_destination, timed_out)
- utils.archive_file(STDOUT, gs_destination, stdout)
- utils.archive_file(STDERR, gs_destination, stderr)
- log('Logs available at: %s' % url)
+ sha = utils.get_HEAD_sha1()
+ cmd_dir = cmd.replace(' ', '_').replace('/', '_')
+ destination = os.path.join(get_sha_destination(sha), cmd_dir)
+ gs_destination = 'gs://%s' % destination
+ url = 'https://storage.cloud.google.com/%s' % destination
+ log('Archiving logs to: %s' % gs_destination)
+ utils.archive_value(EXITCODE, gs_destination, exitcode)
+ utils.archive_value(TIMED_OUT, gs_destination, timed_out)
+ utils.archive_file(STDOUT, gs_destination, stdout)
+ utils.archive_file(STDERR, gs_destination, stderr)
+ log('Logs available at: %s' % url)
+
def get_magic_file_base_path():
- return 'gs://%s/magic' % get_test_result_dir()
+ return 'gs://%s/magic' % get_test_result_dir()
+
def get_magic_file_gs_path(name):
- return '%s/%s' % (get_magic_file_base_path(), name)
+ return '%s/%s' % (get_magic_file_base_path(), name)
+
def get_magic_file_exists(name):
- return utils.file_exists_on_cloud_storage(get_magic_file_gs_path(name))
+ return utils.file_exists_on_cloud_storage(get_magic_file_gs_path(name))
+
def delete_magic_file(name):
- utils.delete_file_from_cloud_storage(get_magic_file_gs_path(name))
+ utils.delete_file_from_cloud_storage(get_magic_file_gs_path(name))
+
def put_magic_file(name, sha):
- utils.archive_value(name, get_magic_file_base_path(), sha)
+ utils.archive_value(name, get_magic_file_base_path(), sha)
+
def get_magic_file_content(name, ignore_errors=False):
- return utils.cat_file_on_cloud_storage(get_magic_file_gs_path(name),
- ignore_errors=ignore_errors)
+ return utils.cat_file_on_cloud_storage(get_magic_file_gs_path(name),
+ ignore_errors=ignore_errors)
+
def print_magic_file_state():
- log('Magic file status:')
- for magic in ALL_MAGIC:
- if get_magic_file_exists(magic):
- content = get_magic_file_content(magic, ignore_errors=True)
- log('%s content: %s' % (magic, content))
+ log('Magic file status:')
+ for magic in ALL_MAGIC:
+ if get_magic_file_exists(magic):
+ content = get_magic_file_content(magic, ignore_errors=True)
+ log('%s content: %s' % (magic, content))
+
def fetch_and_print_logs(hash):
- gs_base = 'gs://%s' % get_sha_destination(hash)
- listing = utils.ls_files_on_cloud_storage(gs_base).strip().split('\n')
- for entry in listing:
- if not entry.endswith('/status'): # Ignore the overall status file
- for to_print in [EXITCODE, TIMED_OUT, STDERR, STDOUT]:
- gs_location = '%s%s' % (entry, to_print)
- value = utils.cat_file_on_cloud_storage(gs_location)
- print('\n\n%s had value:\n%s' % (to_print, value))
- print("\n\nPrinting find-min-xmx ranges for apps")
- run_on_app.print_min_xmx_ranges_for_hash(hash, 'r8', 'lib')
+ gs_base = 'gs://%s' % get_sha_destination(hash)
+ listing = utils.ls_files_on_cloud_storage(gs_base).strip().split('\n')
+ for entry in listing:
+ if not entry.endswith('/status'): # Ignore the overall status file
+ for to_print in [EXITCODE, TIMED_OUT, STDERR, STDOUT]:
+ gs_location = '%s%s' % (entry, to_print)
+ value = utils.cat_file_on_cloud_storage(gs_location)
+ print('\n\n%s had value:\n%s' % (to_print, value))
+ print("\n\nPrinting find-min-xmx ranges for apps")
+ run_on_app.print_min_xmx_ranges_for_hash(hash, 'r8', 'lib')
+
def run_bot():
- print_magic_file_state()
- # Ensure that there is nothing currently scheduled (broken/stopped run)
- for magic in ALL_MAGIC:
- if get_magic_file_exists(magic):
- log('ERROR: Synchronizing file %s exists, cleaning up' % magic)
- delete_magic_file(magic)
- print_magic_file_state()
- assert not get_magic_file_exists(READY_FOR_TESTING)
- git_hash = utils.get_HEAD_sha1()
- put_magic_file(READY_FOR_TESTING, git_hash)
- begin = time.time()
- while True:
- if time.time() - begin > BOT_RUN_TIMEOUT:
- log('Timeout exceeded: http://go/internal-r8-doc')
- raise Exception('Bot timeout')
- if get_magic_file_exists(TESTING_COMPLETE):
- if get_magic_file_content(TESTING_COMPLETE) == git_hash:
- break
- else:
- raise Exception('Non matching git hashes %s and %s' % (
- get_magic_file_content(TESTING_COMPLETE), git_hash))
- log('Still waiting for test result')
print_magic_file_state()
- time.sleep(PULL_DELAY)
- total_time = time.time()-begin
- log('Done running test for %s in %ss' % (git_hash, total_time))
- test_status = get_status(git_hash)
- delete_magic_file(TESTING_COMPLETE)
- log('Test status is: %s' % test_status)
- if test_status != '0':
- print('Tests failed, you can print the logs by running(googlers only):')
- print(' tools/internal_test.py --print_logs %s' % git_hash)
- return 1
+ # Ensure that there is nothing currently scheduled (broken/stopped run)
+ for magic in ALL_MAGIC:
+ if get_magic_file_exists(magic):
+ log('ERROR: Synchronizing file %s exists, cleaning up' % magic)
+ delete_magic_file(magic)
+ print_magic_file_state()
+ assert not get_magic_file_exists(READY_FOR_TESTING)
+ git_hash = utils.get_HEAD_sha1()
+ put_magic_file(READY_FOR_TESTING, git_hash)
+ begin = time.time()
+ while True:
+ if time.time() - begin > BOT_RUN_TIMEOUT:
+ log('Timeout exceeded: http://go/internal-r8-doc')
+ raise Exception('Bot timeout')
+ if get_magic_file_exists(TESTING_COMPLETE):
+ if get_magic_file_content(TESTING_COMPLETE) == git_hash:
+ break
+ else:
+ raise Exception(
+ 'Non matching git hashes %s and %s' %
+ (get_magic_file_content(TESTING_COMPLETE), git_hash))
+ log('Still waiting for test result')
+ print_magic_file_state()
+ time.sleep(PULL_DELAY)
+ total_time = time.time() - begin
+ log('Done running test for %s in %ss' % (git_hash, total_time))
+ test_status = get_status(git_hash)
+ delete_magic_file(TESTING_COMPLETE)
+ log('Test status is: %s' % test_status)
+ if test_status != '0':
+ print('Tests failed, you can print the logs by running(googlers only):')
+ print(' tools/internal_test.py --print_logs %s' % git_hash)
+ return 1
+
def run_continuously():
- while True:
- print_magic_file_state()
- if get_magic_file_exists(READY_FOR_TESTING):
- git_hash = get_magic_file_content(READY_FOR_TESTING)
- checked_out = git_checkout(git_hash)
- if not checked_out:
- # Gerrit change, we don't run these on internal.
- archive_status(0)
- put_magic_file(TESTING_COMPLETE, git_hash)
- delete_magic_file(READY_FOR_TESTING)
- continue
- # Sanity check, if this does not succeed stop.
- if checked_out != git_hash:
- log('Inconsistent state: %s %s' % (git_hash, checked_out))
- sys.exit(1)
- put_magic_file(TESTING, git_hash)
- delete_magic_file(READY_FOR_TESTING)
- log('Running with hash: %s' % git_hash)
- exitcode = run_external()
- log('Running finished with exit code %s' % exitcode)
- # If the bot timed out or something else triggered the bot to fail, don't
- # put up the result (it will not be displayed anywhere, and we can't
- # remove the magic file if the bot cleaned up).
- if get_magic_file_exists(TESTING):
- put_magic_file(TESTING_COMPLETE, git_hash)
- # There is still a potential race here (we check, bot deletes, we try to
- # delete) - this is unlikely and we ignore it (restart if it happens).
- delete_magic_file(TESTING)
- time.sleep(PULL_DELAY)
+ while True:
+ print_magic_file_state()
+ if get_magic_file_exists(READY_FOR_TESTING):
+ git_hash = get_magic_file_content(READY_FOR_TESTING)
+ checked_out = git_checkout(git_hash)
+ if not checked_out:
+ # Gerrit change, we don't run these on internal.
+ archive_status(0)
+ put_magic_file(TESTING_COMPLETE, git_hash)
+ delete_magic_file(READY_FOR_TESTING)
+ continue
+ # Sanity check, if this does not succeed stop.
+ if checked_out != git_hash:
+ log('Inconsistent state: %s %s' % (git_hash, checked_out))
+ sys.exit(1)
+ put_magic_file(TESTING, git_hash)
+ delete_magic_file(READY_FOR_TESTING)
+ log('Running with hash: %s' % git_hash)
+ exitcode = run_external()
+ log('Running finished with exit code %s' % exitcode)
+ # If the bot timed out or something else triggered the bot to fail, don't
+ # put up the result (it will not be displayed anywhere, and we can't
+ # remove the magic file if the bot cleaned up).
+ if get_magic_file_exists(TESTING):
+ put_magic_file(TESTING_COMPLETE, git_hash)
+ # There is still a potential race here (we check, bot deletes, we try to
+ # delete) - this is unlikely and we ignore it (restart if it happens).
+ delete_magic_file(TESTING)
+ time.sleep(PULL_DELAY)
+
def run_external():
- return subprocess.call([sys.executable, "tools/internal_test.py", "--archive"])
+ return subprocess.call(
+ [sys.executable, "tools/internal_test.py", "--archive"])
+
def handle_output(archive, stderr, stdout, exitcode, timed_out, cmd):
- if archive:
- archive_log(stdout, stderr, exitcode, timed_out, cmd)
- else:
- print('Execution of %s resulted in:' % cmd)
- print('exit code: %s ' % exitcode)
- print('timeout: %s ' % timed_out)
- with open(stderr, 'r') as f:
- print('stderr: %s' % f.read())
- with open(stdout, 'r') as f:
- print('stdout: %s' % f.read())
+ if archive:
+ archive_log(stdout, stderr, exitcode, timed_out, cmd)
+ else:
+ print('Execution of %s resulted in:' % cmd)
+ print('exit code: %s ' % exitcode)
+ print('timeout: %s ' % timed_out)
+ with open(stderr, 'r') as f:
+ print('stderr: %s' % f.read())
+ with open(stdout, 'r') as f:
+ print('stdout: %s' % f.read())
+
def execute(cmd, archive, env=None):
- if cmd == []:
- return
+ if cmd == []:
+ return
- assert(cmd[0].endswith('.py'))
- cmd = [sys.executable] + cmd
+ assert (cmd[0].endswith('.py'))
+ cmd = [sys.executable] + cmd
+ utils.PrintCmd(cmd)
+ with utils.TempDir() as temp:
+ try:
+ stderr_fd = None
+ stdout_fd = None
+ exitcode = 0
+ stderr = os.path.join(temp, 'stderr')
+ stderr_fd = open(stderr, 'w')
+ stdout = os.path.join(temp, 'stdout')
+ stdout_fd = open(stdout, 'w')
+ popen = subprocess.Popen(cmd,
+ bufsize=1024 * 1024 * 10,
+ stdout=stdout_fd,
+ stderr=stderr_fd,
+ env=env)
+ begin = time.time()
+ timed_out = False
+ while popen.poll() == None:
+ if time.time() - begin > RUN_TIMEOUT:
+ popen.terminate()
+ timed_out = True
+ time.sleep(2)
+ exitcode = popen.returncode
+ finally:
+ if stderr_fd:
+ stderr_fd.close()
+ if stdout_fd:
+ stdout_fd.close()
+ if exitcode != 0:
+ handle_output(archive, stderr, stdout, popen.returncode,
+ timed_out, ' '.join(cmd))
+ return exitcode
- utils.PrintCmd(cmd)
- with utils.TempDir() as temp:
- try:
- stderr_fd = None
- stdout_fd = None
- exitcode = 0
- stderr = os.path.join(temp, 'stderr')
- stderr_fd = open(stderr, 'w')
- stdout = os.path.join(temp, 'stdout')
- stdout_fd = open(stdout, 'w')
- popen = subprocess.Popen(cmd,
- bufsize=1024*1024*10,
- stdout=stdout_fd,
- stderr=stderr_fd,
- env=env)
- begin = time.time()
- timed_out = False
- while popen.poll() == None:
- if time.time() - begin > RUN_TIMEOUT:
- popen.terminate()
- timed_out = True
- time.sleep(2)
- exitcode = popen.returncode
- finally:
- if stderr_fd:
- stderr_fd.close()
- if stdout_fd:
- stdout_fd.close()
- if exitcode != 0:
- handle_output(archive, stderr, stdout, popen.returncode,
- timed_out, ' '.join(cmd))
- return exitcode
def run_once(archive):
- git_hash = utils.get_HEAD_sha1()
- log('Running once with hash %s' % git_hash)
- env = os.environ.copy()
- # Bot does not have a lot of memory.
- env['R8_GRADLE_CORES_PER_FORK'] = '5'
- if archive:
- [execute(cmd, archive, env) for cmd in CLEAN_COMMANDS]
- failed = any([execute(cmd, archive, env) for cmd in TEST_COMMANDS])
- # Gradle daemon occasionally leaks memory, stop it.
- gradle.RunGradle(['--stop'])
- archive_status(1 if failed else 0)
- return failed
+ git_hash = utils.get_HEAD_sha1()
+ log('Running once with hash %s' % git_hash)
+ env = os.environ.copy()
+ # Bot does not have a lot of memory.
+ env['R8_GRADLE_CORES_PER_FORK'] = '5'
+ if archive:
+ [execute(cmd, archive, env) for cmd in CLEAN_COMMANDS]
+ failed = any([execute(cmd, archive, env) for cmd in TEST_COMMANDS])
+ # Gradle daemon occasionally leaks memory, stop it.
+ gradle.RunGradle(['--stop'])
+ archive_status(1 if failed else 0)
+ return failed
+
def Main():
- (options, args) = ParseOptions()
- if options.continuous:
- run_continuously()
- elif options.bot:
- return run_bot()
- elif options.print_logs:
- return fetch_and_print_logs(options.print_logs)
- else:
- return run_once(options.archive)
+ (options, args) = ParseOptions()
+ if options.continuous:
+ run_continuously()
+ elif options.bot:
+ return run_bot()
+ elif options.print_logs:
+ return fetch_and_print_logs(options.print_logs)
+ else:
+ return run_once(options.archive)
+
if __name__ == '__main__':
- sys.exit(Main())
+ sys.exit(Main())
diff --git a/tools/jardiff.py b/tools/jardiff.py
index 18587ab..afb462e 100755
--- a/tools/jardiff.py
+++ b/tools/jardiff.py
@@ -7,4 +7,4 @@
import toolhelper
if __name__ == '__main__':
- sys.exit(toolhelper.run('jardiff', sys.argv[1:]))
+ sys.exit(toolhelper.run('jardiff', sys.argv[1:]))
diff --git a/tools/java.py b/tools/java.py
deleted file mode 100755
index fb0ef13..0000000
--- a/tools/java.py
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (c) 2019, the R8 project authors. Please see the AUTHORS file
-# for details. All rights reserved. Use of this source code is governed by a
-# BSD-style license that can be found in the LICENSE file.
-
-import jdk
-import utils
-import subprocess
-import sys
-
-def run(args):
- cmd = [jdk.GetJavaExecutable()] + args
- utils.PrintCmd(cmd)
- result = subprocess.check_output(cmd)
- print result
- return result
-
-def main():
- try:
- run(sys.argv[1:])
- except subprocess.CalledProcessError as e:
- # In case anything relevant was printed to stdout, normally this is already
- # on stderr.
- print e.output
- return e.returncode
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/tools/javac.py b/tools/javac.py
deleted file mode 100755
index 242fac0..0000000
--- a/tools/javac.py
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (c) 2019, the R8 project authors. Please see the AUTHORS file
-# for details. All rights reserved. Use of this source code is governed by a
-# BSD-style license that can be found in the LICENSE file.
-
-import jdk
-import utils
-import subprocess
-import sys
-
-def run(args):
- cmd = [jdk.GetJavacExecutable()] + args
- utils.PrintCmd(cmd)
- result = subprocess.check_output(cmd)
- print result
- return result
-
-def main():
- try:
- run(sys.argv[1:])
- except subprocess.CalledProcessError as e:
- # In case anything relevant was printed to stdout, normally this is already
- # on stderr.
- print e.output
- return e.returncode
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/tools/jdk.py b/tools/jdk.py
index f37d272..ba7ddfb 100755
--- a/tools/jdk.py
+++ b/tools/jdk.py
@@ -10,80 +10,92 @@
JDK_DIR = os.path.join(defines.THIRD_PARTY, 'openjdk')
-ALL_JDKS = ['openjdk-9.0.4', 'jdk-11', 'jdk-15', 'jdk-16', 'jdk-17',
- 'jdk-18', 'jdk-20']
+ALL_JDKS = [
+ 'openjdk-9.0.4', 'jdk-11', 'jdk-15', 'jdk-16', 'jdk-17', 'jdk-18', 'jdk-20'
+]
def GetJdkHome():
- return GetJdk11Home()
+ return GetJdk11Home()
+
def GetJdkRoot():
- return GetJdk11Root()
+ return GetJdk11Root()
+
def GetJdk11Root():
- root = os.path.join(JDK_DIR, 'jdk-11')
- os_root = GetOSPath(root)
- return os_root if os_root else os.environ['JAVA_HOME']
+ root = os.path.join(JDK_DIR, 'jdk-11')
+ os_root = GetOSPath(root)
+ return os_root if os_root else os.environ['JAVA_HOME']
+
def GetOSPath(root):
- if defines.IsLinux():
- return os.path.join(root, 'linux')
- elif defines.IsOsX():
- return os.path.join(root, 'osx')
- elif defines.IsWindows():
- return os.path.join(root, 'windows')
- else:
- return None
+ if defines.IsLinux():
+ return os.path.join(root, 'linux')
+ elif defines.IsOsX():
+ return os.path.join(root, 'osx')
+ elif defines.IsWindows():
+ return os.path.join(root, 'windows')
+ else:
+ return None
+
def GetAllJdkDirs():
- dirs = []
- for jdk in ALL_JDKS:
- root = GetOSPath(os.path.join(JDK_DIR, jdk))
- # Some jdks are not available on windows, don't try to get these.
- if os.path.exists(root + '.tar.gz.sha1'):
- dirs.append(root)
- return dirs
+ dirs = []
+ for jdk in ALL_JDKS:
+ root = GetOSPath(os.path.join(JDK_DIR, jdk))
+ # Some jdks are not available on windows, don't try to get these.
+ if os.path.exists(root + '.tar.gz.sha1'):
+ dirs.append(root)
+ return dirs
+
def GetJdk11Home():
- root = GetJdk11Root()
- # osx has the home inside Contents/Home in the bundle
- if defines.IsOsX():
- return os.path.join(root,'Contents', 'Home')
- else:
- return root
+ root = GetJdk11Root()
+ # osx has the home inside Contents/Home in the bundle
+ if defines.IsOsX():
+ return os.path.join(root, 'Contents', 'Home')
+ else:
+ return root
+
def GetJdk9Home():
- root = os.path.join(JDK_DIR, 'openjdk-9.0.4')
- if defines.IsLinux():
- return os.path.join(root, 'linux')
- elif defines.IsOsX():
- return os.path.join(root, 'osx')
- elif defines.IsWindows():
- return os.path.join(root, 'windows')
- else:
- return os.environ['JAVA_HOME']
+ root = os.path.join(JDK_DIR, 'openjdk-9.0.4')
+ if defines.IsLinux():
+ return os.path.join(root, 'linux')
+ elif defines.IsOsX():
+ return os.path.join(root, 'osx')
+ elif defines.IsWindows():
+ return os.path.join(root, 'windows')
+ else:
+ return os.environ['JAVA_HOME']
+
def GetJdk8Home():
- root = os.path.join(JDK_DIR, 'jdk8')
- if defines.IsLinux():
- return os.path.join(root, 'linux-x86')
- elif defines.IsOsX():
- return os.path.join(root, 'darwin-x86')
- else:
- return os.environ['JAVA_HOME']
+ root = os.path.join(JDK_DIR, 'jdk8')
+ if defines.IsLinux():
+ return os.path.join(root, 'linux-x86')
+ elif defines.IsOsX():
+ return os.path.join(root, 'darwin-x86')
+ else:
+ return os.environ['JAVA_HOME']
+
def GetJavaExecutable(jdkHome=None):
- jdkHome = jdkHome if jdkHome else GetJdkHome()
- executable = 'java.exe' if defines.IsWindows() else 'java'
- return os.path.join(jdkHome, 'bin', executable) if jdkHome else executable
+ jdkHome = jdkHome if jdkHome else GetJdkHome()
+ executable = 'java.exe' if defines.IsWindows() else 'java'
+ return os.path.join(jdkHome, 'bin', executable) if jdkHome else executable
+
def GetJavacExecutable(jdkHome=None):
- jdkHome = jdkHome if jdkHome else GetJdkHome()
- executable = 'javac.exe' if defines.IsWindows() else 'javac'
- return os.path.join(jdkHome, 'bin', executable) if jdkHome else executable
+ jdkHome = jdkHome if jdkHome else GetJdkHome()
+ executable = 'javac.exe' if defines.IsWindows() else 'javac'
+ return os.path.join(jdkHome, 'bin', executable) if jdkHome else executable
+
def Main():
- print(GetJdkHome())
+ print(GetJdkHome())
+
if __name__ == '__main__':
- sys.exit(Main())
+ sys.exit(Main())
diff --git a/tools/keeprule_benchmark.py b/tools/keeprule_benchmark.py
deleted file mode 100755
index 1401084..0000000
--- a/tools/keeprule_benchmark.py
+++ /dev/null
@@ -1,241 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (c) 2020, the R8 project authors. Please see the AUTHORS file
-# for details. All rights reserved. Use of this source code is governed by a
-# BSD-style license that can be found in the LICENSE file.
-
-import argparse
-import os
-import subprocess
-import sys
-import time
-
-import jdk
-import proguard
-import toolhelper
-import utils
-
-SHRINKERS = ['r8'] + proguard.getVersions()
-
-INPUT_PROGRAM = utils.PINNED_R8_JAR
-
-ANNO = 'com.android.tools.r8.com.google.common.annotations.VisibleForTesting'
-
-R8_OPTIONS = [
- 'printTimes',
- 'passthroughDexCode',
- 'enableClassMerging',
- 'enableDevirtualization',
- 'enableNonNullTracking',
- 'enableInlining',
- 'enableSwitchMapRemoval',
- 'enableValuePropagation',
- 'useSmaliSyntax',
- 'verbose',
- 'quiet',
- 'invalidDebugInfoFatal',
- 'intermediate',
- 'enableLambdaMerging',
- 'enableDesugaring',
- 'enableMainDexListCheck',
- 'enableTreeShaking',
- 'printCfg',
- 'ignoreMissingClasses',
- 'forceProguardCompatibility',
- 'enableMinification',
- 'disableAssertions',
- 'debugKeepRules',
- 'debug',
- 'minimalMainDex',
- 'skipReadingDexCode',
-]
-
-R8_CLASSES = [
- 'com.android.tools.r8.code.Format11x',
- 'com.android.tools.r8.code.MoveFrom16',
- 'com.android.tools.r8.code.AddLong2Addr',
- 'com.android.tools.r8.code.AgetByte',
- 'com.android.tools.r8.code.SubDouble',
- 'com.android.tools.r8.code.Sput',
- 'com.android.tools.r8.code.Format10x',
- 'com.android.tools.r8.code.RemInt',
- 'com.android.tools.r8.code.ConstWide',
- 'com.android.tools.r8.code.SgetWide',
- 'com.android.tools.r8.code.OrInt2Addr',
- 'com.android.tools.r8.code.Iget',
- 'com.android.tools.r8.code.Instruction',
- 'com.android.tools.r8.code.SubInt2Addr',
- 'com.android.tools.r8.code.SwitchPayload',
- 'com.android.tools.r8.code.Const4',
- 'com.android.tools.r8.code.ShrIntLit8',
- 'com.android.tools.r8.code.ConstWide16',
- 'com.android.tools.r8.code.NegInt',
- 'com.android.tools.r8.code.SgetBoolean',
- 'com.android.tools.r8.code.Format22x',
- 'com.android.tools.r8.code.InvokeVirtualRange',
- 'com.android.tools.r8.code.Format45cc',
- 'com.android.tools.r8.code.DivFloat2Addr',
- 'com.android.tools.r8.code.MulIntLit16',
- 'com.android.tools.r8.code.BytecodeStream',
-]
-
-KEEP_MAIN = \
- '-keep class com.android.tools.r8.R8 { void main(java.lang.String[]); }'
-
-BENCHMARKS = [
- # Baseline compile just keeps R8.main (implicitly kept for all benchmarks).
- ('KeepBaseline', ''),
-
- # Mirror default keep getters/setters, but independent of hierarchy.
- ('KeepGetters',
- '-keepclassmembers class * { *** get*(); }'),
- ('KeepGettersIf',
- '-if class * { *** get*(); } -keep class <1> { *** get<2>(); }'),
-
- # Mirror default keep getters/setters below View (here a class with a B).
- ('KeepSubGetters',
- '-keepclassmembers class * extends **.*B* { *** get*(); }'),
- ('KeepSubGettersIf',
- '-if class * extends **.*B* -keep class <1> { *** get*(); }'),
-
- # General keep rule to keep annotated members.
- ('KeepAnnoMethod',
- '-keepclasseswithmembers class * { @%s *** *(...); }' % ANNO),
- ('KeepAnnoMethodCond',
- '-keepclassmembers class * { @%s *** *(...); }' % ANNO),
- ('KeepAnnoMethodIf',
- '-if class * { @%s *** *(...); } -keep class <1> { @%s *** <2>(...); }' \
- % (ANNO, ANNO)),
-
- # Large collection of rules mirroring AAPT conditional rules on R fields.
- ('KeepAaptFieldIf',
- '\n'.join([
- '-if class **.InternalOptions { boolean %s; }'
- ' -keep class %s { <init>(...); }' % (f, c)
- for (f, c) in zip(R8_OPTIONS, R8_CLASSES) * 1 #100
- ])),
-
- # If rules with predicates that will never by true, but will need
- # consideration. The CodeSize of these should be equal to the baseline run.
- ('KeepIfNonExistingClass',
- '-if class **.*A*B*C*D*E*F* -keep class %s' % ANNO),
- ('KeepIfNonExistingMember',
- '-if class **.*A* { *** *a*b*c*d*e*f*(...); } -keep class %s' % ANNO)
-]
-
-def parse_arguments(argv):
- parser = argparse.ArgumentParser(
- description = 'Run keep-rule benchmarks.')
- parser.add_argument('--ignore-java-version',
- help='Do not check java version',
- default=False,
- action='store_true')
- parser.add_argument('--shrinker',
- help='The shrinker to use',
- choices=SHRINKERS,
- default=SHRINKERS[0])
- parser.add_argument('--runs',
- help='Number of runs to average out time on',
- type=int,
- default=3)
- parser.add_argument('--benchmark',
- help='Benchmark to run (default all)',
- choices=map(lambda (x,y): x, BENCHMARKS),
- default=None)
- options = parser.parse_args(argv)
- return options
-
-class BenchmarkResult:
- def __init__(self, name, size, runs):
- self.name = name
- self.size = size
- self.runs = runs
-
-def isPG(shrinker):
- return proguard.isValidVersion(shrinker)
-
-def shrinker_args(shrinker, keepfile, output):
- if shrinker == 'r8':
- return [
- jdk.GetJavaExecutable(),
- '-cp', utils.R8LIB_JAR,
- 'com.android.tools.r8.R8',
- INPUT_PROGRAM,
- '--lib', utils.RT_JAR,
- '--output', output,
- '--min-api', '10000',
- '--pg-conf', keepfile,
- ]
- elif isPG(shrinker):
- return proguard.getCmd([
- '-injars', INPUT_PROGRAM,
- '-libraryjars', utils.RT_JAR,
- '-outjars', output,
- '-dontwarn', '**',
- '-optimizationpasses', '2',
- '@' + keepfile,
- ],
- version=shrinker)
- else:
- assert False, "Unexpected shrinker " + shrinker
-
-def dex(input, output):
- toolhelper.run(
- 'd8',
- [
- input,
- '--lib', utils.RT_JAR,
- '--min-api', '10000',
- '--output', output
- ],
- build=False,
- debug=False)
-
-def run_shrinker(options, temp):
- benchmarks = BENCHMARKS
- if options.benchmark:
- for (name, rules) in BENCHMARKS:
- if name == options.benchmark:
- benchmarks = [(name, rules)]
- break
- assert len(benchmarks) == 1, "Unexpected benchmark " + options.benchmark
-
- run_count = options.runs
- benchmark_results = []
- for (name, rule) in benchmarks:
- benchmark_keep = os.path.join(temp, '%s-keep.txt' % name)
- with open(benchmark_keep, 'w') as fp:
- fp.write(KEEP_MAIN)
- fp.write('\n')
- fp.write(rule)
-
- benchmark_runs = []
- benchmark_size = 0
- for i in range(run_count):
- out = os.path.join(temp, '%s-out%d.jar' % (name, i))
- cmd = shrinker_args(options.shrinker, benchmark_keep, out)
- utils.PrintCmd(cmd)
- t0 = time.time()
- subprocess.check_output(cmd)
- t1 = time.time()
- benchmark_runs.append(t1 - t0)
- if isPG(options.shrinker):
- dexout = os.path.join(temp, '%s-out%d-dex.jar' % (name, i))
- dex(out, dexout)
- benchmark_size = utils.uncompressed_size(dexout)
- else:
- benchmark_size = utils.uncompressed_size(out)
- benchmark_results.append(
- BenchmarkResult(name, benchmark_size, benchmark_runs))
-
- print 'Runs:', options.runs
- for result in benchmark_results:
- benchmark_avg = sum(result.runs) / run_count
- print '%s(CodeSize): %d' % (result.name, result.size)
- print '%s(RunTimeRaw): %d ms' % (result.name, 1000.0 * benchmark_avg)
-
-if __name__ == '__main__':
- options = parse_arguments(sys.argv[1:])
- if not options.ignore_java_version:
- utils.check_java_version()
- with utils.TempDir() as temp:
- run_shrinker(options, temp)
diff --git a/tools/maindex.py b/tools/maindex.py
index 735ced5..f157498 100755
--- a/tools/maindex.py
+++ b/tools/maindex.py
@@ -7,4 +7,4 @@
import toolhelper
if __name__ == '__main__':
- sys.exit(toolhelper.run('maindex', sys.argv[1:]))
+ sys.exit(toolhelper.run('maindex', sys.argv[1:]))
diff --git a/tools/minify_tool.py b/tools/minify_tool.py
index cef72e5..2356ac1 100755
--- a/tools/minify_tool.py
+++ b/tools/minify_tool.py
@@ -2,7 +2,6 @@
# Copyright (c) 2018, the R8 project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
-
'''
Run R8 (with the class-file backend) to optimize a command-line program.
@@ -31,93 +30,114 @@
parser = argparse.ArgumentParser(description=__doc__.strip(),
formatter_class=argparse.RawTextHelpFormatter)
+parser.add_argument('-i',
+ '--input-jar',
+ default=utils.R8_JAR,
+ help='Input JAR to use (default: build/libs/r8.jar)')
parser.add_argument(
- '-i', '--input-jar', default=utils.R8_JAR,
- help='Input JAR to use (default: build/libs/r8.jar)')
-parser.add_argument(
- '-o', '--output-jar',
+ '-o',
+ '--output-jar',
help='Path to output JAR (default: build/libs/<MainClass>-min.jar)')
+parser.add_argument('-l',
+ '--lib',
+ default=utils.RT_JAR,
+ help='Path to rt.jar to use instead of OpenJDK 1.8')
parser.add_argument(
- '-l', '--lib', default=utils.RT_JAR,
- help='Path to rt.jar to use instead of OpenJDK 1.8')
-parser.add_argument(
- '-m', '--mainclass',
+ '-m',
+ '--mainclass',
help='Create/overwrite MANIFEST.MF with the given Main-Class')
-parser.add_argument(
- '-O', '--no-debug', dest='debug', action='store_false',
- help='Disable assertions when running R8')
-parser.add_argument(
- '--benchmark-name',
- help='Print benchmarks with the given name')
+parser.add_argument('-O',
+ '--no-debug',
+ dest='debug',
+ action='store_false',
+ help='Disable assertions when running R8')
+parser.add_argument('--benchmark-name',
+ help='Print benchmarks with the given name')
+
def generate_output_name(input_jar, mainclass):
- if not mainclass:
- input_base, input_ext = os.path.splitext(input_jar)
- return '%s-min%s' % (input_base, input_ext)
- base = mainclass[mainclass.rindex('.')+1:] if '.' in mainclass else mainclass
- return os.path.join(utils.LIBS, '%s-min.jar' % base)
+ if not mainclass:
+ input_base, input_ext = os.path.splitext(input_jar)
+ return '%s-min%s' % (input_base, input_ext)
+ base = mainclass[mainclass.rindex('.') +
+ 1:] if '.' in mainclass else mainclass
+ return os.path.join(utils.LIBS, '%s-min.jar' % base)
+
def repackage(input_jar, output_jar, mainclass):
- print("Repackaging %s to %s with Main-Class: %s..." %
- (input_jar, output_jar, mainclass))
- manifest = MANIFEST % mainclass
- with zipfile.ZipFile(input_jar, 'r') as input_zf:
- with zipfile.ZipFile(output_jar, 'w') as output_zf:
- for zipinfo in input_zf.infolist():
- if zipinfo.filename.upper() == MANIFEST_PATH:
- assert manifest is not None
- output_zf.writestr(MANIFEST_PATH, manifest)
- manifest = None
- else:
- output_zf.writestr(zipinfo, input_zf.read(zipinfo))
- if manifest is not None:
- output_zf.writestr(MANIFEST_PATH, manifest)
+ print("Repackaging %s to %s with Main-Class: %s..." %
+ (input_jar, output_jar, mainclass))
+ manifest = MANIFEST % mainclass
+ with zipfile.ZipFile(input_jar, 'r') as input_zf:
+ with zipfile.ZipFile(output_jar, 'w') as output_zf:
+ for zipinfo in input_zf.infolist():
+ if zipinfo.filename.upper() == MANIFEST_PATH:
+ assert manifest is not None
+ output_zf.writestr(MANIFEST_PATH, manifest)
+ manifest = None
+ else:
+ output_zf.writestr(zipinfo, input_zf.read(zipinfo))
+ if manifest is not None:
+ output_zf.writestr(MANIFEST_PATH, manifest)
+
def extract_mainclass(input_jar):
- with zipfile.ZipFile(input_jar, 'r') as input_zf:
- try:
- manifest = input_zf.getinfo(MANIFEST_PATH)
- except KeyError:
- raise SystemExit('No --mainclass specified and no manifest in input JAR.')
- mo = re.search(MANIFEST_PATTERN, input_zf.read(manifest))
- if not mo:
- raise SystemExit(
- 'No --mainclass specified and no Main-Class in input JAR manifest.')
- return mo.group(1)
+ with zipfile.ZipFile(input_jar, 'r') as input_zf:
+ try:
+ manifest = input_zf.getinfo(MANIFEST_PATH)
+ except KeyError:
+ raise SystemExit(
+ 'No --mainclass specified and no manifest in input JAR.')
+ mo = re.search(MANIFEST_PATTERN, input_zf.read(manifest))
+ if not mo:
+ raise SystemExit(
+ 'No --mainclass specified and no Main-Class in input JAR manifest.'
+ )
+ return mo.group(1)
-def minify_tool(mainclass=None, input_jar=utils.R8_JAR, output_jar=None,
- lib=utils.RT_JAR, debug=True, build=True, benchmark_name=None,
- track_memory_file=None, additional_args=[], java_args=[]):
- if output_jar is None:
- output_jar = generate_output_name(input_jar, mainclass)
- with utils.TempDir() as path:
- if mainclass:
- tmp_input_path = os.path.join(path, 'input.jar')
- repackage(input_jar, tmp_input_path, mainclass)
- else:
- tmp_input_path = input_jar
- mainclass = extract_mainclass(input_jar)
- keep_path = os.path.join(path, 'keep.txt')
- with open(keep_path, 'w') as fp:
- fp.write(KEEP % mainclass)
- args = ['--lib', lib,
- '--classfile',
- '--output', output_jar,
- '--pg-conf', keep_path,
- '--release',
- tmp_input_path] + additional_args
- start_time = time.time()
- return_code = toolhelper.run('r8', args, debug=debug, build=build,
- track_memory_file=track_memory_file,
- extra_args=java_args)
- if benchmark_name:
- elapsed_ms = 1000 * (time.time() - start_time)
- print('%s(RunTimeRaw): %s ms' % (benchmark_name, elapsed_ms))
- if track_memory_file:
- print('%s(MemoryUse): %s' %
- (benchmark_name, utils.grep_memoryuse(track_memory_file)))
- return return_code
+def minify_tool(mainclass=None,
+ input_jar=utils.R8_JAR,
+ output_jar=None,
+ lib=utils.RT_JAR,
+ debug=True,
+ build=True,
+ benchmark_name=None,
+ track_memory_file=None,
+ additional_args=[],
+ java_args=[]):
+ if output_jar is None:
+ output_jar = generate_output_name(input_jar, mainclass)
+ with utils.TempDir() as path:
+ if mainclass:
+ tmp_input_path = os.path.join(path, 'input.jar')
+ repackage(input_jar, tmp_input_path, mainclass)
+ else:
+ tmp_input_path = input_jar
+ mainclass = extract_mainclass(input_jar)
+ keep_path = os.path.join(path, 'keep.txt')
+ with open(keep_path, 'w') as fp:
+ fp.write(KEEP % mainclass)
+ args = [
+ '--lib', lib, '--classfile', '--output', output_jar, '--pg-conf',
+ keep_path, '--release', tmp_input_path
+ ] + additional_args
+ start_time = time.time()
+ return_code = toolhelper.run('r8',
+ args,
+ debug=debug,
+ build=build,
+ track_memory_file=track_memory_file,
+ extra_args=java_args)
+ if benchmark_name:
+ elapsed_ms = 1000 * (time.time() - start_time)
+ print('%s(RunTimeRaw): %s ms' % (benchmark_name, elapsed_ms))
+ if track_memory_file:
+ print('%s(MemoryUse): %s' %
+ (benchmark_name, utils.grep_memoryuse(track_memory_file)))
+
+ return return_code
+
if __name__ == '__main__':
- sys.exit(minify_tool(**vars(parser.parse_args())))
+ sys.exit(minify_tool(**vars(parser.parse_args())))
diff --git a/tools/nest_data.py b/tools/nest_data.py
index 5724e43..bb9d709 100644
--- a/tools/nest_data.py
+++ b/tools/nest_data.py
@@ -15,23 +15,30 @@
ANDROID_JAR = utils.get_android_jar(25)
VERSIONS = {
- '20180926': {
- 'dex' : {
- 'inputs': [os.path.join(V20180926_BASE, 'obsidian-development-debug.apk')],
- 'libraries' : [ANDROID_JAR],
- 'min-api' : ANDROID_L_API,
+ '20180926': {
+ 'dex': {
+ 'inputs': [
+ os.path.join(V20180926_BASE, 'obsidian-development-debug.apk')
+ ],
+ 'libraries': [ANDROID_JAR],
+ 'min-api': ANDROID_L_API,
+ },
+ 'deploy': {
+ 'inputs': [
+ os.path.join(V20180926_BASE, 'obsidian-development-debug.jar')
+ ],
+ 'libraries': [ANDROID_JAR],
+ 'allow-type-errors': 1,
+ 'pgconf': [
+ os.path.join(V20180926_BASE, 'proguard', 'proguard.cfg'),
+ os.path.join(V20180926_BASE, 'proguard',
+ 'proguard-no-optimizations.cfg'),
+ os.path.join(V20180926_BASE, 'proguard',
+ 'proguard-ignore-warnings.cfg'),
+ utils.IGNORE_WARNINGS_RULES
+ ],
+ # Build for native multi dex
+ 'min-api': ANDROID_L_API,
+ }
},
- 'deploy' : {
- 'inputs': [os.path.join(V20180926_BASE, 'obsidian-development-debug.jar')],
- 'libraries' : [ANDROID_JAR],
- 'allow-type-errors' : 1,
- 'pgconf': [
- os.path.join(V20180926_BASE, 'proguard', 'proguard.cfg'),
- os.path.join(V20180926_BASE, 'proguard', 'proguard-no-optimizations.cfg'),
- os.path.join(V20180926_BASE, 'proguard', 'proguard-ignore-warnings.cfg'),
- utils.IGNORE_WARNINGS_RULES],
- # Build for native multi dex
- 'min-api' : ANDROID_L_API,
- }
- },
}
diff --git a/tools/notify.py b/tools/notify.py
index 34676ba..4dfceb3 100644
--- a/tools/notify.py
+++ b/tools/notify.py
@@ -4,17 +4,18 @@
# BSD-style license that can be found in the LICENSE file.
try:
- import gi
- gi.require_version('Notify', '0.7')
- from gi.repository import Notify
- Notify.init('R8 build tools')
+ import gi
+ gi.require_version('Notify', '0.7')
+ from gi.repository import Notify
+ Notify.init('R8 build tools')
- def notify(message):
- try:
- Notify.Notification.new('R8 build tools', message).show()
- except:
- return
+ def notify(message):
+ try:
+ Notify.Notification.new('R8 build tools', message).show()
+ except:
+ return
except (ImportError, ValueError):
- def notify(message):
- return
+
+ def notify(message):
+ return
diff --git a/tools/performance_try.py b/tools/performance_try.py
index 0c09e70..5f1b4b3 100755
--- a/tools/performance_try.py
+++ b/tools/performance_try.py
@@ -11,12 +11,16 @@
SCRIPT = '/google/data/ro/teams/dart/golem/bin/golem4.dart'
DART = os.path.join(utils.THIRD_PARTY, 'dart-sdk', 'bin', 'dart')
+
def Main():
- args = sys.argv[1:]
- if len(args) != 1 or '--help' in args:
- print('Performance tracking takes exactly one argument, the name for display')
- return 1
- subprocess.check_call([DART, SCRIPT, args[0]])
+ args = sys.argv[1:]
+ if len(args) != 1 or '--help' in args:
+ print(
+ 'Performance tracking takes exactly one argument, the name for display'
+ )
+ return 1
+ subprocess.check_call([DART, SCRIPT, args[0]])
+
if __name__ == '__main__':
- sys.exit(Main())
+ sys.exit(Main())
diff --git a/tools/printseeds.py b/tools/printseeds.py
index 6c0ee8c..4ec2e9e 100755
--- a/tools/printseeds.py
+++ b/tools/printseeds.py
@@ -7,4 +7,4 @@
import toolhelper
if __name__ == '__main__':
- sys.exit(toolhelper.run('printseeds', sys.argv[1:]))
+ sys.exit(toolhelper.run('printseeds', sys.argv[1:]))
diff --git a/tools/printuses.py b/tools/printuses.py
index 5253796..c18432f 100755
--- a/tools/printuses.py
+++ b/tools/printuses.py
@@ -7,4 +7,4 @@
import toolhelper
if __name__ == '__main__':
- sys.exit(toolhelper.run('printuses', sys.argv[1:]))
+ sys.exit(toolhelper.run('printuses', sys.argv[1:]))
diff --git a/tools/proguard.py b/tools/proguard.py
index 4a3ae7d..312533e 100755
--- a/tools/proguard.py
+++ b/tools/proguard.py
@@ -18,55 +18,72 @@
DEFAULT = 'pg6'
DEFAULT_ALIAS = 'pg'
VERSIONS = {
- 'pg5': os.path.join(PG_DIR, 'proguard5.2.1', 'lib', 'proguard.jar'),
- 'pg6': os.path.join(PG_DIR, 'proguard6.0.1', 'lib', 'proguard.jar'),
- 'pg7': os.path.join(PG_DIR, 'proguard-7.0.0', 'lib', 'proguard.jar'),
- 'pg_internal': os.path.join(
- PG_DIR, 'proguard_internal_159423826', 'ProGuard_deploy.jar'),
+ 'pg5':
+ os.path.join(PG_DIR, 'proguard5.2.1', 'lib', 'proguard.jar'),
+ 'pg6':
+ os.path.join(PG_DIR, 'proguard6.0.1', 'lib', 'proguard.jar'),
+ 'pg7':
+ os.path.join(PG_DIR, 'proguard-7.0.0', 'lib', 'proguard.jar'),
+ 'pg_internal':
+ os.path.join(PG_DIR, 'proguard_internal_159423826',
+ 'ProGuard_deploy.jar'),
}
# Add alias for the default version.
VERSIONS[DEFAULT_ALIAS] = VERSIONS[DEFAULT]
+
# Get versions sorted (nice for argument lists)
def getVersions():
- versions = list(VERSIONS.keys())
- versions.sort()
- return versions
+ versions = list(VERSIONS.keys())
+ versions.sort()
+ return versions
+
def isValidVersion(version):
- return version in VERSIONS
+ return version in VERSIONS
+
def getValidatedVersion(version):
- if not isValidVersion(version):
- raise ValueError("Invalid PG version: '%s'" % version)
- return version
+ if not isValidVersion(version):
+ raise ValueError("Invalid PG version: '%s'" % version)
+ return version
+
def getJar(version=DEFAULT):
- return VERSIONS[getValidatedVersion(version)]
+ return VERSIONS[getValidatedVersion(version)]
+
def getRetraceJar(version=DEFAULT):
- if version == 'pg_internal':
- raise ValueError("No retrace in internal distribution")
- return getJar().replace('proguard.jar', 'retrace.jar')
+ if version == 'pg_internal':
+ raise ValueError("No retrace in internal distribution")
+ return getJar().replace('proguard.jar', 'retrace.jar')
+
def getCmd(args, version=DEFAULT, jvmArgs=None):
- cmd = []
- if jvmArgs:
- cmd.extend(jvmArgs)
- cmd.extend([jdk.GetJavaExecutable(), '-jar', getJar(version)])
- cmd.extend(args)
- return cmd
+ cmd = []
+ if jvmArgs:
+ cmd.extend(jvmArgs)
+ cmd.extend([jdk.GetJavaExecutable(), '-jar', getJar(version)])
+ cmd.extend(args)
+ return cmd
-def run(args, version=DEFAULT, track_memory_file=None, stdout=None, stderr=None):
- cmd = []
- if track_memory_file:
- cmd.extend(['tools/track_memory.sh', track_memory_file])
- cmd.extend(getCmd(args, version))
- utils.PrintCmd(cmd)
- subprocess.call(cmd, stdout=stdout, stderr=stderr)
+
+def run(args,
+ version=DEFAULT,
+ track_memory_file=None,
+ stdout=None,
+ stderr=None):
+ cmd = []
+ if track_memory_file:
+ cmd.extend(['tools/track_memory.sh', track_memory_file])
+ cmd.extend(getCmd(args, version))
+ utils.PrintCmd(cmd)
+ subprocess.call(cmd, stdout=stdout, stderr=stderr)
+
def Main():
- run(sys.argv[1:])
+ run(sys.argv[1:])
+
if __name__ == '__main__':
- sys.exit(Main())
+ sys.exit(Main())
diff --git a/tools/r8.py b/tools/r8.py
index 0a9e09a..5a2f5e3 100755
--- a/tools/r8.py
+++ b/tools/r8.py
@@ -9,77 +9,73 @@
import toolhelper
import utils
+
def ParseOptions(argv):
- parser = optparse.OptionParser(usage='%prog [options] -- [R8 options]')
- parser.add_option(
- '-c',
- '--commit-hash',
- '--commit_hash',
- help='Commit hash of R8 to use.',
- default=None)
- parser.add_option(
- '--debug-agent',
- help='Enable Java debug agent and suspend compilation (default disabled)',
- default=False,
- action='store_true')
- parser.add_option(
- '--ea',
- help='Enable Java assertions when running the compiler (default disabled)',
- default=False,
- action='store_true')
- parser.add_option(
- '--lib-android',
- help='Add the android.jar for the given API level',
- default=None,
- type=int)
- parser.add_option(
- '--lib-rt',
- help='Add rt.jar from openjdk-1.8',
- default=False,
- action='store_true')
- parser.add_option(
- '--no-build', '--no_build',
- help='Do not build R8',
- default=False,
- action='store_true')
- parser.add_option(
- '--print-runtimeraw', '--print_runtimeraw',
- metavar='BENCHMARKNAME',
- help='Print the line \'<BENCHMARKNAME>(RunTimeRaw):' +
- ' <elapsed> ms\' at the end where <elapsed> is' +
- ' the elapsed time in milliseconds.')
- parser.add_option(
- '--tag',
- help='Tag of R8 to use.',
- default=None)
- parser.add_option(
- '--version',
- help='Version of R8 to use.',
- default=None)
- return parser.parse_args(argv)
+ parser = optparse.OptionParser(usage='%prog [options] -- [R8 options]')
+ parser.add_option('-c',
+ '--commit-hash',
+ '--commit_hash',
+ help='Commit hash of R8 to use.',
+ default=None)
+ parser.add_option(
+ '--debug-agent',
+ help=
+ 'Enable Java debug agent and suspend compilation (default disabled)',
+ default=False,
+ action='store_true')
+ parser.add_option(
+ '--ea',
+ help=
+ 'Enable Java assertions when running the compiler (default disabled)',
+ default=False,
+ action='store_true')
+ parser.add_option('--lib-android',
+ help='Add the android.jar for the given API level',
+ default=None,
+ type=int)
+ parser.add_option('--lib-rt',
+ help='Add rt.jar from openjdk-1.8',
+ default=False,
+ action='store_true')
+ parser.add_option('--no-build',
+ '--no_build',
+ help='Do not build R8',
+ default=False,
+ action='store_true')
+ parser.add_option('--print-runtimeraw',
+ '--print_runtimeraw',
+ metavar='BENCHMARKNAME',
+ help='Print the line \'<BENCHMARKNAME>(RunTimeRaw):' +
+ ' <elapsed> ms\' at the end where <elapsed> is' +
+ ' the elapsed time in milliseconds.')
+ parser.add_option('--tag', help='Tag of R8 to use.', default=None)
+ parser.add_option('--version', help='Version of R8 to use.', default=None)
+ return parser.parse_args(argv)
+
def main(argv):
- (options, args) = ParseOptions(sys.argv)
- r8_args = args[1:]
- if options.lib_android:
- r8_args.extend(['--lib', utils.get_android_jar(options.lib_android)])
- if options.lib_rt:
- r8_args.extend(['--lib', utils.RT_JAR])
- time_consumer = lambda duration : print_duration(duration, options)
- return toolhelper.run(
- 'r8',
- r8_args,
- build=not options.no_build,
- debug=options.ea,
- debug_agent=options.debug_agent,
- jar=utils.find_r8_jar_from_options(options),
- main='com.android.tools.r8.R8',
- time_consumer=time_consumer)
+ (options, args) = ParseOptions(sys.argv)
+ r8_args = args[1:]
+ if options.lib_android:
+ r8_args.extend(['--lib', utils.get_android_jar(options.lib_android)])
+ if options.lib_rt:
+ r8_args.extend(['--lib', utils.RT_JAR])
+ time_consumer = lambda duration: print_duration(duration, options)
+ return toolhelper.run('r8',
+ r8_args,
+ build=not options.no_build,
+ debug=options.ea,
+ debug_agent=options.debug_agent,
+ jar=utils.find_r8_jar_from_options(options),
+ main='com.android.tools.r8.R8',
+ time_consumer=time_consumer)
+
def print_duration(duration, options):
- benchmark_name = options.print_runtimeraw
- if benchmark_name:
- print('%s-Total(RunTimeRaw): %s ms' % (benchmark_name, duration))
+ benchmark_name = options.print_runtimeraw
+ if benchmark_name:
+ print('%s-Total(RunTimeRaw): %s ms' % (benchmark_name, duration))
+
if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
+ sys.exit(main(sys.argv[1:]))
diff --git a/tools/r8_get.py b/tools/r8_get.py
index cc11e90..44cc9f1 100755
--- a/tools/r8_get.py
+++ b/tools/r8_get.py
@@ -7,26 +7,29 @@
import argparse
import compiledump
+
def parse_arguments():
- parser = argparse.ArgumentParser(
- description = 'Helper to fetch r8.jar from cloudstorage.')
- parser.add_argument(
- '-v',
- '--version',
- help='Version or commit-hash to download '
- '(e.g., 3.3.50 or 33ae86d80351efc4d632452331d06cb97e42f2a7).',
- required=True)
- parser.add_argument(
- '--outdir',
- help='Output directory to place the r8.jar in (default cwd).',
- default=None)
- return parser.parse_args()
+ parser = argparse.ArgumentParser(
+ description='Helper to fetch r8.jar from cloudstorage.')
+ parser.add_argument(
+ '-v',
+ '--version',
+ help='Version or commit-hash to download '
+ '(e.g., 3.3.50 or 33ae86d80351efc4d632452331d06cb97e42f2a7).',
+ required=True)
+ parser.add_argument(
+ '--outdir',
+ help='Output directory to place the r8.jar in (default cwd).',
+ default=None)
+ return parser.parse_args()
+
def main():
- args = parse_arguments()
- outdir = args.outdir if args.outdir else ''
- print(compiledump.download_distribution(args.version, True, outdir))
- return 0
+ args = parse_arguments()
+ outdir = args.outdir if args.outdir else ''
+ print(compiledump.download_distribution(args.version, True, outdir))
+ return 0
+
if __name__ == '__main__':
- sys.exit(main())
+ sys.exit(main())
diff --git a/tools/r8_release.py b/tools/r8_release.py
index 0491a0d..8aaca29 100755
--- a/tools/r8_release.py
+++ b/tools/r8_release.py
@@ -17,8 +17,8 @@
import utils
R8_DEV_BRANCH = '8.3'
-R8_VERSION_FILE = os.path.join(
- 'src', 'main', 'java', 'com', 'android', 'tools', 'r8', 'Version.java')
+R8_VERSION_FILE = os.path.join('src', 'main', 'java', 'com', 'android', 'tools',
+ 'r8', 'Version.java')
THIS_FILE_RELATIVE = os.path.join('tools', 'r8_release.py')
GMAVEN_PUBLISHER = '/google/bin/releases/android-devtools/gmaven/publisher/gmaven-publisher'
@@ -28,255 +28,265 @@
GITHUB_DESUGAR_JDK_LIBS = 'https://github.com/google/desugar_jdk_libs'
+
def install_gerrit_change_id_hook(checkout_dir):
- with utils.ChangedWorkingDirectory(checkout_dir):
- # Fancy way of getting the string ".git".
- git_dir = subprocess.check_output(
- ['git', 'rev-parse', '--git-dir']).decode('utf-8').strip()
- commit_msg_hooks = '%s/hooks/commit-msg' % git_dir
- if not os.path.exists(os.path.dirname(commit_msg_hooks)):
- os.mkdir(os.path.dirname(commit_msg_hooks))
- # Install commit hook to generate Gerrit 'Change-Id:'.
- urllib.request.urlretrieve(
- 'https://gerrit-review.googlesource.com/tools/hooks/commit-msg',
- commit_msg_hooks)
- st = os.stat(commit_msg_hooks)
- os.chmod(
- commit_msg_hooks,
- st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
+ with utils.ChangedWorkingDirectory(checkout_dir):
+ # Fancy way of getting the string ".git".
+ git_dir = subprocess.check_output(['git', 'rev-parse', '--git-dir'
+ ]).decode('utf-8').strip()
+ commit_msg_hooks = '%s/hooks/commit-msg' % git_dir
+ if not os.path.exists(os.path.dirname(commit_msg_hooks)):
+ os.mkdir(os.path.dirname(commit_msg_hooks))
+ # Install commit hook to generate Gerrit 'Change-Id:'.
+ urllib.request.urlretrieve(
+ 'https://gerrit-review.googlesource.com/tools/hooks/commit-msg',
+ commit_msg_hooks)
+ st = os.stat(commit_msg_hooks)
+ os.chmod(commit_msg_hooks,
+ st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
+
def checkout_r8(temp, branch):
- subprocess.check_call(['git', 'clone', utils.REPO_SOURCE, temp])
- with utils.ChangedWorkingDirectory(temp):
- subprocess.check_call([
- 'git',
- 'new-branch',
- '--upstream',
- 'origin/%s' % branch,
- 'dev-release'])
- install_gerrit_change_id_hook(temp)
- return temp
+ subprocess.check_call(['git', 'clone', utils.REPO_SOURCE, temp])
+ with utils.ChangedWorkingDirectory(temp):
+ subprocess.check_call([
+ 'git', 'new-branch', '--upstream',
+ 'origin/%s' % branch, 'dev-release'
+ ])
+ install_gerrit_change_id_hook(temp)
+ return temp
def prepare_release(args):
- if args.version:
- print("Cannot manually specify version when making a dev release.")
- sys.exit(1)
+ if args.version:
+ print("Cannot manually specify version when making a dev release.")
+ sys.exit(1)
- def make_release(args):
- commithash = args.dev_release
+ def make_release(args):
+ commithash = args.dev_release
- with utils.TempDir() as temp:
- with utils.ChangedWorkingDirectory(checkout_r8(temp, R8_DEV_BRANCH)):
- # Compute the current and new version on the branch.
- result = None
- for line in open(R8_VERSION_FILE, 'r'):
- result = re.match(
- r'.*LABEL = "%s\.(\d+)\-dev";' % R8_DEV_BRANCH, line)
- if result:
- break
- if not result or not result.group(1):
- print('Failed to find version label matching %s(\d+)-dev'\
- % R8_DEV_BRANCH)
- sys.exit(1)
- try:
- patch_version = int(result.group(1))
- except ValueError:
- print('Failed to convert version to integer: %s' % result.group(1))
+ with utils.TempDir() as temp:
+ with utils.ChangedWorkingDirectory(checkout_r8(temp,
+ R8_DEV_BRANCH)):
+ # Compute the current and new version on the branch.
+ result = None
+ for line in open(R8_VERSION_FILE, 'r'):
+ result = re.match(
+ r'.*LABEL = "%s\.(\d+)\-dev";' % R8_DEV_BRANCH, line)
+ if result:
+ break
+ if not result or not result.group(1):
+ print('Failed to find version label matching %s(\d+)-dev'\
+ % R8_DEV_BRANCH)
+ sys.exit(1)
+ try:
+ patch_version = int(result.group(1))
+ except ValueError:
+ print('Failed to convert version to integer: %s' %
+ result.group(1))
- old_version = '%s.%s-dev' % (R8_DEV_BRANCH, patch_version)
- version = '%s.%s-dev' % (R8_DEV_BRANCH, patch_version + 1)
+ old_version = '%s.%s-dev' % (R8_DEV_BRANCH, patch_version)
+ version = '%s.%s-dev' % (R8_DEV_BRANCH, patch_version + 1)
- # Verify that the merge point from main is not empty.
- merge_diff_output = subprocess.check_output([
- 'git', 'diff', 'HEAD..%s' % commithash]).decode('utf-8')
- other_diff = version_change_diff(
- merge_diff_output, old_version, "main")
- if not other_diff:
- print('Merge point from main (%s)' % commithash, \
- 'is the same as exiting release (%s).' % old_version)
- sys.exit(1)
+ # Verify that the merge point from main is not empty.
+ merge_diff_output = subprocess.check_output(
+ ['git', 'diff', 'HEAD..%s' % commithash]).decode('utf-8')
+ other_diff = version_change_diff(merge_diff_output, old_version,
+ "main")
+ if not other_diff:
+ print('Merge point from main (%s)' % commithash, \
+ 'is the same as exiting release (%s).' % old_version)
+ sys.exit(1)
- subprocess.check_call([
- 'git', 'cl', 'new-branch', 'release-%s' % version])
+ subprocess.check_call(
+ ['git', 'cl', 'new-branch',
+ 'release-%s' % version])
- if args.dev_pre_cherry_pick:
- for pre_commit in args.dev_pre_cherry_pick:
- subprocess.check_call([
- 'git', 'cherry-pick', '--no-edit', pre_commit])
+ if args.dev_pre_cherry_pick:
+ for pre_commit in args.dev_pre_cherry_pick:
+ subprocess.check_call(
+ ['git', 'cherry-pick', '--no-edit', pre_commit])
- # Merge the desired commit from main on to the branch.
- subprocess.check_call(['git', 'merge', '--no-ff', '--no-edit', commithash])
+ # Merge the desired commit from main on to the branch.
+ subprocess.check_call(
+ ['git', 'merge', '--no-ff', '--no-edit', commithash])
- # Rewrite the version, commit and validate.
- sed(old_version, version, R8_VERSION_FILE)
+ # Rewrite the version, commit and validate.
+ sed(old_version, version, R8_VERSION_FILE)
- subprocess.check_call([
- 'git', 'commit', '-a', '-m', 'Version %s' % version])
+ subprocess.check_call(
+ ['git', 'commit', '-a', '-m',
+ 'Version %s' % version])
- version_diff_output = subprocess.check_output([
- 'git', 'diff', '%s..HEAD' % commithash]).decode('utf-8')
+ version_diff_output = subprocess.check_output(
+ ['git', 'diff', '%s..HEAD' % commithash]).decode('utf-8')
- validate_version_change_diff(version_diff_output, "main", version)
+ validate_version_change_diff(version_diff_output, "main",
+ version)
- cmd = ['git', 'cl', 'upload', '--no-squash']
- if args.bypass_hooks:
- cmd.append('--bypass-hooks')
- maybe_check_call(args, cmd)
+ cmd = ['git', 'cl', 'upload', '--no-squash']
+ if args.bypass_hooks:
+ cmd.append('--bypass-hooks')
+ maybe_check_call(args, cmd)
- if args.dry_run:
- input(
- 'DryRun: check %s for content of version %s [enter to continue]:'
- % (temp, version))
+ if args.dry_run:
+ input(
+ 'DryRun: check %s for content of version %s [enter to continue]:'
+ % (temp, version))
- return "%s dev version %s from hash %s for review" % (
- 'DryRun: omitted upload of' if args.dry_run else 'Uploaded',
- version,
- commithash)
+ return "%s dev version %s from hash %s for review" % (
+ 'DryRun: omitted upload of' if args.dry_run else 'Uploaded',
+ version, commithash)
- return make_release
+ return make_release
def maybe_tag(args, version):
- maybe_check_call(args, [
- 'git', 'tag', '-a', version, '-m', '"%s"' % version])
- maybe_check_call(args, [
- 'git', 'push', 'origin', 'refs/tags/%s' % version])
+ maybe_check_call(args,
+ ['git', 'tag', '-a', version, '-m',
+ '"%s"' % version])
+ maybe_check_call(args, ['git', 'push', 'origin', 'refs/tags/%s' % version])
def version_change_diff(diff, old_version, new_version):
- invalid_line = None
- for line in str(diff).splitlines():
- if line.startswith('- ') and \
- line != '- public static final String LABEL = "%s";' % old_version:
- invalid_line = line
- elif line.startswith('+ ') and \
- line != '+ public static final String LABEL = "%s";' % new_version:
- invalid_line = line
- return invalid_line
+ invalid_line = None
+ for line in str(diff).splitlines():
+ if line.startswith('- ') and \
+ line != '- public static final String LABEL = "%s";' % old_version:
+ invalid_line = line
+ elif line.startswith('+ ') and \
+ line != '+ public static final String LABEL = "%s";' % new_version:
+ invalid_line = line
+ return invalid_line
def validate_version_change_diff(version_diff_output, old_version, new_version):
- invalid = version_change_diff(version_diff_output, old_version, new_version)
- if invalid:
- print("Unexpected diff:")
- print("=" * 80)
- print(version_diff_output)
- print("=" * 80)
- accept_string = 'THE DIFF IS OK!'
- answer = input(
- "Accept the additonal diff as part of the release? "
- "Type '%s' to accept: " % accept_string)
- if answer != accept_string:
- print("You did not type '%s'" % accept_string)
- print('Aborting dev release for %s' % version)
- sys.exit(1)
+ invalid = version_change_diff(version_diff_output, old_version, new_version)
+ if invalid:
+ print("Unexpected diff:")
+ print("=" * 80)
+ print(version_diff_output)
+ print("=" * 80)
+ accept_string = 'THE DIFF IS OK!'
+ answer = input("Accept the additonal diff as part of the release? "
+ "Type '%s' to accept: " % accept_string)
+ if answer != accept_string:
+ print("You did not type '%s'" % accept_string)
+ print('Aborting dev release for %s' % version)
+ sys.exit(1)
def maybe_check_call(args, cmd):
- if args.dry_run:
- print('DryRun:', ' '.join(cmd))
- else:
- print(' '.join(cmd))
- return subprocess.check_call(cmd)
+ if args.dry_run:
+ print('DryRun:', ' '.join(cmd))
+ else:
+ print(' '.join(cmd))
+ return subprocess.check_call(cmd)
def update_prebuilds(r8_checkout, version, checkout, keepanno=False):
- path = os.path.join(r8_checkout, 'tools', 'update_prebuilds_in_android.py')
- commit_arg = '--commit_hash=' if len(version) == 40 else '--version='
- cmd = [path, '--targets=lib', '--maps', commit_arg + version, checkout]
- if keepanno:
- cmd.append("--keepanno")
- subprocess.check_call(cmd)
+ path = os.path.join(r8_checkout, 'tools', 'update_prebuilds_in_android.py')
+ commit_arg = '--commit_hash=' if len(version) == 40 else '--version='
+ cmd = [path, '--targets=lib', '--maps', commit_arg + version, checkout]
+ if keepanno:
+ cmd.append("--keepanno")
+ subprocess.check_call(cmd)
-def release_studio_or_aosp(r8_checkout, path, options, git_message, keepanno=False):
- with utils.ChangedWorkingDirectory(path):
- if not options.use_existing_work_branch:
- subprocess.call(['repo', 'abandon', 'update-r8'])
- if not options.no_sync:
- subprocess.check_call(['repo', 'sync', '-cq', '-j', '16'])
+def release_studio_or_aosp(r8_checkout,
+ path,
+ options,
+ git_message,
+ keepanno=False):
+ with utils.ChangedWorkingDirectory(path):
+ if not options.use_existing_work_branch:
+ subprocess.call(['repo', 'abandon', 'update-r8'])
+ if not options.no_sync:
+ subprocess.check_call(['repo', 'sync', '-cq', '-j', '16'])
- prebuilts_r8 = os.path.join(path, 'prebuilts', 'r8')
+ prebuilts_r8 = os.path.join(path, 'prebuilts', 'r8')
- if not options.use_existing_work_branch:
- with utils.ChangedWorkingDirectory(prebuilts_r8):
- subprocess.check_call(['repo', 'start', 'update-r8'])
+ if not options.use_existing_work_branch:
+ with utils.ChangedWorkingDirectory(prebuilts_r8):
+ subprocess.check_call(['repo', 'start', 'update-r8'])
- update_prebuilds(r8_checkout, options.version, path, keepanno)
+ update_prebuilds(r8_checkout, options.version, path, keepanno)
- with utils.ChangedWorkingDirectory(prebuilts_r8):
- if not options.use_existing_work_branch:
- subprocess.check_call(['git', 'commit', '-a', '-m', git_message])
- else:
- print('Not committing when --use-existing-work-branch. '
- + 'Commit message should be:\n\n'
- + git_message
- + '\n')
- # Don't upload if requested not to, or if changes are not committed due
- # to --use-existing-work-branch
- if not options.no_upload and not options.use_existing_work_branch:
- process = subprocess.Popen(['repo', 'upload', '.', '--verify',
- '--current-branch'],
- stdin=subprocess.PIPE)
- return process.communicate(input=b'y\n')[0]
+ with utils.ChangedWorkingDirectory(prebuilts_r8):
+ if not options.use_existing_work_branch:
+ subprocess.check_call(
+ ['git', 'commit', '-a', '-m', git_message])
+ else:
+ print('Not committing when --use-existing-work-branch. ' +
+ 'Commit message should be:\n\n' + git_message + '\n')
+ # Don't upload if requested not to, or if changes are not committed due
+ # to --use-existing-work-branch
+ if not options.no_upload and not options.use_existing_work_branch:
+ process = subprocess.Popen(
+ ['repo', 'upload', '.', '--verify', '--current-branch'],
+ stdin=subprocess.PIPE)
+ return process.communicate(input=b'y\n')[0]
def prepare_aosp(args):
- assert args.version
- assert os.path.exists(args.aosp), "Could not find AOSP path %s" % args.aosp
+ assert args.version
+ assert os.path.exists(args.aosp), "Could not find AOSP path %s" % args.aosp
- def release_aosp(options):
- print("Releasing for AOSP")
- if options.dry_run:
- return 'DryRun: omitting AOSP release for %s' % options.version
+ def release_aosp(options):
+ print("Releasing for AOSP")
+ if options.dry_run:
+ return 'DryRun: omitting AOSP release for %s' % options.version
- git_message = ("""Update D8 and R8 to %s
+ git_message = ("""Update D8 and R8 to %s
Version: %s
This build IS NOT suitable for preview or public release.
Built here: go/r8-releases/raw/%s
-Test: TARGET_PRODUCT=aosp_arm64 m -j core-oj"""
- % (args.version, args.version, args.version))
- # Fixes to Android U branch is based of 8.2.2-dev where the keepanno library
- # is not built.
- keepanno = not args.version.startswith('8.2.2-udc')
- return release_studio_or_aosp(
- utils.REPO_ROOT, args.aosp, options, git_message, keepanno=keepanno)
+Test: TARGET_PRODUCT=aosp_arm64 m -j core-oj""" %
+ (args.version, args.version, args.version))
+ # Fixes to Android U branch is based of 8.2.2-dev where the keepanno library
+ # is not built.
+ keepanno = not args.version.startswith('8.2.2-udc')
+ return release_studio_or_aosp(utils.REPO_ROOT,
+ args.aosp,
+ options,
+ git_message,
+ keepanno=keepanno)
- return release_aosp
+ return release_aosp
def prepare_maven(args):
- assert args.version
+ assert args.version
- def release_maven(options):
- gfile = '/bigstore/r8-releases/raw/%s/r8lib.zip' % args.version
- release_id = gmaven_publisher_stage(options, [gfile])
+ def release_maven(options):
+ gfile = '/bigstore/r8-releases/raw/%s/r8lib.zip' % args.version
+ release_id = gmaven_publisher_stage(options, [gfile])
- print("Staged Release ID " + release_id + ".\n")
- gmaven_publisher_stage_redir_test_info(
- release_id, "com.android.tools:r8:%s" % args.version, "r8lib.jar")
+ print("Staged Release ID " + release_id + ".\n")
+ gmaven_publisher_stage_redir_test_info(
+ release_id, "com.android.tools:r8:%s" % args.version, "r8lib.jar")
- print
- answer = input("Continue with publishing [y/N]:")
+ print
+ answer = input("Continue with publishing [y/N]:")
- if answer != 'y':
- print('Aborting release to Google maven')
- sys.exit(1)
+ if answer != 'y':
+ print('Aborting release to Google maven')
+ sys.exit(1)
- gmaven_publisher_publish(args, release_id)
+ gmaven_publisher_publish(args, release_id)
- print("")
- print("Published. Use the email workflow for approval.")
+ print("")
+ print("Published. Use the email workflow for approval.")
- return release_maven
+ return release_maven
+
# ------------------------------------------------------ column 70 --v
def git_message_dev(version, bugs):
- return """Update D8 R8 to %s
+ return """Update D8 R8 to %s
This is a development snapshot, it's fine to use for studio canary
build, but not for BETA or release, for those we would need a release
@@ -289,400 +299,422 @@
def git_message_release(version, bugs):
- return """D8 R8 version %s
+ return """D8 R8 version %s
Built here: go/r8-releases/raw/%s/
Test: ./gradlew check
Bug: %s""" % (version, version, '\nBug: '.join(map(bug_fmt, bugs)))
+
def bug_fmt(bug):
- return "b/%s" % bug
+ return "b/%s" % bug
+
def prepare_studio(args):
- assert args.version
- assert os.path.exists(args.studio), ("Could not find STUDIO path %s"
- % args.studio)
+ assert args.version
+ assert os.path.exists(args.studio), ("Could not find STUDIO path %s" %
+ args.studio)
- def release_studio(options):
- print("Releasing for STUDIO")
- if options.dry_run:
- return 'DryRun: omitting studio release for %s' % options.version
+ def release_studio(options):
+ print("Releasing for STUDIO")
+ if options.dry_run:
+ return 'DryRun: omitting studio release for %s' % options.version
- if 'dev' in options.version:
- git_message = git_message_dev(options.version, options.bug)
- r8_checkout = utils.REPO_ROOT
- return release_studio_or_aosp(
- r8_checkout, args.studio, options, git_message)
- else:
- with utils.TempDir() as temp:
- checkout_r8(temp, options.version[0:options.version.rindex('.')])
- git_message = git_message_release(options.version, options.bug)
- return release_studio_or_aosp(temp, args.studio, options, git_message)
+ if 'dev' in options.version:
+ git_message = git_message_dev(options.version, options.bug)
+ r8_checkout = utils.REPO_ROOT
+ return release_studio_or_aosp(r8_checkout, args.studio, options,
+ git_message)
+ else:
+ with utils.TempDir() as temp:
+ checkout_r8(temp,
+ options.version[0:options.version.rindex('.')])
+ git_message = git_message_release(options.version, options.bug)
+ return release_studio_or_aosp(temp, args.studio, options,
+ git_message)
- return release_studio
+ return release_studio
def g4_cp(old, new, file):
- subprocess.check_call('g4 cp {%s,%s}/%s' % (old, new, file), shell=True)
+ subprocess.check_call('g4 cp {%s,%s}/%s' % (old, new, file), shell=True)
def g4_open(file):
- if not os.access(file, os.W_OK):
- subprocess.check_call('g4 open %s' % file, shell=True)
+ if not os.access(file, os.W_OK):
+ subprocess.check_call('g4 open %s' % file, shell=True)
def g4_change(version):
- return subprocess.check_output(
- 'g4 change --desc "Update R8 to version %s\n"' % (version),
- shell=True).decode('utf-8')
+ return subprocess.check_output(
+ 'g4 change --desc "Update R8 to version %s\n"' % (version),
+ shell=True).decode('utf-8')
+
def get_cl_id(c4_change_output):
- startIndex = c4_change_output.find('Change ') + len('Change ')
- endIndex = c4_change_output.find(' ', startIndex)
- cl = c4_change_output[startIndex:endIndex]
- assert cl.isdigit()
- return cl
+ startIndex = c4_change_output.find('Change ') + len('Change ')
+ endIndex = c4_change_output.find(' ', startIndex)
+ cl = c4_change_output[startIndex:endIndex]
+ assert cl.isdigit()
+ return cl
+
def sed(pattern, replace, path):
- with open(path, "r") as sources:
- lines = sources.readlines()
- with open(path, "w") as sources:
- for line in lines:
- sources.write(re.sub(pattern, replace, line))
+ with open(path, "r") as sources:
+ lines = sources.readlines()
+ with open(path, "w") as sources:
+ for line in lines:
+ sources.write(re.sub(pattern, replace, line))
def download_file(version, file, dst):
- dir = 'raw' if len(version) != 40 else 'raw/main'
- urllib.request.urlretrieve(
- ('https://storage.googleapis.com/r8-releases/%s/%s/%s' % (dir, version, file)),
- dst)
+ dir = 'raw' if len(version) != 40 else 'raw/main'
+ urllib.request.urlretrieve(
+ ('https://storage.googleapis.com/r8-releases/%s/%s/%s' %
+ (dir, version, file)), dst)
+
def download_gfile(gfile, dst):
- if not gfile.startswith('/bigstore/r8-releases'):
- print('Unexpected gfile prefix for %s' % gfile)
- sys.exit(1)
+ if not gfile.startswith('/bigstore/r8-releases'):
+ print('Unexpected gfile prefix for %s' % gfile)
+ sys.exit(1)
- urllib.request.urlretrieve(
- 'https://storage.googleapis.com/%s' % gfile[len('/bigstore/'):],
- dst)
+ urllib.request.urlretrieve(
+ 'https://storage.googleapis.com/%s' % gfile[len('/bigstore/'):], dst)
+
def blaze_run(target):
- return subprocess.check_output(
- 'blaze run %s' % target, shell=True, stderr=subprocess.STDOUT).decode('utf-8')
+ return subprocess.check_output('blaze run %s' % target,
+ shell=True,
+ stderr=subprocess.STDOUT).decode('utf-8')
def prepare_google3(args):
- assert args.version
- # Check if an existing client exists.
- if not args.use_existing_work_branch:
- check_no_google3_client(args, args.p4_client)
+ assert args.version
+ # Check if an existing client exists.
+ if not args.use_existing_work_branch:
+ check_no_google3_client(args, args.p4_client)
- def release_google3(options):
- print("Releasing for Google 3")
- if options.dry_run:
- return 'DryRun: omitting g3 release for %s' % options.version
+ def release_google3(options):
+ print("Releasing for Google 3")
+ if options.dry_run:
+ return 'DryRun: omitting g3 release for %s' % options.version
- google3_base = subprocess.check_output(
- ['p4', 'g4d', '-f', args.p4_client]).decode('utf-8').rstrip()
- third_party_r8 = os.path.join(google3_base, 'third_party', 'java', 'r8')
- today = datetime.date.today()
- with utils.ChangedWorkingDirectory(third_party_r8):
- # download files
- g4_open('full.jar')
- g4_open('src.jar')
- g4_open('lib.jar')
- g4_open('lib.jar.map')
- g4_open('retrace_full.jar')
- g4_open('retrace_lib.jar')
- g4_open('retrace_lib.jar.map')
- g4_open('desugar_jdk_libs_configuration.jar')
- download_file(options.version, 'r8-full-exclude-deps.jar', 'full.jar')
- download_file(options.version, 'r8-full-exclude-deps.jar', 'retrace_full.jar')
- download_file(options.version, 'r8-src.jar', 'src.jar')
- download_file(options.version, 'r8lib-exclude-deps.jar', 'lib.jar')
- download_file(
- options.version, 'r8lib-exclude-deps.jar.map', 'lib.jar.map')
- download_file(options.version, 'desugar_jdk_libs_configuration.jar',
- 'desugar_jdk_libs_configuration.jar')
- download_file(options.version, 'r8retrace-exclude-deps.jar', 'retrace_lib.jar')
- download_file(options.version, 'r8retrace-exclude-deps.jar.map', 'retrace_lib.jar.map')
- g4_open('METADATA')
- metadata_path = os.path.join(third_party_r8, 'METADATA')
- match_count = 0
- version_match_regexp = r'[1-9]\.[0-9]{1,2}\.[0-9]{1,3}-dev'
- for line in open(metadata_path, 'r'):
- result = re.search(version_match_regexp, line)
- if result:
- match_count = match_count + 1
- if match_count != 7:
- print((
- "Could not find the previous -dev release string to replace in "
- + "METADATA. Expected to find is mentioned 7 times, but "
- + "found %s occurrences. Please update %s manually and run "
- + "again with options --google3 "
- + "--use-existing-work-branch.")
- % (match_count, metadata_path))
- sys.exit(1)
- sed(version_match_regexp, options.version, metadata_path)
- sed(r'\{ year.*\}',
- ('{ year: %i month: %i day: %i }'
- % (today.year, today.month, today.day)),
- metadata_path)
- subprocess.check_output('chmod u+w *', shell=True)
+ google3_base = subprocess.check_output(
+ ['p4', 'g4d', '-f', args.p4_client]).decode('utf-8').rstrip()
+ third_party_r8 = os.path.join(google3_base, 'third_party', 'java', 'r8')
+ today = datetime.date.today()
+ with utils.ChangedWorkingDirectory(third_party_r8):
+ # download files
+ g4_open('full.jar')
+ g4_open('src.jar')
+ g4_open('lib.jar')
+ g4_open('lib.jar.map')
+ g4_open('retrace_full.jar')
+ g4_open('retrace_lib.jar')
+ g4_open('retrace_lib.jar.map')
+ g4_open('desugar_jdk_libs_configuration.jar')
+ download_file(options.version, 'r8-full-exclude-deps.jar',
+ 'full.jar')
+ download_file(options.version, 'r8-full-exclude-deps.jar',
+ 'retrace_full.jar')
+ download_file(options.version, 'r8-src.jar', 'src.jar')
+ download_file(options.version, 'r8lib-exclude-deps.jar', 'lib.jar')
+ download_file(options.version, 'r8lib-exclude-deps.jar.map',
+ 'lib.jar.map')
+ download_file(options.version, 'desugar_jdk_libs_configuration.jar',
+ 'desugar_jdk_libs_configuration.jar')
+ download_file(options.version, 'r8retrace-exclude-deps.jar',
+ 'retrace_lib.jar')
+ download_file(options.version, 'r8retrace-exclude-deps.jar.map',
+ 'retrace_lib.jar.map')
+ g4_open('METADATA')
+ metadata_path = os.path.join(third_party_r8, 'METADATA')
+ match_count = 0
+ version_match_regexp = r'[1-9]\.[0-9]{1,2}\.[0-9]{1,3}-dev'
+ for line in open(metadata_path, 'r'):
+ result = re.search(version_match_regexp, line)
+ if result:
+ match_count = match_count + 1
+ if match_count != 7:
+ print((
+ "Could not find the previous -dev release string to replace in "
+ + "METADATA. Expected to find is mentioned 7 times, but " +
+ "found %s occurrences. Please update %s manually and run " +
+ "again with options --google3 " +
+ "--use-existing-work-branch.") %
+ (match_count, metadata_path))
+ sys.exit(1)
+ sed(version_match_regexp, options.version, metadata_path)
+ sed(r'\{ year.*\}', ('{ year: %i month: %i day: %i }' %
+ (today.year, today.month, today.day)),
+ metadata_path)
+ subprocess.check_output('chmod u+w *', shell=True)
- with utils.ChangedWorkingDirectory(google3_base):
- blaze_result = blaze_run('//third_party/java/r8:d8 -- --version')
+ with utils.ChangedWorkingDirectory(google3_base):
+ blaze_result = blaze_run('//third_party/java/r8:d8 -- --version')
- assert options.version in blaze_result
+ assert options.version in blaze_result
- if not options.no_upload:
- change_result = g4_change(options.version)
- change_result += 'Run \'(g4d ' + args.p4_client \
- + ' && tap_presubmit -p all --train -c ' \
- + get_cl_id(change_result) + ')\' for running TAP global' \
- + ' presubmit using the train.\n' \
- + 'Run \'(g4d ' + args.p4_client \
- + ' && tap_presubmit -p all --notrain --detach --email' \
- + ' --skip_flaky_targets --skip_already_failing -c ' \
- + get_cl_id(change_result) + ')\' for running an isolated' \
- + ' TAP global presubmit.'
- return change_result
+ if not options.no_upload:
+ change_result = g4_change(options.version)
+ change_result += 'Run \'(g4d ' + args.p4_client \
+ + ' && tap_presubmit -p all --train -c ' \
+ + get_cl_id(change_result) + ')\' for running TAP global' \
+ + ' presubmit using the train.\n' \
+ + 'Run \'(g4d ' + args.p4_client \
+ + ' && tap_presubmit -p all --notrain --detach --email' \
+ + ' --skip_flaky_targets --skip_already_failing -c ' \
+ + get_cl_id(change_result) + ')\' for running an isolated' \
+ + ' TAP global presubmit.'
+ return change_result
- return release_google3
+ return release_google3
def prepare_google3_retrace(args):
- assert args.version
- # Check if an existing client exists.
- if not args.use_existing_work_branch:
- check_no_google3_client(args, args.p4_client)
+ assert args.version
+ # Check if an existing client exists.
+ if not args.use_existing_work_branch:
+ check_no_google3_client(args, args.p4_client)
- def release_google3_retrace(options):
- print("Releasing Retrace for Google 3")
- if options.dry_run:
- return 'DryRun: omitting g3 release for %s' % options.version
+ def release_google3_retrace(options):
+ print("Releasing Retrace for Google 3")
+ if options.dry_run:
+ return 'DryRun: omitting g3 release for %s' % options.version
- google3_base = subprocess.check_output(
- ['p4', 'g4d', '-f', args.p4_client]).decode('utf-8').rstrip()
- third_party_r8 = os.path.join(google3_base, 'third_party', 'java', 'r8')
- with utils.ChangedWorkingDirectory(third_party_r8):
- # download files
- g4_open('retrace_full.jar')
- g4_open('retrace_lib.jar')
- g4_open('retrace_lib.jar.map')
- download_file(options.version, 'r8-full-exclude-deps.jar', 'retrace_full.jar')
- download_file(options.version, 'r8retrace-exclude-deps.jar', 'retrace_lib.jar')
- download_file(
- options.version, 'r8lib-exclude-deps.jar.map', 'retrace_lib.jar.map')
- g4_open('METADATA')
- metadata_path = os.path.join(third_party_r8, 'METADATA')
- match_count = 0
- version_match_regexp = r'[1-9]\.[0-9]{1,2}\.[0-9]{1,3}-dev/r8retrace-exclude-deps.jar'
- for line in open(metadata_path, 'r'):
- result = re.search(version_match_regexp, line)
- if result:
- match_count = match_count + 1
- if match_count != 1:
- print(("Could not find the previous retrace release string to replace in " +
- "METADATA. Expected to find is mentioned 1 times. Please update %s " +
- "manually and run again with options --google3retrace " +
- "--use-existing-work-branch.") % metadata_path)
- sys.exit(1)
- sed(version_match_regexp, options.version + "/r8retrace-exclude-deps.jar", metadata_path)
- subprocess.check_output('chmod u+w *', shell=True)
+ google3_base = subprocess.check_output(
+ ['p4', 'g4d', '-f', args.p4_client]).decode('utf-8').rstrip()
+ third_party_r8 = os.path.join(google3_base, 'third_party', 'java', 'r8')
+ with utils.ChangedWorkingDirectory(third_party_r8):
+ # download files
+ g4_open('retrace_full.jar')
+ g4_open('retrace_lib.jar')
+ g4_open('retrace_lib.jar.map')
+ download_file(options.version, 'r8-full-exclude-deps.jar',
+ 'retrace_full.jar')
+ download_file(options.version, 'r8retrace-exclude-deps.jar',
+ 'retrace_lib.jar')
+ download_file(options.version, 'r8lib-exclude-deps.jar.map',
+ 'retrace_lib.jar.map')
+ g4_open('METADATA')
+ metadata_path = os.path.join(third_party_r8, 'METADATA')
+ match_count = 0
+ version_match_regexp = r'[1-9]\.[0-9]{1,2}\.[0-9]{1,3}-dev/r8retrace-exclude-deps.jar'
+ for line in open(metadata_path, 'r'):
+ result = re.search(version_match_regexp, line)
+ if result:
+ match_count = match_count + 1
+ if match_count != 1:
+ print((
+ "Could not find the previous retrace release string to replace in "
+ +
+ "METADATA. Expected to find is mentioned 1 times. Please update %s "
+ + "manually and run again with options --google3retrace " +
+ "--use-existing-work-branch.") % metadata_path)
+ sys.exit(1)
+ sed(version_match_regexp,
+ options.version + "/r8retrace-exclude-deps.jar", metadata_path)
+ subprocess.check_output('chmod u+w *', shell=True)
- with utils.ChangedWorkingDirectory(google3_base):
- blaze_result = blaze_run('//third_party/java/r8:retrace -- --version')
+ with utils.ChangedWorkingDirectory(google3_base):
+ blaze_result = blaze_run(
+ '//third_party/java/r8:retrace -- --version')
- print(blaze_result)
- assert options.version in blaze_result
+ print(blaze_result)
+ assert options.version in blaze_result
- if not options.no_upload:
- change_result = g4_change(options.version)
- change_result += 'Run \'(g4d ' + args.p4_client \
- + ' && tap_presubmit -p all --train -c ' \
- + get_cl_id(change_result) + ')\' for running TAP global' \
- + ' presubmit using the train.\n' \
- + 'Run \'(g4d ' + args.p4_client \
- + ' && tap_presubmit -p all --notrain --detach --email' \
- + ' --skip_flaky_targets --skip_already_failing -c ' \
- + get_cl_id(change_result) + ')\' for running an isolated' \
- + ' TAP global presubmit.'
- return change_result
+ if not options.no_upload:
+ change_result = g4_change(options.version)
+ change_result += 'Run \'(g4d ' + args.p4_client \
+ + ' && tap_presubmit -p all --train -c ' \
+ + get_cl_id(change_result) + ')\' for running TAP global' \
+ + ' presubmit using the train.\n' \
+ + 'Run \'(g4d ' + args.p4_client \
+ + ' && tap_presubmit -p all --notrain --detach --email' \
+ + ' --skip_flaky_targets --skip_already_failing -c ' \
+ + get_cl_id(change_result) + ')\' for running an isolated' \
+ + ' TAP global presubmit.'
+ return change_result
- return release_google3_retrace
+ return release_google3_retrace
+
def update_desugar_library_in_studio(args):
- assert os.path.exists(args.studio), ("Could not find STUDIO path %s"
- % args.studio)
+ assert os.path.exists(args.studio), ("Could not find STUDIO path %s" %
+ args.studio)
- def make_release(args):
- library_version = args.update_desugar_library_in_studio[0]
- configuration_version = args.update_desugar_library_in_studio[1]
- change_name = 'update-desugar-library-dependencies'
+ def make_release(args):
+ library_version = args.update_desugar_library_in_studio[0]
+ configuration_version = args.update_desugar_library_in_studio[1]
+ change_name = 'update-desugar-library-dependencies'
- with utils.ChangedWorkingDirectory(args.studio):
- if not args.use_existing_work_branch:
- subprocess.call(['repo', 'abandon', change_name])
- if not args.no_sync:
- subprocess.check_call(['repo', 'sync', '-cq', '-j', '16'])
+ with utils.ChangedWorkingDirectory(args.studio):
+ if not args.use_existing_work_branch:
+ subprocess.call(['repo', 'abandon', change_name])
+ if not args.no_sync:
+ subprocess.check_call(['repo', 'sync', '-cq', '-j', '16'])
- cmd = ['tools/base/bazel/bazel',
- 'run',
- '//tools/base/bazel:add_dependency',
- '--',
- '--repo=https://maven.google.com com.android.tools:desugar_jdk_libs:%s' % library_version]
- utils.PrintCmd(cmd)
- subprocess.check_call(" ".join(cmd), shell=True)
- cmd = ['tools/base/bazel/bazel', 'shutdown']
- utils.PrintCmd(cmd)
- subprocess.check_call(cmd)
+ cmd = [
+ 'tools/base/bazel/bazel', 'run',
+ '//tools/base/bazel:add_dependency', '--',
+ '--repo=https://maven.google.com com.android.tools:desugar_jdk_libs:%s'
+ % library_version
+ ]
+ utils.PrintCmd(cmd)
+ subprocess.check_call(" ".join(cmd), shell=True)
+ cmd = ['tools/base/bazel/bazel', 'shutdown']
+ utils.PrintCmd(cmd)
+ subprocess.check_call(cmd)
- prebuilts_tools = os.path.join(args.studio, 'prebuilts', 'tools')
- with utils.ChangedWorkingDirectory(prebuilts_tools):
- if not args.use_existing_work_branch:
+ prebuilts_tools = os.path.join(args.studio, 'prebuilts', 'tools')
with utils.ChangedWorkingDirectory(prebuilts_tools):
- subprocess.check_call(['repo', 'start', change_name])
- m2_dir = os.path.join(
- 'common', 'm2', 'repository', 'com', 'android', 'tools')
- subprocess.check_call(
- ['git',
- 'add',
- os.path.join(m2_dir, DESUGAR_JDK_LIBS, library_version)])
- subprocess.check_call(
- ['git',
- 'add',
- os.path.join(
- m2_dir, DESUGAR_JDK_LIBS_CONFIGURATION, configuration_version)])
+ if not args.use_existing_work_branch:
+ with utils.ChangedWorkingDirectory(prebuilts_tools):
+ subprocess.check_call(['repo', 'start', change_name])
+ m2_dir = os.path.join('common', 'm2', 'repository', 'com',
+ 'android', 'tools')
+ subprocess.check_call([
+ 'git', 'add',
+ os.path.join(m2_dir, DESUGAR_JDK_LIBS, library_version)
+ ])
+ subprocess.check_call([
+ 'git', 'add',
+ os.path.join(m2_dir, DESUGAR_JDK_LIBS_CONFIGURATION,
+ configuration_version)
+ ])
- git_message = ("""Update library desugaring dependencies
+ git_message = ("""Update library desugaring dependencies
com.android.tools:desugar_jdk_libs:%s
com.android.tools:desugar_jdk_libs_configuration:%s
Bug: %s
-Test: L8ToolTest, L8DexDesugarTest"""
- % (library_version,
- configuration_version,
- '\nBug: '.join(map(bug_fmt, args.bug))))
+Test: L8ToolTest, L8DexDesugarTest""" %
+ (library_version, configuration_version,
+ '\nBug: '.join(map(bug_fmt, args.bug))))
- if not args.use_existing_work_branch:
- subprocess.check_call(['git', 'commit', '-a', '-m', git_message])
- else:
- print('Not committing when --use-existing-work-branch. '
- + 'Commit message should be:\n\n'
- + git_message
- + '\n')
- # Don't upload if requested not to, or if changes are not committed due
- # to --use-existing-work-branch
- if not args.no_upload and not args.use_existing_work_branch:
- process = subprocess.Popen(['repo', 'upload', '.', '--verify'],
- stdin=subprocess.PIPE)
- return process.communicate(input='y\n')[0]
+ if not args.use_existing_work_branch:
+ subprocess.check_call(
+ ['git', 'commit', '-a', '-m', git_message])
+ else:
+ print('Not committing when --use-existing-work-branch. ' +
+ 'Commit message should be:\n\n' + git_message + '\n')
+ # Don't upload if requested not to, or if changes are not committed due
+ # to --use-existing-work-branch
+ if not args.no_upload and not args.use_existing_work_branch:
+ process = subprocess.Popen(['repo', 'upload', '.', '--verify'],
+ stdin=subprocess.PIPE)
+ return process.communicate(input='y\n')[0]
- return make_release
+ return make_release
def prepare_desugar_library(args):
- def make_release(args):
- library_version = args.desugar_library[0]
- configuration_version = args.desugar_library[1]
+ def make_release(args):
+ library_version = args.desugar_library[0]
+ configuration_version = args.desugar_library[1]
- # TODO(b/237636871): Cleanup and generalize.
- if (not (library_version.startswith('1.1')
- or library_version.startswith('1.2')
- or library_version.startswith('2.0'))):
- print("Release script does not support desugared library version %s"
- % library_version)
- sys.exit(1)
+ # TODO(b/237636871): Cleanup and generalize.
+ if (not (library_version.startswith('1.1') or
+ library_version.startswith('1.2') or
+ library_version.startswith('2.0'))):
+ print(
+ "Release script does not support desugared library version %s" %
+ library_version)
+ sys.exit(1)
- postfixes = ['']
- if library_version.startswith('1.2'):
- postfixes = ['_legacy']
- if library_version.startswith('2.0'):
- postfixes = ['_minimal', '', '_nio']
+ postfixes = ['']
+ if library_version.startswith('1.2'):
+ postfixes = ['_legacy']
+ if library_version.startswith('2.0'):
+ postfixes = ['_minimal', '', '_nio']
- with utils.TempDir() as temp:
- with utils.ChangedWorkingDirectory(temp):
- artifacts = []
- for postfix in postfixes:
- group_postfix = ('' if postfix == '_legacy' else postfix)
- archive_postfix = (postfix if library_version.startswith('1.1') else '_jdk11' + postfix)
- library_jar = DESUGAR_JDK_LIBS + postfix + '.jar'
- library_archive = DESUGAR_JDK_LIBS + archive_postfix + '.zip'
- configuration_archive = DESUGAR_JDK_LIBS_CONFIGURATION + archive_postfix + '.zip'
- library_gfile = ('/bigstore/r8-releases/raw/%s/%s/%s'
- % (DESUGAR_JDK_LIBS + group_postfix, library_version, library_archive))
- configuration_gfile = ('/bigstore/r8-releases/raw/main/%s/%s'
- % (configuration_version, configuration_archive))
+ with utils.TempDir() as temp:
+ with utils.ChangedWorkingDirectory(temp):
+ artifacts = []
+ for postfix in postfixes:
+ group_postfix = ('' if postfix == '_legacy' else postfix)
+ archive_postfix = (postfix
+ if library_version.startswith('1.1') else
+ '_jdk11' + postfix)
+ library_jar = DESUGAR_JDK_LIBS + postfix + '.jar'
+ library_archive = DESUGAR_JDK_LIBS + archive_postfix + '.zip'
+ configuration_archive = DESUGAR_JDK_LIBS_CONFIGURATION + archive_postfix + '.zip'
+ library_gfile = ('/bigstore/r8-releases/raw/%s/%s/%s' %
+ (DESUGAR_JDK_LIBS + group_postfix,
+ library_version, library_archive))
+ configuration_gfile = (
+ '/bigstore/r8-releases/raw/main/%s/%s' %
+ (configuration_version, configuration_archive))
- download_gfile(library_gfile, library_archive)
- download_gfile(configuration_gfile, configuration_archive)
- check_configuration(configuration_archive, group_postfix)
- artifacts.append(library_gfile)
- artifacts.append(configuration_gfile)
+ download_gfile(library_gfile, library_archive)
+ download_gfile(configuration_gfile, configuration_archive)
+ check_configuration(configuration_archive, group_postfix)
+ artifacts.append(library_gfile)
+ artifacts.append(configuration_gfile)
- release_id = gmaven_publisher_stage(args, artifacts)
+ release_id = gmaven_publisher_stage(args, artifacts)
- print("Staged Release ID " + release_id + ".\n")
- library_artifact_id = \
- '%s:%s:%s' % (ANDROID_TOOLS_PACKAGE, DESUGAR_JDK_LIBS, library_version)
- gmaven_publisher_stage_redir_test_info(
- release_id,
- library_artifact_id,
- library_jar)
+ print("Staged Release ID " + release_id + ".\n")
+ library_artifact_id = \
+ '%s:%s:%s' % (ANDROID_TOOLS_PACKAGE, DESUGAR_JDK_LIBS, library_version)
+ gmaven_publisher_stage_redir_test_info(release_id,
+ library_artifact_id,
+ library_jar)
- print("")
- answer = input("Continue with publishing [y/N]:")
+ print("")
+ answer = input("Continue with publishing [y/N]:")
- if answer != 'y':
- print('Aborting release to Google maven')
- sys.exit(1)
+ if answer != 'y':
+ print('Aborting release to Google maven')
+ sys.exit(1)
- gmaven_publisher_publish(args, release_id)
+ gmaven_publisher_publish(args, release_id)
- print("")
- print("Published. Use the email workflow for approval.")
+ print("")
+ print("Published. Use the email workflow for approval.")
- return make_release
+ return make_release
def check_configuration(configuration_archive, postfix):
- zip = zipfile.ZipFile(configuration_archive)
- zip.extractall()
- dirs = os.listdir(
- os.path.join('com', 'android', 'tools', DESUGAR_JDK_LIBS_CONFIGURATION + postfix))
- if len(dirs) != 1:
- print('Unexpected archive content, %s' + dirs)
- sys.exit(1)
+ zip = zipfile.ZipFile(configuration_archive)
+ zip.extractall()
+ dirs = os.listdir(
+ os.path.join('com', 'android', 'tools',
+ DESUGAR_JDK_LIBS_CONFIGURATION + postfix))
+ if len(dirs) != 1:
+ print('Unexpected archive content, %s' + dirs)
+ sys.exit(1)
- version = dirs[0]
- pom_file = os.path.join(
- 'com',
- 'android',
- 'tools',
- DESUGAR_JDK_LIBS_CONFIGURATION + postfix,
- version,
- '%s-%s.pom' % (DESUGAR_JDK_LIBS_CONFIGURATION + postfix, version))
- version_from_pom = extract_version_from_pom(pom_file)
- if version != version_from_pom:
- print('Version mismatch, %s != %s' % (version, version_from_pom))
- sys.exit(1)
+ version = dirs[0]
+ pom_file = os.path.join(
+ 'com', 'android', 'tools', DESUGAR_JDK_LIBS_CONFIGURATION + postfix,
+ version,
+ '%s-%s.pom' % (DESUGAR_JDK_LIBS_CONFIGURATION + postfix, version))
+ version_from_pom = extract_version_from_pom(pom_file)
+ if version != version_from_pom:
+ print('Version mismatch, %s != %s' % (version, version_from_pom))
+ sys.exit(1)
+
def check_no_google3_client(args, client_name):
- if not args.use_existing_work_branch:
- clients = subprocess.check_output('g4 myclients', shell=True).decode('utf-8')
- if ':%s:' % client_name in clients:
- if args.delete_work_branch:
- subprocess.check_call('g4 citc -d -f %s' % client_name, shell=True)
- else:
- print(("Remove the existing '%s' client before continuing " +
- "(force delete: 'g4 citc -d -f %s'), " +
- "or use either --use-existing-work-branch or " +
- "--delete-work-branch.") % (client_name, client_name))
- sys.exit(1)
+ if not args.use_existing_work_branch:
+ clients = subprocess.check_output('g4 myclients',
+ shell=True).decode('utf-8')
+ if ':%s:' % client_name in clients:
+ if args.delete_work_branch:
+ subprocess.check_call('g4 citc -d -f %s' % client_name,
+ shell=True)
+ else:
+ print(("Remove the existing '%s' client before continuing " +
+ "(force delete: 'g4 citc -d -f %s'), " +
+ "or use either --use-existing-work-branch or " +
+ "--delete-work-branch.") % (client_name, client_name))
+ sys.exit(1)
def extract_version_from_pom(pom_file):
@@ -693,53 +725,55 @@
return tree.getroot().find("{%s}version" % ns).text
-GMAVEN_PUBLISH_STAGE_RELEASE_ID_PATTERN = re.compile('Release ID = ([0-9a-f\-]+)')
+GMAVEN_PUBLISH_STAGE_RELEASE_ID_PATTERN = re.compile(
+ 'Release ID = ([0-9a-f\-]+)')
def gmaven_publisher_stage(args, gfiles):
- if args.dry_run:
- print('Dry-run, would have staged %s' % gfiles)
- return 'dry-run-release-id'
+ if args.dry_run:
+ print('Dry-run, would have staged %s' % gfiles)
+ return 'dry-run-release-id'
- print("Staging: %s" % ', '.join(gfiles))
- print("")
+ print("Staging: %s" % ', '.join(gfiles))
+ print("")
- cmd = [GMAVEN_PUBLISHER, 'stage', '--gfile', ','.join(gfiles)]
- output = subprocess.check_output(cmd)
+ cmd = [GMAVEN_PUBLISHER, 'stage', '--gfile', ','.join(gfiles)]
+ output = subprocess.check_output(cmd)
- # Expect output to contain:
- # [INFO] 06/19/2020 09:35:12 CEST: >>>>>>>>>> Staged
- # [INFO] 06/19/2020 09:35:12 CEST: Release ID = 9171d015-18f6-4a90-9984-1c362589dc1b
- # [INFO] 06/19/2020 09:35:12 CEST: Stage Path = /bigstore/studio_staging/maven2/sgjesse/9171d015-18f6-4a90-9984-1c362589dc1b
+ # Expect output to contain:
+ # [INFO] 06/19/2020 09:35:12 CEST: >>>>>>>>>> Staged
+ # [INFO] 06/19/2020 09:35:12 CEST: Release ID = 9171d015-18f6-4a90-9984-1c362589dc1b
+ # [INFO] 06/19/2020 09:35:12 CEST: Stage Path = /bigstore/studio_staging/maven2/sgjesse/9171d015-18f6-4a90-9984-1c362589dc1b
- matches = GMAVEN_PUBLISH_STAGE_RELEASE_ID_PATTERN.findall(output.decode("utf-8"))
- if matches == None or len(matches) > 1:
- print("Could not determine the release ID from the gmaven_publisher " +
- "output. Expected a line with 'Release ID = <release id>'.")
- print("Output was:")
+ matches = GMAVEN_PUBLISH_STAGE_RELEASE_ID_PATTERN.findall(
+ output.decode("utf-8"))
+ if matches == None or len(matches) > 1:
+ print("Could not determine the release ID from the gmaven_publisher " +
+ "output. Expected a line with 'Release ID = <release id>'.")
+ print("Output was:")
+ print(output)
+ sys.exit(1)
+
print(output)
- sys.exit(1)
- print(output)
-
- release_id = matches[0]
- return release_id
+ release_id = matches[0]
+ return release_id
def gmaven_publisher_stage_redir_test_info(release_id, artifact, dst):
- redir_command = ("/google/data/ro/teams/android-devtools-infra/tools/redir "
- + "--alsologtostderr "
- + "--gcs_bucket_path=/bigstore/gmaven-staging/${USER}/%s "
- + "--port=1480") % release_id
+ redir_command = ("/google/data/ro/teams/android-devtools-infra/tools/redir "
+ + "--alsologtostderr " +
+ "--gcs_bucket_path=/bigstore/gmaven-staging/${USER}/%s " +
+ "--port=1480") % release_id
- get_command = ("mvn org.apache.maven.plugins:maven-dependency-plugin:2.4:get "
- + "-Dmaven.repo.local=/tmp/maven_repo_local "
- + "-DremoteRepositories=http://localhost:1480 "
- + "-Dartifact=%s "
- + "-Ddest=%s") % (artifact, dst)
+ get_command = (
+ "mvn org.apache.maven.plugins:maven-dependency-plugin:2.4:get " +
+ "-Dmaven.repo.local=/tmp/maven_repo_local " +
+ "-DremoteRepositories=http://localhost:1480 " + "-Dartifact=%s " +
+ "-Ddest=%s") % (artifact, dst)
- print("""To test the staged content with 'redir' run:
+ print("""To test the staged content with 'redir' run:
%s
@@ -771,297 +805,319 @@
def gmaven_publisher_publish(args, release_id):
- if args.dry_run:
- print('Dry-run, would have published %s' % release_id)
- return
+ if args.dry_run:
+ print('Dry-run, would have published %s' % release_id)
+ return
- cmd = [GMAVEN_PUBLISHER, 'publish', release_id]
- output = subprocess.check_output(cmd)
+ cmd = [GMAVEN_PUBLISHER, 'publish', release_id]
+ output = subprocess.check_output(cmd)
+
def branch_change_diff(diff, old_version, new_version):
- invalid_line = None
- for line in str(diff).splitlines():
- if line.startswith('-R8') and \
- line != "-R8_DEV_BRANCH = '%s'" % old_version:
- print(line)
- invalid_line = line
- elif line.startswith('+R8') and \
- line != "+R8_DEV_BRANCH = '%s'" % new_version:
- print(line)
- invalid_line = line
- return invalid_line
+ invalid_line = None
+ for line in str(diff).splitlines():
+ if line.startswith('-R8') and \
+ line != "-R8_DEV_BRANCH = '%s'" % old_version:
+ print(line)
+ invalid_line = line
+ elif line.startswith('+R8') and \
+ line != "+R8_DEV_BRANCH = '%s'" % new_version:
+ print(line)
+ invalid_line = line
+ return invalid_line
def validate_branch_change_diff(version_diff_output, old_version, new_version):
- invalid = branch_change_diff(version_diff_output, old_version, new_version)
- if invalid:
- print("")
- print("The diff for the branch change in tools/release.py is not as expected:")
- print("")
- print("=" * 80)
- print(version_diff_output)
- print("=" * 80)
- print("")
- print("Validate the uploaded CL before landing.")
- print("")
+ invalid = branch_change_diff(version_diff_output, old_version, new_version)
+ if invalid:
+ print("")
+ print(
+ "The diff for the branch change in tools/release.py is not as expected:"
+ )
+ print("")
+ print("=" * 80)
+ print(version_diff_output)
+ print("=" * 80)
+ print("")
+ print("Validate the uploaded CL before landing.")
+ print("")
def prepare_branch(args):
- branch_version = args.new_dev_branch[0]
- commithash = args.new_dev_branch[1]
+ branch_version = args.new_dev_branch[0]
+ commithash = args.new_dev_branch[1]
- current_semver = utils.check_basic_semver_version(
- R8_DEV_BRANCH, ", current release branch version should be x.y", 2)
- semver = utils.check_basic_semver_version(
- branch_version, ", release branch version should be x.y", 2)
- if not semver.larger_than(current_semver):
- print('New branch version "'
- + branch_version
- + '" must be strictly larger than the current "'
- + R8_DEV_BRANCH
- + '"')
- sys.exit(1)
+ current_semver = utils.check_basic_semver_version(
+ R8_DEV_BRANCH, ", current release branch version should be x.y", 2)
+ semver = utils.check_basic_semver_version(
+ branch_version, ", release branch version should be x.y", 2)
+ if not semver.larger_than(current_semver):
+ print('New branch version "' + branch_version +
+ '" must be strictly larger than the current "' + R8_DEV_BRANCH +
+ '"')
+ sys.exit(1)
- def make_branch(options):
- with utils.TempDir() as temp:
- subprocess.check_call(['git', 'clone', utils.REPO_SOURCE, temp])
- with utils.ChangedWorkingDirectory(temp):
- subprocess.check_call(['git', 'branch', branch_version, commithash])
+ def make_branch(options):
+ with utils.TempDir() as temp:
+ subprocess.check_call(['git', 'clone', utils.REPO_SOURCE, temp])
+ with utils.ChangedWorkingDirectory(temp):
+ subprocess.check_call(
+ ['git', 'branch', branch_version, commithash])
- subprocess.check_call(['git', 'checkout', branch_version])
+ subprocess.check_call(['git', 'checkout', branch_version])
- # Rewrite the version, commit and validate.
- old_version = 'main'
- full_version = branch_version + '.0-dev'
- version_prefix = 'public static final String LABEL = "'
- sed(version_prefix + old_version,
- version_prefix + full_version,
- R8_VERSION_FILE)
+ # Rewrite the version, commit and validate.
+ old_version = 'main'
+ full_version = branch_version + '.0-dev'
+ version_prefix = 'public static final String LABEL = "'
+ sed(version_prefix + old_version, version_prefix + full_version,
+ R8_VERSION_FILE)
- subprocess.check_call([
- 'git', 'commit', '-a', '-m', 'Version %s' % full_version])
+ subprocess.check_call(
+ ['git', 'commit', '-a', '-m',
+ 'Version %s' % full_version])
- version_diff_output = subprocess.check_output([
- 'git', 'diff', '%s..HEAD' % commithash])
+ version_diff_output = subprocess.check_output(
+ ['git', 'diff', '%s..HEAD' % commithash])
- validate_version_change_diff(version_diff_output, old_version, full_version)
+ validate_version_change_diff(version_diff_output, old_version,
+ full_version)
- # Double check that we want to create a new release branch.
- if not options.dry_run:
- answer = input('Create new branch for %s [y/N]:' % branch_version)
- if answer != 'y':
- print('Aborting new branch for %s' % branch_version)
- sys.exit(1)
+ # Double check that we want to create a new release branch.
+ if not options.dry_run:
+ answer = input('Create new branch for %s [y/N]:' %
+ branch_version)
+ if answer != 'y':
+ print('Aborting new branch for %s' % branch_version)
+ sys.exit(1)
- maybe_check_call(options, [
- 'git', 'push', 'origin', 'HEAD:%s' % branch_version])
- maybe_tag(options, full_version)
+ maybe_check_call(
+ options,
+ ['git', 'push', 'origin',
+ 'HEAD:%s' % branch_version])
+ maybe_tag(options, full_version)
- print('Updating tools/r8_release.py to make new dev releases on %s'
- % branch_version)
+ print(
+ 'Updating tools/r8_release.py to make new dev releases on %s'
+ % branch_version)
- subprocess.check_call(['git', 'new-branch', 'update-release-script'])
+ subprocess.check_call(
+ ['git', 'new-branch', 'update-release-script'])
- # Check this file for the setting of the current dev branch.
- result = None
- for line in open(THIS_FILE_RELATIVE, 'r'):
- result = re.match(
- r"^R8_DEV_BRANCH = '(\d+).(\d+)'", line)
- if result:
- break
- if not result or not result.group(1):
- print('Failed to find version label in %s' % THIS_FILE_RELATIVE)
- sys.exit(1)
+ # Check this file for the setting of the current dev branch.
+ result = None
+ for line in open(THIS_FILE_RELATIVE, 'r'):
+ result = re.match(r"^R8_DEV_BRANCH = '(\d+).(\d+)'", line)
+ if result:
+ break
+ if not result or not result.group(1):
+ print('Failed to find version label in %s' %
+ THIS_FILE_RELATIVE)
+ sys.exit(1)
- # Update this file with the new dev branch.
- sed("R8_DEV_BRANCH = '%s.%s" % (result.group(1), result.group(2)),
- "R8_DEV_BRANCH = '%s.%s" % (str(semver.major), str(semver.minor)),
- THIS_FILE_RELATIVE)
+ # Update this file with the new dev branch.
+ sed(
+ "R8_DEV_BRANCH = '%s.%s" %
+ (result.group(1), result.group(2)),
+ "R8_DEV_BRANCH = '%s.%s" %
+ (str(semver.major), str(semver.minor)), THIS_FILE_RELATIVE)
- message = \
- 'Prepare %s for branch %s' % (THIS_FILE_RELATIVE, branch_version)
- subprocess.check_call(['git', 'commit', '-a', '-m', message])
+ message = \
+ 'Prepare %s for branch %s' % (THIS_FILE_RELATIVE, branch_version)
+ subprocess.check_call(['git', 'commit', '-a', '-m', message])
- branch_diff_output = subprocess.check_output(['git', 'diff', 'HEAD~'])
+ branch_diff_output = subprocess.check_output(
+ ['git', 'diff', 'HEAD~'])
- validate_branch_change_diff(
- branch_diff_output, R8_DEV_BRANCH, branch_version)
+ validate_branch_change_diff(branch_diff_output, R8_DEV_BRANCH,
+ branch_version)
- maybe_check_call(options, ['git', 'cl', 'upload', '-f', '-m', message])
+ maybe_check_call(options,
+ ['git', 'cl', 'upload', '-f', '-m', message])
- print('')
- print('Make sure to send out the branch change CL for review.')
- print('')
+ print('')
+ print('Make sure to send out the branch change CL for review.')
+ print('')
- return make_branch
+ return make_branch
def parse_options():
- result = argparse.ArgumentParser(description='Release r8')
- group = result.add_mutually_exclusive_group()
- group.add_argument('--dev-release',
- metavar=('<main hash>'),
- help='The hash to use for the new dev version of R8')
- group.add_argument('--version',
- metavar=('<version>'),
- help='The new version of R8 (e.g., 1.4.51) to release to selected channels')
- group.add_argument('--desugar-library',
- nargs=2,
- metavar=('<version>', '<configuration hash>'),
- help='The new version of com.android.tools:desugar_jdk_libs')
- group.add_argument('--update-desugar-library-in-studio',
- nargs=2,
- metavar=('<version>', '<configuration version>'),
- help='Update studio mirror of com.android.tools:desugar_jdk_libs')
- group.add_argument('--new-dev-branch',
- nargs=2,
- metavar=('<version>', '<main hash>'),
- help='Create a new branch starting a version line (e.g. 2.0)')
- result.add_argument('--dev-pre-cherry-pick',
- metavar=('<main hash(s)>'),
- default=[],
- action='append',
- help='List of commits to cherry pick before doing full '
- 'merge, mostly used for reverting cherry picks')
- result.add_argument('--no-sync', '--no_sync',
- default=False,
- action='store_true',
- help='Do not sync repos before uploading')
- result.add_argument('--bug',
- metavar=('<bug(s)>'),
- default=[],
- action='append',
- help='List of bugs for release version')
- result.add_argument('--no-bugs',
- default=False,
- action='store_true',
- help='Allow Studio release without specifying any bugs')
- result.add_argument('--studio',
- metavar=('<path>'),
- help='Release for studio by setting the path to a studio '
- 'checkout')
- result.add_argument('--aosp',
- metavar=('<path>'),
- help='Release for aosp by setting the path to the '
- 'checkout')
- result.add_argument('--maven',
- default=False,
- action='store_true',
- help='Release to Google Maven')
- result.add_argument('--google3',
- default=False,
- action='store_true',
- help='Release for google 3')
- result.add_argument('--google3retrace',
- default=False,
- action='store_true',
- help='Release retrace for google 3')
- result.add_argument('--p4-client',
- default='update-r8',
- metavar=('<client name>'),
- help='P4 client name for google 3')
- result.add_argument('--use-existing-work-branch', '--use_existing_work_branch',
- default=False,
- action='store_true',
- help='Use existing work branch/CL in aosp/studio/google3')
- result.add_argument('--delete-work-branch', '--delete_work_branch',
- default=False,
- action='store_true',
- help='Delete CL in google3')
- result.add_argument('--bypass-hooks', '--bypass_hooks',
- default=False,
- action='store_true',
- help="Bypass hooks when uploading")
- result.add_argument('--no-upload', '--no_upload',
- default=False,
- action='store_true',
- help="Don't upload for code review")
- result.add_argument('--dry-run',
- default=False,
- action='store_true',
- help='Only perform non-commiting tasks and print others.')
- result.add_argument('--dry-run-output', '--dry_run_output',
- default=os.getcwd(),
- metavar=('<path>'),
- help='Location for dry run output.')
- args = result.parse_args()
- if (len(args.bug) > 0 and args.no_bugs):
- print("Use of '--bug' and '--no-bugs' are mutually exclusive")
- sys.exit(1)
+ result = argparse.ArgumentParser(description='Release r8')
+ group = result.add_mutually_exclusive_group()
+ group.add_argument('--dev-release',
+ metavar=('<main hash>'),
+ help='The hash to use for the new dev version of R8')
+ group.add_argument(
+ '--version',
+ metavar=('<version>'),
+ help=
+ 'The new version of R8 (e.g., 1.4.51) to release to selected channels')
+ group.add_argument(
+ '--desugar-library',
+ nargs=2,
+ metavar=('<version>', '<configuration hash>'),
+ help='The new version of com.android.tools:desugar_jdk_libs')
+ group.add_argument(
+ '--update-desugar-library-in-studio',
+ nargs=2,
+ metavar=('<version>', '<configuration version>'),
+ help='Update studio mirror of com.android.tools:desugar_jdk_libs')
+ group.add_argument(
+ '--new-dev-branch',
+ nargs=2,
+ metavar=('<version>', '<main hash>'),
+ help='Create a new branch starting a version line (e.g. 2.0)')
+ result.add_argument('--dev-pre-cherry-pick',
+ metavar=('<main hash(s)>'),
+ default=[],
+ action='append',
+ help='List of commits to cherry pick before doing full '
+ 'merge, mostly used for reverting cherry picks')
+ result.add_argument('--no-sync',
+ '--no_sync',
+ default=False,
+ action='store_true',
+ help='Do not sync repos before uploading')
+ result.add_argument('--bug',
+ metavar=('<bug(s)>'),
+ default=[],
+ action='append',
+ help='List of bugs for release version')
+ result.add_argument('--no-bugs',
+ default=False,
+ action='store_true',
+ help='Allow Studio release without specifying any bugs')
+ result.add_argument(
+ '--studio',
+ metavar=('<path>'),
+ help='Release for studio by setting the path to a studio '
+ 'checkout')
+ result.add_argument('--aosp',
+ metavar=('<path>'),
+ help='Release for aosp by setting the path to the '
+ 'checkout')
+ result.add_argument('--maven',
+ default=False,
+ action='store_true',
+ help='Release to Google Maven')
+ result.add_argument('--google3',
+ default=False,
+ action='store_true',
+ help='Release for google 3')
+ result.add_argument('--google3retrace',
+ default=False,
+ action='store_true',
+ help='Release retrace for google 3')
+ result.add_argument('--p4-client',
+ default='update-r8',
+ metavar=('<client name>'),
+ help='P4 client name for google 3')
+ result.add_argument(
+ '--use-existing-work-branch',
+ '--use_existing_work_branch',
+ default=False,
+ action='store_true',
+ help='Use existing work branch/CL in aosp/studio/google3')
+ result.add_argument('--delete-work-branch',
+ '--delete_work_branch',
+ default=False,
+ action='store_true',
+ help='Delete CL in google3')
+ result.add_argument('--bypass-hooks',
+ '--bypass_hooks',
+ default=False,
+ action='store_true',
+ help="Bypass hooks when uploading")
+ result.add_argument('--no-upload',
+ '--no_upload',
+ default=False,
+ action='store_true',
+ help="Don't upload for code review")
+ result.add_argument(
+ '--dry-run',
+ default=False,
+ action='store_true',
+ help='Only perform non-commiting tasks and print others.')
+ result.add_argument('--dry-run-output',
+ '--dry_run_output',
+ default=os.getcwd(),
+ metavar=('<path>'),
+ help='Location for dry run output.')
+ args = result.parse_args()
+ if (len(args.bug) > 0 and args.no_bugs):
+ print("Use of '--bug' and '--no-bugs' are mutually exclusive")
+ sys.exit(1)
- if (args.studio
- and args.version
- and not 'dev' in args.version
- and args.bug == []
- and not args.no_bugs):
- print("When releasing a release version to Android Studio add the "
- + "list of bugs by using '--bug'")
- sys.exit(1)
+ if (args.studio and args.version and not 'dev' in args.version and
+ args.bug == [] and not args.no_bugs):
+ print("When releasing a release version to Android Studio add the " +
+ "list of bugs by using '--bug'")
+ sys.exit(1)
- if args.version and not 'dev' in args.version and args.google3:
- print("WARNING: You should not roll a release version into google 3")
+ if args.version and not 'dev' in args.version and args.google3:
+ print("WARNING: You should not roll a release version into google 3")
- return args
+ return args
def main():
- args = parse_options()
- targets_to_run = []
+ args = parse_options()
+ targets_to_run = []
- if args.new_dev_branch:
- if args.google3 or args.studio or args.aosp:
- print('Cannot create a branch and roll at the same time.')
- sys.exit(1)
- targets_to_run.append(prepare_branch(args))
+ if args.new_dev_branch:
+ if args.google3 or args.studio or args.aosp:
+ print('Cannot create a branch and roll at the same time.')
+ sys.exit(1)
+ targets_to_run.append(prepare_branch(args))
- if args.dev_release:
- if args.google3 or args.studio or args.aosp:
- print('Cannot create a dev release and roll at the same time.')
- sys.exit(1)
- targets_to_run.append(prepare_release(args))
+ if args.dev_release:
+ if args.google3 or args.studio or args.aosp:
+ print('Cannot create a dev release and roll at the same time.')
+ sys.exit(1)
+ targets_to_run.append(prepare_release(args))
- if (args.google3
- or args.maven
- or (args.studio and not args.no_sync)
- or (args.desugar_library and not args.dry_run)):
- utils.check_gcert()
+ if (args.google3 or args.maven or (args.studio and not args.no_sync) or
+ (args.desugar_library and not args.dry_run)):
+ utils.check_gcert()
- if args.google3:
- targets_to_run.append(prepare_google3(args))
- if args.google3retrace:
- targets_to_run.append(prepare_google3_retrace(args))
- if args.studio and not args.update_desugar_library_in_studio:
- targets_to_run.append(prepare_studio(args))
- if args.aosp:
- targets_to_run.append(prepare_aosp(args))
- if args.maven:
- targets_to_run.append(prepare_maven(args))
+ if args.google3:
+ targets_to_run.append(prepare_google3(args))
+ if args.google3retrace:
+ targets_to_run.append(prepare_google3_retrace(args))
+ if args.studio and not args.update_desugar_library_in_studio:
+ targets_to_run.append(prepare_studio(args))
+ if args.aosp:
+ targets_to_run.append(prepare_aosp(args))
+ if args.maven:
+ targets_to_run.append(prepare_maven(args))
- if args.desugar_library:
- targets_to_run.append(prepare_desugar_library(args))
+ if args.desugar_library:
+ targets_to_run.append(prepare_desugar_library(args))
- if args.update_desugar_library_in_studio:
- if not args.studio:
- print("--studio required")
- sys.exit(1)
- if args.bug == []:
- print("Update studio mirror of com.android.tools:desugar_jdk_libs "
- + "requires at least one bug by using '--bug'")
- sys.exit(1)
- targets_to_run.append(update_desugar_library_in_studio(args))
+ if args.update_desugar_library_in_studio:
+ if not args.studio:
+ print("--studio required")
+ sys.exit(1)
+ if args.bug == []:
+ print(
+ "Update studio mirror of com.android.tools:desugar_jdk_libs " +
+ "requires at least one bug by using '--bug'")
+ sys.exit(1)
+ targets_to_run.append(update_desugar_library_in_studio(args))
- final_results = []
- for target_closure in targets_to_run:
- final_results.append(target_closure(args))
+ final_results = []
+ for target_closure in targets_to_run:
+ final_results.append(target_closure(args))
- print('\n\n**************************************************************')
- print('PRINTING SUMMARY')
- print('**************************************************************\n\n')
+ print('\n\n**************************************************************')
+ print('PRINTING SUMMARY')
+ print('**************************************************************\n\n')
- for result in final_results:
- if result is not None:
- print(result)
+ for result in final_results:
+ if result is not None:
+ print(result)
if __name__ == '__main__':
- sys.exit(main())
+ sys.exit(main())
diff --git a/tools/r8bisect.py b/tools/r8bisect.py
index 1b35eb3..cc829c3 100755
--- a/tools/r8bisect.py
+++ b/tools/r8bisect.py
@@ -7,4 +7,4 @@
import toolhelper
if __name__ == '__main__':
- sys.exit(toolhelper.run('bisect', sys.argv[1:]))
+ sys.exit(toolhelper.run('bisect', sys.argv[1:]))
diff --git a/tools/release_smali.py b/tools/release_smali.py
index 902e517..51f261a 100755
--- a/tools/release_smali.py
+++ b/tools/release_smali.py
@@ -12,41 +12,45 @@
ARCHIVE_BUCKET = 'r8-releases'
REPO = 'https://github.com/google/smali'
+
def parse_options():
- result = argparse.ArgumentParser(description='Release Smali')
- result.add_argument('--version',
- required=True,
- metavar=('<version>'),
- help='The version of smali to release.')
- result.add_argument('--dry-run',
- default=False,
- action='store_true',
- help='Only perform non-commiting tasks and print others.')
- return result.parse_args()
+ result = argparse.ArgumentParser(description='Release Smali')
+ result.add_argument('--version',
+ required=True,
+ metavar=('<version>'),
+ help='The version of smali to release.')
+ result.add_argument(
+ '--dry-run',
+ default=False,
+ action='store_true',
+ help='Only perform non-commiting tasks and print others.')
+ return result.parse_args()
def Main():
- options = parse_options()
- utils.check_gcert()
- gfile = ('/bigstore/r8-releases/smali/%s/smali-maven-release-%s.zip'
- % (options.version, options.version))
- release_id = gmaven.publisher_stage([gfile], options.dry_run)
+ options = parse_options()
+ utils.check_gcert()
+ gfile = ('/bigstore/r8-releases/smali/%s/smali-maven-release-%s.zip' %
+ (options.version, options.version))
+ release_id = gmaven.publisher_stage([gfile], options.dry_run)
- print('Staged Release ID %s.\n' % release_id)
- gmaven.publisher_stage_redir_test_info(
- release_id, 'com.android.tools.smali:smali:%s' % options.version, 'smali.jar')
+ print('Staged Release ID %s.\n' % release_id)
+ gmaven.publisher_stage_redir_test_info(
+ release_id, 'com.android.tools.smali:smali:%s' % options.version,
+ 'smali.jar')
- print()
- answer = input('Continue with publishing [y/N]:')
+ print()
+ answer = input('Continue with publishing [y/N]:')
- if answer != 'y':
- print('Aborting release to Google maven')
- sys.exit(1)
+ if answer != 'y':
+ print('Aborting release to Google maven')
+ sys.exit(1)
- gmaven.publisher_publish(release_id, options.dry_run)
+ gmaven.publisher_publish(release_id, options.dry_run)
- print()
- print('Published. Use the email workflow for approval.')
+ print()
+ print('Published. Use the email workflow for approval.')
+
if __name__ == '__main__':
- sys.exit(Main())
+ sys.exit(Main())
diff --git a/tools/retrace.py b/tools/retrace.py
index 1a9deb5..6642224 100755
--- a/tools/retrace.py
+++ b/tools/retrace.py
@@ -15,204 +15,201 @@
def parse_arguments():
- parser = argparse.ArgumentParser(
- description = 'R8lib wrapper for retrace tool.')
- parser.add_argument(
- '-c',
- '--commit-hash',
- '--commit_hash',
- help='Commit hash to download r8lib map file for.',
- default=None)
- parser.add_argument(
- '--version',
- help='Version to download r8lib map file for.',
- default=None)
- parser.add_argument(
- '--tag',
- help='Tag to download r8lib map file for.',
- default=None)
- parser.add_argument(
- '--exclude-deps', '--exclude_deps',
- default=None,
- action='store_true',
- help='Use the exclude-deps version of the mapping file.')
- parser.add_argument(
- '--map',
- help='Path to r8lib map.',
- default=None)
- parser.add_argument(
- '--r8jar',
- help='Path to r8 jar.',
- default=None)
- parser.add_argument(
- '--no-r8lib',
- '--no_r8lib',
- default=False,
- action='store_true',
- help='Use r8.jar and not r8lib.jar.')
- parser.add_argument(
- '--stacktrace',
- help='Path to stacktrace file (read from stdin if not passed).',
- default=None)
- parser.add_argument(
- '--quiet',
- default=None,
- action='store_true',
- help='Disables diagnostics printing to stdout.')
- parser.add_argument(
- '--debug-agent',
- '--debug_agent',
- default=None,
- action='store_true',
- help='Attach a debug-agent to the retracer java process.')
- parser.add_argument(
- '--regex',
- default=None,
- help='Sets a custom regular expression used for parsing'
- )
- parser.add_argument(
- '--verbose',
- default=None,
- action='store_true',
- help='Enables verbose retracing.')
- parser.add_argument(
- '--disable-map-validation',
- default=None,
- action='store_true',
- help='Disable validation of map hash.')
- return parser.parse_args()
+ parser = argparse.ArgumentParser(
+ description='R8lib wrapper for retrace tool.')
+ parser.add_argument('-c',
+ '--commit-hash',
+ '--commit_hash',
+ help='Commit hash to download r8lib map file for.',
+ default=None)
+ parser.add_argument('--version',
+ help='Version to download r8lib map file for.',
+ default=None)
+ parser.add_argument('--tag',
+ help='Tag to download r8lib map file for.',
+ default=None)
+ parser.add_argument(
+ '--exclude-deps',
+ '--exclude_deps',
+ default=None,
+ action='store_true',
+ help='Use the exclude-deps version of the mapping file.')
+ parser.add_argument('--map', help='Path to r8lib map.', default=None)
+ parser.add_argument('--r8jar', help='Path to r8 jar.', default=None)
+ parser.add_argument('--no-r8lib',
+ '--no_r8lib',
+ default=False,
+ action='store_true',
+ help='Use r8.jar and not r8lib.jar.')
+ parser.add_argument(
+ '--stacktrace',
+ help='Path to stacktrace file (read from stdin if not passed).',
+ default=None)
+ parser.add_argument('--quiet',
+ default=None,
+ action='store_true',
+ help='Disables diagnostics printing to stdout.')
+ parser.add_argument(
+ '--debug-agent',
+ '--debug_agent',
+ default=None,
+ action='store_true',
+ help='Attach a debug-agent to the retracer java process.')
+ parser.add_argument(
+ '--regex',
+ default=None,
+ help='Sets a custom regular expression used for parsing')
+ parser.add_argument('--verbose',
+ default=None,
+ action='store_true',
+ help='Enables verbose retracing.')
+ parser.add_argument('--disable-map-validation',
+ default=None,
+ action='store_true',
+ help='Disable validation of map hash.')
+ return parser.parse_args()
def get_map_file(args, temp):
- # default to using the specified map file.
- if args.map:
- return args.map
+ # default to using the specified map file.
+ if args.map:
+ return args.map
- # next try to extract it from the tag/version options.
- map_path = utils.find_cloud_storage_file_from_options('r8lib.jar.map', args)
- if map_path:
- return map_path
+ # next try to extract it from the tag/version options.
+ map_path = utils.find_cloud_storage_file_from_options('r8lib.jar.map', args)
+ if map_path:
+ return map_path
- # next try to extract it from the stack-trace source-file content.
- if not args.stacktrace:
- if not args.quiet:
- print('Waiting for stack-trace input...')
- args.stacktrace = os.path.join(temp, 'stacktrace.txt')
- open(args.stacktrace, 'w').writelines(sys.stdin.readlines())
+ # next try to extract it from the stack-trace source-file content.
+ if not args.stacktrace:
+ if not args.quiet:
+ print('Waiting for stack-trace input...')
+ args.stacktrace = os.path.join(temp, 'stacktrace.txt')
+ open(args.stacktrace, 'w').writelines(sys.stdin.readlines())
- r8_source_file = None
- for line in open(args.stacktrace, 'r'):
- start = line.rfind("(R8_")
- if start > 0:
- end = line.find(":", start)
- content = line[start + 1: end]
- if r8_source_file:
- if content != r8_source_file:
- print('WARNING: there are multiple distinct R8 source files:')
- print(' ' + r8_source_file)
- print(' ' + content)
- else:
- r8_source_file = content
+ r8_source_file = None
+ for line in open(args.stacktrace, 'r'):
+ start = line.rfind("(R8_")
+ if start > 0:
+ end = line.find(":", start)
+ content = line[start + 1:end]
+ if r8_source_file:
+ if content != r8_source_file:
+ print(
+ 'WARNING: there are multiple distinct R8 source files:')
+ print(' ' + r8_source_file)
+ print(' ' + content)
+ else:
+ r8_source_file = content
- if r8_source_file:
- (header, r8_version_or_hash, maphash) = r8_source_file.split('_')
- # If the command-line specified --exclude-deps then assume it is as previous
- # versions will not be marked as such in the source-file line.
- is_excldeps = args.exclude_deps
- excldeps_start = r8_version_or_hash.find('+excldeps')
- if (excldeps_start > 0):
- is_excldeps = True
- r8_version_or_hash = r8_version_or_hash[0:excldeps_start]
- if len(r8_version_or_hash) < 40:
- args.version = r8_version_or_hash
- else:
- args.commit_hash = r8_version_or_hash
- map_path = None
- if path.exists(utils.R8LIB_MAP) and get_hash_from_map_file(utils.R8LIB_MAP) == maphash:
- return utils.R8LIB_MAP
+ if r8_source_file:
+ (header, r8_version_or_hash, maphash) = r8_source_file.split('_')
+ # If the command-line specified --exclude-deps then assume it is as previous
+ # versions will not be marked as such in the source-file line.
+ is_excldeps = args.exclude_deps
+ excldeps_start = r8_version_or_hash.find('+excldeps')
+ if (excldeps_start > 0):
+ is_excldeps = True
+ r8_version_or_hash = r8_version_or_hash[0:excldeps_start]
+ if len(r8_version_or_hash) < 40:
+ args.version = r8_version_or_hash
+ else:
+ args.commit_hash = r8_version_or_hash
+ map_path = None
+ if path.exists(utils.R8LIB_MAP) and get_hash_from_map_file(
+ utils.R8LIB_MAP) == maphash:
+ return utils.R8LIB_MAP
- try:
- map_path = utils.find_cloud_storage_file_from_options(
- 'r8lib' + ('-exclude-deps' if is_excldeps else '') + '.jar.map', args)
- except Exception as e:
- print(e)
- print('WARNING: Falling back to using local mapping file.')
+ try:
+ map_path = utils.find_cloud_storage_file_from_options(
+ 'r8lib' + ('-exclude-deps' if is_excldeps else '') + '.jar.map',
+ args)
+ except Exception as e:
+ print(e)
+ print('WARNING: Falling back to using local mapping file.')
- if map_path and not args.disable_map_validation:
- check_maphash(map_path, maphash, args)
- return map_path
+ if map_path and not args.disable_map_validation:
+ check_maphash(map_path, maphash, args)
+ return map_path
- # If no other map file was found, use the local mapping file.
- if args.r8jar:
- return args.r8jar + ".map"
- return utils.R8LIB_MAP
+ # If no other map file was found, use the local mapping file.
+ if args.r8jar:
+ return args.r8jar + ".map"
+ return utils.R8LIB_MAP
def check_maphash(mapping_path, maphash, args):
- infile_maphash = get_hash_from_map_file(mapping_path)
- if infile_maphash != maphash:
- print('ERROR: The mapping file hash does not match the R8 line')
- print(' In mapping file: ' + infile_maphash)
- print(' In source file: ' + maphash)
- if (not args.exclude_deps):
- print('If this could be a version without internalized dependencies '
- + 'try passing --exclude-deps')
- sys.exit(1)
+ infile_maphash = get_hash_from_map_file(mapping_path)
+ if infile_maphash != maphash:
+ print('ERROR: The mapping file hash does not match the R8 line')
+ print(' In mapping file: ' + infile_maphash)
+ print(' In source file: ' + maphash)
+ if (not args.exclude_deps):
+ print(
+ 'If this could be a version without internalized dependencies '
+ + 'try passing --exclude-deps')
+ sys.exit(1)
+
def get_hash_from_map_file(mapping_path):
- map_hash_header = "# pg_map_hash: SHA-256 "
- for line in open(mapping_path, 'r'):
- if line.startswith(map_hash_header):
- return line[len(map_hash_header):].strip()
+ map_hash_header = "# pg_map_hash: SHA-256 "
+ for line in open(mapping_path, 'r'):
+ if line.startswith(map_hash_header):
+ return line[len(map_hash_header):].strip()
+
def main():
- args = parse_arguments()
- with utils.TempDir() as temp:
- map_path = get_map_file(args, temp)
- return run(
- map_path,
- args.stacktrace,
- args.r8jar,
- args.no_r8lib,
- quiet=args.quiet,
- debug=args.debug_agent,
- regex=args.regex,
- verbose=args.verbose)
+ args = parse_arguments()
+ with utils.TempDir() as temp:
+ map_path = get_map_file(args, temp)
+ return run(map_path,
+ args.stacktrace,
+ args.r8jar,
+ args.no_r8lib,
+ quiet=args.quiet,
+ debug=args.debug_agent,
+ regex=args.regex,
+ verbose=args.verbose)
-def run(map_path, stacktrace, r8jar, no_r8lib, quiet=False, debug=False, regex=None, verbose=False):
- retrace_args = [jdk.GetJavaExecutable()]
+def run(map_path,
+ stacktrace,
+ r8jar,
+ no_r8lib,
+ quiet=False,
+ debug=False,
+ regex=None,
+ verbose=False):
+ retrace_args = [jdk.GetJavaExecutable()]
- if debug:
- retrace_args.append(
- '-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=5005')
+ if debug:
+ retrace_args.append(
+ '-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=5005'
+ )
- if not r8jar:
- r8jar = utils.R8_JAR if no_r8lib else utils.R8RETRACE_JAR
+ if not r8jar:
+ r8jar = utils.R8_JAR if no_r8lib else utils.R8RETRACE_JAR
- retrace_args += [
- '-cp',
- r8jar,
- 'com.android.tools.r8.retrace.Retrace',
- map_path
- ]
+ retrace_args += [
+ '-cp', r8jar, 'com.android.tools.r8.retrace.Retrace', map_path
+ ]
- if regex:
- retrace_args.append('--regex')
- retrace_args.append(regex)
+ if regex:
+ retrace_args.append('--regex')
+ retrace_args.append(regex)
- if quiet:
- retrace_args.append('--quiet')
+ if quiet:
+ retrace_args.append('--quiet')
- if stacktrace:
- retrace_args.append(stacktrace)
+ if stacktrace:
+ retrace_args.append(stacktrace)
- if verbose:
- retrace_args.append('--verbose')
+ if verbose:
+ retrace_args.append('--verbose')
- utils.PrintCmd(retrace_args, quiet=quiet)
- return subprocess.call(retrace_args)
+ utils.PrintCmd(retrace_args, quiet=quiet)
+ return subprocess.call(retrace_args)
if __name__ == '__main__':
- sys.exit(main())
+ sys.exit(main())
diff --git a/tools/run-jdwp-tests.py b/tools/run-jdwp-tests.py
index e241fb4..591a608 100755
--- a/tools/run-jdwp-tests.py
+++ b/tools/run-jdwp-tests.py
@@ -14,166 +14,171 @@
TEST_PACKAGE = 'org.apache.harmony.jpda.tests.jdwp'
VERSIONS = [
- 'default',
- '10.0.0',
- '9.0.0',
- '8.1.0',
- '7.0.0',
- '6.0.1',
- '5.1.1',
- '4.4.4'
+ 'default', '10.0.0', '9.0.0', '8.1.0', '7.0.0', '6.0.1', '5.1.1', '4.4.4'
]
-JUNIT_HOSTDEX = os.path.join(
- utils.REPO_ROOT,
- 'third_party', 'jdwp-tests', 'junit-hostdex.jar')
+JUNIT_HOSTDEX = os.path.join(utils.REPO_ROOT, 'third_party', 'jdwp-tests',
+ 'junit-hostdex.jar')
-JDWP_TESTS_HOSTDEX = os.path.join(
- utils.REPO_ROOT,
- 'third_party', 'jdwp-tests', 'apache-harmony-jdwp-tests-hostdex.jar')
+JDWP_TESTS_HOSTDEX = os.path.join(utils.REPO_ROOT, 'third_party', 'jdwp-tests',
+ 'apache-harmony-jdwp-tests-hostdex.jar')
-IMAGE='/system/non/existent/jdwp/image.art'
+IMAGE = '/system/non/existent/jdwp/image.art'
# Timeout in ms
-TIMEOUT=10000
+TIMEOUT = 10000
DEBUGGER_EXTRA_FLAGS = [
- '-Xjnigreflimit:2000',
- '-Duser.language=en',
- '-Duser.region=US',
- '-Djpda.settings.verbose=true',
- '-Djpda.settings.transportAddress=127.0.0.1:55107',
- '-Djpda.settings.timeout=%d' % TIMEOUT,
- '-Djpda.settings.waitingTime=%d' % TIMEOUT
+ '-Xjnigreflimit:2000', '-Duser.language=en', '-Duser.region=US',
+ '-Djpda.settings.verbose=true',
+ '-Djpda.settings.transportAddress=127.0.0.1:55107',
+ '-Djpda.settings.timeout=%d' % TIMEOUT,
+ '-Djpda.settings.waitingTime=%d' % TIMEOUT
]
-DEBUGGEE_EXTRA_FLAGS = [
-]
+DEBUGGEE_EXTRA_FLAGS = []
+
def get_art_dir(version):
- if version == '4.4.4':
- art_dir = 'dalvik'
- else:
- art_dir = version == 'default' and 'art' or 'art-%s' % version
- return os.path.join(utils.REPO_ROOT, 'tools', 'linux', art_dir)
+ if version == '4.4.4':
+ art_dir = 'dalvik'
+ else:
+ art_dir = version == 'default' and 'art' or 'art-%s' % version
+ return os.path.join(utils.REPO_ROOT, 'tools', 'linux', art_dir)
+
def get_lib_dir(version):
- return os.path.join(get_art_dir(version), 'lib')
+ return os.path.join(get_art_dir(version), 'lib')
+
def get_fw_dir(version):
- return os.path.join(get_art_dir(version), 'framework')
+ return os.path.join(get_art_dir(version), 'framework')
+
def get_vm(version):
- return os.path.join(get_art_dir(version), 'bin', 'dalvikvm')
+ return os.path.join(get_art_dir(version), 'bin', 'dalvikvm')
+
def setup_environment(version):
- art_dir = get_art_dir(version)
- lib_dir = get_lib_dir(version)
- android_data = os.path.join(utils.REPO_ROOT, 'build', 'tmp', version)
- if not os.path.isdir(android_data):
- os.makedirs(android_data)
- if version == '4.4.4':
- # Dalvik expects that the dalvik-cache dir already exists.
- dalvik_cache_dir = os.path.join(android_data, 'dalvik-cache')
- if not os.path.isdir(dalvik_cache_dir):
- os.makedirs(dalvik_cache_dir)
- os.environ['ANDROID_DATA'] = android_data
- os.environ['ANDROID_ROOT'] = art_dir
- os.environ['LD_LIBRARY_PATH'] = lib_dir
- os.environ['DYLD_LIBRARY_PATH'] = lib_dir
- os.environ['LD_USE_LOAD_BIAS'] = '1'
+ art_dir = get_art_dir(version)
+ lib_dir = get_lib_dir(version)
+ android_data = os.path.join(utils.REPO_ROOT, 'build', 'tmp', version)
+ if not os.path.isdir(android_data):
+ os.makedirs(android_data)
+ if version == '4.4.4':
+ # Dalvik expects that the dalvik-cache dir already exists.
+ dalvik_cache_dir = os.path.join(android_data, 'dalvik-cache')
+ if not os.path.isdir(dalvik_cache_dir):
+ os.makedirs(dalvik_cache_dir)
+ os.environ['ANDROID_DATA'] = android_data
+ os.environ['ANDROID_ROOT'] = art_dir
+ os.environ['LD_LIBRARY_PATH'] = lib_dir
+ os.environ['DYLD_LIBRARY_PATH'] = lib_dir
+ os.environ['LD_USE_LOAD_BIAS'] = '1'
+
def get_boot_libs(version):
- boot_libs = []
- if version == '4.4.4':
- # Dalvik
- boot_libs.extend(['core-hostdex.jar'])
- else:
- # ART
- boot_libs.extend(['core-libart-hostdex.jar'])
- if version != '5.1.1' and version != '6.0.1':
- boot_libs.extend(['core-oj-hostdex.jar'])
- boot_libs.extend(['apache-xml-hostdex.jar'])
- return [os.path.join(get_fw_dir(version), lib) for lib in boot_libs]
+ boot_libs = []
+ if version == '4.4.4':
+ # Dalvik
+ boot_libs.extend(['core-hostdex.jar'])
+ else:
+ # ART
+ boot_libs.extend(['core-libart-hostdex.jar'])
+ if version != '5.1.1' and version != '6.0.1':
+ boot_libs.extend(['core-oj-hostdex.jar'])
+ boot_libs.extend(['apache-xml-hostdex.jar'])
+ return [os.path.join(get_fw_dir(version), lib) for lib in boot_libs]
+
def get_common_flags(version):
- flags = []
- flags.extend(['-Xbootclasspath:%s' % ':'.join(get_boot_libs(version))])
- if version != '4.4.4':
- flags.extend(['-Ximage:%s' % IMAGE])
- if version != '5.1.1':
- flags.extend(['-Xcompiler-option', '--debuggable'])
- if version == '9.0.0' or version == '10.0.0':
- flags.extend(['-XjdwpProvider:internal'])
- return flags
+ flags = []
+ flags.extend(['-Xbootclasspath:%s' % ':'.join(get_boot_libs(version))])
+ if version != '4.4.4':
+ flags.extend(['-Ximage:%s' % IMAGE])
+ if version != '5.1.1':
+ flags.extend(['-Xcompiler-option', '--debuggable'])
+ if version == '9.0.0' or version == '10.0.0':
+ flags.extend(['-XjdwpProvider:internal'])
+ return flags
+
def get_debuggee_flags(version):
- return get_common_flags(version) + DEBUGGEE_EXTRA_FLAGS
+ return get_common_flags(version) + DEBUGGEE_EXTRA_FLAGS
+
def get_debugger_flags(version):
- return get_common_flags(version) + DEBUGGER_EXTRA_FLAGS
+ return get_common_flags(version) + DEBUGGER_EXTRA_FLAGS
+
def runDebuggee(version, args):
- art_dir = get_art_dir(version)
- lib_dir = get_lib_dir(version)
- fw_dir = get_fw_dir(version)
- cmd = [get_vm(version)]
- cmd.extend(get_debuggee_flags(version))
- cmd.extend(args)
- setup_environment(version)
- print("Running debuggee as: %s" % cmd)
- return subprocess.check_call(cmd)
+ art_dir = get_art_dir(version)
+ lib_dir = get_lib_dir(version)
+ fw_dir = get_fw_dir(version)
+ cmd = [get_vm(version)]
+ cmd.extend(get_debuggee_flags(version))
+ cmd.extend(args)
+ setup_environment(version)
+ print("Running debuggee as: %s" % cmd)
+ return subprocess.check_call(cmd)
+
def runDebugger(version, classpath, args):
- art_dir = get_art_dir(version)
- lib_dir = get_lib_dir(version)
- fw_dir = get_fw_dir(version)
- dalvikvm = os.path.join(art_dir, 'bin', 'dalvikvm')
- cmd = [dalvikvm]
- cmd.extend(['-classpath', '%s:%s' % (classpath, JUNIT_HOSTDEX)])
- cmd.extend(get_debugger_flags(version))
- cmd.append('-Djpda.settings.debuggeeJavaPath=%s %s' %\
- (dalvikvm, ' '.join(get_debuggee_flags(version))))
- cmd.extend(args)
- setup_environment(version)
- print("Running debugger as: " % cmd)
- return subprocess.check_call(cmd)
+ art_dir = get_art_dir(version)
+ lib_dir = get_lib_dir(version)
+ fw_dir = get_fw_dir(version)
+ dalvikvm = os.path.join(art_dir, 'bin', 'dalvikvm')
+ cmd = [dalvikvm]
+ cmd.extend(['-classpath', '%s:%s' % (classpath, JUNIT_HOSTDEX)])
+ cmd.extend(get_debugger_flags(version))
+ cmd.append('-Djpda.settings.debuggeeJavaPath=%s %s' %\
+ (dalvikvm, ' '.join(get_debuggee_flags(version))))
+ cmd.extend(args)
+ setup_environment(version)
+ print("Running debugger as: " % cmd)
+ return subprocess.check_call(cmd)
+
def usage():
- print("Usage: %s [--debuggee] [--version=<version>] [--classpath=<classpath>] <args>" % (sys.argv[0]))
- print("where <version> is one of:", ', '.join(VERSIONS))
- print(" and <classpath> is optional classpath (default: %s)" % JDWP_TESTS_HOSTDEX)
- print(" and <args> will be passed on as arguments to the art runtime.")
+ print(
+ "Usage: %s [--debuggee] [--version=<version>] [--classpath=<classpath>] <args>"
+ % (sys.argv[0]))
+ print("where <version> is one of:", ', '.join(VERSIONS))
+ print(" and <classpath> is optional classpath (default: %s)" %
+ JDWP_TESTS_HOSTDEX)
+ print(" and <args> will be passed on as arguments to the art runtime.")
+
def main():
- version = 'default'
- debuggee = False
- args = []
- classpath = JDWP_TESTS_HOSTDEX
- for arg in sys.argv[1:]:
- if arg == '--help':
- usage()
- return 0
- elif arg.startswith('--version='):
- version = arg[len('--version='):]
- elif arg.startswith('--classpath='):
- classpath = arg[len('--classpath='):]
+ version = 'default'
+ debuggee = False
+ args = []
+ classpath = JDWP_TESTS_HOSTDEX
+ for arg in sys.argv[1:]:
+ if arg == '--help':
+ usage()
+ return 0
+ elif arg.startswith('--version='):
+ version = arg[len('--version='):]
+ elif arg.startswith('--classpath='):
+ classpath = arg[len('--classpath='):]
+ else:
+ args.append(arg)
+ if version not in VERSIONS:
+ print("Invalid version %s" % version)
+ usage()
+ return 1
+ if not debuggee and len(args) == 0:
+ args.append(DEFAULT_TEST)
+ if debuggee:
+ return runDebuggee(version, args)
else:
- args.append(arg)
- if version not in VERSIONS:
- print("Invalid version %s" % version)
- usage()
- return 1
- if not debuggee and len(args) == 0:
- args.append(DEFAULT_TEST)
- if debuggee:
- return runDebuggee(version, args)
- else:
- if len(args) == 0:
- args.append(DEFAULT_TEST)
- elif len(args) == 1:
- args = [TEST_RUNNER, args[0]]
- return runDebugger(version, classpath, args)
+ if len(args) == 0:
+ args.append(DEFAULT_TEST)
+ elif len(args) == 1:
+ args = [TEST_RUNNER, args[0]]
+ return runDebugger(version, classpath, args)
+
if __name__ == '__main__':
- sys.exit(main())
+ sys.exit(main())
diff --git a/tools/run-r8-on-gmscore.py b/tools/run-r8-on-gmscore.py
index 280941e..0f7d3b2 100755
--- a/tools/run-r8-on-gmscore.py
+++ b/tools/run-r8-on-gmscore.py
@@ -7,6 +7,7 @@
import run_on_app
if __name__ == '__main__':
- # Default compiler is R8.
- sys.exit(run_on_app.main(sys.argv[1:]
- + ['--app', 'gmscore', '--compiler', 'r8']))
+ # Default compiler is R8.
+ sys.exit(
+ run_on_app.main(sys.argv[1:] +
+ ['--app', 'gmscore', '--compiler', 'r8']))
diff --git a/tools/run_benchmark.py b/tools/run_benchmark.py
index 0d1e4fd..f98c8b7 100755
--- a/tools/run_benchmark.py
+++ b/tools/run_benchmark.py
@@ -19,117 +19,134 @@
]
GOLEM_BUILD_TARGETS = [utils.GRADLE_TASK_R8LIB] + GOLEM_BUILD_TARGETS_TESTS
+
def get_golem_resource_path(benchmark):
- return os.path.join('benchmarks', benchmark)
+ return os.path.join('benchmarks', benchmark)
+
def get_jdk_home(options, benchmark):
- if options.golem:
- return os.path.join(get_golem_resource_path(benchmark), 'linux')
- return None
+ if options.golem:
+ return os.path.join(get_golem_resource_path(benchmark), 'linux')
+ return None
+
def parse_options(argv):
- result = argparse.ArgumentParser(description = 'Run test-based benchmarks.')
- result.add_argument('--golem',
- help='Indicate this as a run on golem',
- default=False,
- action='store_true')
- result.add_argument('--benchmark',
- help='The test benchmark to run',
- required=True)
- result.add_argument('--target',
- help='The test target to run',
- required=True,
- # These should 1:1 with benchmarks/BenchmarkTarget.java
- choices=['d8', 'r8-full', 'r8-force', 'r8-compat'])
- result.add_argument('--nolib', '--no-lib', '--no-r8lib',
- help='Run the non-lib R8 build (default false)',
- default=False,
- action='store_true')
- result.add_argument('--no-build', '--no_build',
- help='Run without building first (default false)',
- default=False,
- action='store_true')
- result.add_argument('--enable-assertions', '--enable_assertions', '-ea',
- help='Enable assertions when running',
- default=False,
- action='store_true')
- result.add_argument('--print-times',
- help='Print timing information from r8',
- default=False,
- action='store_true')
- result.add_argument('--version', '-v',
- help='Use R8 version/hash for the run (default local build)',
- default=None)
- result.add_argument('--temp',
- help='A directory to use for temporaries and outputs.',
- default=None)
- return result.parse_known_args(argv)
+ result = argparse.ArgumentParser(description='Run test-based benchmarks.')
+ result.add_argument('--golem',
+ help='Indicate this as a run on golem',
+ default=False,
+ action='store_true')
+ result.add_argument('--benchmark',
+ help='The test benchmark to run',
+ required=True)
+ result.add_argument(
+ '--target',
+ help='The test target to run',
+ required=True,
+ # These should 1:1 with benchmarks/BenchmarkTarget.java
+ choices=['d8', 'r8-full', 'r8-force', 'r8-compat'])
+ result.add_argument('--nolib',
+ '--no-lib',
+ '--no-r8lib',
+ help='Run the non-lib R8 build (default false)',
+ default=False,
+ action='store_true')
+ result.add_argument('--no-build',
+ '--no_build',
+ help='Run without building first (default false)',
+ default=False,
+ action='store_true')
+ result.add_argument('--enable-assertions',
+ '--enable_assertions',
+ '-ea',
+ help='Enable assertions when running',
+ default=False,
+ action='store_true')
+ result.add_argument('--print-times',
+ help='Print timing information from r8',
+ default=False,
+ action='store_true')
+ result.add_argument(
+ '--version',
+ '-v',
+ help='Use R8 version/hash for the run (default local build)',
+ default=None)
+ result.add_argument('--temp',
+ help='A directory to use for temporaries and outputs.',
+ default=None)
+ return result.parse_known_args(argv)
+
def main(argv, temp):
- (options, args) = parse_options(argv)
+ (options, args) = parse_options(argv)
- if options.temp:
- temp = options.temp
+ if options.temp:
+ temp = options.temp
- if options.golem:
- options.no_build = True
+ if options.golem:
+ options.no_build = True
+ if options.nolib:
+ print("Error: golem should always run r8lib")
+ return 1
+
if options.nolib:
- print("Error: golem should always run r8lib")
- return 1
+ testBuildTargets = [
+ utils.GRADLE_TASK_TEST_JAR, utils.GRADLE_TASK_TEST_DEPS_JAR
+ ]
+ buildTargets = [utils.GRADLE_TASK_R8] + testBuildTargets
+ r8jar = utils.R8_JAR
+ testjars = [utils.R8_TESTS_JAR, utils.R8_TESTS_DEPS_JAR]
+ else:
+ testBuildTargets = GOLEM_BUILD_TARGETS_TESTS
+ buildTargets = GOLEM_BUILD_TARGETS
+ r8jar = utils.R8LIB_JAR
+ testjars = [
+ os.path.join(utils.R8LIB_TESTS_JAR),
+ os.path.join(utils.R8LIB_TESTS_DEPS_JAR)
+ ]
- if options.nolib:
- testBuildTargets = [utils.GRADLE_TASK_TEST_JAR, utils.GRADLE_TASK_TEST_DEPS_JAR]
- buildTargets = [utils.GRADLE_TASK_R8] + testBuildTargets
- r8jar = utils.R8_JAR
- testjars = [utils.R8_TESTS_JAR, utils.R8_TESTS_DEPS_JAR]
- else:
- testBuildTargets = GOLEM_BUILD_TARGETS_TESTS
- buildTargets = GOLEM_BUILD_TARGETS
- r8jar = utils.R8LIB_JAR
- testjars = [
- os.path.join(utils.R8LIB_TESTS_JAR),
- os.path.join(utils.R8LIB_TESTS_DEPS_JAR)
- ]
+ if options.version:
+ # r8 is downloaded so only test jar needs to be built.
+ buildTargets = testBuildTargets
+ r8jar = compiledump.download_distribution(options.version,
+ options.nolib, temp)
- if options.version:
- # r8 is downloaded so only test jar needs to be built.
- buildTargets = testBuildTargets
- r8jar = compiledump.download_distribution(options.version, options.nolib, temp)
+ if not options.no_build:
+ gradle.RunGradle(buildTargets + ['-Pno_internal'])
- if not options.no_build:
- gradle.RunGradle(buildTargets + ['-Pno_internal'])
+ if not options.golem:
+ # When running locally, change the working directory to be in 'temp'.
+ # This is hard to do properly within the JVM so we do it here.
+ with utils.ChangedWorkingDirectory(temp):
+ return run(options, r8jar, testjars)
+ else:
+ return run(options, r8jar, testjars)
- if not options.golem:
- # When running locally, change the working directory to be in 'temp'.
- # This is hard to do properly within the JVM so we do it here.
- with utils.ChangedWorkingDirectory(temp):
- return run(options, r8jar, testjars)
- else:
- return run(options, r8jar, testjars)
def run(options, r8jar, testjars):
- jdkhome = get_jdk_home(options, options.benchmark)
- cmd = [jdk.GetJavaExecutable(jdkhome)]
- if options.enable_assertions:
- cmd.append('-ea')
- if options.print_times:
- cmd.append('-Dcom.android.tools.r8.printtimes=1')
- if not options.golem:
+ jdkhome = get_jdk_home(options, options.benchmark)
+ cmd = [jdk.GetJavaExecutable(jdkhome)]
+ if options.enable_assertions:
+ cmd.append('-ea')
+ if options.print_times:
+ cmd.append('-Dcom.android.tools.r8.printtimes=1')
+ if not options.golem:
+ cmd.extend([
+ '-DUSE_NEW_GRADLE_SETUP=true',
+ f'-DTEST_DATA_LOCATION={utils.REPO_ROOT}/d8_r8/test_modules/tests_java_8/build/classes/java/test'
+ ])
+ cmd.extend(['-cp', ':'.join([r8jar] + testjars)])
cmd.extend([
- '-DUSE_NEW_GRADLE_SETUP=true',
- f'-DTEST_DATA_LOCATION={utils.REPO_ROOT}/d8_r8/test_modules/tests_java_8/build/classes/java/test'
- ])
- cmd.extend(['-cp', ':'.join([r8jar] + testjars)])
- cmd.extend([
- 'com.android.tools.r8.benchmarks.BenchmarkMainEntryRunner',
- options.benchmark,
- options.target,
- # When running locally the working directory is moved and we pass the
- # repository root as an argument. The runner can then setup dependencies.
- 'golem' if options.golem else utils.REPO_ROOT,
+ 'com.android.tools.r8.benchmarks.BenchmarkMainEntryRunner',
+ options.benchmark,
+ options.target,
+ # When running locally the working directory is moved and we pass the
+ # repository root as an argument. The runner can then setup dependencies.
+ 'golem' if options.golem else utils.REPO_ROOT,
])
- return subprocess.check_call(cmd)
+ return subprocess.check_call(cmd)
+
if __name__ == '__main__':
- with utils.TempDir() as temp:
- sys.exit(main(sys.argv[1:], temp))
+ with utils.TempDir() as temp:
+ sys.exit(main(sys.argv[1:], temp))
diff --git a/tools/run_kotlin_benchmarks.py b/tools/run_kotlin_benchmarks.py
index 86adbe2..f7befb3 100755
--- a/tools/run_kotlin_benchmarks.py
+++ b/tools/run_kotlin_benchmarks.py
@@ -12,7 +12,6 @@
import toolhelper
import utils
-
BENCHMARK_ROOT = os.path.join(utils.REPO_ROOT, 'third_party', 'benchmarks',
'kotlin-benches')
@@ -31,75 +30,87 @@
-allowaccessmodification
"""
-DEVICE_TEMP='/data/local/temp/bench'
+DEVICE_TEMP = '/data/local/temp/bench'
def parse_options():
- result = optparse.OptionParser()
- result.add_option('--api',
- help='Android api level',
- default='26',
- choices=['21', '22', '23', '24', '25', '26'])
- result.add_option('--benchmark',
- help='The benchmark to run',
- default='rgx',
- choices=['rgx', 'deltablue', 'sta', 'empty'])
- result.add_option('--use-device',
- help='Run the benchmark on an attaced device',
- default=False, action='store_true')
- return result.parse_args()
+ result = optparse.OptionParser()
+ result.add_option('--api',
+ help='Android api level',
+ default='26',
+ choices=['21', '22', '23', '24', '25', '26'])
+ result.add_option('--benchmark',
+ help='The benchmark to run',
+ default='rgx',
+ choices=['rgx', 'deltablue', 'sta', 'empty'])
+ result.add_option('--use-device',
+ help='Run the benchmark on an attaced device',
+ default=False,
+ action='store_true')
+ return result.parse_args()
def get_jar_for_benchmark(benchmark):
- return os.path.join(BENCHMARK_ROOT,
- BENCHMARK_PATTERN.format(benchmark=benchmark))
+ return os.path.join(BENCHMARK_ROOT,
+ BENCHMARK_PATTERN.format(benchmark=benchmark))
+
def run_art(dex):
- command = ['bash', ART, '-cp', dex, BENCHMARK_MAIN_CLASS]
- utils.PrintCmd(command)
- benchmark_output = subprocess.check_output(command)
- return get_result(benchmark_output)
+ command = ['bash', ART, '-cp', dex, BENCHMARK_MAIN_CLASS]
+ utils.PrintCmd(command)
+ benchmark_output = subprocess.check_output(command)
+ return get_result(benchmark_output)
+
def adb(args):
- command = ['adb'] + args
- utils.PrintCmd(command)
- return subprocess.check_output(['adb'] + args)
+ command = ['adb'] + args
+ utils.PrintCmd(command)
+ return subprocess.check_output(['adb'] + args)
+
def get_result(output):
- # There is a lot of debug output, with the actual results being in the line with:
- # RESULTS,KtBench,KtBench,15719
- # structure.
- for result in [s for s in output.splitlines() if s.startswith('RESULTS')]:
- return s.split('RESULTS,KtBench,KtBench,')[1]
+ # There is a lot of debug output, with the actual results being in the line with:
+ # RESULTS,KtBench,KtBench,15719
+ # structure.
+ for result in [s for s in output.splitlines() if s.startswith('RESULTS')]:
+ return s.split('RESULTS,KtBench,KtBench,')[1]
+
def run_art_device(dex):
- adb(['wait-for-device', 'root'])
- device_dst = os.path.join(DEVICE_TEMP, os.path.basename(dex))
- adb(['push', dex, device_dst])
- benchmark_output = adb(['shell', 'dalvikvm', '-cp', device_dst, BENCHMARK_MAIN_CLASS])
- return get_result(benchmark_output)
+ adb(['wait-for-device', 'root'])
+ device_dst = os.path.join(DEVICE_TEMP, os.path.basename(dex))
+ adb(['push', dex, device_dst])
+ benchmark_output = adb(
+ ['shell', 'dalvikvm', '-cp', device_dst, BENCHMARK_MAIN_CLASS])
+ return get_result(benchmark_output)
+
def Main():
- (options, args) = parse_options()
- with utils.TempDir() as temp:
- dex_path = os.path.join(temp, "classes.jar")
- proguard_conf = os.path.join(temp, 'proguard.conf')
- with open(proguard_conf, 'w') as f:
- f.write(PROGUARD_CONF)
- benchmark_jar = get_jar_for_benchmark(options.benchmark)
- r8_args = [
- '--lib', utils.get_android_jar(26), # Only works with api 26
- '--output', dex_path,
- '--pg-conf', proguard_conf,
- '--min-api', str(options.api),
- benchmark_jar
- ]
- toolhelper.run('r8', r8_args, True)
- if options.use_device:
- result = run_art_device(dex_path)
- else:
- result = run_art(dex_path)
- print('Kotlin_{}(RunTimeRaw): {} ms'.format(options.benchmark, result))
+ (options, args) = parse_options()
+ with utils.TempDir() as temp:
+ dex_path = os.path.join(temp, "classes.jar")
+ proguard_conf = os.path.join(temp, 'proguard.conf')
+ with open(proguard_conf, 'w') as f:
+ f.write(PROGUARD_CONF)
+ benchmark_jar = get_jar_for_benchmark(options.benchmark)
+ r8_args = [
+ '--lib',
+ utils.get_android_jar(26), # Only works with api 26
+ '--output',
+ dex_path,
+ '--pg-conf',
+ proguard_conf,
+ '--min-api',
+ str(options.api),
+ benchmark_jar
+ ]
+ toolhelper.run('r8', r8_args, True)
+ if options.use_device:
+ result = run_art_device(dex_path)
+ else:
+ result = run_art(dex_path)
+ print('Kotlin_{}(RunTimeRaw): {} ms'.format(options.benchmark, result))
+
if __name__ == '__main__':
- sys.exit(Main())
+ sys.exit(Main())
diff --git a/tools/run_on_app.py b/tools/run_on_app.py
index 0196e3f..8684f89 100755
--- a/tools/run_on_app.py
+++ b/tools/run_on_app.py
@@ -40,164 +40,167 @@
FIND_MIN_XMX_FILE = 'find_min_xmx_results'
FIND_MIN_XMX_DIR = 'find_min_xmx'
+
def ParseOptions(argv):
- result = argparse.ArgumentParser()
- result.add_argument('--compiler',
- help='The compiler to use',
- choices=COMPILERS)
- result.add_argument('--compiler-build',
- help='Compiler build to use',
- choices=COMPILER_BUILDS,
- default='lib')
- result.add_argument('--no-fail-fast',
- help='Whether run_on_app.py should report all failures '
- 'and not just the first one',
- default=False,
- action='store_true')
- result.add_argument('--hash',
- help='The version of D8/R8 to use')
- result.add_argument('--app',
- help='What app to run on',
- choices=APPS)
- result.add_argument('--run-all',
- help='Compile all possible combinations',
- default=False,
- action='store_true')
- result.add_argument('--expect-oom',
- help='Expect that compilation will fail with an OOM',
- default=False,
- action='store_true')
- result.add_argument('--type',
- help='Default for R8: deploy, for D8: proguarded',
- choices=TYPES)
- result.add_argument('--out',
- help='Where to place the output',
- default=utils.BUILD)
- result.add_argument('--no-build',
- help='Run without building first',
- default=False,
- action='store_true')
- result.add_argument('--max-memory',
- help='The maximum memory in MB to run with',
- type=int)
- result.add_argument('--find-min-xmx',
- help='Find the minimum amount of memory we can run in',
- default=False,
- action='store_true')
- result.add_argument('--find-min-xmx-min-memory',
- help='Setting the minimum memory baseline to run in',
- type=int)
- result.add_argument('--find-min-xmx-max-memory',
- help='Setting the maximum memory baseline to run in',
- type=int)
- result.add_argument('--find-min-xmx-range-size',
- help='Setting the size of the acceptable memory range',
- type=int,
- default=32)
- result.add_argument('--find-min-xmx-archive',
- help='Archive find-min-xmx results on GCS',
- default=False,
- action='store_true')
- result.add_argument('--no-extra-pgconf', '--no_extra_pgconf',
- help='Build without the following extra rules: ' +
- '-printconfiguration, -printmapping, -printseeds, ' +
- '-printusage',
- default=False,
- action='store_true')
- result.add_argument('--timeout',
- type=int,
- default=0,
- help='Set timeout instead of waiting for OOM.')
- result.add_argument('--ignore-java-version',
- help='Do not check java version',
- default=False,
- action='store_true')
- result.add_argument('--no-libraries',
- help='Do not pass in libraries, even if they exist in conf',
- default=False,
- action='store_true')
- result.add_argument('--disable-assertions', '--disable_assertions', '-da',
- help='Disable Java assertions when running the compiler '
- '(default enabled)',
- default=False,
- action='store_true')
- result.add_argument('--debug-agent',
- help='Run with debug agent.',
- default=False,
- action='store_true')
- result.add_argument('--version',
- help='The version of the app to run')
- result.add_argument('-k',
- help='Override the default ProGuard keep rules')
- result.add_argument('--compiler-flags',
- help='Additional option(s) for the compiler. ' +
- 'If passing several options use a quoted string.')
- result.add_argument('--r8-flags',
- help='Additional option(s) for the compiler. ' +
- 'Same as --compiler-flags, keeping it for backward'
- ' compatibility. ' +
- 'If passing several options use a quoted string.')
- result.add_argument('--track-memory-to-file',
- help='Track how much memory the jvm is using while ' +
- ' compiling. Output to the specified file.')
- result.add_argument('--profile',
- help='Profile R8 run.',
- default=False,
- action='store_true')
- result.add_argument('--dump-args-file',
- help='Dump a file with the arguments for the specified ' +
- 'configuration. For use as a @<file> argument to perform ' +
- 'the run.')
- result.add_argument('--print-runtimeraw',
- metavar='BENCHMARKNAME',
- help='Print the line \'<BENCHMARKNAME>(RunTimeRaw):' +
+ result = argparse.ArgumentParser()
+ result.add_argument('--compiler',
+ help='The compiler to use',
+ choices=COMPILERS)
+ result.add_argument('--compiler-build',
+ help='Compiler build to use',
+ choices=COMPILER_BUILDS,
+ default='lib')
+ result.add_argument('--no-fail-fast',
+ help='Whether run_on_app.py should report all failures '
+ 'and not just the first one',
+ default=False,
+ action='store_true')
+ result.add_argument('--hash', help='The version of D8/R8 to use')
+ result.add_argument('--app', help='What app to run on', choices=APPS)
+ result.add_argument('--run-all',
+ help='Compile all possible combinations',
+ default=False,
+ action='store_true')
+ result.add_argument('--expect-oom',
+ help='Expect that compilation will fail with an OOM',
+ default=False,
+ action='store_true')
+ result.add_argument('--type',
+ help='Default for R8: deploy, for D8: proguarded',
+ choices=TYPES)
+ result.add_argument('--out',
+ help='Where to place the output',
+ default=utils.BUILD)
+ result.add_argument('--no-build',
+ help='Run without building first',
+ default=False,
+ action='store_true')
+ result.add_argument('--max-memory',
+ help='The maximum memory in MB to run with',
+ type=int)
+ result.add_argument('--find-min-xmx',
+ help='Find the minimum amount of memory we can run in',
+ default=False,
+ action='store_true')
+ result.add_argument('--find-min-xmx-min-memory',
+ help='Setting the minimum memory baseline to run in',
+ type=int)
+ result.add_argument('--find-min-xmx-max-memory',
+ help='Setting the maximum memory baseline to run in',
+ type=int)
+ result.add_argument('--find-min-xmx-range-size',
+ help='Setting the size of the acceptable memory range',
+ type=int,
+ default=32)
+ result.add_argument('--find-min-xmx-archive',
+ help='Archive find-min-xmx results on GCS',
+ default=False,
+ action='store_true')
+ result.add_argument('--no-extra-pgconf',
+ '--no_extra_pgconf',
+ help='Build without the following extra rules: ' +
+ '-printconfiguration, -printmapping, -printseeds, ' +
+ '-printusage',
+ default=False,
+ action='store_true')
+ result.add_argument('--timeout',
+ type=int,
+ default=0,
+ help='Set timeout instead of waiting for OOM.')
+ result.add_argument('--ignore-java-version',
+ help='Do not check java version',
+ default=False,
+ action='store_true')
+ result.add_argument(
+ '--no-libraries',
+ help='Do not pass in libraries, even if they exist in conf',
+ default=False,
+ action='store_true')
+ result.add_argument(
+ '--disable-assertions',
+ '--disable_assertions',
+ '-da',
+ help='Disable Java assertions when running the compiler '
+ '(default enabled)',
+ default=False,
+ action='store_true')
+ result.add_argument('--debug-agent',
+ help='Run with debug agent.',
+ default=False,
+ action='store_true')
+ result.add_argument('--version', help='The version of the app to run')
+ result.add_argument('-k', help='Override the default ProGuard keep rules')
+ result.add_argument('--compiler-flags',
+ help='Additional option(s) for the compiler. ' +
+ 'If passing several options use a quoted string.')
+ result.add_argument('--r8-flags',
+ help='Additional option(s) for the compiler. ' +
+ 'Same as --compiler-flags, keeping it for backward'
+ ' compatibility. ' +
+ 'If passing several options use a quoted string.')
+ result.add_argument('--track-memory-to-file',
+ help='Track how much memory the jvm is using while ' +
+ ' compiling. Output to the specified file.')
+ result.add_argument('--profile',
+ help='Profile R8 run.',
+ default=False,
+ action='store_true')
+ result.add_argument(
+ '--dump-args-file',
+ help='Dump a file with the arguments for the specified ' +
+ 'configuration. For use as a @<file> argument to perform ' + 'the run.')
+ result.add_argument('--print-runtimeraw',
+ metavar='BENCHMARKNAME',
+ help='Print the line \'<BENCHMARKNAME>(RunTimeRaw):' +
' <elapsed> ms\' at the end where <elapsed> is' +
' the elapsed time in milliseconds.')
- result.add_argument('--print-memoryuse',
- metavar='BENCHMARKNAME',
- help='Print the line \'<BENCHMARKNAME>(MemoryUse):' +
+ result.add_argument('--print-memoryuse',
+ metavar='BENCHMARKNAME',
+ help='Print the line \'<BENCHMARKNAME>(MemoryUse):' +
' <mem>\' at the end where <mem> is the peak' +
' peak resident set size (VmHWM) in bytes.')
- result.add_argument('--print-dexsegments',
- metavar='BENCHMARKNAME',
- help='Print the sizes of individual dex segments as ' +
+ result.add_argument('--print-dexsegments',
+ metavar='BENCHMARKNAME',
+ help='Print the sizes of individual dex segments as ' +
'\'<BENCHMARKNAME>-<segment>(CodeSize): <bytes>\'')
- result.add_argument('--track-time-in-memory',
- help='Plot the times taken from memory starting point to '
- 'end-point with defined memory increment',
- default=False,
- action='store_true')
- result.add_argument('--track-time-in-memory-max',
- help='Setting the maximum memory baseline to run in',
- type=int)
- result.add_argument('--track-time-in-memory-min',
- help='Setting the minimum memory baseline to run in',
- type=int)
- result.add_argument('--track-time-in-memory-increment',
- help='Setting the increment',
- type=int,
- default=32)
- result.add_argument('--print-times',
- help='Include timing',
- default=False,
- action='store_true')
- result.add_argument('--cpu-list',
- help='Run under \'taskset\' with these CPUs. See '
- 'the \'taskset\' -c option for the format')
- result.add_argument('--quiet',
- help='Disable compiler logging',
- default=False,
- action='store_true')
- result.add_argument('--workers',
- help='Number of workers to use',
- default=1,
- type=int)
- (options, args) = result.parse_known_args(argv)
- assert not options.hash or options.no_build, (
- 'Argument --no-build is required when using --hash')
- assert not options.hash or options.compiler_build == 'full', (
- 'Compiler build lib not yet supported with --hash')
- return (options, args)
+ result.add_argument(
+ '--track-time-in-memory',
+ help='Plot the times taken from memory starting point to '
+ 'end-point with defined memory increment',
+ default=False,
+ action='store_true')
+ result.add_argument('--track-time-in-memory-max',
+ help='Setting the maximum memory baseline to run in',
+ type=int)
+ result.add_argument('--track-time-in-memory-min',
+ help='Setting the minimum memory baseline to run in',
+ type=int)
+ result.add_argument('--track-time-in-memory-increment',
+ help='Setting the increment',
+ type=int,
+ default=32)
+ result.add_argument('--print-times',
+ help='Include timing',
+ default=False,
+ action='store_true')
+ result.add_argument('--cpu-list',
+ help='Run under \'taskset\' with these CPUs. See '
+ 'the \'taskset\' -c option for the format')
+ result.add_argument('--quiet',
+ help='Disable compiler logging',
+ default=False,
+ action='store_true')
+ result.add_argument('--workers',
+ help='Number of workers to use',
+ default=1,
+ type=int)
+ (options, args) = result.parse_known_args(argv)
+ assert not options.hash or options.no_build, (
+ 'Argument --no-build is required when using --hash')
+ assert not options.hash or options.compiler_build == 'full', (
+ 'Compiler build lib not yet supported with --hash')
+ return (options, args)
+
# Most apps have -printmapping, -printseeds, -printusage and
# -printconfiguration in the Proguard configuration. However we don't
@@ -205,570 +208,597 @@
# Instead generate an auxiliary Proguard configuration placing these
# output files together with the dex output.
def GenerateAdditionalProguardConfiguration(temp, outdir):
- name = "output.config"
- with open(os.path.join(temp, name), 'w') as f:
- f.write('-printmapping ' + os.path.join(outdir, 'proguard.map') + "\n")
- f.write('-printseeds ' + os.path.join(outdir, 'proguard.seeds') + "\n")
- f.write('-printusage ' + os.path.join(outdir, 'proguard.usage') + "\n")
- f.write('-printconfiguration ' + os.path.join(outdir, 'proguard.config') + "\n")
- return os.path.abspath(f.name)
+ name = "output.config"
+ with open(os.path.join(temp, name), 'w') as f:
+ f.write('-printmapping ' + os.path.join(outdir, 'proguard.map') + "\n")
+ f.write('-printseeds ' + os.path.join(outdir, 'proguard.seeds') + "\n")
+ f.write('-printusage ' + os.path.join(outdir, 'proguard.usage') + "\n")
+ f.write('-printconfiguration ' +
+ os.path.join(outdir, 'proguard.config') + "\n")
+ return os.path.abspath(f.name)
+
# Please add bug number for disabled permutations and please explicitly
# do Bug: #BUG in the commit message of disabling to ensure re-enabling
DISABLED_PERMUTATIONS = [
- # (app, version, type), e.g., ('gmail', '180826.15', 'deploy')
+ # (app, version, type), e.g., ('gmail', '180826.15', 'deploy')
]
+
def get_permutations():
- data_providers = {
- 'nest': nest_data,
- 'youtube': youtube_data,
- 'chrome': chrome_data,
- 'gmail': gmail_data,
- }
- # Check to ensure that we add all variants here.
- assert len(APPS) == len(data_providers)
- for app, data in data_providers.items():
- for version in data.VERSIONS:
- for type in data.VERSIONS[version]:
- if (app, version, type) not in DISABLED_PERMUTATIONS:
- # Only run with R8 lib to reduce cycle times.
- for use_r8lib in [True]:
- yield app, version, type, use_r8lib
+ data_providers = {
+ 'nest': nest_data,
+ 'youtube': youtube_data,
+ 'chrome': chrome_data,
+ 'gmail': gmail_data,
+ }
+ # Check to ensure that we add all variants here.
+ assert len(APPS) == len(data_providers)
+ for app, data in data_providers.items():
+ for version in data.VERSIONS:
+ for type in data.VERSIONS[version]:
+ if (app, version, type) not in DISABLED_PERMUTATIONS:
+ # Only run with R8 lib to reduce cycle times.
+ for use_r8lib in [True]:
+ yield app, version, type, use_r8lib
+
def run_all(options, args):
- # Build first so that each job won't.
- if should_build(options):
- gradle.RunGradle([utils.GRADLE_TASK_R8LIB])
- options.no_build = True
- assert not should_build(options)
+ # Build first so that each job won't.
+ if should_build(options):
+ gradle.RunGradle([utils.GRADLE_TASK_R8LIB])
+ options.no_build = True
+ assert not should_build(options)
- # Args will be destroyed
- assert len(args) == 0
- jobs = []
- for name, version, type, use_r8lib in get_permutations():
- compiler = 'r8' if type == 'deploy' else 'd8'
- compiler_build = 'lib' if use_r8lib else 'full'
- fixed_options = copy.copy(options)
- fixed_options.app = name
- fixed_options.version = version
- fixed_options.compiler = compiler
- fixed_options.compiler_build = compiler_build
- fixed_options.type = type
- jobs.append(
- create_job(
- compiler, compiler_build, name, fixed_options, type, version))
- exit_code = thread_utils.run_in_parallel(
- jobs,
- number_of_workers=options.workers,
- stop_on_first_failure=not options.no_fail_fast)
- exit(exit_code)
+ # Args will be destroyed
+ assert len(args) == 0
+ jobs = []
+ for name, version, type, use_r8lib in get_permutations():
+ compiler = 'r8' if type == 'deploy' else 'd8'
+ compiler_build = 'lib' if use_r8lib else 'full'
+ fixed_options = copy.copy(options)
+ fixed_options.app = name
+ fixed_options.version = version
+ fixed_options.compiler = compiler
+ fixed_options.compiler_build = compiler_build
+ fixed_options.type = type
+ jobs.append(
+ create_job(compiler, compiler_build, name, fixed_options, type,
+ version))
+ exit_code = thread_utils.run_in_parallel(
+ jobs,
+ number_of_workers=options.workers,
+ stop_on_first_failure=not options.no_fail_fast)
+ exit(exit_code)
+
def create_job(compiler, compiler_build, name, options, type, version):
- return lambda worker_id: run_job(
- compiler, compiler_build, name, options, type, version, worker_id)
+ return lambda worker_id: run_job(compiler, compiler_build, name, options,
+ type, version, worker_id)
-def run_job(
- compiler, compiler_build, name, options, type, version, worker_id):
- print_thread(
- 'Executing %s/%s with %s %s %s'
- % (compiler, compiler_build, name, version, type),
- worker_id)
- if worker_id is not None:
- options.out = os.path.join(options.out, str(worker_id))
- os.makedirs(options.out, exist_ok=True)
- exit_code = run_with_options(options, [], worker_id=worker_id)
- if exit_code:
+
+def run_job(compiler, compiler_build, name, options, type, version, worker_id):
print_thread(
- 'Failed %s %s %s with %s/%s'
- % (name, version, type, compiler, compiler_build),
- worker_id)
- return exit_code
+ 'Executing %s/%s with %s %s %s' %
+ (compiler, compiler_build, name, version, type), worker_id)
+ if worker_id is not None:
+ options.out = os.path.join(options.out, str(worker_id))
+ os.makedirs(options.out, exist_ok=True)
+ exit_code = run_with_options(options, [], worker_id=worker_id)
+ if exit_code:
+ print_thread(
+ 'Failed %s %s %s with %s/%s' %
+ (name, version, type, compiler, compiler_build), worker_id)
+ return exit_code
+
def find_min_xmx(options, args):
- # Args will be destroyed
- assert len(args) == 0
- # If we can run in 128 MB then we are good (which we can for small examples
- # or D8 on medium sized examples)
- if options.find_min_xmx_min_memory:
- not_working = options.find_min_xmx_min_memory
- elif options.compiler == 'd8':
- not_working = 128
- else:
- not_working = 1024
- if options.find_min_xmx_max_memory:
- working = options.find_min_xmx_max_memory
- else:
- working = 1024 * 8
- exit_code = 0
- range = int(options.find_min_xmx_range_size)
- while working - not_working > range:
- next_candidate = int(working - ((working - not_working)/2))
- print('working: %s, non_working: %s, next_candidate: %s' %
- (working, not_working, next_candidate))
- extra_args = ['-Xmx%sM' % next_candidate]
- t0 = time.time()
- exit_code = run_with_options(options, [], extra_args)
- t1 = time.time()
- print('Running took: %s ms' % (1000.0 * (t1 - t0)))
- if exit_code != 0:
- if exit_code not in [OOM_EXIT_CODE, TIMEOUT_KILL_CODE]:
- print('Non OOM/Timeout error executing, exiting')
- return 2
- if exit_code == 0:
- working = next_candidate
- elif exit_code == TIMEOUT_KILL_CODE:
- print('Timeout. Continue to the next candidate.')
- not_working = next_candidate
+ # Args will be destroyed
+ assert len(args) == 0
+ # If we can run in 128 MB then we are good (which we can for small examples
+ # or D8 on medium sized examples)
+ if options.find_min_xmx_min_memory:
+ not_working = options.find_min_xmx_min_memory
+ elif options.compiler == 'd8':
+ not_working = 128
else:
- assert exit_code == OOM_EXIT_CODE
- not_working = next_candidate
+ not_working = 1024
+ if options.find_min_xmx_max_memory:
+ working = options.find_min_xmx_max_memory
+ else:
+ working = 1024 * 8
+ exit_code = 0
+ range = int(options.find_min_xmx_range_size)
+ while working - not_working > range:
+ next_candidate = int(working - ((working - not_working) / 2))
+ print('working: %s, non_working: %s, next_candidate: %s' %
+ (working, not_working, next_candidate))
+ extra_args = ['-Xmx%sM' % next_candidate]
+ t0 = time.time()
+ exit_code = run_with_options(options, [], extra_args)
+ t1 = time.time()
+ print('Running took: %s ms' % (1000.0 * (t1 - t0)))
+ if exit_code != 0:
+ if exit_code not in [OOM_EXIT_CODE, TIMEOUT_KILL_CODE]:
+ print('Non OOM/Timeout error executing, exiting')
+ return 2
+ if exit_code == 0:
+ working = next_candidate
+ elif exit_code == TIMEOUT_KILL_CODE:
+ print('Timeout. Continue to the next candidate.')
+ not_working = next_candidate
+ else:
+ assert exit_code == OOM_EXIT_CODE
+ not_working = next_candidate
- assert working - not_working <= range
- found_range = 'Found range: %s - %s' % (not_working, working)
- print(found_range)
+ assert working - not_working <= range
+ found_range = 'Found range: %s - %s' % (not_working, working)
+ print(found_range)
- if options.find_min_xmx_archive:
- sha = utils.get_HEAD_sha1()
- (version, _) = get_version_and_data(options)
- destination = os.path.join(
- utils.R8_TEST_RESULTS_BUCKET,
- FIND_MIN_XMX_DIR,
- sha,
- options.compiler,
- options.compiler_build,
- options.app,
- version,
- get_type(options))
- gs_destination = 'gs://%s' % destination
- utils.archive_value(FIND_MIN_XMX_FILE, gs_destination, found_range + '\n')
+ if options.find_min_xmx_archive:
+ sha = utils.get_HEAD_sha1()
+ (version, _) = get_version_and_data(options)
+ destination = os.path.join(utils.R8_TEST_RESULTS_BUCKET,
+ FIND_MIN_XMX_DIR, sha, options.compiler,
+ options.compiler_build, options.app, version,
+ get_type(options))
+ gs_destination = 'gs://%s' % destination
+ utils.archive_value(FIND_MIN_XMX_FILE, gs_destination,
+ found_range + '\n')
- return 0
+ return 0
+
def print_min_xmx_ranges_for_hash(hash, compiler, compiler_build):
- app_directory = os.path.join(
- utils.R8_TEST_RESULTS_BUCKET,
- FIND_MIN_XMX_DIR,
- hash,
- compiler,
- compiler_build)
- gs_base = 'gs://%s' % app_directory
- for app in utils.ls_files_on_cloud_storage(gs_base).strip().split('\n'):
- for version in utils.ls_files_on_cloud_storage(app).strip().split('\n'):
- for type in utils.ls_files_on_cloud_storage(version).strip().split('\n'):
- gs_location = '%s%s' % (type, FIND_MIN_XMX_FILE)
- value = utils.cat_file_on_cloud_storage(gs_location, ignore_errors=True)
- print('%s\n' % value)
+ app_directory = os.path.join(utils.R8_TEST_RESULTS_BUCKET, FIND_MIN_XMX_DIR,
+ hash, compiler, compiler_build)
+ gs_base = 'gs://%s' % app_directory
+ for app in utils.ls_files_on_cloud_storage(gs_base).strip().split('\n'):
+ for version in utils.ls_files_on_cloud_storage(app).strip().split('\n'):
+ for type in utils.ls_files_on_cloud_storage(version).strip().split(
+ '\n'):
+ gs_location = '%s%s' % (type, FIND_MIN_XMX_FILE)
+ value = utils.cat_file_on_cloud_storage(gs_location,
+ ignore_errors=True)
+ print('%s\n' % value)
+
def track_time_in_memory(options, args):
- # Args will be destroyed
- assert len(args) == 0
- if not options.track_time_in_memory_min:
- raise Exception(
- 'You have to specify --track_time_in_memory_min when running with '
- '--track-time-in-memory')
- if not options.track_time_in_memory_max:
- raise Exception(
- 'You have to specify --track_time_in_memory_max when running with '
- '--track-time-in-memory')
- if not options.track_time_in_memory_increment:
- raise Exception(
- 'You have to specify --track_time_in_memory_increment when running '
- 'with --track-time-in-memory')
- current = options.track_time_in_memory_min
- print('Memory (KB)\tTime (ms)')
- with utils.TempDir() as temp:
- stdout = os.path.join(temp, 'stdout')
- stdout_fd = open(stdout, 'w')
- while current <= options.track_time_in_memory_max:
- extra_args = ['-Xmx%sM' % current]
- t0 = time.time()
- exit_code = run_with_options(options, [], extra_args, stdout_fd, quiet=True)
- t1 = time.time()
- total = (1000.0 * (t1 - t0)) if exit_code == 0 else -1
- print('%s\t%s' % (current, total))
- current += options.track_time_in_memory_increment
+ # Args will be destroyed
+ assert len(args) == 0
+ if not options.track_time_in_memory_min:
+ raise Exception(
+ 'You have to specify --track_time_in_memory_min when running with '
+ '--track-time-in-memory')
+ if not options.track_time_in_memory_max:
+ raise Exception(
+ 'You have to specify --track_time_in_memory_max when running with '
+ '--track-time-in-memory')
+ if not options.track_time_in_memory_increment:
+ raise Exception(
+ 'You have to specify --track_time_in_memory_increment when running '
+ 'with --track-time-in-memory')
+ current = options.track_time_in_memory_min
+ print('Memory (KB)\tTime (ms)')
+ with utils.TempDir() as temp:
+ stdout = os.path.join(temp, 'stdout')
+ stdout_fd = open(stdout, 'w')
+ while current <= options.track_time_in_memory_max:
+ extra_args = ['-Xmx%sM' % current]
+ t0 = time.time()
+ exit_code = run_with_options(options, [],
+ extra_args,
+ stdout_fd,
+ quiet=True)
+ t1 = time.time()
+ total = (1000.0 * (t1 - t0)) if exit_code == 0 else -1
+ print('%s\t%s' % (current, total))
+ current += options.track_time_in_memory_increment
- return 0
+ return 0
+
def main(argv):
- (options, args) = ParseOptions(argv)
- if options.expect_oom and not options.max_memory:
- raise Exception(
- 'You should only use --expect-oom if also specifying --max-memory')
- if options.expect_oom and options.timeout:
- raise Exception(
- 'You should not use --timeout when also specifying --expect-oom')
- if options.find_min_xmx and options.track_time_in_memory:
- raise Exception(
- 'You cannot both find the min xmx and track time at the same time')
- if options.run_all:
- return run_all(options, args)
- if options.find_min_xmx:
- return find_min_xmx(options, args)
- if options.track_time_in_memory:
- return track_time_in_memory(options, args)
- exit_code = run_with_options(options, args, quiet=options.quiet)
- if options.expect_oom:
- exit_code = 0 if exit_code == OOM_EXIT_CODE else 1
- return exit_code
+ (options, args) = ParseOptions(argv)
+ if options.expect_oom and not options.max_memory:
+ raise Exception(
+ 'You should only use --expect-oom if also specifying --max-memory')
+ if options.expect_oom and options.timeout:
+ raise Exception(
+ 'You should not use --timeout when also specifying --expect-oom')
+ if options.find_min_xmx and options.track_time_in_memory:
+ raise Exception(
+ 'You cannot both find the min xmx and track time at the same time')
+ if options.run_all:
+ return run_all(options, args)
+ if options.find_min_xmx:
+ return find_min_xmx(options, args)
+ if options.track_time_in_memory:
+ return track_time_in_memory(options, args)
+ exit_code = run_with_options(options, args, quiet=options.quiet)
+ if options.expect_oom:
+ exit_code = 0 if exit_code == OOM_EXIT_CODE else 1
+ return exit_code
+
def get_version_and_data(options):
- if options.app == 'nest':
- version = options.version or '20180926'
- data = nest_data
- elif options.app == 'youtube':
- version = options.version or youtube_data.LATEST_VERSION
- data = youtube_data
- elif options.app == 'chrome':
- version = options.version or '180917'
- data = chrome_data
- elif options.app == 'gmail':
- version = options.version or '170604.16'
- data = gmail_data
- else:
- raise Exception("You need to specify '--app={}'".format('|'.join(APPS)))
- return version, data
+ if options.app == 'nest':
+ version = options.version or '20180926'
+ data = nest_data
+ elif options.app == 'youtube':
+ version = options.version or youtube_data.LATEST_VERSION
+ data = youtube_data
+ elif options.app == 'chrome':
+ version = options.version or '180917'
+ data = chrome_data
+ elif options.app == 'gmail':
+ version = options.version or '170604.16'
+ data = gmail_data
+ else:
+ raise Exception("You need to specify '--app={}'".format('|'.join(APPS)))
+ return version, data
+
def get_type(options):
- if not options.type:
- return 'deploy' if options.compiler == 'r8' else 'proguarded'
- return options.type
+ if not options.type:
+ return 'deploy' if options.compiler == 'r8' else 'proguarded'
+ return options.type
+
def has_injars_and_libraryjars(pgconfs):
- # Check if there are -injars and -libraryjars in the configuration.
- has_injars = False
- has_libraryjars = False
- for pgconf in pgconfs:
- pgconf_dirname = os.path.abspath(os.path.dirname(pgconf))
- with open(pgconf) as pgconf_file:
- for line in pgconf_file:
- trimmed = line.strip()
- if trimmed.startswith('-injars'):
- has_injars = True
- elif trimmed.startswith('-libraryjars'):
- has_libraryjars = True
- if has_injars and has_libraryjars:
- return True
- return False
+ # Check if there are -injars and -libraryjars in the configuration.
+ has_injars = False
+ has_libraryjars = False
+ for pgconf in pgconfs:
+ pgconf_dirname = os.path.abspath(os.path.dirname(pgconf))
+ with open(pgconf) as pgconf_file:
+ for line in pgconf_file:
+ trimmed = line.strip()
+ if trimmed.startswith('-injars'):
+ has_injars = True
+ elif trimmed.startswith('-libraryjars'):
+ has_libraryjars = True
+ if has_injars and has_libraryjars:
+ return True
+ return False
+
def check_no_injars_and_no_libraryjars(pgconfs):
- # Ensure that there are no -injars or -libraryjars in the configuration.
- for pgconf in pgconfs:
- pgconf_dirname = os.path.abspath(os.path.dirname(pgconf))
- with open(pgconf) as pgconf_file:
- for line in pgconf_file:
- trimmed = line.strip()
- if trimmed.startswith('-injars'):
- raise Exception("Unexpected -injars found in " + pgconf)
- elif trimmed.startswith('-libraryjars'):
- raise Exception("Unexpected -libraryjars found in " + pgconf)
+ # Ensure that there are no -injars or -libraryjars in the configuration.
+ for pgconf in pgconfs:
+ pgconf_dirname = os.path.abspath(os.path.dirname(pgconf))
+ with open(pgconf) as pgconf_file:
+ for line in pgconf_file:
+ trimmed = line.strip()
+ if trimmed.startswith('-injars'):
+ raise Exception("Unexpected -injars found in " + pgconf)
+ elif trimmed.startswith('-libraryjars'):
+ raise Exception("Unexpected -libraryjars found in " +
+ pgconf)
+
def should_build(options):
- return not options.no_build
+ return not options.no_build
-def build_desugared_library_dex(
- options,
- quiet,
- temp,
- android_java8_libs,
- desugared_lib_pg_conf,
- inputs,
- outdir):
- if not inputs:
- raise Exception(
- "If 'android_java8_libs' is specified the inputs must be explicit"
- + "(not defined using '-injars' in Proguard configuration files)")
- if outdir.endswith('.zip') or outdir.endswith('.jar'):
- raise Exception(
- "If 'android_java8_libs' is specified the output must be a directory")
- jar = None
- main = None
- if options.hash:
- jar = os.path.join(utils.LIBS, 'r8-' + options.hash + '.jar')
- main = 'com.android.tools.r8.R8'
+def build_desugared_library_dex(options, quiet, temp, android_java8_libs,
+ desugared_lib_pg_conf, inputs, outdir):
+ if not inputs:
+ raise Exception(
+ "If 'android_java8_libs' is specified the inputs must be explicit" +
+ "(not defined using '-injars' in Proguard configuration files)")
+ if outdir.endswith('.zip') or outdir.endswith('.jar'):
+ raise Exception(
+ "If 'android_java8_libs' is specified the output must be a directory"
+ )
- # Determine the l8 tool.
- assert(options.compiler_build in ['full', 'lib'])
- lib_prefix = 'r8lib-' if options.compiler_build == 'lib' else ''
- tool = lib_prefix + 'l8'
+ jar = None
+ main = None
+ if options.hash:
+ jar = os.path.join(utils.LIBS, 'r8-' + options.hash + '.jar')
+ main = 'com.android.tools.r8.R8'
- # Prepare out directory.
- android_java8_libs_output = os.path.join(temp, 'android_java8_libs')
- os.makedirs(android_java8_libs_output)
+ # Determine the l8 tool.
+ assert (options.compiler_build in ['full', 'lib'])
+ lib_prefix = 'r8lib-' if options.compiler_build == 'lib' else ''
+ tool = lib_prefix + 'l8'
- # Prepare arguments for L8.
- args = [
- '--desugared-lib', android_java8_libs['config'],
- '--lib', android_java8_libs['library'],
- '--output', android_java8_libs_output,
- '--pg-conf', desugared_lib_pg_conf,
- '--release',
- ]
- if 'pgconf' in android_java8_libs:
- for pgconf in android_java8_libs['pgconf']:
- args.extend(['--pg-conf', pgconf])
- args.extend(android_java8_libs['program'])
+ # Prepare out directory.
+ android_java8_libs_output = os.path.join(temp, 'android_java8_libs')
+ os.makedirs(android_java8_libs_output)
- # Run L8.
- exit_code = toolhelper.run(
- tool, args,
- build=should_build(options),
- debug=not options.disable_assertions,
- quiet=quiet,
- jar=jar,
- main=main)
+ # Prepare arguments for L8.
+ args = [
+ '--desugared-lib',
+ android_java8_libs['config'],
+ '--lib',
+ android_java8_libs['library'],
+ '--output',
+ android_java8_libs_output,
+ '--pg-conf',
+ desugared_lib_pg_conf,
+ '--release',
+ ]
+ if 'pgconf' in android_java8_libs:
+ for pgconf in android_java8_libs['pgconf']:
+ args.extend(['--pg-conf', pgconf])
+ args.extend(android_java8_libs['program'])
- # Copy the desugared library DEX to the output.
- dex_file_name = (
- 'classes' + str(len(glob(os.path.join(outdir, '*.dex'))) + 1) + '.dex')
- shutil.copyfile(
- os.path.join(android_java8_libs_output, 'classes.dex'),
- os.path.join(outdir, dex_file_name))
+ # Run L8.
+ exit_code = toolhelper.run(tool,
+ args,
+ build=should_build(options),
+ debug=not options.disable_assertions,
+ quiet=quiet,
+ jar=jar,
+ main=main)
-def run_with_options(
- options, args, extra_args=None, stdout=None, quiet=False, worker_id=None):
- if extra_args is None:
- extra_args = []
- app_provided_pg_conf = False;
- # todo(121018500): remove when memory is under control
- if not any('-Xmx' in arg for arg in extra_args):
- if options.max_memory:
- extra_args.append('-Xmx%sM' % options.max_memory)
+ # Copy the desugared library DEX to the output.
+ dex_file_name = ('classes' +
+ str(len(glob(os.path.join(outdir, '*.dex'))) + 1) + '.dex')
+ shutil.copyfile(os.path.join(android_java8_libs_output, 'classes.dex'),
+ os.path.join(outdir, dex_file_name))
+
+
+def run_with_options(options,
+ args,
+ extra_args=None,
+ stdout=None,
+ quiet=False,
+ worker_id=None):
+ if extra_args is None:
+ extra_args = []
+ app_provided_pg_conf = False
+ # todo(121018500): remove when memory is under control
+ if not any('-Xmx' in arg for arg in extra_args):
+ if options.max_memory:
+ extra_args.append('-Xmx%sM' % options.max_memory)
+ else:
+ extra_args.append('-Xmx8G')
+ if not options.ignore_java_version:
+ utils.check_java_version()
+
+ if options.print_times:
+ extra_args.append('-Dcom.android.tools.r8.printtimes=1')
+
+ if not options.disable_assertions:
+ extra_args.append('-Dcom.android.tools.r8.enableTestAssertions=1')
+
+ outdir = options.out
+ (version_id, data) = get_version_and_data(options)
+
+ if options.compiler not in COMPILERS:
+ raise Exception("You need to specify '--compiler={}'".format(
+ '|'.join(COMPILERS)))
+
+ if options.compiler_build not in COMPILER_BUILDS:
+ raise Exception("You need to specify '--compiler-build={}'".format(
+ '|'.join(COMPILER_BUILDS)))
+
+ if not version_id in data.VERSIONS.keys():
+ print('No version {} for application {}'.format(version_id,
+ options.app))
+ print('Valid versions are {}'.format(data.VERSIONS.keys()))
+ return 1
+
+ version = data.VERSIONS[version_id]
+
+ type = get_type(options)
+
+ if type not in version:
+ print('No type {} for version {}'.format(type, version))
+ print('Valid types are {}'.format(version.keys()))
+ return 1
+ values = version[type]
+
+ args.extend(['--output', outdir])
+ if 'min-api' in values:
+ args.extend(['--min-api', values['min-api']])
+
+ if 'main-dex-list' in values:
+ args.extend(['--main-dex-list', values['main-dex-list']])
+
+ inputs = values['inputs']
+ libraries = values['libraries'] if 'libraries' in values else []
+
+ if options.compiler == 'r8':
+ if 'pgconf' in values and not options.k:
+ sanitized_lib_path = os.path.join(os.path.abspath(outdir),
+ 'sanitized_lib.jar')
+ if has_injars_and_libraryjars(values['pgconf']):
+ sanitized_pgconf_path = os.path.join(os.path.abspath(outdir),
+ 'sanitized.config')
+ SanitizeLibrariesInPgconf(sanitized_lib_path,
+ sanitized_pgconf_path,
+ values['pgconf'])
+ libraries = [sanitized_lib_path]
+ args.extend(['--pg-conf', sanitized_pgconf_path])
+ inputs = []
+ else:
+ # -injars without -libraryjars or vice versa is not supported.
+ check_no_injars_and_no_libraryjars(values['pgconf'])
+ for pgconf in values['pgconf']:
+ args.extend(['--pg-conf', pgconf])
+ if 'sanitize_libraries' in values and values[
+ 'sanitize_libraries']:
+ SanitizeLibraries(sanitized_lib_path, values['libraries'],
+ values['inputs'])
+ libraries = [sanitized_lib_path]
+ app_provided_pg_conf = True
+ if 'pgconf_extra' in values:
+ extra_conf = os.path.join(os.path.abspath(outdir),
+ 'pgconf_extra')
+ with open(extra_conf, 'w') as extra_f:
+ extra_f.write(values['pgconf_extra'])
+ args.extend(['--pg-conf', extra_conf])
+ if options.k:
+ args.extend(['--pg-conf', options.k])
+ if 'maindexrules' in values:
+ for rules in values['maindexrules']:
+ args.extend(['--main-dex-rules', rules])
+ if 'allow-type-errors' in values:
+ extra_args.append('-Dcom.android.tools.r8.allowTypeErrors=1')
+ extra_args.append(
+ '-Dcom.android.tools.r8.disallowClassInlinerGracefulExit=1')
+ if 'system-properties' in values:
+ for system_property in values['system-properties']:
+ extra_args.append(system_property)
+
+ if options.debug_agent:
+ if not options.compiler_build == 'full':
+ print(
+ 'WARNING: Running debugging agent on r8lib is questionable...')
+ extra_args.append(
+ '-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=*:5005'
+ )
+
+ if not options.no_libraries:
+ for lib in libraries:
+ args.extend(['--lib', lib])
+
+ if not outdir.endswith('.zip') and not outdir.endswith('.jar') \
+ and not os.path.exists(outdir):
+ os.makedirs(outdir)
+
+ if options.hash:
+ # Download r8-<hash>.jar from
+ # https://storage.googleapis.com/r8-releases/raw/<hash>/.
+ download_path = archive.GetUploadDestination(options.hash, 'r8.jar',
+ True)
+ assert utils.file_exists_on_cloud_storage(download_path), (
+ 'Could not find r8.jar file from provided hash: %s' % options.hash)
+ destination = os.path.join(utils.LIBS, 'r8-' + options.hash + '.jar')
+ utils.download_file_from_cloud_storage(download_path,
+ destination,
+ quiet=quiet)
+
+ # Additional flags for the compiler from the configuration file.
+ if 'flags' in values:
+ args.extend(values['flags'].split(' '))
+ if options.compiler == 'r8':
+ if 'r8-flags' in values:
+ args.extend(values['r8-flags'].split(' '))
+
+ # Additional flags for the compiler from the command line.
+ if options.compiler_flags:
+ args.extend(options.compiler_flags.split(' '))
+ if options.r8_flags:
+ args.extend(options.r8_flags.split(' '))
+
+ # Feature jars.
+ features = values['features'] if 'features' in values else []
+ for i, feature in enumerate(features, start=1):
+ feature_out = os.path.join(outdir, 'feature-%d.zip' % i)
+ for feature_jar in feature['inputs']:
+ args.extend(['--feature', feature_jar, feature_out])
+
+ args.extend(inputs)
+
+ t0 = None
+ if options.dump_args_file:
+ with open(options.dump_args_file, 'w') as args_file:
+ args_file.writelines([arg + os.linesep for arg in args])
else:
- extra_args.append('-Xmx8G')
- if not options.ignore_java_version:
- utils.check_java_version()
+ with utils.TempDir() as temp:
+ if options.print_memoryuse and not options.track_memory_to_file:
+ options.track_memory_to_file = os.path.join(
+ temp, utils.MEMORY_USE_TMP_FILE)
+ if options.compiler == 'r8' and app_provided_pg_conf:
+ # Ensure that output of -printmapping and -printseeds go to the output
+ # location and not where the app Proguard configuration places them.
+ if outdir.endswith('.zip') or outdir.endswith('.jar'):
+ pg_outdir = os.path.dirname(outdir)
+ else:
+ pg_outdir = outdir
+ if not options.no_extra_pgconf:
+ additional_pg_conf = GenerateAdditionalProguardConfiguration(
+ temp, os.path.abspath(pg_outdir))
+ args.extend(['--pg-conf', additional_pg_conf])
- if options.print_times:
- extra_args.append('-Dcom.android.tools.r8.printtimes=1')
+ android_java8_libs = values.get('android_java8_libs')
+ if android_java8_libs:
+ desugared_lib_pg_conf = os.path.join(
+ temp, 'desugared-lib-pg-conf.txt')
+ args.extend(['--desugared-lib', android_java8_libs['config']])
+ args.extend(
+ ['--desugared-lib-pg-conf-output', desugared_lib_pg_conf])
- if not options.disable_assertions:
- extra_args.append('-Dcom.android.tools.r8.enableTestAssertions=1')
+ stderr_path = os.path.join(temp, 'stderr')
+ with open(stderr_path, 'w') as stderr:
+ jar = None
+ main = None
+ if options.compiler_build == 'full':
+ tool = options.compiler
+ else:
+ assert (options.compiler_build == 'lib')
+ tool = 'r8lib-' + options.compiler
+ if options.hash:
+ jar = os.path.join(utils.LIBS,
+ 'r8-' + options.hash + '.jar')
+ main = 'com.android.tools.r8.' + options.compiler.upper()
+ if should_build(options):
+ gradle.RunGradle([
+ utils.GRADLE_TASK_R8LIB
+ if tool.startswith('r8lib') else UTILS.GRADLE_TASK_R8
+ ])
+ t0 = time.time()
+ exit_code = toolhelper.run(
+ tool,
+ args,
+ build=False,
+ debug=not options.disable_assertions,
+ profile=options.profile,
+ track_memory_file=options.track_memory_to_file,
+ extra_args=extra_args,
+ stdout=stdout,
+ stderr=stderr,
+ timeout=options.timeout,
+ quiet=quiet,
+ cmd_prefix=['taskset', '-c', options.cpu_list]
+ if options.cpu_list else [],
+ jar=jar,
+ main=main,
+ worker_id=worker_id)
+ if exit_code != 0:
+ with open(stderr_path) as stderr:
+ stderr_text = stderr.read()
+ if not quiet:
+ print(stderr_text)
+ if 'java.lang.OutOfMemoryError' in stderr_text:
+ if not quiet:
+ print('Failure was OOM')
+ return OOM_EXIT_CODE
+ return exit_code
- outdir = options.out
- (version_id, data) = get_version_and_data(options)
+ if options.print_memoryuse:
+ print('{}(MemoryUse): {}'.format(
+ options.print_memoryuse,
+ utils.grep_memoryuse(options.track_memory_to_file)))
- if options.compiler not in COMPILERS:
- raise Exception("You need to specify '--compiler={}'"
- .format('|'.join(COMPILERS)))
+ if android_java8_libs:
+ build_desugared_library_dex(options, quiet, temp,
+ android_java8_libs,
+ desugared_lib_pg_conf, inputs,
+ outdir)
- if options.compiler_build not in COMPILER_BUILDS:
- raise Exception("You need to specify '--compiler-build={}'"
- .format('|'.join(COMPILER_BUILDS)))
+ if options.print_runtimeraw:
+ print('{}(RunTimeRaw): {} ms'.format(options.print_runtimeraw,
+ 1000.0 * (time.time() - t0)))
- if not version_id in data.VERSIONS.keys():
- print('No version {} for application {}'
- .format(version_id, options.app))
- print('Valid versions are {}'.format(data.VERSIONS.keys()))
- return 1
-
- version = data.VERSIONS[version_id]
-
- type = get_type(options)
-
- if type not in version:
- print('No type {} for version {}'.format(type, version))
- print('Valid types are {}'.format(version.keys()))
- return 1
- values = version[type]
-
- args.extend(['--output', outdir])
- if 'min-api' in values:
- args.extend(['--min-api', values['min-api']])
-
- if 'main-dex-list' in values:
- args.extend(['--main-dex-list', values['main-dex-list']])
-
- inputs = values['inputs']
- libraries = values['libraries'] if 'libraries' in values else []
-
- if options.compiler == 'r8':
- if 'pgconf' in values and not options.k:
- sanitized_lib_path = os.path.join(
- os.path.abspath(outdir), 'sanitized_lib.jar')
- if has_injars_and_libraryjars(values['pgconf']):
- sanitized_pgconf_path = os.path.join(
- os.path.abspath(outdir), 'sanitized.config')
- SanitizeLibrariesInPgconf(
- sanitized_lib_path, sanitized_pgconf_path, values['pgconf'])
- libraries = [sanitized_lib_path]
- args.extend(['--pg-conf', sanitized_pgconf_path])
- inputs = []
- else:
- # -injars without -libraryjars or vice versa is not supported.
- check_no_injars_and_no_libraryjars(values['pgconf'])
- for pgconf in values['pgconf']:
- args.extend(['--pg-conf', pgconf])
- if 'sanitize_libraries' in values and values['sanitize_libraries']:
- SanitizeLibraries(
- sanitized_lib_path, values['libraries'], values['inputs'])
- libraries = [sanitized_lib_path]
- app_provided_pg_conf = True
- if 'pgconf_extra' in values:
- extra_conf = os.path.join(os.path.abspath(outdir), 'pgconf_extra')
- with open(extra_conf, 'w') as extra_f:
- extra_f.write(values['pgconf_extra'])
- args.extend(['--pg-conf', extra_conf])
- if options.k:
- args.extend(['--pg-conf', options.k])
- if 'maindexrules' in values:
- for rules in values['maindexrules']:
- args.extend(['--main-dex-rules', rules])
- if 'allow-type-errors' in values:
- extra_args.append('-Dcom.android.tools.r8.allowTypeErrors=1')
- extra_args.append(
- '-Dcom.android.tools.r8.disallowClassInlinerGracefulExit=1')
- if 'system-properties' in values:
- for system_property in values['system-properties']:
- extra_args.append(system_property)
-
- if options.debug_agent:
- if not options.compiler_build == 'full':
- print('WARNING: Running debugging agent on r8lib is questionable...')
- extra_args.append(
- '-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=*:5005')
-
- if not options.no_libraries:
- for lib in libraries:
- args.extend(['--lib', lib])
-
- if not outdir.endswith('.zip') and not outdir.endswith('.jar') \
- and not os.path.exists(outdir):
- os.makedirs(outdir)
-
- if options.hash:
- # Download r8-<hash>.jar from
- # https://storage.googleapis.com/r8-releases/raw/<hash>/.
- download_path = archive.GetUploadDestination(options.hash, 'r8.jar', True)
- assert utils.file_exists_on_cloud_storage(download_path), (
- 'Could not find r8.jar file from provided hash: %s' % options.hash)
- destination = os.path.join(utils.LIBS, 'r8-' + options.hash + '.jar')
- utils.download_file_from_cloud_storage(
- download_path, destination, quiet=quiet)
-
- # Additional flags for the compiler from the configuration file.
- if 'flags' in values:
- args.extend(values['flags'].split(' '))
- if options.compiler == 'r8':
- if 'r8-flags' in values:
- args.extend(values['r8-flags'].split(' '))
-
- # Additional flags for the compiler from the command line.
- if options.compiler_flags:
- args.extend(options.compiler_flags.split(' '))
- if options.r8_flags:
- args.extend(options.r8_flags.split(' '))
-
- # Feature jars.
- features = values['features'] if 'features' in values else []
- for i, feature in enumerate(features, start=1):
- feature_out = os.path.join(outdir, 'feature-%d.zip' % i)
- for feature_jar in feature['inputs']:
- args.extend(['--feature', feature_jar, feature_out])
-
- args.extend(inputs)
-
- t0 = None
- if options.dump_args_file:
- with open(options.dump_args_file, 'w') as args_file:
- args_file.writelines([arg + os.linesep for arg in args])
- else:
- with utils.TempDir() as temp:
- if options.print_memoryuse and not options.track_memory_to_file:
- options.track_memory_to_file = os.path.join(temp,
- utils.MEMORY_USE_TMP_FILE)
- if options.compiler == 'r8' and app_provided_pg_conf:
- # Ensure that output of -printmapping and -printseeds go to the output
- # location and not where the app Proguard configuration places them.
- if outdir.endswith('.zip') or outdir.endswith('.jar'):
- pg_outdir = os.path.dirname(outdir)
- else:
- pg_outdir = outdir
- if not options.no_extra_pgconf:
- additional_pg_conf = GenerateAdditionalProguardConfiguration(
- temp, os.path.abspath(pg_outdir))
- args.extend(['--pg-conf', additional_pg_conf])
-
- android_java8_libs = values.get('android_java8_libs')
- if android_java8_libs:
- desugared_lib_pg_conf = os.path.join(
- temp, 'desugared-lib-pg-conf.txt')
- args.extend(['--desugared-lib', android_java8_libs['config']])
- args.extend(
- ['--desugared-lib-pg-conf-output', desugared_lib_pg_conf])
-
- stderr_path = os.path.join(temp, 'stderr')
- with open(stderr_path, 'w') as stderr:
- jar = None
- main = None
- if options.compiler_build == 'full':
- tool = options.compiler
- else:
- assert(options.compiler_build == 'lib')
- tool = 'r8lib-' + options.compiler
- if options.hash:
- jar = os.path.join(utils.LIBS, 'r8-' + options.hash + '.jar')
- main = 'com.android.tools.r8.' + options.compiler.upper()
- if should_build(options):
- gradle.RunGradle([
- utils.GRADLE_TASK_R8LIB if tool.startswith('r8lib')
- else UTILS.GRADLE_TASK_R8])
- t0 = time.time()
- exit_code = toolhelper.run(tool, args,
- build=False,
- debug=not options.disable_assertions,
- profile=options.profile,
- track_memory_file=options.track_memory_to_file,
- extra_args=extra_args,
- stdout=stdout,
- stderr=stderr,
- timeout=options.timeout,
- quiet=quiet,
- cmd_prefix=[
- 'taskset', '-c', options.cpu_list] if options.cpu_list else [],
- jar=jar,
- main=main,
- worker_id=worker_id)
- if exit_code != 0:
- with open(stderr_path) as stderr:
- stderr_text = stderr.read()
- if not quiet:
- print(stderr_text)
- if 'java.lang.OutOfMemoryError' in stderr_text:
- if not quiet:
- print('Failure was OOM')
- return OOM_EXIT_CODE
- return exit_code
-
- if options.print_memoryuse:
- print('{}(MemoryUse): {}'
- .format(options.print_memoryuse,
- utils.grep_memoryuse(options.track_memory_to_file)))
-
- if android_java8_libs:
- build_desugared_library_dex(
- options, quiet, temp, android_java8_libs,
- desugared_lib_pg_conf, inputs, outdir)
-
-
- if options.print_runtimeraw:
- print('{}(RunTimeRaw): {} ms'
- .format(options.print_runtimeraw, 1000.0 * (time.time() - t0)))
-
- if options.print_dexsegments:
- dex_files = glob(os.path.join(outdir, '*.dex'))
- utils.print_dexsegments(options.print_dexsegments, dex_files)
- print('{}-Total(CodeSize): {}'.format(
+ if options.print_dexsegments:
+ dex_files = glob(os.path.join(outdir, '*.dex'))
+ utils.print_dexsegments(options.print_dexsegments, dex_files)
+ print('{}-Total(CodeSize): {}'.format(
options.print_dexsegments, compute_size_of_dex_files(dex_files)))
- return 0
+ return 0
+
def compute_size_of_dex_files(dex_files):
- dex_size = 0
- for dex_file in dex_files:
- dex_size += os.path.getsize(dex_file)
- return dex_size
+ dex_size = 0
+ for dex_file in dex_files:
+ dex_size += os.path.getsize(dex_file)
+ return dex_size
+
if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
+ sys.exit(main(sys.argv[1:]))
diff --git a/tools/run_on_app_dump.py b/tools/run_on_app_dump.py
index 172f8b7..902c675 100755
--- a/tools/run_on_app_dump.py
+++ b/tools/run_on_app_dump.py
@@ -23,13 +23,15 @@
import update_prebuilds_in_android
import utils
-GOLEM_BUILD_TARGETS = [utils.GRADLE_TASK_R8LIB,
- utils.GRADLE_TASK_RETRACE]
+GOLEM_BUILD_TARGETS = [utils.GRADLE_TASK_R8LIB, utils.GRADLE_TASK_RETRACE]
SHRINKERS = ['r8', 'r8-full', 'r8-nolib', 'r8-nolib-full']
+
class AttrDict(dict):
- def __getattr__(self, name):
- return self.get(name, None)
+
+ def __getattr__(self, name):
+ return self.get(name, None)
+
# To generate the files for a new app, navigate to the app source folder and
# run:
@@ -40,1230 +42,1314 @@
# ./gradlew assembleAndroidTest -Dcom.android.tools.r8.dumpinputtodirectory=<path>
# will also generate dumps and apk for tests.
+
class App(object):
- def __init__(self, fields):
- defaults = {
- 'id': None,
- 'name': None,
- 'collections': [],
- 'dump_app': None,
- 'apk_app': None,
- 'dump_test': None,
- 'apk_test': None,
- 'skip': False,
- 'url': None, # url is not used but nice to have for updating apps
- 'revision': None,
- 'folder': None,
- 'skip_recompilation': False,
- 'compiler_properties': [],
- 'internal': False,
- 'golem_duration': None,
- }
- # This below does not work in python3
- defaults.update(fields.items())
- self.__dict__ = defaults
+
+ def __init__(self, fields):
+ defaults = {
+ 'id': None,
+ 'name': None,
+ 'collections': [],
+ 'dump_app': None,
+ 'apk_app': None,
+ 'dump_test': None,
+ 'apk_test': None,
+ 'skip': False,
+ 'url': None, # url is not used but nice to have for updating apps
+ 'revision': None,
+ 'folder': None,
+ 'skip_recompilation': False,
+ 'compiler_properties': [],
+ 'internal': False,
+ 'golem_duration': None,
+ }
+ # This below does not work in python3
+ defaults.update(fields.items())
+ self.__dict__ = defaults
class AppCollection(object):
- def __init__(self, fields):
- defaults = {
- 'name': None
- }
- # This below does not work in python3
- defaults.update(fields.items())
- self.__dict__ = defaults
+
+ def __init__(self, fields):
+ defaults = {'name': None}
+ # This below does not work in python3
+ defaults.update(fields.items())
+ self.__dict__ = defaults
APPS = [
- App({
- 'id': 'com.numix.calculator',
- 'name': 'Calculator',
- 'dump_app': 'dump_app.zip',
- 'apk_app': 'app-release.apk',
- # Compiling tests fail: Library class android.content.res.XmlResourceParser
- # implements program class org.xmlpull.v1.XmlPullParser. Nothing to really
- # do about that.
- 'id_test': 'com.numix.calculator.test',
- 'dump_test': 'dump_test.zip',
- 'apk_test': 'app-release-androidTest.apk',
- 'url': 'https://github.com/numixproject/android-suite/tree/master/Calculator',
- 'revision': 'f58e1b53f7278c9b675d5855842c6d8a44cccb1f',
- 'folder': 'android-suite-calculator',
- }),
- App({
- 'id': 'dev.dworks.apps.anexplorer.pro',
- 'name': 'AnExplorer',
- 'dump_app': 'dump_app.zip',
- 'apk_app': 'AnExplorer-googleMobileProRelease-4.0.3.apk',
- 'url': 'https://github.com/christofferqa/AnExplorer',
- 'revision': '365927477b8eab4052a1882d5e358057ae3dee4d',
- 'folder': 'anexplorer',
- }),
- App({
- 'id': 'de.danoeh.antennapod',
- 'name': 'AntennaPod',
- 'dump_app': 'dump_app.zip',
- 'apk_app': 'app-free-release.apk',
- # TODO(b/172452102): Tests and monkey do not work
- 'id_test': 'de.danoeh.antennapod.test',
- 'dump_test': 'dump_test.zip',
- 'apk_test': 'app-free-release-androidTest.apk',
- 'url': 'https://github.com/christofferqa/AntennaPod.git',
- 'revision': '77e94f4783a16abe9cc5b78dc2d2b2b1867d8c06',
- 'folder': 'antennapod',
- }),
- App({
- 'id': 'com.example.applymapping',
- 'name': 'applymapping',
- 'dump_app': 'dump_app.zip',
- 'apk_app': 'app-release.apk',
- 'id_test': 'com.example.applymapping.test',
- 'dump_test': 'dump_test.zip',
- 'apk_test': 'app-release-androidTest.apk',
- 'url': 'https://github.com/mkj-gram/applymapping',
- 'revision': 'e3ae14b8c16fa4718e5dea8f7ad00937701b3c48',
- 'folder': 'applymapping',
- }),
- App({
- 'id': 'com.chanapps.four.activity',
- 'name': 'chanu',
- 'dump_app': 'dump_app.zip',
- 'apk_app': 'app-release.apk',
- 'url': 'https://github.com/mkj-gram/chanu.git',
- 'revision': '6e53458f167b6d78398da60c20fd0da01a232617',
- 'folder': 'chanu',
- # The app depends on a class file that has access flags interface but
- # not abstract
- 'compiler_properties': ['-Dcom.android.tools.r8.allowInvalidCfAccessFlags=true']
- }),
- App({
- 'id': 'com.example.myapplication',
- 'name': 'empty-activity',
- 'dump_app': 'dump_app.zip',
- 'apk_app': 'app-release.apk',
- 'url': 'https://github.com/christofferqa/empty_android_activity.git',
- 'revision': '2d297ec3373dadb03cbae916b9feba4792563156',
- 'folder': 'empty-activity',
- }),
- App({
- 'id': 'com.example.emptycomposeactivity',
- 'name': 'empty-compose-activity',
- 'dump_app': 'dump_app.zip',
- 'apk_app': 'app-release.apk',
- 'url': 'https://github.com/christofferqa/empty_android_compose_activity.git',
- 'revision': '3c8111b8b7d6e9184049a07e2b96702d7b33d03e',
- 'folder': 'empty-compose-activity',
- }),
- # TODO(b/172539375): Monkey runner fails on recompilation.
- App({
- 'id': 'com.google.firebase.example.fireeats',
- 'name': 'FriendlyEats',
- 'dump_app': 'dump_app.zip',
- 'apk_app': 'app-release-unsigned.apk',
- 'url': 'https://github.com/firebase/friendlyeats-android',
- 'revision': '7c6dd016fc31ea5ecb948d5166b8479efc3775cc',
- 'folder': 'friendlyeats',
- }),
- App({
- 'id': 'com.google.samples.apps.sunflower',
- 'name': 'Sunflower',
- 'dump_app': 'dump_app.zip',
- 'apk_app': 'app-debug.apk',
- # TODO(b/172549283): Compiling tests fails
- 'id_test': 'com.google.samples.apps.sunflower.test',
- 'dump_test': 'dump_test.zip',
- 'apk_test': 'app-debug-androidTest.apk',
- 'url': 'https://github.com/android/sunflower',
- 'revision': '0c4c88fdad2a74791199dffd1a6559559b1dbd4a',
- 'folder': 'sunflower',
- }),
- # TODO(b/172565385): Monkey runner fails on recompilation
- App({
- 'id': 'com.google.samples.apps.iosched',
- 'name': 'iosched',
- 'dump_app': 'dump_app.zip',
- 'apk_app': 'mobile-release.apk',
- 'url': 'https://github.com/christofferqa/iosched.git',
- 'revision': '581cbbe2253711775dbccb753cdb53e7e506cb02',
- 'folder': 'iosched',
- }),
- App({
- 'id': 'fr.neamar.kiss',
- 'name': 'KISS',
- 'dump_app': 'dump_app.zip',
- 'apk_app': 'app-release.apk',
- # TODO(b/172569220): Running tests fails due to missing keep rules
- 'id_test': 'fr.neamar.kiss.test',
- 'dump_test': 'dump_test.zip',
- 'apk_test': 'app-release-androidTest.apk',
- 'url': 'https://github.com/Neamar/KISS',
- 'revision': '8ccffaadaf0d0b8fc4418ed2b4281a0935d3d971',
- 'folder': 'kiss',
- }),
- # TODO(b/172577344): Monkey runner not working.
- App({
- 'id': 'io.github.hidroh.materialistic',
- 'name': 'materialistic',
- 'dump_app': 'dump_app.zip',
- 'apk_app': 'app-release.apk',
- 'url': 'https://github.com/christofferqa/materialistic.git',
- 'revision': '2b2b2ee25ce9e672d5aab1dc90a354af1522b1d9',
- 'folder': 'materialistic',
- }),
- App({
- 'id': 'com.avjindersinghsekhon.minimaltodo',
- 'name': 'MinimalTodo',
- 'dump_app': 'dump_app.zip',
- 'apk_app': 'app-release.apk',
- 'url': 'https://github.com/christofferqa/Minimal-Todo',
- 'revision': '9d8c73746762cd376b718858ec1e8783ca07ba7c',
- 'folder': 'minimal-todo',
- }),
- App({
- 'id': 'net.nurik.roman.muzei',
- 'name': 'muzei',
- 'dump_app': 'dump_app.zip',
- 'apk_app': 'muzei-release.apk',
- 'url': 'https://github.com/romannurik/muzei',
- 'revision': '9eac6e98aebeaf0ae40bdcd85f16dd2886551138',
- 'folder': 'muzei',
- }),
- # TODO(b/172806281): Monkey runner does not work.
- App({
- 'id': 'org.schabi.newpipe',
- 'name': 'NewPipe',
- 'dump_app': 'dump_app.zip',
- 'apk_app': 'app-release-unsigned.apk',
- 'url': 'https://github.com/TeamNewPipe/NewPipe',
- 'revision': 'f4435f90313281beece70c544032f784418d85fa',
- 'folder': 'newpipe',
- }),
- # TODO(b/172806808): Monkey runner does not work.
- App({
- 'id': 'io.rover.app.debug',
- 'name': 'Rover',
- 'dump_app': 'dump_app.zip',
- 'apk_app': 'example-app-release-unsigned.apk',
- 'url': 'https://github.com/RoverPlatform/rover-android',
- 'revision': '94342117097770ea3ca2c6df6ab496a1a55c3ce7',
- 'folder': 'rover-android',
- }),
- # TODO(b/172808159): Monkey runner does not work
- App({
- 'id': 'com.google.android.apps.santatracker',
- 'name': 'SantaTracker',
- 'dump_app': 'dump_app.zip',
- 'apk_app': 'santa-tracker-release.apk',
- 'url': 'https://github.com/christofferqa/santa-tracker-android',
- 'revision': '8dee74be7d9ee33c69465a07088c53087d24a6dd',
- 'folder': 'santa-tracker',
- }),
- App({
- 'id': 'org.thoughtcrime.securesms',
- 'name': 'Signal',
- 'dump_app': 'dump_app.zip',
- 'apk_app': 'Signal-Android-play-prod-universal-release-4.76.2.apk',
- # TODO(b/172812839): Instrumentation test fails.
- 'id_test': 'org.thoughtcrime.securesms.test',
- 'dump_test': 'dump_test.zip',
- 'apk_test': 'Signal-Android-play-prod-release-androidTest.apk',
- 'url': 'https://github.com/signalapp/Signal-Android',
- 'revision': '91ca19f294362ccee2c2b43c247eba228e2b30a1',
- 'folder': 'signal-android',
- }),
- # TODO(b/172815827): Monkey runner does not work
- App({
- 'id': 'com.simplemobiletools.calendar.pro',
- 'name': 'Simple-Calendar',
- 'dump_app': 'dump_app.zip',
- 'apk_app': 'calendar-release.apk',
- 'url': 'https://github.com/SimpleMobileTools/Simple-Calendar',
- 'revision': '906209874d0a091c7fce5a57972472f272d6b068',
- 'folder': 'simple-calendar',
- }),
- # TODO(b/172815534): Monkey runner does not work
- App({
- 'id': 'com.simplemobiletools.camera.pro',
- 'name': 'Simple-Camera',
- 'dump_app': 'dump_app.zip',
- 'apk_app': 'camera-release.apk',
- 'url': 'https://github.com/SimpleMobileTools/Simple-Camera',
- 'revision': 'ebf9820c51e960912b3238287e30a131244fdee6',
- 'folder': 'simple-camera',
- }),
- App({
- 'id': 'com.simplemobiletools.filemanager.pro',
- 'name': 'Simple-File-Manager',
- 'dump_app': 'dump_app.zip',
- 'apk_app': 'file-manager-release.apk',
- 'url': 'https://github.com/SimpleMobileTools/Simple-File-Manager',
- 'revision': '2b7fa68ea251222cc40cf6d62ad1de260a6f54d9',
- 'folder': 'simple-file-manager',
- }),
- App({
- 'id': 'com.simplemobiletools.gallery.pro',
- 'name': 'Simple-Gallery',
- 'dump_app': 'dump_app.zip',
- 'apk_app': 'gallery-326-foss-release.apk',
- 'url': 'https://github.com/SimpleMobileTools/Simple-Gallery',
- 'revision': '564e56b20d33b28d0018c8087ec705beeb60785e',
- 'folder': 'simple-gallery',
- }),
- App({
- 'id': 'com.example.sqldelight.hockey',
- 'name': 'SQLDelight',
- 'dump_app': 'dump_app.zip',
- 'apk_app': 'android-release.apk',
- 'url': 'https://github.com/christofferqa/sqldelight',
- 'revision': '2e67a1126b6df05e4119d1e3a432fde51d76cdc8',
- 'folder': 'sqldelight',
- }),
- # TODO(b/172824096): Monkey runner does not work.
- App({
- 'id': 'eu.kanade.tachiyomi',
- 'name': 'Tachiyomi',
- 'dump_app': 'dump_app.zip',
- 'apk_app': 'app-dev-release.apk',
- 'url': 'https://github.com/inorichi/tachiyomi',
- 'revision': '8aa6486bf76ab9a61a5494bee284b1a5e9180bf3',
- 'folder': 'tachiyomi',
- }),
- # TODO(b/172862042): Monkey runner does not work.
- App({
- 'id': 'app.tivi',
- 'name': 'Tivi',
- 'dump_app': 'dump_app.zip',
- 'apk_app': 'app-release.apk',
- 'url': 'https://github.com/chrisbanes/tivi',
- 'revision': '5c6d9ed338885c59b1fc64050d92d056417bb4de',
- 'folder': 'tivi',
- 'golem_duration': 300
- }),
- App({
- 'id': 'com.keylesspalace.tusky',
- 'name': 'Tusky',
- 'dump_app': 'dump_app.zip',
- 'apk_app': 'app-blue-release.apk',
- 'url': 'https://github.com/tuskyapp/Tusky',
- 'revision': '814a9b8f9bacf8d26f712b06a0313a3534a2be95',
- 'folder': 'tusky',
- }),
- App({
- 'id': 'org.wikipedia',
- 'name': 'Wikipedia',
- 'dump_app': 'dump_app.zip',
- 'apk_app': 'app-prod-release.apk',
- 'url': 'https://github.com/wikimedia/apps-android-wikipedia',
- 'revision': '0fa7cad843c66313be8e25790ef084cf1a1fa67e',
- 'folder': 'wikipedia',
- }),
- # TODO(b/173167253): Check if monkey testing works.
- App({
- 'id': 'androidx.compose.samples.crane',
- 'name': 'compose-crane',
- 'collections': ['compose-samples'],
- 'dump_app': 'dump_app.zip',
- 'apk_app': 'app-release-unsigned.apk',
- 'url': 'https://github.com/android/compose-samples',
- 'revision': '779cf9e187b8ee2c6b620b2abb4524719b3f10f8',
- 'folder': 'android/compose-samples/crane',
- 'golem_duration': 240
- }),
- # TODO(b/173167253): Check if monkey testing works.
- App({
- 'id': 'com.example.jetcaster',
- 'name': 'compose-jetcaster',
- 'collections': ['compose-samples'],
- 'dump_app': 'dump_app.zip',
- 'apk_app': 'app-release-unsigned.apk',
- 'url': 'https://github.com/android/compose-samples',
- 'revision': '779cf9e187b8ee2c6b620b2abb4524719b3f10f8',
- 'folder': 'android/compose-samples/jetcaster',
- }),
- # TODO(b/173167253): Check if monkey testing works.
- App({
- 'id': 'com.example.compose.jetchat',
- 'name': 'compose-jetchat',
- 'collections': ['compose-samples'],
- 'dump_app': 'dump_app.zip',
- 'apk_app': 'app-release-unsigned.apk',
- 'url': 'https://github.com/android/compose-samples',
- 'revision': '779cf9e187b8ee2c6b620b2abb4524719b3f10f8',
- 'folder': 'android/compose-samples/jetchat',
- }),
- # TODO(b/173167253): Check if monkey testing works.
- App({
- 'id': 'com.example.jetnews',
- 'name': 'compose-jetnews',
- 'collections': ['compose-samples'],
- 'dump_app': 'dump_app.zip',
- 'apk_app': 'app-release-unsigned.apk',
- 'url': 'https://github.com/android/compose-samples',
- 'revision': '779cf9e187b8ee2c6b620b2abb4524719b3f10f8',
- 'folder': 'android/compose-samples/jetnews',
- }),
- # TODO(b/173167253): Check if monkey testing works.
- App({
- 'id': 'com.example.jetsnack',
- 'name': 'compose-jetsnack',
- 'collections': ['compose-samples'],
- 'dump_app': 'dump_app.zip',
- 'apk_app': 'app-release-unsigned.apk',
- 'url': 'https://github.com/android/compose-samples',
- 'revision': '779cf9e187b8ee2c6b620b2abb4524719b3f10f8',
- 'folder': 'android/compose-samples/jetsnack',
- }),
- # TODO(b/173167253): Check if monkey testing works.
- App({
- 'id': 'com.example.compose.jetsurvey',
- 'name': 'compose-jetsurvey',
- 'collections': ['compose-samples'],
- 'dump_app': 'dump_app.zip',
- 'apk_app': 'app-release-unsigned.apk',
- 'url': 'https://github.com/android/compose-samples',
- 'revision': '779cf9e187b8ee2c6b620b2abb4524719b3f10f8',
- 'folder': 'android/compose-samples/jetsurvey',
- }),
- # TODO(b/173167253): Check if monkey testing works.
- App({
- 'id': 'com.example.owl',
- 'name': 'compose-owl',
- 'collections': ['compose-samples'],
- 'dump_app': 'dump_app.zip',
- 'apk_app': 'app-release-unsigned.apk',
- 'url': 'https://github.com/android/compose-samples',
- 'revision': '779cf9e187b8ee2c6b620b2abb4524719b3f10f8',
- 'folder': 'android/compose-samples/owl',
- }),
- # TODO(b/173167253): Check if monkey testing works.
- App({
- 'id': 'com.example.compose.rally',
- 'name': 'compose-rally',
- 'collections': ['compose-samples'],
- 'dump_app': 'dump_app.zip',
- 'apk_app': 'app-release-unsigned.apk',
- 'url': 'https://github.com/android/compose-samples',
- 'revision': '779cf9e187b8ee2c6b620b2abb4524719b3f10f8',
- 'folder': 'android/compose-samples/rally',
- }),
+ App({
+ 'id':
+ 'com.numix.calculator',
+ 'name':
+ 'Calculator',
+ 'dump_app':
+ 'dump_app.zip',
+ 'apk_app':
+ 'app-release.apk',
+ # Compiling tests fail: Library class android.content.res.XmlResourceParser
+ # implements program class org.xmlpull.v1.XmlPullParser. Nothing to really
+ # do about that.
+ 'id_test':
+ 'com.numix.calculator.test',
+ 'dump_test':
+ 'dump_test.zip',
+ 'apk_test':
+ 'app-release-androidTest.apk',
+ 'url':
+ 'https://github.com/numixproject/android-suite/tree/master/Calculator',
+ 'revision':
+ 'f58e1b53f7278c9b675d5855842c6d8a44cccb1f',
+ 'folder':
+ 'android-suite-calculator',
+ }),
+ App({
+ 'id': 'dev.dworks.apps.anexplorer.pro',
+ 'name': 'AnExplorer',
+ 'dump_app': 'dump_app.zip',
+ 'apk_app': 'AnExplorer-googleMobileProRelease-4.0.3.apk',
+ 'url': 'https://github.com/christofferqa/AnExplorer',
+ 'revision': '365927477b8eab4052a1882d5e358057ae3dee4d',
+ 'folder': 'anexplorer',
+ }),
+ App({
+ 'id': 'de.danoeh.antennapod',
+ 'name': 'AntennaPod',
+ 'dump_app': 'dump_app.zip',
+ 'apk_app': 'app-free-release.apk',
+ # TODO(b/172452102): Tests and monkey do not work
+ 'id_test': 'de.danoeh.antennapod.test',
+ 'dump_test': 'dump_test.zip',
+ 'apk_test': 'app-free-release-androidTest.apk',
+ 'url': 'https://github.com/christofferqa/AntennaPod.git',
+ 'revision': '77e94f4783a16abe9cc5b78dc2d2b2b1867d8c06',
+ 'folder': 'antennapod',
+ }),
+ App({
+ 'id': 'com.example.applymapping',
+ 'name': 'applymapping',
+ 'dump_app': 'dump_app.zip',
+ 'apk_app': 'app-release.apk',
+ 'id_test': 'com.example.applymapping.test',
+ 'dump_test': 'dump_test.zip',
+ 'apk_test': 'app-release-androidTest.apk',
+ 'url': 'https://github.com/mkj-gram/applymapping',
+ 'revision': 'e3ae14b8c16fa4718e5dea8f7ad00937701b3c48',
+ 'folder': 'applymapping',
+ }),
+ App({
+ 'id':
+ 'com.chanapps.four.activity',
+ 'name':
+ 'chanu',
+ 'dump_app':
+ 'dump_app.zip',
+ 'apk_app':
+ 'app-release.apk',
+ 'url':
+ 'https://github.com/mkj-gram/chanu.git',
+ 'revision':
+ '6e53458f167b6d78398da60c20fd0da01a232617',
+ 'folder':
+ 'chanu',
+ # The app depends on a class file that has access flags interface but
+ # not abstract
+ 'compiler_properties': [
+ '-Dcom.android.tools.r8.allowInvalidCfAccessFlags=true'
+ ]
+ }),
+ App({
+ 'id': 'com.example.myapplication',
+ 'name': 'empty-activity',
+ 'dump_app': 'dump_app.zip',
+ 'apk_app': 'app-release.apk',
+ 'url': 'https://github.com/christofferqa/empty_android_activity.git',
+ 'revision': '2d297ec3373dadb03cbae916b9feba4792563156',
+ 'folder': 'empty-activity',
+ }),
+ App({
+ 'id':
+ 'com.example.emptycomposeactivity',
+ 'name':
+ 'empty-compose-activity',
+ 'dump_app':
+ 'dump_app.zip',
+ 'apk_app':
+ 'app-release.apk',
+ 'url':
+ 'https://github.com/christofferqa/empty_android_compose_activity.git',
+ 'revision':
+ '3c8111b8b7d6e9184049a07e2b96702d7b33d03e',
+ 'folder':
+ 'empty-compose-activity',
+ }),
+ # TODO(b/172539375): Monkey runner fails on recompilation.
+ App({
+ 'id': 'com.google.firebase.example.fireeats',
+ 'name': 'FriendlyEats',
+ 'dump_app': 'dump_app.zip',
+ 'apk_app': 'app-release-unsigned.apk',
+ 'url': 'https://github.com/firebase/friendlyeats-android',
+ 'revision': '7c6dd016fc31ea5ecb948d5166b8479efc3775cc',
+ 'folder': 'friendlyeats',
+ }),
+ App({
+ 'id': 'com.google.samples.apps.sunflower',
+ 'name': 'Sunflower',
+ 'dump_app': 'dump_app.zip',
+ 'apk_app': 'app-debug.apk',
+ # TODO(b/172549283): Compiling tests fails
+ 'id_test': 'com.google.samples.apps.sunflower.test',
+ 'dump_test': 'dump_test.zip',
+ 'apk_test': 'app-debug-androidTest.apk',
+ 'url': 'https://github.com/android/sunflower',
+ 'revision': '0c4c88fdad2a74791199dffd1a6559559b1dbd4a',
+ 'folder': 'sunflower',
+ }),
+ # TODO(b/172565385): Monkey runner fails on recompilation
+ App({
+ 'id': 'com.google.samples.apps.iosched',
+ 'name': 'iosched',
+ 'dump_app': 'dump_app.zip',
+ 'apk_app': 'mobile-release.apk',
+ 'url': 'https://github.com/christofferqa/iosched.git',
+ 'revision': '581cbbe2253711775dbccb753cdb53e7e506cb02',
+ 'folder': 'iosched',
+ }),
+ App({
+ 'id': 'fr.neamar.kiss',
+ 'name': 'KISS',
+ 'dump_app': 'dump_app.zip',
+ 'apk_app': 'app-release.apk',
+ # TODO(b/172569220): Running tests fails due to missing keep rules
+ 'id_test': 'fr.neamar.kiss.test',
+ 'dump_test': 'dump_test.zip',
+ 'apk_test': 'app-release-androidTest.apk',
+ 'url': 'https://github.com/Neamar/KISS',
+ 'revision': '8ccffaadaf0d0b8fc4418ed2b4281a0935d3d971',
+ 'folder': 'kiss',
+ }),
+ # TODO(b/172577344): Monkey runner not working.
+ App({
+ 'id': 'io.github.hidroh.materialistic',
+ 'name': 'materialistic',
+ 'dump_app': 'dump_app.zip',
+ 'apk_app': 'app-release.apk',
+ 'url': 'https://github.com/christofferqa/materialistic.git',
+ 'revision': '2b2b2ee25ce9e672d5aab1dc90a354af1522b1d9',
+ 'folder': 'materialistic',
+ }),
+ App({
+ 'id': 'com.avjindersinghsekhon.minimaltodo',
+ 'name': 'MinimalTodo',
+ 'dump_app': 'dump_app.zip',
+ 'apk_app': 'app-release.apk',
+ 'url': 'https://github.com/christofferqa/Minimal-Todo',
+ 'revision': '9d8c73746762cd376b718858ec1e8783ca07ba7c',
+ 'folder': 'minimal-todo',
+ }),
+ App({
+ 'id': 'net.nurik.roman.muzei',
+ 'name': 'muzei',
+ 'dump_app': 'dump_app.zip',
+ 'apk_app': 'muzei-release.apk',
+ 'url': 'https://github.com/romannurik/muzei',
+ 'revision': '9eac6e98aebeaf0ae40bdcd85f16dd2886551138',
+ 'folder': 'muzei',
+ }),
+ # TODO(b/172806281): Monkey runner does not work.
+ App({
+ 'id': 'org.schabi.newpipe',
+ 'name': 'NewPipe',
+ 'dump_app': 'dump_app.zip',
+ 'apk_app': 'app-release-unsigned.apk',
+ 'url': 'https://github.com/TeamNewPipe/NewPipe',
+ 'revision': 'f4435f90313281beece70c544032f784418d85fa',
+ 'folder': 'newpipe',
+ }),
+ # TODO(b/172806808): Monkey runner does not work.
+ App({
+ 'id': 'io.rover.app.debug',
+ 'name': 'Rover',
+ 'dump_app': 'dump_app.zip',
+ 'apk_app': 'example-app-release-unsigned.apk',
+ 'url': 'https://github.com/RoverPlatform/rover-android',
+ 'revision': '94342117097770ea3ca2c6df6ab496a1a55c3ce7',
+ 'folder': 'rover-android',
+ }),
+ # TODO(b/172808159): Monkey runner does not work
+ App({
+ 'id': 'com.google.android.apps.santatracker',
+ 'name': 'SantaTracker',
+ 'dump_app': 'dump_app.zip',
+ 'apk_app': 'santa-tracker-release.apk',
+ 'url': 'https://github.com/christofferqa/santa-tracker-android',
+ 'revision': '8dee74be7d9ee33c69465a07088c53087d24a6dd',
+ 'folder': 'santa-tracker',
+ }),
+ App({
+ 'id': 'org.thoughtcrime.securesms',
+ 'name': 'Signal',
+ 'dump_app': 'dump_app.zip',
+ 'apk_app': 'Signal-Android-play-prod-universal-release-4.76.2.apk',
+ # TODO(b/172812839): Instrumentation test fails.
+ 'id_test': 'org.thoughtcrime.securesms.test',
+ 'dump_test': 'dump_test.zip',
+ 'apk_test': 'Signal-Android-play-prod-release-androidTest.apk',
+ 'url': 'https://github.com/signalapp/Signal-Android',
+ 'revision': '91ca19f294362ccee2c2b43c247eba228e2b30a1',
+ 'folder': 'signal-android',
+ }),
+ # TODO(b/172815827): Monkey runner does not work
+ App({
+ 'id': 'com.simplemobiletools.calendar.pro',
+ 'name': 'Simple-Calendar',
+ 'dump_app': 'dump_app.zip',
+ 'apk_app': 'calendar-release.apk',
+ 'url': 'https://github.com/SimpleMobileTools/Simple-Calendar',
+ 'revision': '906209874d0a091c7fce5a57972472f272d6b068',
+ 'folder': 'simple-calendar',
+ }),
+ # TODO(b/172815534): Monkey runner does not work
+ App({
+ 'id': 'com.simplemobiletools.camera.pro',
+ 'name': 'Simple-Camera',
+ 'dump_app': 'dump_app.zip',
+ 'apk_app': 'camera-release.apk',
+ 'url': 'https://github.com/SimpleMobileTools/Simple-Camera',
+ 'revision': 'ebf9820c51e960912b3238287e30a131244fdee6',
+ 'folder': 'simple-camera',
+ }),
+ App({
+ 'id': 'com.simplemobiletools.filemanager.pro',
+ 'name': 'Simple-File-Manager',
+ 'dump_app': 'dump_app.zip',
+ 'apk_app': 'file-manager-release.apk',
+ 'url': 'https://github.com/SimpleMobileTools/Simple-File-Manager',
+ 'revision': '2b7fa68ea251222cc40cf6d62ad1de260a6f54d9',
+ 'folder': 'simple-file-manager',
+ }),
+ App({
+ 'id': 'com.simplemobiletools.gallery.pro',
+ 'name': 'Simple-Gallery',
+ 'dump_app': 'dump_app.zip',
+ 'apk_app': 'gallery-326-foss-release.apk',
+ 'url': 'https://github.com/SimpleMobileTools/Simple-Gallery',
+ 'revision': '564e56b20d33b28d0018c8087ec705beeb60785e',
+ 'folder': 'simple-gallery',
+ }),
+ App({
+ 'id': 'com.example.sqldelight.hockey',
+ 'name': 'SQLDelight',
+ 'dump_app': 'dump_app.zip',
+ 'apk_app': 'android-release.apk',
+ 'url': 'https://github.com/christofferqa/sqldelight',
+ 'revision': '2e67a1126b6df05e4119d1e3a432fde51d76cdc8',
+ 'folder': 'sqldelight',
+ }),
+ # TODO(b/172824096): Monkey runner does not work.
+ App({
+ 'id': 'eu.kanade.tachiyomi',
+ 'name': 'Tachiyomi',
+ 'dump_app': 'dump_app.zip',
+ 'apk_app': 'app-dev-release.apk',
+ 'url': 'https://github.com/inorichi/tachiyomi',
+ 'revision': '8aa6486bf76ab9a61a5494bee284b1a5e9180bf3',
+ 'folder': 'tachiyomi',
+ }),
+ # TODO(b/172862042): Monkey runner does not work.
+ App({
+ 'id': 'app.tivi',
+ 'name': 'Tivi',
+ 'dump_app': 'dump_app.zip',
+ 'apk_app': 'app-release.apk',
+ 'url': 'https://github.com/chrisbanes/tivi',
+ 'revision': '5c6d9ed338885c59b1fc64050d92d056417bb4de',
+ 'folder': 'tivi',
+ 'golem_duration': 300
+ }),
+ App({
+ 'id': 'com.keylesspalace.tusky',
+ 'name': 'Tusky',
+ 'dump_app': 'dump_app.zip',
+ 'apk_app': 'app-blue-release.apk',
+ 'url': 'https://github.com/tuskyapp/Tusky',
+ 'revision': '814a9b8f9bacf8d26f712b06a0313a3534a2be95',
+ 'folder': 'tusky',
+ }),
+ App({
+ 'id': 'org.wikipedia',
+ 'name': 'Wikipedia',
+ 'dump_app': 'dump_app.zip',
+ 'apk_app': 'app-prod-release.apk',
+ 'url': 'https://github.com/wikimedia/apps-android-wikipedia',
+ 'revision': '0fa7cad843c66313be8e25790ef084cf1a1fa67e',
+ 'folder': 'wikipedia',
+ }),
+ # TODO(b/173167253): Check if monkey testing works.
+ App({
+ 'id': 'androidx.compose.samples.crane',
+ 'name': 'compose-crane',
+ 'collections': ['compose-samples'],
+ 'dump_app': 'dump_app.zip',
+ 'apk_app': 'app-release-unsigned.apk',
+ 'url': 'https://github.com/android/compose-samples',
+ 'revision': '779cf9e187b8ee2c6b620b2abb4524719b3f10f8',
+ 'folder': 'android/compose-samples/crane',
+ 'golem_duration': 240
+ }),
+ # TODO(b/173167253): Check if monkey testing works.
+ App({
+ 'id': 'com.example.jetcaster',
+ 'name': 'compose-jetcaster',
+ 'collections': ['compose-samples'],
+ 'dump_app': 'dump_app.zip',
+ 'apk_app': 'app-release-unsigned.apk',
+ 'url': 'https://github.com/android/compose-samples',
+ 'revision': '779cf9e187b8ee2c6b620b2abb4524719b3f10f8',
+ 'folder': 'android/compose-samples/jetcaster',
+ }),
+ # TODO(b/173167253): Check if monkey testing works.
+ App({
+ 'id': 'com.example.compose.jetchat',
+ 'name': 'compose-jetchat',
+ 'collections': ['compose-samples'],
+ 'dump_app': 'dump_app.zip',
+ 'apk_app': 'app-release-unsigned.apk',
+ 'url': 'https://github.com/android/compose-samples',
+ 'revision': '779cf9e187b8ee2c6b620b2abb4524719b3f10f8',
+ 'folder': 'android/compose-samples/jetchat',
+ }),
+ # TODO(b/173167253): Check if monkey testing works.
+ App({
+ 'id': 'com.example.jetnews',
+ 'name': 'compose-jetnews',
+ 'collections': ['compose-samples'],
+ 'dump_app': 'dump_app.zip',
+ 'apk_app': 'app-release-unsigned.apk',
+ 'url': 'https://github.com/android/compose-samples',
+ 'revision': '779cf9e187b8ee2c6b620b2abb4524719b3f10f8',
+ 'folder': 'android/compose-samples/jetnews',
+ }),
+ # TODO(b/173167253): Check if monkey testing works.
+ App({
+ 'id': 'com.example.jetsnack',
+ 'name': 'compose-jetsnack',
+ 'collections': ['compose-samples'],
+ 'dump_app': 'dump_app.zip',
+ 'apk_app': 'app-release-unsigned.apk',
+ 'url': 'https://github.com/android/compose-samples',
+ 'revision': '779cf9e187b8ee2c6b620b2abb4524719b3f10f8',
+ 'folder': 'android/compose-samples/jetsnack',
+ }),
+ # TODO(b/173167253): Check if monkey testing works.
+ App({
+ 'id': 'com.example.compose.jetsurvey',
+ 'name': 'compose-jetsurvey',
+ 'collections': ['compose-samples'],
+ 'dump_app': 'dump_app.zip',
+ 'apk_app': 'app-release-unsigned.apk',
+ 'url': 'https://github.com/android/compose-samples',
+ 'revision': '779cf9e187b8ee2c6b620b2abb4524719b3f10f8',
+ 'folder': 'android/compose-samples/jetsurvey',
+ }),
+ # TODO(b/173167253): Check if monkey testing works.
+ App({
+ 'id': 'com.example.owl',
+ 'name': 'compose-owl',
+ 'collections': ['compose-samples'],
+ 'dump_app': 'dump_app.zip',
+ 'apk_app': 'app-release-unsigned.apk',
+ 'url': 'https://github.com/android/compose-samples',
+ 'revision': '779cf9e187b8ee2c6b620b2abb4524719b3f10f8',
+ 'folder': 'android/compose-samples/owl',
+ }),
+ # TODO(b/173167253): Check if monkey testing works.
+ App({
+ 'id': 'com.example.compose.rally',
+ 'name': 'compose-rally',
+ 'collections': ['compose-samples'],
+ 'dump_app': 'dump_app.zip',
+ 'apk_app': 'app-release-unsigned.apk',
+ 'url': 'https://github.com/android/compose-samples',
+ 'revision': '779cf9e187b8ee2c6b620b2abb4524719b3f10f8',
+ 'folder': 'android/compose-samples/rally',
+ }),
]
-
-APP_COLLECTIONS = [
- AppCollection({
+APP_COLLECTIONS = [AppCollection({
'name': 'compose-samples',
- })
-]
+})]
def remove_print_lines(file):
- with open(file) as f:
- lines = f.readlines()
- with open(file, 'w') as f:
- for line in lines:
- if '-printconfiguration' not in line:
- f.write(line)
+ with open(file) as f:
+ lines = f.readlines()
+ with open(file, 'w') as f:
+ for line in lines:
+ if '-printconfiguration' not in line:
+ f.write(line)
def download_sha(app_sha, internal, quiet=False):
- if internal:
- utils.DownloadFromX20(app_sha)
- else:
- utils.DownloadFromGoogleCloudStorage(app_sha, quiet=quiet)
+ if internal:
+ utils.DownloadFromX20(app_sha)
+ else:
+ utils.DownloadFromGoogleCloudStorage(app_sha, quiet=quiet)
def is_logging_enabled_for(app, options):
- if options.no_logging:
- return False
- if options.app_logging_filter and app.name not in options.app_logging_filter:
- return False
- return True
+ if options.no_logging:
+ return False
+ if options.app_logging_filter and app.name not in options.app_logging_filter:
+ return False
+ return True
def is_minified_r8(shrinker):
- return '-nolib' not in shrinker
+ return '-nolib' not in shrinker
def is_full_r8(shrinker):
- return '-full' in shrinker
+ return '-full' in shrinker
def version_is_built_jar(version):
- return version != 'main' and version != 'source'
+ return version != 'main' and version != 'source'
def compute_size_of_dex_files_in_package(path):
- dex_size = 0
- z = zipfile.ZipFile(path, 'r')
- for filename in z.namelist():
- if filename.endswith('.dex'):
- dex_size += z.getinfo(filename).file_size
- return dex_size
+ dex_size = 0
+ z = zipfile.ZipFile(path, 'r')
+ for filename in z.namelist():
+ if filename.endswith('.dex'):
+ dex_size += z.getinfo(filename).file_size
+ return dex_size
def dump_for_app(app_dir, app):
- return os.path.join(app_dir, app.dump_app)
+ return os.path.join(app_dir, app.dump_app)
def dump_test_for_app(app_dir, app):
- return os.path.join(app_dir, app.dump_test)
+ return os.path.join(app_dir, app.dump_test)
def get_r8_jar(options, temp_dir, shrinker):
- if (options.version == 'source'):
- return None
- jar = os.path.abspath(
- os.path.join(
- temp_dir,
- '..',
- 'r8lib.jar' if is_minified_r8(shrinker) else 'r8.jar'))
- return jar
+ if (options.version == 'source'):
+ return None
+ jar = os.path.abspath(
+ os.path.join(temp_dir, '..',
+ 'r8lib.jar' if is_minified_r8(shrinker) else 'r8.jar'))
+ return jar
def get_results_for_app(app, options, temp_dir, worker_id):
- app_folder = app.folder if app.folder else app.name + "_" + app.revision
- # Golem extraction will extract to the basename under the benchmarks dir.
- app_location = os.path.basename(app_folder) if options.golem else app_folder
- opensource_basedir = (os.path.join('benchmarks', app.name) if options.golem
- else utils.OPENSOURCE_DUMPS_DIR)
- app_dir = (os.path.join(utils.INTERNAL_DUMPS_DIR, app_location) if app.internal
- else os.path.join(opensource_basedir, app_location))
- if not os.path.exists(app_dir) and not options.golem:
- # Download the app from google storage.
- download_sha(app_dir + ".tar.gz.sha1", app.internal)
+ app_folder = app.folder if app.folder else app.name + "_" + app.revision
+ # Golem extraction will extract to the basename under the benchmarks dir.
+ app_location = os.path.basename(app_folder) if options.golem else app_folder
+ opensource_basedir = (os.path.join('benchmarks', app.name)
+ if options.golem else utils.OPENSOURCE_DUMPS_DIR)
+ app_dir = (os.path.join(utils.INTERNAL_DUMPS_DIR, app_location) if
+ app.internal else os.path.join(opensource_basedir, app_location))
+ if not os.path.exists(app_dir) and not options.golem:
+ # Download the app from google storage.
+ download_sha(app_dir + ".tar.gz.sha1", app.internal)
- # Ensure that the dumps are in place
- assert os.path.isfile(dump_for_app(app_dir, app)), "Could not find dump " \
- "for app " + app.name
+ # Ensure that the dumps are in place
+ assert os.path.isfile(dump_for_app(app_dir, app)), "Could not find dump " \
+ "for app " + app.name
- result = {}
- result['status'] = 'success'
- result_per_shrinker = build_app_with_shrinkers(
- app, options, temp_dir, app_dir, worker_id=worker_id)
- for shrinker, shrinker_result in result_per_shrinker.items():
- result[shrinker] = shrinker_result
- return result
+ result = {}
+ result['status'] = 'success'
+ result_per_shrinker = build_app_with_shrinkers(app,
+ options,
+ temp_dir,
+ app_dir,
+ worker_id=worker_id)
+ for shrinker, shrinker_result in result_per_shrinker.items():
+ result[shrinker] = shrinker_result
+ return result
def build_app_with_shrinkers(app, options, temp_dir, app_dir, worker_id):
- result_per_shrinker = {}
- for shrinker in options.shrinker:
- results = []
- build_app_and_run_with_shrinker(
- app, options, temp_dir, app_dir, shrinker, results, worker_id=worker_id)
- result_per_shrinker[shrinker] = results
- if len(options.apps) > 1:
- print_thread('', worker_id)
- log_results_for_app(app, result_per_shrinker, options, worker_id=worker_id)
- print_thread('', worker_id)
+ result_per_shrinker = {}
+ for shrinker in options.shrinker:
+ results = []
+ build_app_and_run_with_shrinker(app,
+ options,
+ temp_dir,
+ app_dir,
+ shrinker,
+ results,
+ worker_id=worker_id)
+ result_per_shrinker[shrinker] = results
+ if len(options.apps) > 1:
+ print_thread('', worker_id)
+ log_results_for_app(app,
+ result_per_shrinker,
+ options,
+ worker_id=worker_id)
+ print_thread('', worker_id)
- return result_per_shrinker
+ return result_per_shrinker
def is_last_build(index, compilation_steps):
- return index == compilation_steps - 1
+ return index == compilation_steps - 1
def build_app_and_run_with_shrinker(app, options, temp_dir, app_dir, shrinker,
results, worker_id):
- print_thread(
- '[{}] Building {} with {}'.format(
- datetime.now().strftime("%H:%M:%S"),
- app.name,
- shrinker),
- worker_id)
- print_thread(
- 'To compile locally: '
- 'tools/run_on_app_dump.py --shrinker {} --r8-compilation-steps {} '
- '--app {} --minify {} --optimize {} --shrink {}'.format(
- shrinker,
- options.r8_compilation_steps,
- app.name,
- options.minify,
- options.optimize,
- options.shrink),
- worker_id)
- print_thread(
- 'HINT: use --shrinker r8-nolib --no-build if you have a local R8.jar',
- worker_id)
- recomp_jar = None
- status = 'success'
- if options.r8_compilation_steps < 1:
- return
- compilation_steps = 1 if app.skip_recompilation else options.r8_compilation_steps
- for compilation_step in range(0, compilation_steps):
- if status != 'success':
- break
print_thread(
- 'Compiling {} of {}'.format(compilation_step + 1, compilation_steps),
+ '[{}] Building {} with {}'.format(datetime.now().strftime("%H:%M:%S"),
+ app.name, shrinker), worker_id)
+ print_thread(
+ 'To compile locally: '
+ 'tools/run_on_app_dump.py --shrinker {} --r8-compilation-steps {} '
+ '--app {} --minify {} --optimize {} --shrink {}'.format(
+ shrinker, options.r8_compilation_steps, app.name, options.minify,
+ options.optimize, options.shrink), worker_id)
+ print_thread(
+ 'HINT: use --shrinker r8-nolib --no-build if you have a local R8.jar',
worker_id)
- result = {}
- try:
- start = time.time()
- (app_jar, mapping, new_recomp_jar) = \
- build_app_with_shrinker(
- app, options, temp_dir, app_dir, shrinker, compilation_step,
- compilation_steps, recomp_jar, worker_id=worker_id)
- end = time.time()
- dex_size = compute_size_of_dex_files_in_package(app_jar)
- result['build_status'] = 'success'
- result['recompilation_status'] = 'success'
- result['output_jar'] = app_jar
- result['output_mapping'] = mapping
- result['dex_size'] = dex_size
- result['duration'] = int((end - start) * 1000) # Wall time
- if (new_recomp_jar is None
- and not is_last_build(compilation_step, compilation_steps)):
- result['recompilation_status'] = 'failed'
- warn('Failed to build {} with {}'.format(app.name, shrinker))
- recomp_jar = new_recomp_jar
- except Exception as e:
- warn('Failed to build {} with {}'.format(app.name, shrinker))
- if e:
- print_thread('Error: ' + str(e), worker_id)
- result['build_status'] = 'failed'
- status = 'failed'
+ recomp_jar = None
+ status = 'success'
+ if options.r8_compilation_steps < 1:
+ return
+ compilation_steps = 1 if app.skip_recompilation else options.r8_compilation_steps
+ for compilation_step in range(0, compilation_steps):
+ if status != 'success':
+ break
+ print_thread(
+ 'Compiling {} of {}'.format(compilation_step + 1,
+ compilation_steps), worker_id)
+ result = {}
+ try:
+ start = time.time()
+ (app_jar, mapping, new_recomp_jar) = \
+ build_app_with_shrinker(
+ app, options, temp_dir, app_dir, shrinker, compilation_step,
+ compilation_steps, recomp_jar, worker_id=worker_id)
+ end = time.time()
+ dex_size = compute_size_of_dex_files_in_package(app_jar)
+ result['build_status'] = 'success'
+ result['recompilation_status'] = 'success'
+ result['output_jar'] = app_jar
+ result['output_mapping'] = mapping
+ result['dex_size'] = dex_size
+ result['duration'] = int((end - start) * 1000) # Wall time
+ if (new_recomp_jar is None and
+ not is_last_build(compilation_step, compilation_steps)):
+ result['recompilation_status'] = 'failed'
+ warn('Failed to build {} with {}'.format(app.name, shrinker))
+ recomp_jar = new_recomp_jar
+ except Exception as e:
+ warn('Failed to build {} with {}'.format(app.name, shrinker))
+ if e:
+ print_thread('Error: ' + str(e), worker_id)
+ result['build_status'] = 'failed'
+ status = 'failed'
- original_app_apk = os.path.join(app_dir, app.apk_app)
- app_apk_destination = os.path.join(
- temp_dir,"{}_{}.apk".format(app.id, compilation_step))
+ original_app_apk = os.path.join(app_dir, app.apk_app)
+ app_apk_destination = os.path.join(
+ temp_dir, "{}_{}.apk".format(app.id, compilation_step))
- if result.get('build_status') == 'success' and options.monkey:
- # Make a copy of the given APK, move the newly generated dex files into the
- # copied APK, and then sign the APK.
- apk_masseur.masseur(
- original_app_apk, dex=app_jar, resources='META-INF/services/*',
- out=app_apk_destination,
- quiet=options.quiet, logging=is_logging_enabled_for(app, options),
- keystore=options.keystore)
+ if result.get('build_status') == 'success' and options.monkey:
+ # Make a copy of the given APK, move the newly generated dex files into the
+ # copied APK, and then sign the APK.
+ apk_masseur.masseur(original_app_apk,
+ dex=app_jar,
+ resources='META-INF/services/*',
+ out=app_apk_destination,
+ quiet=options.quiet,
+ logging=is_logging_enabled_for(app, options),
+ keystore=options.keystore)
- result['monkey_status'] = 'success' if adb.run_monkey(
- app.id, options.emulator_id, app_apk_destination, options.monkey_events,
- options.quiet, is_logging_enabled_for(app, options)) else 'failed'
+ result['monkey_status'] = 'success' if adb.run_monkey(
+ app.id, options.emulator_id, app_apk_destination,
+ options.monkey_events, options.quiet,
+ is_logging_enabled_for(app, options)) else 'failed'
- if (result.get('build_status') == 'success'
- and options.run_tests and app.dump_test):
- if not os.path.isfile(app_apk_destination):
- apk_masseur.masseur(
- original_app_apk, dex=app_jar, resources='META-INF/services/*',
- out=app_apk_destination,
- quiet=options.quiet, logging=is_logging_enabled_for(app, options),
- keystore=options.keystore)
+ if (result.get('build_status') == 'success' and options.run_tests and
+ app.dump_test):
+ if not os.path.isfile(app_apk_destination):
+ apk_masseur.masseur(original_app_apk,
+ dex=app_jar,
+ resources='META-INF/services/*',
+ out=app_apk_destination,
+ quiet=options.quiet,
+ logging=is_logging_enabled_for(
+ app, options),
+ keystore=options.keystore)
- # Compile the tests with the mapping file.
- test_jar = build_test_with_shrinker(
- app, options, temp_dir, app_dir,shrinker, compilation_step,
- result['output_mapping'])
- if not test_jar:
- result['instrumentation_test_status'] = 'compilation_failed'
- else:
- original_test_apk = os.path.join(app_dir, app.apk_test)
- test_apk_destination = os.path.join(
- temp_dir,"{}_{}.test.apk".format(app.id_test, compilation_step))
- apk_masseur.masseur(
- original_test_apk, dex=test_jar, resources='META-INF/services/*',
- out=test_apk_destination,
- quiet=options.quiet, logging=is_logging_enabled_for(app, options),
- keystore=options.keystore)
- result['instrumentation_test_status'] = 'success' if adb.run_instrumented(
- app.id, app.id_test, options.emulator_id, app_apk_destination,
- test_apk_destination, options.quiet,
- is_logging_enabled_for(app, options)) else 'failed'
+ # Compile the tests with the mapping file.
+ test_jar = build_test_with_shrinker(app, options, temp_dir, app_dir,
+ shrinker, compilation_step,
+ result['output_mapping'])
+ if not test_jar:
+ result['instrumentation_test_status'] = 'compilation_failed'
+ else:
+ original_test_apk = os.path.join(app_dir, app.apk_test)
+ test_apk_destination = os.path.join(
+ temp_dir, "{}_{}.test.apk".format(app.id_test,
+ compilation_step))
+ apk_masseur.masseur(original_test_apk,
+ dex=test_jar,
+ resources='META-INF/services/*',
+ out=test_apk_destination,
+ quiet=options.quiet,
+ logging=is_logging_enabled_for(
+ app, options),
+ keystore=options.keystore)
+ result[
+ 'instrumentation_test_status'] = 'success' if adb.run_instrumented(
+ app.id, app.id_test, options.emulator_id,
+ app_apk_destination,
+ test_apk_destination, options.quiet,
+ is_logging_enabled_for(app, options)) else 'failed'
- results.append(result)
- if result.get('recompilation_status') != 'success':
- break
+ results.append(result)
+ if result.get('recompilation_status') != 'success':
+ break
+
def get_jdk_home(options, app):
- if options.golem:
- return os.path.join('benchmarks', app.name, 'linux')
- return None
+ if options.golem:
+ return os.path.join('benchmarks', app.name, 'linux')
+ return None
+
def build_app_with_shrinker(app, options, temp_dir, app_dir, shrinker,
compilation_step_index, compilation_steps,
prev_recomp_jar, worker_id):
- def config_files_consumer(files):
- for file in files:
- compiledump.clean_config(file, options)
- remove_print_lines(file)
- args = AttrDict({
- 'dump': dump_for_app(app_dir, app),
- 'r8_jar': get_r8_jar(options, temp_dir, shrinker),
- 'r8_flags': options.r8_flags,
- 'disable_assertions': options.disable_assertions,
- 'version': options.version,
- 'compiler': 'r8full' if is_full_r8(shrinker) else 'r8',
- 'debug_agent': options.debug_agent,
- 'program_jar': prev_recomp_jar,
- 'nolib': not is_minified_r8(shrinker),
- 'config_files_consumer': config_files_consumer,
- 'properties': app.compiler_properties,
- 'disable_desugared_lib': False,
- 'print_times': options.print_times,
- })
- app_jar = os.path.join(
- temp_dir, '{}_{}_{}_dex_out.jar'.format(
- app.name, shrinker, compilation_step_index))
- app_mapping = os.path.join(
- temp_dir, '{}_{}_{}_dex_out.jar.map'.format(
- app.name, shrinker, compilation_step_index))
- recomp_jar = None
- jdkhome = get_jdk_home(options, app)
- with utils.TempDir() as compile_temp_dir:
- compile_result = compiledump.run1(
- compile_temp_dir, args, [], jdkhome, worker_id=worker_id)
- out_jar = os.path.join(compile_temp_dir, "out.jar")
- out_mapping = os.path.join(compile_temp_dir, "out.jar.map")
+ def config_files_consumer(files):
+ for file in files:
+ compiledump.clean_config(file, options)
+ remove_print_lines(file)
- if compile_result != 0 or not os.path.isfile(out_jar):
- assert False, 'Compilation of {} failed'.format(dump_for_app(app_dir, app))
- shutil.move(out_jar, app_jar)
- shutil.move(out_mapping, app_mapping)
+ args = AttrDict({
+ 'dump': dump_for_app(app_dir, app),
+ 'r8_jar': get_r8_jar(options, temp_dir, shrinker),
+ 'r8_flags': options.r8_flags,
+ 'disable_assertions': options.disable_assertions,
+ 'version': options.version,
+ 'compiler': 'r8full' if is_full_r8(shrinker) else 'r8',
+ 'debug_agent': options.debug_agent,
+ 'program_jar': prev_recomp_jar,
+ 'nolib': not is_minified_r8(shrinker),
+ 'config_files_consumer': config_files_consumer,
+ 'properties': app.compiler_properties,
+ 'disable_desugared_lib': False,
+ 'print_times': options.print_times,
+ })
- if compilation_step_index < compilation_steps - 1:
- args['classfile'] = True
- args['min_api'] = "10000"
- args['disable_desugared_lib'] = True
- compile_result = compiledump.run1(compile_temp_dir, args, [], jdkhome)
- if compile_result == 0:
- recomp_jar = os.path.join(
- temp_dir, '{}_{}_{}_cf_out.jar'.format(
- app.name, shrinker, compilation_step_index))
- shutil.move(out_jar, recomp_jar)
+ app_jar = os.path.join(
+ temp_dir, '{}_{}_{}_dex_out.jar'.format(app.name, shrinker,
+ compilation_step_index))
+ app_mapping = os.path.join(
+ temp_dir, '{}_{}_{}_dex_out.jar.map'.format(app.name, shrinker,
+ compilation_step_index))
+ recomp_jar = None
+ jdkhome = get_jdk_home(options, app)
+ with utils.TempDir() as compile_temp_dir:
+ compile_result = compiledump.run1(compile_temp_dir,
+ args, [],
+ jdkhome,
+ worker_id=worker_id)
+ out_jar = os.path.join(compile_temp_dir, "out.jar")
+ out_mapping = os.path.join(compile_temp_dir, "out.jar.map")
- return (app_jar, app_mapping, recomp_jar)
+ if compile_result != 0 or not os.path.isfile(out_jar):
+ assert False, 'Compilation of {} failed'.format(
+ dump_for_app(app_dir, app))
+ shutil.move(out_jar, app_jar)
+ shutil.move(out_mapping, app_mapping)
+
+ if compilation_step_index < compilation_steps - 1:
+ args['classfile'] = True
+ args['min_api'] = "10000"
+ args['disable_desugared_lib'] = True
+ compile_result = compiledump.run1(compile_temp_dir, args, [],
+ jdkhome)
+ if compile_result == 0:
+ recomp_jar = os.path.join(
+ temp_dir,
+ '{}_{}_{}_cf_out.jar'.format(app.name, shrinker,
+ compilation_step_index))
+ shutil.move(out_jar, recomp_jar)
+
+ return (app_jar, app_mapping, recomp_jar)
def build_test_with_shrinker(app, options, temp_dir, app_dir, shrinker,
compilation_step_index, mapping):
- def rewrite_files(files):
- add_applymapping = True
- for file in files:
- compiledump.clean_config(file, options)
- remove_print_lines(file)
- with open(file) as f:
- lines = f.readlines()
- with open(file, 'w') as f:
- for line in lines:
- if '-applymapping' not in line:
- f.write(line + '\n')
- if add_applymapping:
- f.write("-applymapping " + mapping + '\n')
- add_applymapping = False
+ def rewrite_files(files):
+ add_applymapping = True
+ for file in files:
+ compiledump.clean_config(file, options)
+ remove_print_lines(file)
+ with open(file) as f:
+ lines = f.readlines()
+ with open(file, 'w') as f:
+ for line in lines:
+ if '-applymapping' not in line:
+ f.write(line + '\n')
+ if add_applymapping:
+ f.write("-applymapping " + mapping + '\n')
+ add_applymapping = False
- args = AttrDict({
- 'dump': dump_test_for_app(app_dir, app),
- 'r8_jar': get_r8_jar(options, temp_dir, shrinker),
- 'disable_assertions': options.disable_assertions,
- 'version': options.version,
- 'compiler': 'r8full' if is_full_r8(shrinker) else 'r8',
- 'debug_agent': options.debug_agent,
- 'nolib': not is_minified_r8(shrinker),
- # The config file will have an -applymapping reference to an old map.
- # Update it to point to mapping file build in the compilation of the app.
- 'config_files_consumer': rewrite_files,
- })
+ args = AttrDict({
+ 'dump': dump_test_for_app(app_dir, app),
+ 'r8_jar': get_r8_jar(options, temp_dir, shrinker),
+ 'disable_assertions': options.disable_assertions,
+ 'version': options.version,
+ 'compiler': 'r8full' if is_full_r8(shrinker) else 'r8',
+ 'debug_agent': options.debug_agent,
+ 'nolib': not is_minified_r8(shrinker),
+ # The config file will have an -applymapping reference to an old map.
+ # Update it to point to mapping file build in the compilation of the app.
+ 'config_files_consumer': rewrite_files,
+ })
- test_jar = os.path.join(
- temp_dir, '{}_{}_{}_test_out.jar'.format(
- app.name, shrinker, compilation_step_index))
+ test_jar = os.path.join(
+ temp_dir, '{}_{}_{}_test_out.jar'.format(app.name, shrinker,
+ compilation_step_index))
- with utils.TempDir() as compile_temp_dir:
- jdkhome = get_jdk_home(options, app)
- compile_result = compiledump.run1(compile_temp_dir, args, [], jdkhome)
- out_jar = os.path.join(compile_temp_dir, "out.jar")
- if compile_result != 0 or not os.path.isfile(out_jar):
- return None
- shutil.move(out_jar, test_jar)
+ with utils.TempDir() as compile_temp_dir:
+ jdkhome = get_jdk_home(options, app)
+ compile_result = compiledump.run1(compile_temp_dir, args, [], jdkhome)
+ out_jar = os.path.join(compile_temp_dir, "out.jar")
+ if compile_result != 0 or not os.path.isfile(out_jar):
+ return None
+ shutil.move(out_jar, test_jar)
- return test_jar
+ return test_jar
def log_results_for_apps(result_per_shrinker_per_app, options):
- print('')
- app_errors = 0
- for (app, result_per_shrinker) in result_per_shrinker_per_app:
- app_errors += (1 if log_results_for_app(app, result_per_shrinker, options)
- else 0)
- return app_errors
+ print('')
+ app_errors = 0
+ for (app, result_per_shrinker) in result_per_shrinker_per_app:
+ app_errors += (1 if log_results_for_app(app, result_per_shrinker,
+ options) else 0)
+ return app_errors
def log_results_for_app(app, result_per_shrinker, options, worker_id=None):
- if options.print_dexsegments:
- log_segments_for_app(app, result_per_shrinker, options, worker_id=worker_id)
- return False
- else:
- return log_comparison_results_for_app(app, result_per_shrinker, options, worker_id=worker_id)
+ if options.print_dexsegments:
+ log_segments_for_app(app,
+ result_per_shrinker,
+ options,
+ worker_id=worker_id)
+ return False
+ else:
+ return log_comparison_results_for_app(app,
+ result_per_shrinker,
+ options,
+ worker_id=worker_id)
def log_segments_for_app(app, result_per_shrinker, options, worker_id):
- for shrinker in SHRINKERS:
- if shrinker not in result_per_shrinker:
- continue
- for result in result_per_shrinker.get(shrinker):
- benchmark_name = '{}-{}'.format(options.print_dexsegments, app.name)
- utils.print_dexsegments(
- benchmark_name, [result.get('output_jar')], worker_id=worker_id)
- duration = result.get('duration')
- print_thread(
- '%s-Total(RunTimeRaw): %s ms' % (benchmark_name, duration),
- worker_id)
- print_thread(
- '%s-Total(CodeSize): %s' % (benchmark_name, result.get('dex_size')),
- worker_id)
+ for shrinker in SHRINKERS:
+ if shrinker not in result_per_shrinker:
+ continue
+ for result in result_per_shrinker.get(shrinker):
+ benchmark_name = '{}-{}'.format(options.print_dexsegments, app.name)
+ utils.print_dexsegments(benchmark_name, [result.get('output_jar')],
+ worker_id=worker_id)
+ duration = result.get('duration')
+ print_thread(
+ '%s-Total(RunTimeRaw): %s ms' % (benchmark_name, duration),
+ worker_id)
+ print_thread(
+ '%s-Total(CodeSize): %s' %
+ (benchmark_name, result.get('dex_size')), worker_id)
def percentage_diff_as_string(before, after):
- if after < before:
- return '-' + str(round((1.0 - after / before) * 100)) + '%'
- else:
- return '+' + str(round((after - before) / before * 100)) + '%'
+ if after < before:
+ return '-' + str(round((1.0 - after / before) * 100)) + '%'
+ else:
+ return '+' + str(round((after - before) / before * 100)) + '%'
-def log_comparison_results_for_app(app, result_per_shrinker, options, worker_id):
- print_thread(app.name + ':', worker_id)
- app_error = False
- if result_per_shrinker.get('status', 'success') != 'success':
- error_message = result_per_shrinker.get('error_message')
- print_thread(' skipped ({})'.format(error_message), worker_id)
- return
+def log_comparison_results_for_app(app, result_per_shrinker, options,
+ worker_id):
+ print_thread(app.name + ':', worker_id)
+ app_error = False
+ if result_per_shrinker.get('status', 'success') != 'success':
+ error_message = result_per_shrinker.get('error_message')
+ print_thread(' skipped ({})'.format(error_message), worker_id)
+ return
- proguard_result = result_per_shrinker.get('pg', {})
- proguard_dex_size = float(proguard_result.get('dex_size', -1))
+ proguard_result = result_per_shrinker.get('pg', {})
+ proguard_dex_size = float(proguard_result.get('dex_size', -1))
- for shrinker in SHRINKERS:
- if shrinker not in result_per_shrinker:
- continue
- compilation_index = 1
- for result in result_per_shrinker.get(shrinker):
- build_status = result.get('build_status')
- if build_status != 'success' and build_status is not None:
- app_error = True
- warn(' {}-#{}: {}'.format(shrinker, compilation_index, build_status))
- continue
+ for shrinker in SHRINKERS:
+ if shrinker not in result_per_shrinker:
+ continue
+ compilation_index = 1
+ for result in result_per_shrinker.get(shrinker):
+ build_status = result.get('build_status')
+ if build_status != 'success' and build_status is not None:
+ app_error = True
+ warn(' {}-#{}: {}'.format(shrinker, compilation_index,
+ build_status))
+ continue
- if options.golem:
- print_thread(
- '%s(RunTimeRaw): %s ms' % (app.name, result.get('duration')),
- worker_id)
- print_thread(
- '%s(CodeSize): %s' % (app.name, result.get('dex_size')), worker_id)
- continue
+ if options.golem:
+ print_thread(
+ '%s(RunTimeRaw): %s ms' %
+ (app.name, result.get('duration')), worker_id)
+ print_thread(
+ '%s(CodeSize): %s' % (app.name, result.get('dex_size')),
+ worker_id)
+ continue
- print_thread(' {}-#{}:'.format(shrinker, compilation_index), worker_id)
- dex_size = result.get('dex_size')
- msg = ' dex size: {}'.format(dex_size)
- if options.print_runtimeraw:
- print_thread(
- ' run time raw: {} ms'.format(result.get('duration')), worker_id)
- if dex_size != proguard_dex_size and proguard_dex_size >= 0:
- msg = '{} ({}, {})'.format(
- msg, dex_size - proguard_dex_size,
- percentage_diff_as_string(proguard_dex_size, dex_size))
- success(msg) if dex_size < proguard_dex_size else warn(msg)
- else:
- print_thread(msg, worker_id)
+ print_thread(' {}-#{}:'.format(shrinker, compilation_index),
+ worker_id)
+ dex_size = result.get('dex_size')
+ msg = ' dex size: {}'.format(dex_size)
+ if options.print_runtimeraw:
+ print_thread(
+ ' run time raw: {} ms'.format(result.get('duration')),
+ worker_id)
+ if dex_size != proguard_dex_size and proguard_dex_size >= 0:
+ msg = '{} ({}, {})'.format(
+ msg, dex_size - proguard_dex_size,
+ percentage_diff_as_string(proguard_dex_size, dex_size))
+ success(msg) if dex_size < proguard_dex_size else warn(msg)
+ else:
+ print_thread(msg, worker_id)
- if options.monkey:
- monkey_status = result.get('monkey_status')
- if monkey_status != 'success':
- app_error = True
- warn(' monkey: {}'.format(monkey_status))
- else:
- success(' monkey: {}'.format(monkey_status))
+ if options.monkey:
+ monkey_status = result.get('monkey_status')
+ if monkey_status != 'success':
+ app_error = True
+ warn(' monkey: {}'.format(monkey_status))
+ else:
+ success(' monkey: {}'.format(monkey_status))
- if options.run_tests and 'instrumentation_test_status' in result:
- test_status = result.get('instrumentation_test_status')
- if test_status != 'success':
- warn(' instrumentation_tests: {}'.format(test_status))
- else:
- success(' instrumentation_tests: {}'.format(test_status))
+ if options.run_tests and 'instrumentation_test_status' in result:
+ test_status = result.get('instrumentation_test_status')
+ if test_status != 'success':
+ warn(' instrumentation_tests: {}'.format(test_status))
+ else:
+ success(' instrumentation_tests: {}'.format(test_status))
- recompilation_status = result.get('recompilation_status', '')
- if recompilation_status == 'failed':
- app_error = True
- warn(' recompilation {}-#{}: failed'.format(shrinker,
- compilation_index))
- continue
+ recompilation_status = result.get('recompilation_status', '')
+ if recompilation_status == 'failed':
+ app_error = True
+ warn(' recompilation {}-#{}: failed'.format(
+ shrinker, compilation_index))
+ continue
- compilation_index += 1
+ compilation_index += 1
- return app_error
+ return app_error
def parse_options(argv):
- result = argparse.ArgumentParser(description = 'Run/compile dump artifacts.')
- result.add_argument('--app',
- help='What app to run on',
- choices=[app.name for app in APPS],
- action='append')
- result.add_argument('--app-collection', '--app_collection',
- help='What app collection to run',
- choices=[collection.name for collection in
- APP_COLLECTIONS],
- action='append')
- result.add_argument('--app-logging-filter', '--app_logging_filter',
- help='The apps for which to turn on logging',
- action='append')
- result.add_argument('--bot',
- help='Running on bot, use third_party dependency.',
- default=False,
- action='store_true')
- result.add_argument('--generate-golem-config', '--generate_golem_config',
- help='Generate a new config for golem.',
- default=False,
- action='store_true')
- result.add_argument('--debug-agent',
- help='Enable Java debug agent and suspend compilation '
- '(default disabled)',
- default=False,
- action='store_true')
- result.add_argument('--disable-assertions', '--disable_assertions', '-da',
- help='Disable Java assertions when running the compiler '
- '(default enabled)',
- default=False,
- action='store_true')
- result.add_argument('--emulator-id', '--emulator_id',
- help='Id of the emulator to use',
- default='emulator-5554')
- result.add_argument('--golem',
- help='Running on golem, do not download',
- default=False,
- action='store_true')
- result.add_argument('--hash',
- help='The commit of R8 to use')
- result.add_argument('--internal',
- help='Run internal apps if set, otherwise run opensource',
- default=False,
- action='store_true')
- result.add_argument('--keystore',
- help='Path to app.keystore',
- default=os.path.join(utils.TOOLS_DIR, 'debug.keystore'))
- result.add_argument('--keystore-password', '--keystore_password',
- help='Password for app.keystore',
- default='android')
- result.add_argument('--minify',
- help='Force enable/disable minification' +
- ' (defaults to app proguard config)',
- choices=['default', 'force-enable', 'force-disable'],
- default='default')
- result.add_argument('--monkey',
- help='Whether to install and run app(s) with monkey',
- default=False,
- action='store_true')
- result.add_argument('--monkey-events', '--monkey_events',
- help='Number of events that the monkey should trigger',
- default=250,
- type=int)
- result.add_argument('--no-build', '--no_build',
- help='Run without building first (only when using ToT)',
- default=False,
- action='store_true')
- result.add_argument('--no-logging', '--no_logging',
- help='Disable logging except for errors',
- default=False,
- action='store_true')
- result.add_argument('--optimize',
- help='Force enable/disable optimizations' +
- ' (defaults to app proguard config)',
- choices=['default', 'force-enable', 'force-disable'],
- default='default')
- result.add_argument('--print-times',
- help='Print timing information from r8',
- default=False,
- action='store_true')
- result.add_argument('--print-dexsegments',
- metavar='BENCHMARKNAME',
- help='Print the sizes of individual dex segments as ' +
- '\'<BENCHMARKNAME>-<APP>-<segment>(CodeSize): '
- '<bytes>\'')
- result.add_argument('--print-runtimeraw',
- metavar='BENCHMARKNAME',
- help='Print the line \'<BENCHMARKNAME>(RunTimeRaw):' +
- ' <elapsed> ms\' at the end where <elapsed> is' +
- ' the elapsed time in milliseconds.')
- result.add_argument('--quiet',
- help='Disable verbose logging',
- default=False,
- action='store_true')
- result.add_argument('--r8-compilation-steps', '--r8_compilation_steps',
- help='Number of times R8 should be run on each app',
- default=2,
- type=int)
- result.add_argument('--r8-flags', '--r8_flags',
- help='Additional option(s) for the compiler.')
- result.add_argument('--run-tests', '--run_tests',
- help='Whether to run instrumentation tests',
- default=False,
- action='store_true')
- result.add_argument('--shrink',
- help='Force enable/disable shrinking' +
- ' (defaults to app proguard config)',
- choices=['default', 'force-enable', 'force-disable'],
- default='default')
- result.add_argument('--sign-apks', '--sign_apks',
- help='Whether the APKs should be signed',
- default=False,
- action='store_true')
- result.add_argument('--shrinker',
- help='The shrinkers to use (by default, all are run)',
- action='append')
- result.add_argument('--temp',
- help='A directory to use for temporaries and outputs.',
- default=None)
- result.add_argument('--version',
- default='main',
- help='The version of R8 to use (e.g., 1.4.51)')
- result.add_argument('--workers',
- help='Number of workers to use',
- default=1,
- type=int)
- (options, args) = result.parse_known_args(argv)
+ result = argparse.ArgumentParser(description='Run/compile dump artifacts.')
+ result.add_argument('--app',
+ help='What app to run on',
+ choices=[app.name for app in APPS],
+ action='append')
+ result.add_argument(
+ '--app-collection',
+ '--app_collection',
+ help='What app collection to run',
+ choices=[collection.name for collection in APP_COLLECTIONS],
+ action='append')
+ result.add_argument('--app-logging-filter',
+ '--app_logging_filter',
+ help='The apps for which to turn on logging',
+ action='append')
+ result.add_argument('--bot',
+ help='Running on bot, use third_party dependency.',
+ default=False,
+ action='store_true')
+ result.add_argument('--generate-golem-config',
+ '--generate_golem_config',
+ help='Generate a new config for golem.',
+ default=False,
+ action='store_true')
+ result.add_argument('--debug-agent',
+ help='Enable Java debug agent and suspend compilation '
+ '(default disabled)',
+ default=False,
+ action='store_true')
+ result.add_argument(
+ '--disable-assertions',
+ '--disable_assertions',
+ '-da',
+ help='Disable Java assertions when running the compiler '
+ '(default enabled)',
+ default=False,
+ action='store_true')
+ result.add_argument('--emulator-id',
+ '--emulator_id',
+ help='Id of the emulator to use',
+ default='emulator-5554')
+ result.add_argument('--golem',
+ help='Running on golem, do not download',
+ default=False,
+ action='store_true')
+ result.add_argument('--hash', help='The commit of R8 to use')
+ result.add_argument(
+ '--internal',
+ help='Run internal apps if set, otherwise run opensource',
+ default=False,
+ action='store_true')
+ result.add_argument('--keystore',
+ help='Path to app.keystore',
+ default=os.path.join(utils.TOOLS_DIR, 'debug.keystore'))
+ result.add_argument('--keystore-password',
+ '--keystore_password',
+ help='Password for app.keystore',
+ default='android')
+ result.add_argument('--minify',
+ help='Force enable/disable minification' +
+ ' (defaults to app proguard config)',
+ choices=['default', 'force-enable', 'force-disable'],
+ default='default')
+ result.add_argument('--monkey',
+ help='Whether to install and run app(s) with monkey',
+ default=False,
+ action='store_true')
+ result.add_argument('--monkey-events',
+ '--monkey_events',
+ help='Number of events that the monkey should trigger',
+ default=250,
+ type=int)
+ result.add_argument('--no-build',
+ '--no_build',
+ help='Run without building first (only when using ToT)',
+ default=False,
+ action='store_true')
+ result.add_argument('--no-logging',
+ '--no_logging',
+ help='Disable logging except for errors',
+ default=False,
+ action='store_true')
+ result.add_argument('--optimize',
+ help='Force enable/disable optimizations' +
+ ' (defaults to app proguard config)',
+ choices=['default', 'force-enable', 'force-disable'],
+ default='default')
+ result.add_argument('--print-times',
+ help='Print timing information from r8',
+ default=False,
+ action='store_true')
+ result.add_argument('--print-dexsegments',
+ metavar='BENCHMARKNAME',
+ help='Print the sizes of individual dex segments as ' +
+ '\'<BENCHMARKNAME>-<APP>-<segment>(CodeSize): '
+ '<bytes>\'')
+ result.add_argument('--print-runtimeraw',
+ metavar='BENCHMARKNAME',
+ help='Print the line \'<BENCHMARKNAME>(RunTimeRaw):' +
+ ' <elapsed> ms\' at the end where <elapsed> is' +
+ ' the elapsed time in milliseconds.')
+ result.add_argument('--quiet',
+ help='Disable verbose logging',
+ default=False,
+ action='store_true')
+ result.add_argument('--r8-compilation-steps',
+ '--r8_compilation_steps',
+ help='Number of times R8 should be run on each app',
+ default=2,
+ type=int)
+ result.add_argument('--r8-flags',
+ '--r8_flags',
+ help='Additional option(s) for the compiler.')
+ result.add_argument('--run-tests',
+ '--run_tests',
+ help='Whether to run instrumentation tests',
+ default=False,
+ action='store_true')
+ result.add_argument('--shrink',
+ help='Force enable/disable shrinking' +
+ ' (defaults to app proguard config)',
+ choices=['default', 'force-enable', 'force-disable'],
+ default='default')
+ result.add_argument('--sign-apks',
+ '--sign_apks',
+ help='Whether the APKs should be signed',
+ default=False,
+ action='store_true')
+ result.add_argument('--shrinker',
+ help='The shrinkers to use (by default, all are run)',
+ action='append')
+ result.add_argument('--temp',
+ help='A directory to use for temporaries and outputs.',
+ default=None)
+ result.add_argument('--version',
+ default='main',
+ help='The version of R8 to use (e.g., 1.4.51)')
+ result.add_argument('--workers',
+ help='Number of workers to use',
+ default=1,
+ type=int)
+ (options, args) = result.parse_known_args(argv)
- if options.app or options.app_collection:
- if not options.app:
- options.app = []
- if not options.app_collection:
- options.app_collection = []
- options.apps = [
- app
- for app in APPS
- if app.name in options.app
- or any(collection in options.app_collection
- for collection in app.collections)]
- del options.app
- del options.app_collection
- else:
- options.apps = [app for app in APPS if app.internal == options.internal]
+ if options.app or options.app_collection:
+ if not options.app:
+ options.app = []
+ if not options.app_collection:
+ options.app_collection = []
+ options.apps = [
+ app for app in APPS if app.name in options.app or any(
+ collection in options.app_collection
+ for collection in app.collections)
+ ]
+ del options.app
+ del options.app_collection
+ else:
+ options.apps = [app for app in APPS if app.internal == options.internal]
- if options.app_logging_filter:
- for app_name in options.app_logging_filter:
- assert any(app.name == app_name for app in options.apps)
- if options.shrinker:
- for shrinker in options.shrinker:
- assert shrinker in SHRINKERS, (
- 'Shrinker must be one of %s' % ', '.join(SHRINKERS))
- else:
- options.shrinker = [shrinker for shrinker in SHRINKERS]
+ if options.app_logging_filter:
+ for app_name in options.app_logging_filter:
+ assert any(app.name == app_name for app in options.apps)
+ if options.shrinker:
+ for shrinker in options.shrinker:
+ assert shrinker in SHRINKERS, ('Shrinker must be one of %s' %
+ ', '.join(SHRINKERS))
+ else:
+ options.shrinker = [shrinker for shrinker in SHRINKERS]
- if options.hash or version_is_built_jar(options.version):
- # No need to build R8 if a specific version should be used.
- options.no_build = True
- if 'r8-nolib' in options.shrinker:
- warn('Skipping shrinker r8-nolib because a specific version '
- + 'of r8 was specified')
- options.shrinker.remove('r8-nolib')
- if 'r8-nolib-full' in options.shrinker:
- warn('Skipping shrinker r8-nolib-full because a specific version '
- + 'of r8 was specified')
- options.shrinker.remove('r8-nolib-full')
- return (options, args)
+ if options.hash or version_is_built_jar(options.version):
+ # No need to build R8 if a specific version should be used.
+ options.no_build = True
+ if 'r8-nolib' in options.shrinker:
+ warn('Skipping shrinker r8-nolib because a specific version ' +
+ 'of r8 was specified')
+ options.shrinker.remove('r8-nolib')
+ if 'r8-nolib-full' in options.shrinker:
+ warn('Skipping shrinker r8-nolib-full because a specific version ' +
+ 'of r8 was specified')
+ options.shrinker.remove('r8-nolib-full')
+ return (options, args)
def print_indented(s, indent):
- print(' ' * indent + s)
+ print(' ' * indent + s)
def get_sha256(gz_file):
- with open(gz_file, 'rb') as f:
- bytes = f.read() # read entire file as bytes
- return hashlib.sha256(bytes).hexdigest();
+ with open(gz_file, 'rb') as f:
+ bytes = f.read() # read entire file as bytes
+ return hashlib.sha256(bytes).hexdigest()
def get_sha_from_file(sha_file):
- with open(sha_file, 'r') as f:
- return f.readlines()[0]
+ with open(sha_file, 'r') as f:
+ return f.readlines()[0]
def print_golem_config(options):
- print('// AUTOGENERATED FILE from tools/run_on_app_dump.py in R8 repo')
- print('part of r8_config;')
- print('')
- print('final Suite dumpsSuite = Suite("OpenSourceAppDumps");')
- print('')
- print('createOpenSourceAppBenchmarks() {')
- print_indented('final cpus = ["Lenovo M90"];', 2)
- print_indented('final targetsCompat = ["R8"];', 2)
- print_indented('final targetsFull = ["R8-full-minify-optimize-shrink"];', 2)
- # Avoid calculating this for every app
- jdk_gz = jdk.GetJdkHome() + '.tar.gz'
- add_golem_resource(2, jdk_gz, 'openjdk')
- for app in options.apps:
- if app.folder and not app.internal:
- indentation = 2;
- print_indented('{', indentation)
- indentation = 4
- print_indented('final name = "%s";' % app.name, indentation)
- print_indented('final benchmark =', indentation)
- print_indented(
- 'StandardBenchmark(name, [Metric.RunTimeRaw, Metric.CodeSize]);',
- indentation + 4)
- if app.golem_duration != None:
- print_indented(
- 'final timeout = const Duration(seconds: %s);' % app.golem_duration,
- indentation)
- print_indented(
- 'ExecutionManagement.addTimeoutConstraint'
- '(timeout, benchmark: benchmark);', indentation)
- app_gz = os.path.join(utils.OPENSOURCE_DUMPS_DIR, app.folder + '.tar.gz')
- name = 'appResource'
- add_golem_resource(indentation, app_gz, name)
- print_golem_config_target('Compat', 'r8', app, indentation)
- print_golem_config_target(
- 'Full',
- 'r8-full',
- app,
- indentation,
- minify='force-enable',
- optimize='force-enable',
- shrink='force-enable')
- print_indented('dumpsSuite.addBenchmark(name);', indentation)
- indentation = 2
- print_indented('}', indentation)
- print('}')
+ print('// AUTOGENERATED FILE from tools/run_on_app_dump.py in R8 repo')
+ print('part of r8_config;')
+ print('')
+ print('final Suite dumpsSuite = Suite("OpenSourceAppDumps");')
+ print('')
+ print('createOpenSourceAppBenchmarks() {')
+ print_indented('final cpus = ["Lenovo M90"];', 2)
+ print_indented('final targetsCompat = ["R8"];', 2)
+ print_indented('final targetsFull = ["R8-full-minify-optimize-shrink"];', 2)
+ # Avoid calculating this for every app
+ jdk_gz = jdk.GetJdkHome() + '.tar.gz'
+ add_golem_resource(2, jdk_gz, 'openjdk')
+ for app in options.apps:
+ if app.folder and not app.internal:
+ indentation = 2
+ print_indented('{', indentation)
+ indentation = 4
+ print_indented('final name = "%s";' % app.name, indentation)
+ print_indented('final benchmark =', indentation)
+ print_indented(
+ 'StandardBenchmark(name, [Metric.RunTimeRaw, Metric.CodeSize]);',
+ indentation + 4)
+ if app.golem_duration != None:
+ print_indented(
+ 'final timeout = const Duration(seconds: %s);' %
+ app.golem_duration, indentation)
+ print_indented(
+ 'ExecutionManagement.addTimeoutConstraint'
+ '(timeout, benchmark: benchmark);', indentation)
+ app_gz = os.path.join(utils.OPENSOURCE_DUMPS_DIR,
+ app.folder + '.tar.gz')
+ name = 'appResource'
+ add_golem_resource(indentation, app_gz, name)
+ print_golem_config_target('Compat', 'r8', app, indentation)
+ print_golem_config_target('Full',
+ 'r8-full',
+ app,
+ indentation,
+ minify='force-enable',
+ optimize='force-enable',
+ shrink='force-enable')
+ print_indented('dumpsSuite.addBenchmark(name);', indentation)
+ indentation = 2
+ print_indented('}', indentation)
+ print('}')
-def print_golem_config_target(
- target, shrinker, app, indentation,
- minify='default', optimize='default', shrink='default'):
- options="options" + target
- print_indented(
- 'final %s = benchmark.addTargets(noImplementation, targets%s);'
- % (options, target),
- indentation)
- print_indented('%s.cpus = cpus;' % options, indentation)
- print_indented('%s.isScript = true;' % options, indentation)
- print_indented('%s.fromRevision = 9700;' % options, indentation);
- print_indented('%s.mainFile = "tools/run_on_app_dump.py "' % options,
- indentation)
- print_indented('"--golem --disable-assertions --quiet --shrinker %s --app %s "'
- % (shrinker, app.name),
- indentation + 4)
- print_indented('"--minify %s --optimize %s --shrink %s";'
- % (minify, optimize, shrink),
- indentation + 4)
- print_indented('%s.resources.add(appResource);' % options, indentation)
- print_indented('%s.resources.add(openjdk);' % options, indentation)
+
+def print_golem_config_target(target,
+ shrinker,
+ app,
+ indentation,
+ minify='default',
+ optimize='default',
+ shrink='default'):
+ options = "options" + target
+ print_indented(
+ 'final %s = benchmark.addTargets(noImplementation, targets%s);' %
+ (options, target), indentation)
+ print_indented('%s.cpus = cpus;' % options, indentation)
+ print_indented('%s.isScript = true;' % options, indentation)
+ print_indented('%s.fromRevision = 9700;' % options, indentation)
+ print_indented('%s.mainFile = "tools/run_on_app_dump.py "' % options,
+ indentation)
+ print_indented(
+ '"--golem --disable-assertions --quiet --shrinker %s --app %s "' %
+ (shrinker, app.name), indentation + 4)
+ print_indented(
+ '"--minify %s --optimize %s --shrink %s";' % (minify, optimize, shrink),
+ indentation + 4)
+ print_indented('%s.resources.add(appResource);' % options, indentation)
+ print_indented('%s.resources.add(openjdk);' % options, indentation)
+
def add_golem_resource(indentation, gz, name, sha256=None):
- sha = gz + '.sha1'
- if not sha256:
- # Golem uses a sha256 of the file in the cache, and you need to specify that.
- download_sha(sha, False, quiet=True)
- sha256 = get_sha256(gz)
- sha = get_sha_from_file(sha)
- print_indented('final %s = BenchmarkResource("",' % name, indentation)
- print_indented('type: BenchmarkResourceType.storage,', indentation + 4)
- print_indented('uri: "gs://r8-deps/%s",' % sha, indentation + 4)
- # Make dart formatter happy.
- if indentation > 2:
- print_indented('hash:', indentation + 4)
- print_indented('"%s",' % sha256, indentation + 8)
- else:
- print_indented('hash: "%s",' % sha256, indentation + 4)
- print_indented('extract: "gz");', indentation + 4);
+ sha = gz + '.sha1'
+ if not sha256:
+ # Golem uses a sha256 of the file in the cache, and you need to specify that.
+ download_sha(sha, False, quiet=True)
+ sha256 = get_sha256(gz)
+ sha = get_sha_from_file(sha)
+ print_indented('final %s = BenchmarkResource("",' % name, indentation)
+ print_indented('type: BenchmarkResourceType.storage,', indentation + 4)
+ print_indented('uri: "gs://r8-deps/%s",' % sha, indentation + 4)
+ # Make dart formatter happy.
+ if indentation > 2:
+ print_indented('hash:', indentation + 4)
+ print_indented('"%s",' % sha256, indentation + 8)
+ else:
+ print_indented('hash: "%s",' % sha256, indentation + 4)
+ print_indented('extract: "gz");', indentation + 4)
+
def main(argv):
- (options, args) = parse_options(argv)
+ (options, args) = parse_options(argv)
- if options.bot:
- options.no_logging = True
- options.shrinker = ['r8', 'r8-full']
- print(options.shrinker)
+ if options.bot:
+ options.no_logging = True
+ options.shrinker = ['r8', 'r8-full']
+ print(options.shrinker)
- if options.golem:
- options.disable_assertions = True
- options.no_build = True
- options.r8_compilation_steps = 1
- options.quiet = True
- options.no_logging = True
+ if options.golem:
+ options.disable_assertions = True
+ options.no_build = True
+ options.r8_compilation_steps = 1
+ options.quiet = True
+ options.no_logging = True
- if options.generate_golem_config:
- print_golem_config(options)
- return 0
+ if options.generate_golem_config:
+ print_golem_config(options)
+ return 0
- with utils.TempDir() as temp_dir:
- if options.temp:
- temp_dir = options.temp
- os.makedirs(temp_dir, exist_ok=True)
- if options.hash:
- # Download r8-<hash>.jar from
- # https://storage.googleapis.com/r8-releases/raw/.
- target = 'r8-{}.jar'.format(options.hash)
- update_prebuilds_in_android.download_hash(
- temp_dir, 'com/android/tools/r8/' + options.hash, target)
- as_utils.MoveFile(
- os.path.join(temp_dir, target), os.path.join(temp_dir, 'r8lib.jar'),
- quiet=options.quiet)
- elif version_is_built_jar(options.version):
- # Download r8-<version>.jar from
- # https://storage.googleapis.com/r8-releases/raw/.
- target = 'r8-{}.jar'.format(options.version)
- update_prebuilds_in_android.download_version(
- temp_dir, 'com/android/tools/r8/' + options.version, target)
- as_utils.MoveFile(
- os.path.join(temp_dir, target), os.path.join(temp_dir, 'r8lib.jar'),
- quiet=options.quiet)
- elif options.version == 'main':
- if not options.no_build:
- gradle.RunGradle([utils.GRADLE_TASK_RETRACE, utils.GRADLE_TASK_R8,
- '-Pno_internal'])
- build_r8lib = False
- for shrinker in options.shrinker:
- if is_minified_r8(shrinker):
- build_r8lib = True
- if build_r8lib:
- gradle.RunGradle([utils.GRADLE_TASK_R8LIB, '-Pno_internal'])
- # Make a copy of r8.jar and r8lib.jar such that they stay the same for
- # the entire execution of this script.
- if 'r8-nolib' in options.shrinker or 'r8-nolib-full' in options.shrinker:
- assert os.path.isfile(utils.R8_JAR), 'Cannot build without r8.jar'
- shutil.copyfile(utils.R8_JAR, os.path.join(temp_dir, 'r8.jar'))
- if 'r8' in options.shrinker or 'r8-full' in options.shrinker:
- assert os.path.isfile(utils.R8LIB_JAR), 'Cannot build without r8lib.jar'
- shutil.copyfile(utils.R8LIB_JAR, os.path.join(temp_dir, 'r8lib.jar'))
+ with utils.TempDir() as temp_dir:
+ if options.temp:
+ temp_dir = options.temp
+ os.makedirs(temp_dir, exist_ok=True)
+ if options.hash:
+ # Download r8-<hash>.jar from
+ # https://storage.googleapis.com/r8-releases/raw/.
+ target = 'r8-{}.jar'.format(options.hash)
+ update_prebuilds_in_android.download_hash(
+ temp_dir, 'com/android/tools/r8/' + options.hash, target)
+ as_utils.MoveFile(os.path.join(temp_dir, target),
+ os.path.join(temp_dir, 'r8lib.jar'),
+ quiet=options.quiet)
+ elif version_is_built_jar(options.version):
+ # Download r8-<version>.jar from
+ # https://storage.googleapis.com/r8-releases/raw/.
+ target = 'r8-{}.jar'.format(options.version)
+ update_prebuilds_in_android.download_version(
+ temp_dir, 'com/android/tools/r8/' + options.version, target)
+ as_utils.MoveFile(os.path.join(temp_dir, target),
+ os.path.join(temp_dir, 'r8lib.jar'),
+ quiet=options.quiet)
+ elif options.version == 'main':
+ if not options.no_build:
+ gradle.RunGradle([
+ utils.GRADLE_TASK_RETRACE, utils.GRADLE_TASK_R8,
+ '-Pno_internal'
+ ])
+ build_r8lib = False
+ for shrinker in options.shrinker:
+ if is_minified_r8(shrinker):
+ build_r8lib = True
+ if build_r8lib:
+ gradle.RunGradle([utils.GRADLE_TASK_R8LIB, '-Pno_internal'])
+ # Make a copy of r8.jar and r8lib.jar such that they stay the same for
+ # the entire execution of this script.
+ if 'r8-nolib' in options.shrinker or 'r8-nolib-full' in options.shrinker:
+ assert os.path.isfile(
+ utils.R8_JAR), 'Cannot build without r8.jar'
+ shutil.copyfile(utils.R8_JAR, os.path.join(temp_dir, 'r8.jar'))
+ if 'r8' in options.shrinker or 'r8-full' in options.shrinker:
+ assert os.path.isfile(
+ utils.R8LIB_JAR), 'Cannot build without r8lib.jar'
+ shutil.copyfile(utils.R8LIB_JAR,
+ os.path.join(temp_dir, 'r8lib.jar'))
- jobs = []
- result_per_shrinker_per_app = []
- for app in options.apps:
- if app.skip:
- continue
- result = {}
- result_per_shrinker_per_app.append((app, result))
- jobs.append(create_job(app, options, result, temp_dir))
- thread_utils.run_in_parallel(
- jobs,
- number_of_workers=options.workers,
- stop_on_first_failure=False)
- errors = log_results_for_apps(result_per_shrinker_per_app, options)
- if errors > 0:
- dest = 'gs://r8-test-results/r8-libs/' + str(int(time.time()))
- utils.upload_file_to_cloud_storage(os.path.join(temp_dir, 'r8lib.jar'), dest)
- print('R8lib saved to %s' % dest)
- return errors
+ jobs = []
+ result_per_shrinker_per_app = []
+ for app in options.apps:
+ if app.skip:
+ continue
+ result = {}
+ result_per_shrinker_per_app.append((app, result))
+ jobs.append(create_job(app, options, result, temp_dir))
+ thread_utils.run_in_parallel(jobs,
+ number_of_workers=options.workers,
+ stop_on_first_failure=False)
+ errors = log_results_for_apps(result_per_shrinker_per_app, options)
+ if errors > 0:
+ dest = 'gs://r8-test-results/r8-libs/' + str(int(time.time()))
+ utils.upload_file_to_cloud_storage(
+ os.path.join(temp_dir, 'r8lib.jar'), dest)
+ print('R8lib saved to %s' % dest)
+ return errors
+
def create_job(app, options, result, temp_dir):
- return lambda worker_id: run_job(
- app, options, result, temp_dir, worker_id)
+ return lambda worker_id: run_job(app, options, result, temp_dir, worker_id)
+
def run_job(app, options, result, temp_dir, worker_id):
- job_temp_dir = os.path.join(temp_dir, str(worker_id or 0))
- os.makedirs(job_temp_dir, exist_ok=True)
- result.update(get_results_for_app(app, options, job_temp_dir, worker_id))
- return 0
+ job_temp_dir = os.path.join(temp_dir, str(worker_id or 0))
+ os.makedirs(job_temp_dir, exist_ok=True)
+ result.update(get_results_for_app(app, options, job_temp_dir, worker_id))
+ return 0
+
def success(message):
- CGREEN = '\033[32m'
- CEND = '\033[0m'
- print(CGREEN + message + CEND)
+ CGREEN = '\033[32m'
+ CEND = '\033[0m'
+ print(CGREEN + message + CEND)
def warn(message):
- CRED = '\033[91m'
- CEND = '\033[0m'
- print(CRED + message + CEND)
+ CRED = '\033[91m'
+ CEND = '\033[0m'
+ print(CRED + message + CEND)
if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
+ sys.exit(main(sys.argv[1:]))
diff --git a/tools/sanitize_libraries.py b/tools/sanitize_libraries.py
index bc61d50..671d3a9 100755
--- a/tools/sanitize_libraries.py
+++ b/tools/sanitize_libraries.py
@@ -7,102 +7,103 @@
import sys
import zipfile
+
# Proguard lookup program classes before library classes. In R8 this is
# not the behaviour (it used to be) R8 will check library classes before
# program classes. Some apps have duplicate classes in the library and program.
# To make these apps work with R8 simulate program classes before library
# classes by creating a new library jar which have all the provided library
# classes which are not also in program classes.
-def SanitizeLibrariesInPgconf(
- sanitized_lib_path,
- sanitized_pgconf_path,
- pgconfs,
- injars = None,
- libraryjars = None):
- with open(sanitized_pgconf_path, 'w') as sanitized_pgconf:
- injars = [] if injars is None else injars
- libraryjars = [] if libraryjars is None else libraryjars
- for pgconf in pgconfs:
- pgconf_dirname = os.path.abspath(os.path.dirname(pgconf))
- first_library_jar = True
- with open(pgconf) as pgconf_file:
- for line in pgconf_file:
- trimmed = line.strip()
- if trimmed.startswith('-injars'):
- # Collect -injars and leave them in the configuration.
- injar = os.path.join(
- pgconf_dirname, trimmed[len('-injars'):].strip())
- injars.append(injar)
- sanitized_pgconf.write('-injars {}\n'.format(injar))
- elif trimmed.startswith('-libraryjars'):
- # Collect -libraryjars and replace them with the sanitized library.
- libraryjar = os.path.join(
- pgconf_dirname, trimmed[len('-libraryjars'):].strip())
- libraryjars.append(libraryjar)
- if first_library_jar:
- sanitized_pgconf.write(
- '-libraryjars {}\n'.format(sanitized_lib_path))
- first_library_jar = False
- sanitized_pgconf.write('# {}'.format(line))
- else:
- sanitized_pgconf.write(line)
+def SanitizeLibrariesInPgconf(sanitized_lib_path,
+ sanitized_pgconf_path,
+ pgconfs,
+ injars=None,
+ libraryjars=None):
+ with open(sanitized_pgconf_path, 'w') as sanitized_pgconf:
+ injars = [] if injars is None else injars
+ libraryjars = [] if libraryjars is None else libraryjars
+ for pgconf in pgconfs:
+ pgconf_dirname = os.path.abspath(os.path.dirname(pgconf))
+ first_library_jar = True
+ with open(pgconf) as pgconf_file:
+ for line in pgconf_file:
+ trimmed = line.strip()
+ if trimmed.startswith('-injars'):
+ # Collect -injars and leave them in the configuration.
+ injar = os.path.join(pgconf_dirname,
+ trimmed[len('-injars'):].strip())
+ injars.append(injar)
+ sanitized_pgconf.write('-injars {}\n'.format(injar))
+ elif trimmed.startswith('-libraryjars'):
+ # Collect -libraryjars and replace them with the sanitized library.
+ libraryjar = os.path.join(
+ pgconf_dirname,
+ trimmed[len('-libraryjars'):].strip())
+ libraryjars.append(libraryjar)
+ if first_library_jar:
+ sanitized_pgconf.write(
+ '-libraryjars {}\n'.format(sanitized_lib_path))
+ first_library_jar = False
+ sanitized_pgconf.write('# {}'.format(line))
+ else:
+ sanitized_pgconf.write(line)
- SanitizeLibraries(sanitized_lib_path, libraryjars, injars)
+ SanitizeLibraries(sanitized_lib_path, libraryjars, injars)
def SanitizeLibraries(sanitized_lib_path, libraryjars, injars):
- program_entries = set()
- library_entries = set()
+ program_entries = set()
+ library_entries = set()
- for injar in injars:
- with zipfile.ZipFile(injar, 'r') as injar_zf:
- for zipinfo in injar_zf.infolist():
- program_entries.add(zipinfo.filename)
+ for injar in injars:
+ with zipfile.ZipFile(injar, 'r') as injar_zf:
+ for zipinfo in injar_zf.infolist():
+ program_entries.add(zipinfo.filename)
- with zipfile.ZipFile(sanitized_lib_path, 'w') as output_zf:
- for libraryjar in libraryjars:
- with zipfile.ZipFile(libraryjar, 'r') as input_zf:
- for zipinfo in input_zf.infolist():
- if (not zipinfo.filename in program_entries
- and not zipinfo.filename in library_entries):
- library_entries.add(zipinfo.filename)
- output_zf.writestr(zipinfo, input_zf.read(zipinfo))
+ with zipfile.ZipFile(sanitized_lib_path, 'w') as output_zf:
+ for libraryjar in libraryjars:
+ with zipfile.ZipFile(libraryjar, 'r') as input_zf:
+ for zipinfo in input_zf.infolist():
+ if (not zipinfo.filename in program_entries and
+ not zipinfo.filename in library_entries):
+ library_entries.add(zipinfo.filename)
+ output_zf.writestr(zipinfo, input_zf.read(zipinfo))
def usage(argv, error):
- print(error)
- print("Usage: sanitize_libraries.py <sanitized_lib> <sanitized_pgconf> ("
- + "--injar <existing_injar>"
- + "|--libraryjar <existing_library_jar>"
- + "|--pgconf <existing_pgconf>)+")
- return 1
+ print(error)
+ print("Usage: sanitize_libraries.py <sanitized_lib> <sanitized_pgconf> (" +
+ "--injar <existing_injar>" + "|--libraryjar <existing_library_jar>" +
+ "|--pgconf <existing_pgconf>)+")
+ return 1
def main(argv):
- if (len(argv) < 4):
- return usage(argv, "Wrong number of arguments!")
- pgconfs = []
- injars = []
- libraryjars = []
- i = 2
- while i < len(argv):
- directive = argv[i]
- if directive not in ['--pgconf', '--injar', '--libraryjar']:
- return usage(
- argv,
- 'Unexpected argument, expected one of --pgconf, --injar, and '
- + '--libraryjar.')
- if i + 1 >= len(argv):
- return usage(argv, 'Expected argument after ' + directive + '.')
- file = argv[i + 1]
- if directive == '--pgconf':
- pgconfs.append(file)
- elif directive == '--injar':
- injars.append(file)
- elif directive == '--libraryjar':
- libraryjars.append(file)
- i = i + 2
- SanitizeLibrariesInPgconf(argv[0], argv[1], pgconfs, injars, libraryjars)
+ if (len(argv) < 4):
+ return usage(argv, "Wrong number of arguments!")
+ pgconfs = []
+ injars = []
+ libraryjars = []
+ i = 2
+ while i < len(argv):
+ directive = argv[i]
+ if directive not in ['--pgconf', '--injar', '--libraryjar']:
+ return usage(
+ argv,
+ 'Unexpected argument, expected one of --pgconf, --injar, and ' +
+ '--libraryjar.')
+ if i + 1 >= len(argv):
+ return usage(argv, 'Expected argument after ' + directive + '.')
+ file = argv[i + 1]
+ if directive == '--pgconf':
+ pgconfs.append(file)
+ elif directive == '--injar':
+ injars.append(file)
+ elif directive == '--libraryjar':
+ libraryjars.append(file)
+ i = i + 2
+ SanitizeLibrariesInPgconf(argv[0], argv[1], pgconfs, injars, libraryjars)
+
if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
+ sys.exit(main(sys.argv[1:]))
diff --git a/tools/startup/adb_utils.py b/tools/startup/adb_utils.py
index d4f70a1..6ce19ab 100755
--- a/tools/startup/adb_utils.py
+++ b/tools/startup/adb_utils.py
@@ -17,458 +17,511 @@
import profile_utils
import utils
-DEVNULL=subprocess.DEVNULL
+DEVNULL = subprocess.DEVNULL
+
class ProcessReader(threading.Thread):
- def __init__(self, process):
- threading.Thread.__init__(self)
- self.lines = []
- self.process = process
+ def __init__(self, process):
+ threading.Thread.__init__(self)
+ self.lines = []
+ self.process = process
- def run(self):
- for line in self.process.stdout:
- line = line.decode('utf-8').strip()
- self.lines.append(line)
+ def run(self):
+ for line in self.process.stdout:
+ line = line.decode('utf-8').strip()
+ self.lines.append(line)
- def stop(self):
- self.process.kill()
+ def stop(self):
+ self.process.kill()
+
class ScreenState(Enum):
- OFF_LOCKED = 1,
- OFF_UNLOCKED = 2
- ON_LOCKED = 3
- ON_UNLOCKED = 4
+ OFF_LOCKED = 1,
+ OFF_UNLOCKED = 2
+ ON_LOCKED = 3
+ ON_UNLOCKED = 4
- def is_off(self):
- return self == ScreenState.OFF_LOCKED or self == ScreenState.OFF_UNLOCKED
+ def is_off(self):
+ return self == ScreenState.OFF_LOCKED or self == ScreenState.OFF_UNLOCKED
- def is_on(self):
- return self == ScreenState.ON_LOCKED or self == ScreenState.ON_UNLOCKED
+ def is_on(self):
+ return self == ScreenState.ON_LOCKED or self == ScreenState.ON_UNLOCKED
- def is_on_and_locked(self):
- return self == ScreenState.ON_LOCKED
+ def is_on_and_locked(self):
+ return self == ScreenState.ON_LOCKED
- def is_on_and_unlocked(self):
- return self == ScreenState.ON_UNLOCKED
+ def is_on_and_unlocked(self):
+ return self == ScreenState.ON_UNLOCKED
+
def broadcast(action, component, device_id=None):
- print('Sending broadcast %s' % action)
- cmd = create_adb_cmd('shell am broadcast -a %s %s' % (action, component), device_id)
- return subprocess.check_output(cmd).decode('utf-8').strip().splitlines()
+ print('Sending broadcast %s' % action)
+ cmd = create_adb_cmd('shell am broadcast -a %s %s' % (action, component),
+ device_id)
+ return subprocess.check_output(cmd).decode('utf-8').strip().splitlines()
+
def build_apks_from_bundle(bundle, output, overwrite=False):
- print('Building %s' % bundle)
- cmd = [
- 'java', '-jar', utils.BUNDLETOOL_JAR,
- 'build-apks',
- '--bundle=%s' % bundle,
- '--output=%s' % output]
- if overwrite:
- cmd.append('--overwrite')
- subprocess.check_call(cmd, stdout=DEVNULL, stderr=DEVNULL)
+ print('Building %s' % bundle)
+ cmd = [
+ 'java', '-jar', utils.BUNDLETOOL_JAR, 'build-apks',
+ '--bundle=%s' % bundle,
+ '--output=%s' % output
+ ]
+ if overwrite:
+ cmd.append('--overwrite')
+ subprocess.check_call(cmd, stdout=DEVNULL, stderr=DEVNULL)
+
def capture_screen(target, device_id=None):
- print('Taking screenshot to %s' % target)
- tmp = '/sdcard/screencap.png'
- cmd = create_adb_cmd('shell screencap -p %s' % tmp, device_id)
- subprocess.check_call(cmd, stdout=DEVNULL, stderr=DEVNULL)
- pull(tmp, target, device_id)
+ print('Taking screenshot to %s' % target)
+ tmp = '/sdcard/screencap.png'
+ cmd = create_adb_cmd('shell screencap -p %s' % tmp, device_id)
+ subprocess.check_call(cmd, stdout=DEVNULL, stderr=DEVNULL)
+ pull(tmp, target, device_id)
+
def create_adb_cmd(arguments, device_id=None):
- assert isinstance(arguments, list) or isinstance(arguments, str)
- cmd = ['adb']
- if device_id is not None:
- cmd.append('-s')
- cmd.append(device_id)
- cmd.extend(arguments if isinstance(arguments, list) else arguments.split(' '))
- return cmd
+ assert isinstance(arguments, list) or isinstance(arguments, str)
+ cmd = ['adb']
+ if device_id is not None:
+ cmd.append('-s')
+ cmd.append(device_id)
+ cmd.extend(
+ arguments if isinstance(arguments, list) else arguments.split(' '))
+ return cmd
+
def capture_app_profile_data(app_id, device_id=None):
- ps_cmd = create_adb_cmd('shell ps -o NAME', device_id)
- stdout = subprocess.check_output(ps_cmd).decode('utf-8').strip()
- killed_any = False
- for process_name in stdout.splitlines():
- if process_name.startswith(app_id):
- print('Flushing profile for process %s' % process_name)
- killall_cmd = create_adb_cmd(
- 'shell killall -s SIGUSR1 %s' % process_name, device_id)
- killall_result = subprocess.run(killall_cmd, capture_output=True)
- stdout = killall_result.stdout.decode('utf-8')
- stderr = killall_result.stderr.decode('utf-8')
- if killall_result.returncode == 0:
- killed_any = True
- else:
- print('Error: stdout: %s, stderr: %s' % (stdout, stderr))
- time.sleep(5)
- assert killed_any, 'Expected to find at least one process'
+ ps_cmd = create_adb_cmd('shell ps -o NAME', device_id)
+ stdout = subprocess.check_output(ps_cmd).decode('utf-8').strip()
+ killed_any = False
+ for process_name in stdout.splitlines():
+ if process_name.startswith(app_id):
+ print('Flushing profile for process %s' % process_name)
+ killall_cmd = create_adb_cmd(
+ 'shell killall -s SIGUSR1 %s' % process_name, device_id)
+ killall_result = subprocess.run(killall_cmd, capture_output=True)
+ stdout = killall_result.stdout.decode('utf-8')
+ stderr = killall_result.stderr.decode('utf-8')
+ if killall_result.returncode == 0:
+ killed_any = True
+ else:
+ print('Error: stdout: %s, stderr: %s' % (stdout, stderr))
+ time.sleep(5)
+ assert killed_any, 'Expected to find at least one process'
+
def check_app_has_profile_data(app_id, device_id=None):
- profile_path = get_profile_path(app_id)
- cmd = create_adb_cmd(
- 'shell du /data/misc/profiles/cur/0/%s/primary.prof' % app_id,
- device_id)
- stdout = subprocess.check_output(cmd).decode('utf-8').strip()
- size_str = stdout[:stdout.index('\t')]
- assert size_str.isdigit()
- size = int(size_str)
- if size == 4:
- raise ValueError('Expected size of profile at %s to be > 4K' % profile_path)
+ profile_path = get_profile_path(app_id)
+ cmd = create_adb_cmd(
+ 'shell du /data/misc/profiles/cur/0/%s/primary.prof' % app_id,
+ device_id)
+ stdout = subprocess.check_output(cmd).decode('utf-8').strip()
+ size_str = stdout[:stdout.index('\t')]
+ assert size_str.isdigit()
+ size = int(size_str)
+ if size == 4:
+ raise ValueError('Expected size of profile at %s to be > 4K' %
+ profile_path)
+
def clear_logcat(device_id=None):
- cmd = create_adb_cmd('logcat -c', device_id)
- subprocess.check_call(cmd, stdout=DEVNULL, stderr=DEVNULL)
+ cmd = create_adb_cmd('logcat -c', device_id)
+ subprocess.check_call(cmd, stdout=DEVNULL, stderr=DEVNULL)
+
def clear_profile_data(app_id, device_id=None):
- cmd = create_adb_cmd(
- 'shell cmd package compile --reset %s' % app_id, device_id)
- subprocess.check_call(cmd, stdout=DEVNULL, stderr=DEVNULL)
+ cmd = create_adb_cmd('shell cmd package compile --reset %s' % app_id,
+ device_id)
+ subprocess.check_call(cmd, stdout=DEVNULL, stderr=DEVNULL)
+
def drop_caches(device_id=None):
- cmd = create_adb_cmd(
- ['shell', 'echo 3 > /proc/sys/vm/drop_caches'], device_id)
- subprocess.check_call(cmd, stdout=DEVNULL, stderr=DEVNULL)
+ cmd = create_adb_cmd(['shell', 'echo 3 > /proc/sys/vm/drop_caches'],
+ device_id)
+ subprocess.check_call(cmd, stdout=DEVNULL, stderr=DEVNULL)
+
def ensure_screen_on(device_id=None):
- if get_screen_state(device_id).is_off():
- toggle_screen(device_id)
- assert get_screen_state(device_id).is_on()
+ if get_screen_state(device_id).is_off():
+ toggle_screen(device_id)
+ assert get_screen_state(device_id).is_on()
+
def ensure_screen_off(device_id=None):
- if get_screen_state(device_id).is_on():
- toggle_screen(device_id)
- assert get_screen_state(device_id).is_off()
+ if get_screen_state(device_id).is_on():
+ toggle_screen(device_id)
+ assert get_screen_state(device_id).is_off()
+
def force_compilation(app_id, device_id=None):
- print('Applying AOT (full)')
- cmd = create_adb_cmd(
- 'shell cmd package compile -m speed -f %s' % app_id, device_id)
- subprocess.check_call(cmd, stdout=DEVNULL, stderr=DEVNULL)
+ print('Applying AOT (full)')
+ cmd = create_adb_cmd('shell cmd package compile -m speed -f %s' % app_id,
+ device_id)
+ subprocess.check_call(cmd, stdout=DEVNULL, stderr=DEVNULL)
+
def force_profile_compilation(app_id, device_id=None):
- print('Applying AOT (profile)')
- cmd = create_adb_cmd(
- 'shell cmd package compile -m speed-profile -f %s' % app_id, device_id)
- subprocess.check_call(cmd, stdout=DEVNULL, stderr=DEVNULL)
+ print('Applying AOT (profile)')
+ cmd = create_adb_cmd(
+ 'shell cmd package compile -m speed-profile -f %s' % app_id, device_id)
+ subprocess.check_call(cmd, stdout=DEVNULL, stderr=DEVNULL)
+
def get_apk_path(app_id, device_id=None):
- cmd = create_adb_cmd('shell pm path %s' % app_id, device_id)
- stdout = subprocess.check_output(cmd).decode('utf-8').strip()
- if not stdout.startswith('package:'):
- raise ValueError(
- 'Expected stdout to start with "package:", was: %s' % stdout)
- apk_path = stdout[len('package:'):]
- if not apk_path.endswith('.apk'):
- raise ValueError(
- 'Expected stdout to end with ".apk", was: %s' % stdout)
- return apk_path
+ cmd = create_adb_cmd('shell pm path %s' % app_id, device_id)
+ stdout = subprocess.check_output(cmd).decode('utf-8').strip()
+ if not stdout.startswith('package:'):
+ raise ValueError('Expected stdout to start with "package:", was: %s' %
+ stdout)
+ apk_path = stdout[len('package:'):]
+ if not apk_path.endswith('.apk'):
+ raise ValueError('Expected stdout to end with ".apk", was: %s' % stdout)
+ return apk_path
+
def get_component_name(app_id, activity):
- if activity.startswith(app_id):
- return '%s/.%s' % (app_id, activity[len(app_id)+1:])
- else:
- return '%s/%s' % (app_id, activity)
+ if activity.startswith(app_id):
+ return '%s/.%s' % (app_id, activity[len(app_id) + 1:])
+ else:
+ return '%s/%s' % (app_id, activity)
+
def get_meminfo(app_id, device_id=None):
- cmd = create_adb_cmd('shell dumpsys meminfo -s %s' % app_id, device_id)
- stdout = subprocess.check_output(cmd).decode('utf-8').strip()
- for line in stdout.splitlines():
- if 'TOTAL PSS: ' in line:
- elements = [s for s in line.replace('TOTAL ', 'TOTAL_').split()]
- assert elements[0] == 'TOTAL_PSS:', elements[0]
- assert elements[1].isdigit()
- assert elements[2] == 'TOTAL_RSS:'
- assert elements[3].isdigit()
- return { 'total_pss': int(elements[1]), 'total_rss': int(elements[3]) }
- raise ValueError('Unexpected stdout: %s' % stdout)
+ cmd = create_adb_cmd('shell dumpsys meminfo -s %s' % app_id, device_id)
+ stdout = subprocess.check_output(cmd).decode('utf-8').strip()
+ for line in stdout.splitlines():
+ if 'TOTAL PSS: ' in line:
+ elements = [s for s in line.replace('TOTAL ', 'TOTAL_').split()]
+ assert elements[0] == 'TOTAL_PSS:', elements[0]
+ assert elements[1].isdigit()
+ assert elements[2] == 'TOTAL_RSS:'
+ assert elements[3].isdigit()
+ return {
+ 'total_pss': int(elements[1]),
+ 'total_rss': int(elements[3])
+ }
+ raise ValueError('Unexpected stdout: %s' % stdout)
+
def get_profile_data(app_id, device_id=None):
- with utils.TempDir() as temp:
- source = get_profile_path(app_id)
- target = os.path.join(temp, 'primary.prof')
- pull(source, target, device_id)
- with open(target, 'rb') as f:
- return f.read()
+ with utils.TempDir() as temp:
+ source = get_profile_path(app_id)
+ target = os.path.join(temp, 'primary.prof')
+ pull(source, target, device_id)
+ with open(target, 'rb') as f:
+ return f.read()
+
def get_profile_path(app_id):
- return '/data/misc/profiles/cur/0/%s/primary.prof' % app_id
+ return '/data/misc/profiles/cur/0/%s/primary.prof' % app_id
+
def get_minor_major_page_faults(app_id, device_id=None):
- pid = get_pid(app_id, device_id)
- cmd = create_adb_cmd('shell ps -p %i -o MINFL,MAJFL' % pid, device_id)
- stdout = subprocess.check_output(cmd).decode('utf-8')
- lines_it = iter(stdout.splitlines())
- first_line = next(lines_it)
- assert first_line == ' MINFL MAJFL'
- second_line = next(lines_it)
- minfl, majfl = second_line.split()
- assert minfl.isdigit()
- assert majfl.isdigit()
- return (int(minfl), int(majfl))
+ pid = get_pid(app_id, device_id)
+ cmd = create_adb_cmd('shell ps -p %i -o MINFL,MAJFL' % pid, device_id)
+ stdout = subprocess.check_output(cmd).decode('utf-8')
+ lines_it = iter(stdout.splitlines())
+ first_line = next(lines_it)
+ assert first_line == ' MINFL MAJFL'
+ second_line = next(lines_it)
+ minfl, majfl = second_line.split()
+ assert minfl.isdigit()
+ assert majfl.isdigit()
+ return (int(minfl), int(majfl))
+
def get_pid(app_id, device_id=None):
- cmd = create_adb_cmd('shell pidof %s' % app_id, device_id)
- stdout = subprocess.check_output(cmd).decode('utf-8').strip()
- assert stdout.isdigit()
- pid = int(stdout)
- return pid
+ cmd = create_adb_cmd('shell pidof %s' % app_id, device_id)
+ stdout = subprocess.check_output(cmd).decode('utf-8').strip()
+ assert stdout.isdigit()
+ pid = int(stdout)
+ return pid
+
def get_screen_state(device_id=None):
- cmd = create_adb_cmd('shell dumpsys nfc', device_id)
- stdout = subprocess.check_output(cmd).decode('utf-8').strip()
- screen_state_value = None
- for line in stdout.splitlines():
- if line.startswith('mScreenState='):
- value_start_index = len('mScreenState=')
- screen_state_value=line[value_start_index:]
- if screen_state_value is None:
- raise ValueError('Expected to find mScreenState in: adb shell dumpsys nfc')
- if not hasattr(ScreenState, screen_state_value):
- raise ValueError(
- 'Expected mScreenState to be a value of ScreenState, was: %s'
- % screen_state_value)
- return ScreenState[screen_state_value]
+ cmd = create_adb_cmd('shell dumpsys nfc', device_id)
+ stdout = subprocess.check_output(cmd).decode('utf-8').strip()
+ screen_state_value = None
+ for line in stdout.splitlines():
+ if line.startswith('mScreenState='):
+ value_start_index = len('mScreenState=')
+ screen_state_value = line[value_start_index:]
+ if screen_state_value is None:
+ raise ValueError(
+ 'Expected to find mScreenState in: adb shell dumpsys nfc')
+ if not hasattr(ScreenState, screen_state_value):
+ raise ValueError(
+ 'Expected mScreenState to be a value of ScreenState, was: %s' %
+ screen_state_value)
+ return ScreenState[screen_state_value]
+
def get_classes_and_methods_from_app_profile(app_id, device_id=None):
- apk_path = get_apk_path(app_id, device_id)
- profile_path = get_profile_path(app_id)
- cmd = create_adb_cmd(
- 'shell profman --dump-classes-and-methods'
- ' --profile-file=%s --apk=%s --dex-location=%s'
- % (profile_path, apk_path, apk_path), device_id)
- stdout = subprocess.check_output(cmd).decode('utf-8').strip()
- lines = stdout.splitlines()
- return profile_utils.parse_art_profile(lines)
+ apk_path = get_apk_path(app_id, device_id)
+ profile_path = get_profile_path(app_id)
+ cmd = create_adb_cmd(
+ 'shell profman --dump-classes-and-methods'
+ ' --profile-file=%s --apk=%s --dex-location=%s' %
+ (profile_path, apk_path, apk_path), device_id)
+ stdout = subprocess.check_output(cmd).decode('utf-8').strip()
+ lines = stdout.splitlines()
+ return profile_utils.parse_art_profile(lines)
+
def get_screen_off_timeout(device_id=None):
- cmd = create_adb_cmd(
- 'shell settings get system screen_off_timeout', device_id)
- stdout = subprocess.check_output(cmd).decode('utf-8').strip()
- assert stdout.isdigit()
- screen_off_timeout = int(stdout)
- return screen_off_timeout
+ cmd = create_adb_cmd('shell settings get system screen_off_timeout',
+ device_id)
+ stdout = subprocess.check_output(cmd).decode('utf-8').strip()
+ assert stdout.isdigit()
+ screen_off_timeout = int(stdout)
+ return screen_off_timeout
+
def grant(app_id, permission, device_id=None):
- cmd = create_adb_cmd('shell pm grant %s %s' % (app_id, permission), device_id)
- subprocess.check_call(cmd)
+ cmd = create_adb_cmd('shell pm grant %s %s' % (app_id, permission),
+ device_id)
+ subprocess.check_call(cmd)
+
def install(apk, device_id=None):
- print('Installing %s' % apk)
- cmd = create_adb_cmd('install %s' % apk, device_id)
- stdout = subprocess.check_output(cmd).decode('utf-8')
- assert 'Success' in stdout
+ print('Installing %s' % apk)
+ cmd = create_adb_cmd('install %s' % apk, device_id)
+ stdout = subprocess.check_output(cmd).decode('utf-8')
+ assert 'Success' in stdout
+
def install_apks(apks, device_id=None, max_attempts=3):
- print('Installing %s' % apks)
- cmd = [
- 'java', '-jar', utils.BUNDLETOOL_JAR,
- 'install-apks',
- '--apks=%s' % apks]
- if device_id is not None:
- cmd.append('--device-id=%s' % device_id)
- for i in range(max_attempts):
+ print('Installing %s' % apks)
+ cmd = [
+ 'java', '-jar', utils.BUNDLETOOL_JAR, 'install-apks',
+ '--apks=%s' % apks
+ ]
+ if device_id is not None:
+ cmd.append('--device-id=%s' % device_id)
+ for i in range(max_attempts):
+ process_result = subprocess.run(cmd, capture_output=True)
+ stdout = process_result.stdout.decode('utf-8')
+ stderr = process_result.stderr.decode('utf-8')
+ if process_result.returncode == 0:
+ return
+ print('Failed to install %s' % apks)
+ print('Stdout: %s' % stdout)
+ print('Stderr: %s' % stderr)
+ print('Retrying...')
+ raise Exception('Unable to install apks in %s attempts' % max_attempts)
+
+
+def install_bundle(bundle, device_id=None):
+ print('Installing %s' % bundle)
+ with utils.TempDir() as temp:
+ apks = os.path.join(temp, 'Bundle.apks')
+ build_apks_from_bundle(bundle, apks)
+ install_apks(apks, device_id)
+
+
+def install_profile_using_adb(app_id, host_profile_path, device_id=None):
+ device_profile_path = get_profile_path(app_id)
+ cmd = create_adb_cmd('push %s %s' %
+ (host_profile_path, device_profile_path))
+ subprocess.check_call(cmd)
+ stop_app(app_id, device_id)
+ force_profile_compilation(app_id, device_id)
+
+
+def install_profile_using_profileinstaller(app_id, device_id=None):
+ # This assumes that the profileinstaller library has been added to the app,
+ # https://developer.android.com/jetpack/androidx/releases/profileinstaller.
+ action = 'androidx.profileinstaller.action.INSTALL_PROFILE'
+ component = '%s/androidx.profileinstaller.ProfileInstallReceiver' % app_id
+ stdout = broadcast(action, component, device_id)
+ assert len(stdout) == 2
+ assert stdout[0] == ('Broadcasting: Intent { act=%s flg=0x400000 cmp=%s }' %
+ (action, component))
+ assert stdout[1] == 'Broadcast completed: result=1', stdout[1]
+ stop_app(app_id, device_id)
+ force_profile_compilation(app_id, device_id)
+
+
+def issue_key_event(key_event, device_id=None, sleep_in_seconds=1):
+ cmd = create_adb_cmd('shell input keyevent %s' % key_event, device_id)
+ stdout = subprocess.check_output(cmd).decode('utf-8').strip()
+ assert len(stdout) == 0
+ time.sleep(sleep_in_seconds)
+
+
+def launch_activity(app_id,
+ activity,
+ device_id=None,
+ intent_data_uri=None,
+ wait_for_activity_to_launch=False):
+ args = ['shell', 'am', 'start', '-n', '%s/%s' % (app_id, activity)]
+ if intent_data_uri:
+ args.extend(['-d', intent_data_uri])
+ if wait_for_activity_to_launch:
+ args.append('-W')
+ cmd = create_adb_cmd(args, device_id)
+ stdout = subprocess.check_output(cmd).decode('utf-8').strip()
+ assert stdout.startswith('Starting: Intent {')
+ expected_component = 'cmp=%s' % get_component_name(app_id, activity)
+ assert expected_component in stdout, \
+ 'was %s, expected %s' % (stdout, expected_component)
+ lines = stdout.splitlines()
+ result = {}
+ for line in lines:
+ if line.startswith('TotalTime: '):
+ total_time_str = line.removeprefix('TotalTime: ')
+ assert total_time_str.isdigit()
+ result['total_time'] = int(total_time_str)
+ assert not wait_for_activity_to_launch or 'total_time' in result, lines
+ return result
+
+
+def navigate_to_home_screen(device_id=None):
+ cmd = create_adb_cmd('shell input keyevent KEYCODE_HOME', device_id)
+ subprocess.check_call(cmd, stdout=DEVNULL, stderr=DEVNULL)
+
+
+def prepare_for_interaction_with_device(device_id=None, device_pin=None):
+ # Increase screen off timeout to avoid device screen turns off.
+ twenty_four_hours_in_millis = 24 * 60 * 60 * 1000
+ previous_screen_off_timeout = get_screen_off_timeout(device_id)
+ set_screen_off_timeout(twenty_four_hours_in_millis, device_id)
+
+ # Unlock device.
+ unlock(device_id, device_pin)
+
+ teardown_options = {
+ 'previous_screen_off_timeout': previous_screen_off_timeout
+ }
+ return teardown_options
+
+
+def pull(source, target, device_id=None):
+ cmd = create_adb_cmd('pull %s %s' % (source, target), device_id)
+ subprocess.check_call(cmd, stdout=DEVNULL, stderr=DEVNULL)
+
+
+def root(device_id=None):
+ cmd = create_adb_cmd('root', device_id)
+ subprocess.check_call(cmd, stdout=DEVNULL, stderr=DEVNULL)
+
+
+def set_screen_off_timeout(screen_off_timeout_in_millis, device_id=None):
+ cmd = create_adb_cmd(
+ 'shell settings put system screen_off_timeout %i' %
+ screen_off_timeout_in_millis, device_id)
+ stdout = subprocess.check_output(cmd).decode('utf-8').strip()
+ assert len(stdout) == 0
+
+
+def start_logcat(device_id=None, format=None, filter=None, silent=False):
+ args = ['logcat']
+ if format:
+ args.extend(['--format', format])
+ if silent:
+ args.append('-s')
+ if filter:
+ args.append(filter)
+ cmd = create_adb_cmd(args, device_id)
+ logcat_process = subprocess.Popen(cmd,
+ bufsize=1024 * 1024,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ reader = ProcessReader(logcat_process)
+ reader.start()
+ return reader
+
+
+def stop_logcat(logcat_reader):
+ logcat_reader.stop()
+ logcat_reader.join()
+ return logcat_reader.lines
+
+
+def stop_app(app_id, device_id=None):
+ print('Shutting down %s' % app_id)
+ cmd = create_adb_cmd('shell am force-stop %s' % app_id, device_id)
+ subprocess.check_call(cmd, stdout=DEVNULL, stderr=DEVNULL)
+
+
+def teardown_after_interaction_with_device(teardown_options, device_id=None):
+ # Reset screen off timeout.
+ set_screen_off_timeout(teardown_options['previous_screen_off_timeout'],
+ device_id)
+
+
+def toggle_screen(device_id=None):
+ issue_key_event('KEYCODE_POWER', device_id)
+
+
+def uninstall(app_id, device_id=None):
+ print('Uninstalling %s' % app_id)
+ cmd = create_adb_cmd('uninstall %s' % app_id, device_id)
process_result = subprocess.run(cmd, capture_output=True)
stdout = process_result.stdout.decode('utf-8')
stderr = process_result.stderr.decode('utf-8')
if process_result.returncode == 0:
- return
- print('Failed to install %s' % apks)
- print('Stdout: %s' % stdout)
- print('Stderr: %s' % stderr)
- print('Retrying...')
- raise Exception('Unable to install apks in %s attempts' % max_attempts)
+ assert 'Success' in stdout
+ elif stdout.startswith('cmd: Failure calling service package: Broken pipe'):
+ assert app_id == 'com.google.android.youtube'
+ print('Waiting after broken pipe')
+ time.sleep(15)
+ else:
+ expected_error = (
+ 'java.lang.IllegalArgumentException: Unknown package: %s' % app_id)
+ assert 'Failure [DELETE_FAILED_INTERNAL_ERROR]' in stdout \
+ or expected_error in stderr, \
+ 'stdout: %s, stderr: %s' % (stdout, stderr)
-def install_bundle(bundle, device_id=None):
- print('Installing %s' % bundle)
- with utils.TempDir() as temp:
- apks = os.path.join(temp, 'Bundle.apks')
- build_apks_from_bundle(bundle, apks)
- install_apks(apks, device_id)
-
-def install_profile_using_adb(app_id, host_profile_path, device_id=None):
- device_profile_path = get_profile_path(app_id)
- cmd = create_adb_cmd('push %s %s' % (host_profile_path, device_profile_path))
- subprocess.check_call(cmd)
- stop_app(app_id, device_id)
- force_profile_compilation(app_id, device_id)
-
-def install_profile_using_profileinstaller(app_id, device_id=None):
- # This assumes that the profileinstaller library has been added to the app,
- # https://developer.android.com/jetpack/androidx/releases/profileinstaller.
- action = 'androidx.profileinstaller.action.INSTALL_PROFILE'
- component = '%s/androidx.profileinstaller.ProfileInstallReceiver' % app_id
- stdout = broadcast(action, component, device_id)
- assert len(stdout) == 2
- assert stdout[0] == ('Broadcasting: Intent { act=%s flg=0x400000 cmp=%s }' % (action, component))
- assert stdout[1] == 'Broadcast completed: result=1', stdout[1]
- stop_app(app_id, device_id)
- force_profile_compilation(app_id, device_id)
-
-def issue_key_event(key_event, device_id=None, sleep_in_seconds=1):
- cmd = create_adb_cmd('shell input keyevent %s' % key_event, device_id)
- stdout = subprocess.check_output(cmd).decode('utf-8').strip()
- assert len(stdout) == 0
- time.sleep(sleep_in_seconds)
-
-def launch_activity(
- app_id,
- activity,
- device_id=None,
- intent_data_uri=None,
- wait_for_activity_to_launch=False):
- args = ['shell', 'am', 'start', '-n', '%s/%s' % (app_id, activity)]
- if intent_data_uri:
- args.extend(['-d', intent_data_uri])
- if wait_for_activity_to_launch:
- args.append('-W')
- cmd = create_adb_cmd(args, device_id)
- stdout = subprocess.check_output(cmd).decode('utf-8').strip()
- assert stdout.startswith('Starting: Intent {')
- expected_component = 'cmp=%s' % get_component_name(app_id, activity)
- assert expected_component in stdout, \
- 'was %s, expected %s' % (stdout, expected_component)
- lines = stdout.splitlines()
- result = {}
- for line in lines:
- if line.startswith('TotalTime: '):
- total_time_str = line.removeprefix('TotalTime: ')
- assert total_time_str.isdigit()
- result['total_time'] = int(total_time_str)
- assert not wait_for_activity_to_launch or 'total_time' in result, lines
- return result
-
-def navigate_to_home_screen(device_id=None):
- cmd = create_adb_cmd('shell input keyevent KEYCODE_HOME', device_id)
- subprocess.check_call(cmd, stdout=DEVNULL, stderr=DEVNULL)
-
-def prepare_for_interaction_with_device(device_id=None, device_pin=None):
- # Increase screen off timeout to avoid device screen turns off.
- twenty_four_hours_in_millis = 24 * 60 * 60 * 1000
- previous_screen_off_timeout = get_screen_off_timeout(device_id)
- set_screen_off_timeout(twenty_four_hours_in_millis, device_id)
-
- # Unlock device.
- unlock(device_id, device_pin)
-
- teardown_options = {
- 'previous_screen_off_timeout': previous_screen_off_timeout
- }
- return teardown_options
-
-def pull(source, target, device_id=None):
- cmd = create_adb_cmd('pull %s %s' % (source, target), device_id)
- subprocess.check_call(cmd, stdout=DEVNULL, stderr=DEVNULL)
-
-def root(device_id=None):
- cmd = create_adb_cmd('root', device_id)
- subprocess.check_call(cmd, stdout=DEVNULL, stderr=DEVNULL)
-
-def set_screen_off_timeout(screen_off_timeout_in_millis, device_id=None):
- cmd = create_adb_cmd(
- 'shell settings put system screen_off_timeout %i'
- % screen_off_timeout_in_millis,
- device_id)
- stdout = subprocess.check_output(cmd).decode('utf-8').strip()
- assert len(stdout) == 0
-
-def start_logcat(device_id=None, format=None, filter=None, silent=False):
- args = ['logcat']
- if format:
- args.extend(['--format', format])
- if silent:
- args.append('-s')
- if filter:
- args.append(filter)
- cmd = create_adb_cmd(args, device_id)
- logcat_process = subprocess.Popen(
- cmd, bufsize=1024*1024, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- reader = ProcessReader(logcat_process)
- reader.start()
- return reader
-
-def stop_logcat(logcat_reader):
- logcat_reader.stop()
- logcat_reader.join()
- return logcat_reader.lines
-
-def stop_app(app_id, device_id=None):
- print('Shutting down %s' % app_id)
- cmd = create_adb_cmd('shell am force-stop %s' % app_id, device_id)
- subprocess.check_call(cmd, stdout=DEVNULL, stderr=DEVNULL)
-
-def teardown_after_interaction_with_device(teardown_options, device_id=None):
- # Reset screen off timeout.
- set_screen_off_timeout(
- teardown_options['previous_screen_off_timeout'],
- device_id)
-
-def toggle_screen(device_id=None):
- issue_key_event('KEYCODE_POWER', device_id)
-
-def uninstall(app_id, device_id=None):
- print('Uninstalling %s' % app_id)
- cmd = create_adb_cmd('uninstall %s' % app_id, device_id)
- process_result = subprocess.run(cmd, capture_output=True)
- stdout = process_result.stdout.decode('utf-8')
- stderr = process_result.stderr.decode('utf-8')
- if process_result.returncode == 0:
- assert 'Success' in stdout
- elif stdout.startswith('cmd: Failure calling service package: Broken pipe'):
- assert app_id == 'com.google.android.youtube'
- print('Waiting after broken pipe')
- time.sleep(15)
- else:
- expected_error = (
- 'java.lang.IllegalArgumentException: Unknown package: %s' % app_id)
- assert 'Failure [DELETE_FAILED_INTERNAL_ERROR]' in stdout \
- or expected_error in stderr, \
- 'stdout: %s, stderr: %s' % (stdout, stderr)
def unlock(device_id=None, device_pin=None):
- ensure_screen_on(device_id)
- screen_state = get_screen_state(device_id)
- assert screen_state.is_on(), 'was %s' % screen_state
- if screen_state.is_on_and_locked():
- if device_pin is not None:
- raise NotImplementedError('Device unlocking with pin not implemented')
- issue_key_event('KEYCODE_MENU', device_id)
+ ensure_screen_on(device_id)
screen_state = get_screen_state(device_id)
- assert screen_state.is_on_and_unlocked(), 'was %s' % screen_state
+ assert screen_state.is_on(), 'was %s' % screen_state
+ if screen_state.is_on_and_locked():
+ if device_pin is not None:
+ raise NotImplementedError(
+ 'Device unlocking with pin not implemented')
+ issue_key_event('KEYCODE_MENU', device_id)
+ screen_state = get_screen_state(device_id)
+ assert screen_state.is_on_and_unlocked(), 'was %s' % screen_state
+
def parse_options(argv):
- result = argparse.ArgumentParser(description='Run adb utils.')
- result.add_argument('--capture-screen',
- help='Capture screen to given file')
- result.add_argument('--device-id',
- help='Device id (e.g., emulator-5554).')
- result.add_argument('--device-pin',
- help='Device pin code (e.g., 1234)')
- result.add_argument('--ensure-screen-off',
- help='Ensure screen off',
- action='store_true',
- default=False)
- result.add_argument('--get-screen-state',
- help='Get screen state',
- action='store_true',
- default=False)
- result.add_argument('--unlock',
- help='Unlock device',
- action='store_true',
- default=False)
- options, args = result.parse_known_args(argv)
- return options, args
+ result = argparse.ArgumentParser(description='Run adb utils.')
+ result.add_argument('--capture-screen', help='Capture screen to given file')
+ result.add_argument('--device-id', help='Device id (e.g., emulator-5554).')
+ result.add_argument('--device-pin', help='Device pin code (e.g., 1234)')
+ result.add_argument('--ensure-screen-off',
+ help='Ensure screen off',
+ action='store_true',
+ default=False)
+ result.add_argument('--get-screen-state',
+ help='Get screen state',
+ action='store_true',
+ default=False)
+ result.add_argument('--unlock',
+ help='Unlock device',
+ action='store_true',
+ default=False)
+ options, args = result.parse_known_args(argv)
+ return options, args
+
def main(argv):
- (options, args) = parse_options(argv)
- if options.capture_screen:
- capture_screen(options.capture_screen, options.device_id)
- if options.ensure_screen_off:
- ensure_screen_off(options.device_id)
- elif options.get_screen_state:
- print(get_screen_state(options.device_id))
- elif options.unlock:
- unlock(options.device_id, options.device_pin)
+ (options, args) = parse_options(argv)
+ if options.capture_screen:
+ capture_screen(options.capture_screen, options.device_id)
+ if options.ensure_screen_off:
+ ensure_screen_off(options.device_id)
+ elif options.get_screen_state:
+ print(get_screen_state(options.device_id))
+ elif options.unlock:
+ unlock(options.device_id, options.device_pin)
if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
+ sys.exit(main(sys.argv[1:]))
diff --git a/tools/startup/generate_startup_descriptors.py b/tools/startup/generate_startup_descriptors.py
index adae239..211be89 100755
--- a/tools/startup/generate_startup_descriptors.py
+++ b/tools/startup/generate_startup_descriptors.py
@@ -11,445 +11,474 @@
import sys
import time
+
class Device:
- def __init__(self, device_id, device_pin):
- self.device_id = device_id
- self.device_pin = device_pin
+ def __init__(self, device_id, device_pin):
+ self.device_id = device_id
+ self.device_pin = device_pin
+
def extend_startup_descriptors(startup_descriptors, iteration, device, options):
- (logcat, profile, profile_classes_and_methods) = \
- generate_startup_profile(device, options)
- if options.logcat:
- write_tmp_logcat(logcat, iteration, options)
- current_startup_descriptors = get_r8_startup_descriptors_from_logcat(
- logcat, options)
- else:
- write_tmp_profile(profile, iteration, options)
- write_tmp_profile_classes_and_methods(
- profile_classes_and_methods, iteration, options)
- current_startup_descriptors = \
- profile_utils.transform_art_profile_to_r8_startup_list(
- profile_classes_and_methods, options.generalize_synthetics)
- write_tmp_startup_descriptors(current_startup_descriptors, iteration, options)
- new_startup_descriptors = add_r8_startup_descriptors(
- startup_descriptors, current_startup_descriptors)
- number_of_new_startup_descriptors = \
- len(new_startup_descriptors) - len(startup_descriptors)
- if options.out is not None:
- print(
- 'Found %i new startup descriptors in iteration %i'
- % (number_of_new_startup_descriptors, iteration + 1))
- return new_startup_descriptors
+ (logcat, profile, profile_classes_and_methods) = \
+ generate_startup_profile(device, options)
+ if options.logcat:
+ write_tmp_logcat(logcat, iteration, options)
+ current_startup_descriptors = get_r8_startup_descriptors_from_logcat(
+ logcat, options)
+ else:
+ write_tmp_profile(profile, iteration, options)
+ write_tmp_profile_classes_and_methods(profile_classes_and_methods,
+ iteration, options)
+ current_startup_descriptors = \
+ profile_utils.transform_art_profile_to_r8_startup_list(
+ profile_classes_and_methods, options.generalize_synthetics)
+ write_tmp_startup_descriptors(current_startup_descriptors, iteration,
+ options)
+ new_startup_descriptors = add_r8_startup_descriptors(
+ startup_descriptors, current_startup_descriptors)
+ number_of_new_startup_descriptors = \
+ len(new_startup_descriptors) - len(startup_descriptors)
+ if options.out is not None:
+ print('Found %i new startup descriptors in iteration %i' %
+ (number_of_new_startup_descriptors, iteration + 1))
+ return new_startup_descriptors
+
def generate_startup_profile(device, options):
- logcat = None
- profile = None
- profile_classes_and_methods = None
- if options.use_existing_profile:
- # Verify presence of profile.
- adb_utils.check_app_has_profile_data(options.app_id, device.device_id)
- profile = adb_utils.get_profile_data(options.app_id, device.device_id)
- profile_classes_and_methods = \
- adb_utils.get_classes_and_methods_from_app_profile(
- options.app_id, device.device_id)
- else:
- # Unlock device.
- tear_down_options = adb_utils.prepare_for_interaction_with_device(
- device.device_id, device.device_pin)
-
- logcat_process = None
- if options.logcat:
- # Clear logcat and start capturing logcat.
- adb_utils.clear_logcat(device.device_id)
- logcat_process = adb_utils.start_logcat(
- device.device_id, format='tag', filter='R8:I ActivityTaskManager:I *:S')
+ logcat = None
+ profile = None
+ profile_classes_and_methods = None
+ if options.use_existing_profile:
+ # Verify presence of profile.
+ adb_utils.check_app_has_profile_data(options.app_id, device.device_id)
+ profile = adb_utils.get_profile_data(options.app_id, device.device_id)
+ profile_classes_and_methods = \
+ adb_utils.get_classes_and_methods_from_app_profile(
+ options.app_id, device.device_id)
else:
- # Clear existing profile data.
- adb_utils.clear_profile_data(options.app_id, device.device_id)
+ # Unlock device.
+ tear_down_options = adb_utils.prepare_for_interaction_with_device(
+ device.device_id, device.device_pin)
- # Launch activity to generate startup profile on device.
- adb_utils.launch_activity(
- options.app_id, options.main_activity, device.device_id)
+ logcat_process = None
+ if options.logcat:
+ # Clear logcat and start capturing logcat.
+ adb_utils.clear_logcat(device.device_id)
+ logcat_process = adb_utils.start_logcat(
+ device.device_id,
+ format='tag',
+ filter='R8:I ActivityTaskManager:I *:S')
+ else:
+ # Clear existing profile data.
+ adb_utils.clear_profile_data(options.app_id, device.device_id)
- # Wait for activity startup.
- time.sleep(options.startup_duration)
+ # Launch activity to generate startup profile on device.
+ adb_utils.launch_activity(options.app_id, options.main_activity,
+ device.device_id)
- if options.logcat:
- # Get startup descriptors from logcat.
- logcat = adb_utils.stop_logcat(logcat_process)
- else:
- # Capture startup profile.
- adb_utils.capture_app_profile_data(options.app_id, device.device_id)
- profile = adb_utils.get_profile_data(options.app_id, device.device_id)
- profile_classes_and_methods = \
- adb_utils.get_classes_and_methods_from_app_profile(
- options.app_id, device.device_id)
+ # Wait for activity startup.
+ time.sleep(options.startup_duration)
- # Shutdown app.
- adb_utils.stop_app(options.app_id, device.device_id)
- adb_utils.teardown_after_interaction_with_device(
- tear_down_options, device.device_id)
+ if options.logcat:
+ # Get startup descriptors from logcat.
+ logcat = adb_utils.stop_logcat(logcat_process)
+ else:
+ # Capture startup profile.
+ adb_utils.capture_app_profile_data(options.app_id, device.device_id)
+ profile = adb_utils.get_profile_data(options.app_id,
+ device.device_id)
+ profile_classes_and_methods = \
+ adb_utils.get_classes_and_methods_from_app_profile(
+ options.app_id, device.device_id)
- return (logcat, profile, profile_classes_and_methods)
+ # Shutdown app.
+ adb_utils.stop_app(options.app_id, device.device_id)
+ adb_utils.teardown_after_interaction_with_device(
+ tear_down_options, device.device_id)
+
+ return (logcat, profile, profile_classes_and_methods)
+
def get_r8_startup_descriptors_from_logcat(logcat, options):
- post_startup = False
- startup_descriptors = {}
- for line in logcat:
- line_elements = parse_logcat_line(line)
- if line_elements is None:
- continue
- (priority, tag, message) = line_elements
- if tag == 'ActivityTaskManager':
- if message.startswith('START') \
- or message.startswith('Activity pause timeout for') \
- or message.startswith('Activity top resumed state loss timeout for') \
- or message.startswith('Force removing') \
- or message.startswith(
- 'Launch timeout has expired, giving up wake lock!'):
- continue
- elif message.startswith('Displayed %s/' % options.app_id):
- print('Entering post startup: %s' % message)
- post_startup = True
- continue
- elif tag == 'R8':
- if is_startup_descriptor(message):
- startup_descriptors[message] = {
- 'conditional_startup': False,
- 'hot': False,
- 'post_startup': post_startup,
- 'startup': True
- }
- continue
- # Reaching here means we didn't expect this line.
- report_unrecognized_logcat_line(line)
- return startup_descriptors
+ post_startup = False
+ startup_descriptors = {}
+ for line in logcat:
+ line_elements = parse_logcat_line(line)
+ if line_elements is None:
+ continue
+ (priority, tag, message) = line_elements
+ if tag == 'ActivityTaskManager':
+ if message.startswith('START') \
+ or message.startswith('Activity pause timeout for') \
+ or message.startswith('Activity top resumed state loss timeout for') \
+ or message.startswith('Force removing') \
+ or message.startswith(
+ 'Launch timeout has expired, giving up wake lock!'):
+ continue
+ elif message.startswith('Displayed %s/' % options.app_id):
+ print('Entering post startup: %s' % message)
+ post_startup = True
+ continue
+ elif tag == 'R8':
+ if is_startup_descriptor(message):
+ startup_descriptors[message] = {
+ 'conditional_startup': False,
+ 'hot': False,
+ 'post_startup': post_startup,
+ 'startup': True
+ }
+ continue
+ # Reaching here means we didn't expect this line.
+ report_unrecognized_logcat_line(line)
+ return startup_descriptors
+
def is_startup_descriptor(string):
- # The descriptor should start with the holder (possibly prefixed with 'S').
- if not any(string.startswith('%sL' % flags) for flags in ['', 'S']):
- return False
- # The descriptor should end with ';', a primitive type, or void.
- if not string.endswith(';') \
- and not any(string.endswith(c) for c in get_primitive_descriptors()) \
- and not string.endswith('V'):
- return False
- return True
+ # The descriptor should start with the holder (possibly prefixed with 'S').
+ if not any(string.startswith('%sL' % flags) for flags in ['', 'S']):
+ return False
+ # The descriptor should end with ';', a primitive type, or void.
+ if not string.endswith(';') \
+ and not any(string.endswith(c) for c in get_primitive_descriptors()) \
+ and not string.endswith('V'):
+ return False
+ return True
+
def get_primitive_descriptors():
- return ['Z', 'B', 'S', 'C', 'I', 'F', 'J', 'D']
+ return ['Z', 'B', 'S', 'C', 'I', 'F', 'J', 'D']
+
def parse_logcat_line(line):
- if line == '--------- beginning of kernel':
- return None
- if line == '--------- beginning of main':
- return None
- if line == '--------- beginning of system':
- return None
+ if line == '--------- beginning of kernel':
+ return None
+ if line == '--------- beginning of main':
+ return None
+ if line == '--------- beginning of system':
+ return None
- priority = None
- tag = None
+ priority = None
+ tag = None
- try:
- priority_end = line.index('/')
- priority = line[0:priority_end]
- line = line[priority_end + 1:]
- except ValueError:
- return report_unrecognized_logcat_line(line)
+ try:
+ priority_end = line.index('/')
+ priority = line[0:priority_end]
+ line = line[priority_end + 1:]
+ except ValueError:
+ return report_unrecognized_logcat_line(line)
- try:
- tag_end = line.index(':')
- tag = line[0:tag_end].strip()
- line = line[tag_end + 1 :]
- except ValueError:
- return report_unrecognized_logcat_line(line)
+ try:
+ tag_end = line.index(':')
+ tag = line[0:tag_end].strip()
+ line = line[tag_end + 1:]
+ except ValueError:
+ return report_unrecognized_logcat_line(line)
- message = line.strip()
- return (priority, tag, message)
+ message = line.strip()
+ return (priority, tag, message)
+
def report_unrecognized_logcat_line(line):
- print('Unrecognized line in logcat: %s' % line)
+ print('Unrecognized line in logcat: %s' % line)
-def add_r8_startup_descriptors(old_startup_descriptors, startup_descriptors_to_add):
- new_startup_descriptors = {}
- if len(old_startup_descriptors) == 0:
- for startup_descriptor, flags in startup_descriptors_to_add.items():
- new_startup_descriptors[startup_descriptor] = flags.copy()
- else:
- # Merge the new startup descriptors with the old descriptors in a way so
- # that new startup descriptors are added next to the startup descriptors
- # they are close to in the newly generated list of startup descriptors.
- startup_descriptors_to_add_after_key = {}
- startup_descriptors_to_add_in_the_end = {}
- closest_seen_startup_descriptor = None
- for startup_descriptor, flags in startup_descriptors_to_add.items():
- if startup_descriptor in old_startup_descriptors:
- closest_seen_startup_descriptor = startup_descriptor
- else:
- if closest_seen_startup_descriptor is None:
- # Insert this new startup descriptor in the end of the result.
- startup_descriptors_to_add_in_the_end[startup_descriptor] = flags
- else:
- # Record that this should be inserted after
- # closest_seen_startup_descriptor.
- pending_startup_descriptors = \
- startup_descriptors_to_add_after_key.setdefault(
- closest_seen_startup_descriptor, {})
- pending_startup_descriptors[startup_descriptor] = flags
- for startup_descriptor, flags in old_startup_descriptors.items():
- # Merge flags if this also exists in startup_descriptors_to_add.
- if startup_descriptor in startup_descriptors_to_add:
- merged_flags = flags.copy()
- other_flags = startup_descriptors_to_add[startup_descriptor]
- assert not other_flags['conditional_startup']
- merged_flags['hot'] = \
- merged_flags['hot'] or other_flags['hot']
- merged_flags['startup'] = \
- merged_flags['startup'] or other_flags['startup']
- merged_flags['post_startup'] = \
- merged_flags['post_startup'] or other_flags['post_startup']
- new_startup_descriptors[startup_descriptor] = merged_flags
- else:
- new_startup_descriptors[startup_descriptor] = flags.copy()
- # Flush startup descriptors that followed this item in the new trace.
- if startup_descriptor in startup_descriptors_to_add_after_key:
- pending_startup_descriptors = \
- startup_descriptors_to_add_after_key[startup_descriptor]
- for pending_startup_descriptor, pending_flags \
- in pending_startup_descriptors.items():
- new_startup_descriptors[pending_startup_descriptor] = \
- pending_flags.copy()
- # Insert remaining new startup descriptors in the end.
- for startup_descriptor, flags \
- in startup_descriptors_to_add_in_the_end.items():
- assert startup_descriptor not in new_startup_descriptors
- new_startup_descriptors[startup_descriptor] = flags.copy()
- return new_startup_descriptors
+
+def add_r8_startup_descriptors(old_startup_descriptors,
+ startup_descriptors_to_add):
+ new_startup_descriptors = {}
+ if len(old_startup_descriptors) == 0:
+ for startup_descriptor, flags in startup_descriptors_to_add.items():
+ new_startup_descriptors[startup_descriptor] = flags.copy()
+ else:
+ # Merge the new startup descriptors with the old descriptors in a way so
+ # that new startup descriptors are added next to the startup descriptors
+ # they are close to in the newly generated list of startup descriptors.
+ startup_descriptors_to_add_after_key = {}
+ startup_descriptors_to_add_in_the_end = {}
+ closest_seen_startup_descriptor = None
+ for startup_descriptor, flags in startup_descriptors_to_add.items():
+ if startup_descriptor in old_startup_descriptors:
+ closest_seen_startup_descriptor = startup_descriptor
+ else:
+ if closest_seen_startup_descriptor is None:
+ # Insert this new startup descriptor in the end of the result.
+ startup_descriptors_to_add_in_the_end[
+ startup_descriptor] = flags
+ else:
+ # Record that this should be inserted after
+ # closest_seen_startup_descriptor.
+ pending_startup_descriptors = \
+ startup_descriptors_to_add_after_key.setdefault(
+ closest_seen_startup_descriptor, {})
+ pending_startup_descriptors[startup_descriptor] = flags
+ for startup_descriptor, flags in old_startup_descriptors.items():
+ # Merge flags if this also exists in startup_descriptors_to_add.
+ if startup_descriptor in startup_descriptors_to_add:
+ merged_flags = flags.copy()
+ other_flags = startup_descriptors_to_add[startup_descriptor]
+ assert not other_flags['conditional_startup']
+ merged_flags['hot'] = \
+ merged_flags['hot'] or other_flags['hot']
+ merged_flags['startup'] = \
+ merged_flags['startup'] or other_flags['startup']
+ merged_flags['post_startup'] = \
+ merged_flags['post_startup'] or other_flags['post_startup']
+ new_startup_descriptors[startup_descriptor] = merged_flags
+ else:
+ new_startup_descriptors[startup_descriptor] = flags.copy()
+ # Flush startup descriptors that followed this item in the new trace.
+ if startup_descriptor in startup_descriptors_to_add_after_key:
+ pending_startup_descriptors = \
+ startup_descriptors_to_add_after_key[startup_descriptor]
+ for pending_startup_descriptor, pending_flags \
+ in pending_startup_descriptors.items():
+ new_startup_descriptors[pending_startup_descriptor] = \
+ pending_flags.copy()
+ # Insert remaining new startup descriptors in the end.
+ for startup_descriptor, flags \
+ in startup_descriptors_to_add_in_the_end.items():
+ assert startup_descriptor not in new_startup_descriptors
+ new_startup_descriptors[startup_descriptor] = flags.copy()
+ return new_startup_descriptors
+
def write_tmp_binary_artifact(artifact, iteration, options, name):
- if not options.tmp_dir:
- return
- out_dir = os.path.join(options.tmp_dir, str(iteration))
- os.makedirs(out_dir, exist_ok=True)
- path = os.path.join(out_dir, name)
- with open(path, 'wb') as f:
- f.write(artifact)
+ if not options.tmp_dir:
+ return
+ out_dir = os.path.join(options.tmp_dir, str(iteration))
+ os.makedirs(out_dir, exist_ok=True)
+ path = os.path.join(out_dir, name)
+ with open(path, 'wb') as f:
+ f.write(artifact)
-def write_tmp_textual_artifact(artifact, iteration, options, name, item_to_string=None):
- if not options.tmp_dir:
- return
- out_dir = os.path.join(options.tmp_dir, str(iteration))
- os.makedirs(out_dir, exist_ok=True)
- path = os.path.join(out_dir, name)
- with open(path, 'w') as f:
- for item in artifact:
- f.write(item if item_to_string is None else item_to_string(item))
- f.write('\n')
+
+def write_tmp_textual_artifact(artifact,
+ iteration,
+ options,
+ name,
+ item_to_string=None):
+ if not options.tmp_dir:
+ return
+ out_dir = os.path.join(options.tmp_dir, str(iteration))
+ os.makedirs(out_dir, exist_ok=True)
+ path = os.path.join(out_dir, name)
+ with open(path, 'w') as f:
+ for item in artifact:
+ f.write(item if item_to_string is None else item_to_string(item))
+ f.write('\n')
+
def write_tmp_logcat(logcat, iteration, options):
- write_tmp_textual_artifact(logcat, iteration, options, 'logcat.txt')
+ write_tmp_textual_artifact(logcat, iteration, options, 'logcat.txt')
+
def write_tmp_profile(profile, iteration, options):
- write_tmp_binary_artifact(profile, iteration, options, 'primary.prof')
+ write_tmp_binary_artifact(profile, iteration, options, 'primary.prof')
-def write_tmp_profile_classes_and_methods(
- profile_classes_and_methods, iteration, options):
- def item_to_string(item):
- (descriptor, flags) = item
- return '%s%s%s%s' % (
- 'H' if flags.get('hot') else '',
- 'S' if flags.get('startup') else '',
- 'P' if flags.get('post_startup') else '',
- descriptor)
- write_tmp_textual_artifact(
- profile_classes_and_methods.items(),
- iteration,
- options,
- 'profile.txt',
- item_to_string)
+
+def write_tmp_profile_classes_and_methods(profile_classes_and_methods,
+ iteration, options):
+
+ def item_to_string(item):
+ (descriptor, flags) = item
+ return '%s%s%s%s' % ('H' if flags.get('hot') else '',
+ 'S' if flags.get('startup') else '', 'P'
+ if flags.get('post_startup') else '', descriptor)
+
+ write_tmp_textual_artifact(profile_classes_and_methods.items(), iteration,
+ options, 'profile.txt', item_to_string)
+
def write_tmp_startup_descriptors(startup_descriptors, iteration, options):
- lines = [
- startup_descriptor_to_string(startup_descriptor, flags)
- for startup_descriptor, flags in startup_descriptors.items()]
- write_tmp_textual_artifact(
- lines, iteration, options, 'startup-descriptors.txt')
+ lines = [
+ startup_descriptor_to_string(startup_descriptor, flags)
+ for startup_descriptor, flags in startup_descriptors.items()
+ ]
+ write_tmp_textual_artifact(lines, iteration, options,
+ 'startup-descriptors.txt')
+
def startup_descriptor_to_string(startup_descriptor, flags):
- result = ''
- if flags['hot']:
- result += 'H'
- if flags['startup']:
- result += 'S'
- if flags['post_startup']:
- result += 'P'
- result += startup_descriptor
- return result
+ result = ''
+ if flags['hot']:
+ result += 'H'
+ if flags['startup']:
+ result += 'S'
+ if flags['post_startup']:
+ result += 'P'
+ result += startup_descriptor
+ return result
+
def should_include_startup_descriptor(descriptor, flags, options):
- if flags.get('conditional_startup') \
- and not options.include_conditional_startup:
- return False
- if flags.get('post_startup') \
- and not flags.get('startup') \
- and not options.include_post_startup:
- return False
- return True
+ if flags.get('conditional_startup') \
+ and not options.include_conditional_startup:
+ return False
+ if flags.get('post_startup') \
+ and not flags.get('startup') \
+ and not options.include_post_startup:
+ return False
+ return True
+
def parse_options(argv):
- result = argparse.ArgumentParser(
- description='Generate a perfetto trace file.')
- result.add_argument('--apk',
- help='Path to the .apk')
- result.add_argument('--apks',
- help='Path to the .apks')
- result.add_argument('--app-id',
- help='The application ID of interest',
- required=True)
- result.add_argument('--bundle',
- help='Path to the .aab')
- result.add_argument('--device-id',
- help='Device id (e.g., emulator-5554).',
- action='append')
- result.add_argument('--device-pin',
- help='Device pin code (e.g., 1234)',
- action='append')
- result.add_argument('--generalize-synthetics',
- help='Whether synthetics should be abstracted into their '
- 'synthetic contexts',
- action='store_true',
- default=False)
- result.add_argument('--grant-post-notification-permission',
- help='Grants the android.permission.POST_NOTIFICATIONS '
- 'permission before launching the app',
- default=False,
- action='store_true')
- result.add_argument('--logcat',
- action='store_true',
- default=False)
- result.add_argument('--include-conditional-startup',
- help='Include conditional startup classes and methods in '
- 'the R8 startup descriptors',
- action='store_true',
- default=False)
- result.add_argument('--include-post-startup',
- help='Include post startup classes and methods in the R8 '
- 'startup descriptors',
- action='store_true',
- default=False)
- result.add_argument('--iterations',
- help='Number of profiles to generate',
- default=1,
- type=int)
- result.add_argument('--main-activity',
- help='Main activity class name')
- result.add_argument('--out',
- help='File where to store startup descriptors (defaults '
- 'to stdout)')
- result.add_argument('--startup-duration',
- help='Duration in seconds before shutting down app',
- default=15,
- type=int)
- result.add_argument('--tmp-dir',
- help='Directory where to store intermediate artifacts'
- ' (by default these are not emitted)')
- result.add_argument('--until-stable',
- help='Repeat profile generation until no new startup '
- 'descriptors are found',
- action='store_true',
- default=False)
- result.add_argument('--until-stable-iterations',
- help='Number of times that profile generation must must '
- 'not find new startup descriptors before exiting',
- default=1,
- type=int)
- result.add_argument('--use-existing-profile',
- help='Do not launch app to generate startup profile',
- action='store_true',
- default=False)
- options, args = result.parse_known_args(argv)
+ result = argparse.ArgumentParser(
+ description='Generate a perfetto trace file.')
+ result.add_argument('--apk', help='Path to the .apk')
+ result.add_argument('--apks', help='Path to the .apks')
+ result.add_argument('--app-id',
+ help='The application ID of interest',
+ required=True)
+ result.add_argument('--bundle', help='Path to the .aab')
+ result.add_argument('--device-id',
+ help='Device id (e.g., emulator-5554).',
+ action='append')
+ result.add_argument('--device-pin',
+ help='Device pin code (e.g., 1234)',
+ action='append')
+ result.add_argument(
+ '--generalize-synthetics',
+ help='Whether synthetics should be abstracted into their '
+ 'synthetic contexts',
+ action='store_true',
+ default=False)
+ result.add_argument('--grant-post-notification-permission',
+ help='Grants the android.permission.POST_NOTIFICATIONS '
+ 'permission before launching the app',
+ default=False,
+ action='store_true')
+ result.add_argument('--logcat', action='store_true', default=False)
+ result.add_argument(
+ '--include-conditional-startup',
+ help='Include conditional startup classes and methods in '
+ 'the R8 startup descriptors',
+ action='store_true',
+ default=False)
+ result.add_argument(
+ '--include-post-startup',
+ help='Include post startup classes and methods in the R8 '
+ 'startup descriptors',
+ action='store_true',
+ default=False)
+ result.add_argument('--iterations',
+ help='Number of profiles to generate',
+ default=1,
+ type=int)
+ result.add_argument('--main-activity', help='Main activity class name')
+ result.add_argument(
+ '--out',
+ help='File where to store startup descriptors (defaults '
+ 'to stdout)')
+ result.add_argument('--startup-duration',
+ help='Duration in seconds before shutting down app',
+ default=15,
+ type=int)
+ result.add_argument('--tmp-dir',
+ help='Directory where to store intermediate artifacts'
+ ' (by default these are not emitted)')
+ result.add_argument('--until-stable',
+ help='Repeat profile generation until no new startup '
+ 'descriptors are found',
+ action='store_true',
+ default=False)
+ result.add_argument(
+ '--until-stable-iterations',
+ help='Number of times that profile generation must must '
+ 'not find new startup descriptors before exiting',
+ default=1,
+ type=int)
+ result.add_argument('--use-existing-profile',
+ help='Do not launch app to generate startup profile',
+ action='store_true',
+ default=False)
+ options, args = result.parse_known_args(argv)
- # Read the device pins.
- device_pins = options.device_pin or []
- del options.device_pin
+ # Read the device pins.
+ device_pins = options.device_pin or []
+ del options.device_pin
- # Convert the device ids and pins into a list of devices.
- options.devices = []
- if options.device_id is None:
- # Assume a single device is attached.
- options.devices.append(
- Device(None, device_pins[0] if len(device_pins) > 0 else None))
- else:
- for i in range(len(options.device_id)):
- device_id = options.device_id[i]
- device_pin = device_pins[i] if i < len(device_pins) else None
- options.devices.append(Device(device_id, device_pin))
- del options.device_id
+ # Convert the device ids and pins into a list of devices.
+ options.devices = []
+ if options.device_id is None:
+ # Assume a single device is attached.
+ options.devices.append(
+ Device(None, device_pins[0] if len(device_pins) > 0 else None))
+ else:
+ for i in range(len(options.device_id)):
+ device_id = options.device_id[i]
+ device_pin = device_pins[i] if i < len(device_pins) else None
+ options.devices.append(Device(device_id, device_pin))
+ del options.device_id
- paths = [
- path for path in [options.apk, options.apks, options.bundle]
- if path is not None]
- assert len(paths) <= 1, 'Expected at most one .apk, .apks, or .aab file.'
- assert options.main_activity is not None or options.use_existing_profile, \
- 'Argument --main-activity is required except when running with ' \
- '--use-existing-profile.'
+ paths = [
+ path for path in [options.apk, options.apks, options.bundle]
+ if path is not None
+ ]
+ assert len(paths) <= 1, 'Expected at most one .apk, .apks, or .aab file.'
+ assert options.main_activity is not None or options.use_existing_profile, \
+ 'Argument --main-activity is required except when running with ' \
+ '--use-existing-profile.'
- return options, args
+ return options, args
+
def run_on_device(device, options, startup_descriptors):
- adb_utils.root(device.device_id)
- if options.apk:
- adb_utils.uninstall(options.app_id, device.device_id)
- adb_utils.install(options.apk, device.device_id)
- elif options.apks:
- adb_utils.uninstall(options.app_id, device.device_id)
- adb_utils.install_apks(options.apks, device.device_id)
- elif options.bundle:
- adb_utils.uninstall(options.app_id, device.device_id)
- adb_utils.install_bundle(options.bundle, device.device_id)
- # Grant notifications.
- if options.grant_post_notification_permission:
- adb_utils.grant(
- options.app_id,
- 'android.permission.POST_NOTIFICATIONS',
- device.device_id)
- if options.until_stable:
- iteration = 0
- stable_iterations = 0
- while True:
- old_startup_descriptors = startup_descriptors
- startup_descriptors = extend_startup_descriptors(
- old_startup_descriptors, iteration, device, options)
- diff = len(startup_descriptors) - len(old_startup_descriptors)
- if diff == 0:
- stable_iterations = stable_iterations + 1
- if stable_iterations == options.until_stable_iterations:
- break
- else:
+ adb_utils.root(device.device_id)
+ if options.apk:
+ adb_utils.uninstall(options.app_id, device.device_id)
+ adb_utils.install(options.apk, device.device_id)
+ elif options.apks:
+ adb_utils.uninstall(options.app_id, device.device_id)
+ adb_utils.install_apks(options.apks, device.device_id)
+ elif options.bundle:
+ adb_utils.uninstall(options.app_id, device.device_id)
+ adb_utils.install_bundle(options.bundle, device.device_id)
+ # Grant notifications.
+ if options.grant_post_notification_permission:
+ adb_utils.grant(options.app_id, 'android.permission.POST_NOTIFICATIONS',
+ device.device_id)
+ if options.until_stable:
+ iteration = 0
stable_iterations = 0
- iteration = iteration + 1
- else:
- for iteration in range(options.iterations):
- startup_descriptors = extend_startup_descriptors(
- startup_descriptors, iteration, device, options)
- return startup_descriptors
+ while True:
+ old_startup_descriptors = startup_descriptors
+ startup_descriptors = extend_startup_descriptors(
+ old_startup_descriptors, iteration, device, options)
+ diff = len(startup_descriptors) - len(old_startup_descriptors)
+ if diff == 0:
+ stable_iterations = stable_iterations + 1
+ if stable_iterations == options.until_stable_iterations:
+ break
+ else:
+ stable_iterations = 0
+ iteration = iteration + 1
+ else:
+ for iteration in range(options.iterations):
+ startup_descriptors = extend_startup_descriptors(
+ startup_descriptors, iteration, device, options)
+ return startup_descriptors
+
def main(argv):
- (options, args) = parse_options(argv)
- startup_descriptors = {}
- for device in options.devices:
- startup_descriptors = run_on_device(device, options, startup_descriptors)
- if options.out is not None:
- with open(options.out, 'w') as f:
- for startup_descriptor, flags in startup_descriptors.items():
- if should_include_startup_descriptor(startup_descriptor, flags, options):
- f.write(startup_descriptor_to_string(startup_descriptor, flags))
- f.write('\n')
- else:
- for startup_descriptor, flags in startup_descriptors.items():
- if should_include_startup_descriptor(startup_descriptor, flags, options):
- print(startup_descriptor_to_string(startup_descriptor, flags))
+ (options, args) = parse_options(argv)
+ startup_descriptors = {}
+ for device in options.devices:
+ startup_descriptors = run_on_device(device, options,
+ startup_descriptors)
+ if options.out is not None:
+ with open(options.out, 'w') as f:
+ for startup_descriptor, flags in startup_descriptors.items():
+ if should_include_startup_descriptor(startup_descriptor, flags,
+ options):
+ f.write(
+ startup_descriptor_to_string(startup_descriptor, flags))
+ f.write('\n')
+ else:
+ for startup_descriptor, flags in startup_descriptors.items():
+ if should_include_startup_descriptor(startup_descriptor, flags,
+ options):
+ print(startup_descriptor_to_string(startup_descriptor, flags))
+
if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
+ sys.exit(main(sys.argv[1:]))
diff --git a/tools/startup/instrument.py b/tools/startup/instrument.py
index ecefe55..9752670 100755
--- a/tools/startup/instrument.py
+++ b/tools/startup/instrument.py
@@ -18,117 +18,126 @@
import utils
import zip_utils
+
def parse_options(argv):
- result = argparse.ArgumentParser(
- description='Instrument the dex files of a given apk to print what is '
- 'executed.')
- result.add_argument('--apk',
- help='Path to the .apk',
- required=True)
- result.add_argument('--dex-files',
- action='append',
- help='Name of dex files to instrument')
- result.add_argument('--discard',
- action='append',
- help='Name of dex files to discard')
- result.add_argument('--out',
- help='Destination of resulting apk',
- required=True)
- options, args = result.parse_known_args(argv)
- return options, args
+ result = argparse.ArgumentParser(
+ description='Instrument the dex files of a given apk to print what is '
+ 'executed.')
+ result.add_argument('--apk', help='Path to the .apk', required=True)
+ result.add_argument('--dex-files',
+ action='append',
+ help='Name of dex files to instrument')
+ result.add_argument('--discard',
+ action='append',
+ help='Name of dex files to discard')
+ result.add_argument('--out',
+ help='Destination of resulting apk',
+ required=True)
+ options, args = result.parse_known_args(argv)
+ return options, args
+
def add_instrumented_dex(dex_file, instrumented_dex_index, instrumented_dir):
- dex_name = get_dex_name(instrumented_dex_index)
- destination = os.path.join(instrumented_dir, dex_name)
- shutil.move(dex_file, destination)
+ dex_name = get_dex_name(instrumented_dex_index)
+ destination = os.path.join(instrumented_dir, dex_name)
+ shutil.move(dex_file, destination)
+
def get_dex_name(dex_index):
- assert dex_index > 0
- return 'classes.dex' if dex_index == 1 else ('classes%s.dex' % dex_index)
+ assert dex_index > 0
+ return 'classes.dex' if dex_index == 1 else ('classes%s.dex' % dex_index)
-def instrument_dex_file(dex_file, include_instrumentation_server, options, tmp_dir):
- d8_cmd = [
- 'java',
- '-cp', utils.R8_JAR,
- '-Dcom.android.tools.r8.startup.instrumentation.instrument=1',
- '-Dcom.android.tools.r8.startup.instrumentation.instrumentationtag=R8']
- if not include_instrumentation_server:
- # We avoid injecting the InstrumentationServer by specifying it should only
- # be added if foo.bar.Baz is in the program.
- d8_cmd.append(
- '-Dcom.android.tools.r8.startup.instrumentation.instrumentationserversyntheticcontext=foo.bar.Baz')
- d8_cmd.extend([
- 'com.android.tools.r8.D8',
- '--min-api', str(apk_utils.get_min_api(options.apk)),
- '--output', tmp_dir,
- '--release',
- dex_file])
- subprocess.check_call(d8_cmd)
- instrumented_dex_files = []
- instrumented_dex_index = 1
- while True:
- instrumented_dex_name = get_dex_name(instrumented_dex_index)
- instrumented_dex_file = os.path.join(tmp_dir, instrumented_dex_name)
- if not os.path.exists(instrumented_dex_file):
- break
- instrumented_dex_files.append(instrumented_dex_file)
- instrumented_dex_index = instrumented_dex_index + 1
- assert len(instrumented_dex_files) > 0
- return instrumented_dex_files
+
+def instrument_dex_file(dex_file, include_instrumentation_server, options,
+ tmp_dir):
+ d8_cmd = [
+ 'java', '-cp', utils.R8_JAR,
+ '-Dcom.android.tools.r8.startup.instrumentation.instrument=1',
+ '-Dcom.android.tools.r8.startup.instrumentation.instrumentationtag=R8'
+ ]
+ if not include_instrumentation_server:
+ # We avoid injecting the InstrumentationServer by specifying it should only
+ # be added if foo.bar.Baz is in the program.
+ d8_cmd.append(
+ '-Dcom.android.tools.r8.startup.instrumentation.instrumentationserversyntheticcontext=foo.bar.Baz'
+ )
+ d8_cmd.extend([
+ 'com.android.tools.r8.D8', '--min-api',
+ str(apk_utils.get_min_api(options.apk)), '--output', tmp_dir,
+ '--release', dex_file
+ ])
+ subprocess.check_call(d8_cmd)
+ instrumented_dex_files = []
+ instrumented_dex_index = 1
+ while True:
+ instrumented_dex_name = get_dex_name(instrumented_dex_index)
+ instrumented_dex_file = os.path.join(tmp_dir, instrumented_dex_name)
+ if not os.path.exists(instrumented_dex_file):
+ break
+ instrumented_dex_files.append(instrumented_dex_file)
+ instrumented_dex_index = instrumented_dex_index + 1
+ assert len(instrumented_dex_files) > 0
+ return instrumented_dex_files
+
def should_discard_dex_file(dex_name, options):
- return options.discard is not None and dex_name in options.discard
+ return options.discard is not None and dex_name in options.discard
+
def should_instrument_dex_file(dex_name, options):
- return options.dex_files is not None and dex_name in options.dex_files
+ return options.dex_files is not None and dex_name in options.dex_files
+
def main(argv):
- options, args = parse_options(argv)
- with utils.TempDir() as tmp_dir:
- # Extract the dex files of the apk.
- uninstrumented_dir = os.path.join(tmp_dir, 'uninstrumented')
- os.mkdir(uninstrumented_dir)
+ options, args = parse_options(argv)
+ with utils.TempDir() as tmp_dir:
+ # Extract the dex files of the apk.
+ uninstrumented_dir = os.path.join(tmp_dir, 'uninstrumented')
+ os.mkdir(uninstrumented_dir)
- dex_predicate = \
- lambda name : name.startswith('classes') and name.endswith('.dex')
- zip_utils.extract_all_that_matches(
- options.apk, uninstrumented_dir, dex_predicate)
+ dex_predicate = \
+ lambda name : name.startswith('classes') and name.endswith('.dex')
+ zip_utils.extract_all_that_matches(options.apk, uninstrumented_dir,
+ dex_predicate)
- # Instrument each dex one by one.
- instrumented_dir = os.path.join(tmp_dir, 'instrumented')
- os.mkdir(instrumented_dir)
+ # Instrument each dex one by one.
+ instrumented_dir = os.path.join(tmp_dir, 'instrumented')
+ os.mkdir(instrumented_dir)
- include_instrumentation_server = True
- instrumented_dex_index = 1
- uninstrumented_dex_index = 1
- while True:
- dex_name = get_dex_name(uninstrumented_dex_index)
- dex_file = os.path.join(uninstrumented_dir, dex_name)
- if not os.path.exists(dex_file):
- break
- if not should_discard_dex_file(dex_name, options):
- if should_instrument_dex_file(dex_name, options):
- with utils.TempDir() as tmp_instrumentation_dir:
- instrumented_dex_files = \
- instrument_dex_file(
- dex_file,
- include_instrumentation_server,
- options,
- tmp_instrumentation_dir)
- for instrumented_dex_file in instrumented_dex_files:
- add_instrumented_dex(
- instrumented_dex_file, instrumented_dex_index, instrumented_dir)
- instrumented_dex_index = instrumented_dex_index + 1
- include_instrumentation_server = False
- else:
- add_instrumented_dex(dex_file, instrumented_dex_index, instrumented_dir)
- instrumented_dex_index = instrumented_dex_index + 1
- uninstrumented_dex_index = uninstrumented_dex_index + 1
+ include_instrumentation_server = True
+ instrumented_dex_index = 1
+ uninstrumented_dex_index = 1
+ while True:
+ dex_name = get_dex_name(uninstrumented_dex_index)
+ dex_file = os.path.join(uninstrumented_dir, dex_name)
+ if not os.path.exists(dex_file):
+ break
+ if not should_discard_dex_file(dex_name, options):
+ if should_instrument_dex_file(dex_name, options):
+ with utils.TempDir() as tmp_instrumentation_dir:
+ instrumented_dex_files = \
+ instrument_dex_file(
+ dex_file,
+ include_instrumentation_server,
+ options,
+ tmp_instrumentation_dir)
+ for instrumented_dex_file in instrumented_dex_files:
+ add_instrumented_dex(instrumented_dex_file,
+ instrumented_dex_index,
+ instrumented_dir)
+ instrumented_dex_index = instrumented_dex_index + 1
+ include_instrumentation_server = False
+ else:
+ add_instrumented_dex(dex_file, instrumented_dex_index,
+ instrumented_dir)
+ instrumented_dex_index = instrumented_dex_index + 1
+ uninstrumented_dex_index = uninstrumented_dex_index + 1
- assert instrumented_dex_index > 1
+ assert instrumented_dex_index > 1
- # Masseur APK.
- apk_masseur.masseur(options.apk, dex=instrumented_dir, out=options.out)
+ # Masseur APK.
+ apk_masseur.masseur(options.apk, dex=instrumented_dir, out=options.out)
+
if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
+ sys.exit(main(sys.argv[1:]))
diff --git a/tools/startup/measure_startup.py b/tools/startup/measure_startup.py
index 57f6cb4..7e97735 100755
--- a/tools/startup/measure_startup.py
+++ b/tools/startup/measure_startup.py
@@ -18,446 +18,461 @@
import perfetto_utils
import utils
+
def setup(options):
- # Increase screen off timeout to avoid device screen turns off.
- twenty_four_hours_in_millis = 24 * 60 * 60 * 1000
- previous_screen_off_timeout = adb_utils.get_screen_off_timeout(
- options.device_id)
- adb_utils.set_screen_off_timeout(
- twenty_four_hours_in_millis, options.device_id)
+ # Increase screen off timeout to avoid device screen turns off.
+ twenty_four_hours_in_millis = 24 * 60 * 60 * 1000
+ previous_screen_off_timeout = adb_utils.get_screen_off_timeout(
+ options.device_id)
+ adb_utils.set_screen_off_timeout(twenty_four_hours_in_millis,
+ options.device_id)
- # Unlock device.
- adb_utils.unlock(options.device_id, options.device_pin)
+ # Unlock device.
+ adb_utils.unlock(options.device_id, options.device_pin)
- teardown_options = {
- 'previous_screen_off_timeout': previous_screen_off_timeout
- }
- return teardown_options
+ teardown_options = {
+ 'previous_screen_off_timeout': previous_screen_off_timeout
+ }
+ return teardown_options
+
def teardown(options, teardown_options):
- # Reset screen off timeout.
- adb_utils.set_screen_off_timeout(
- teardown_options['previous_screen_off_timeout'],
- options.device_id)
+ # Reset screen off timeout.
+ adb_utils.set_screen_off_timeout(
+ teardown_options['previous_screen_off_timeout'], options.device_id)
+
def run_all(apk_or_apks, options, tmp_dir):
- # Launch app while collecting information.
- data_total = {}
- for iteration in range(1, options.iterations + 1):
- print('Starting iteration %i' % iteration)
- out_dir = os.path.join(options.out_dir, str(iteration))
- teardown_options = setup_for_run(apk_or_apks, out_dir, options)
- data = run(out_dir, options, tmp_dir)
- teardown_for_run(out_dir, options, teardown_options)
- add_data(data_total, data)
- print('Result:')
- print(data)
- print(compute_data_summary(data_total))
- print('Done')
- print('Average result:')
- data_summary = compute_data_summary(data_total)
- print(data_summary)
- write_data_to_dir(options.out_dir, data_summary)
- if options.out:
- write_data_to_file(options.out, data_summary)
+ # Launch app while collecting information.
+ data_total = {}
+ for iteration in range(1, options.iterations + 1):
+ print('Starting iteration %i' % iteration)
+ out_dir = os.path.join(options.out_dir, str(iteration))
+ teardown_options = setup_for_run(apk_or_apks, out_dir, options)
+ data = run(out_dir, options, tmp_dir)
+ teardown_for_run(out_dir, options, teardown_options)
+ add_data(data_total, data)
+ print('Result:')
+ print(data)
+ print(compute_data_summary(data_total))
+ print('Done')
+ print('Average result:')
+ data_summary = compute_data_summary(data_total)
+ print(data_summary)
+ write_data_to_dir(options.out_dir, data_summary)
+ if options.out:
+ write_data_to_file(options.out, data_summary)
+
def compute_data_summary(data_total):
- data_summary = {}
- for key, value in data_total.items():
- if not isinstance(value, list):
- data_summary[key] = value
- continue
- data_summary['%s_avg' % key] = round(statistics.mean(value), 1)
- data_summary['%s_med' % key] = statistics.median(value)
- data_summary['%s_min' % key] = min(value)
- data_summary['%s_max' % key] = max(value)
- return data_summary
+ data_summary = {}
+ for key, value in data_total.items():
+ if not isinstance(value, list):
+ data_summary[key] = value
+ continue
+ data_summary['%s_avg' % key] = round(statistics.mean(value), 1)
+ data_summary['%s_med' % key] = statistics.median(value)
+ data_summary['%s_min' % key] = min(value)
+ data_summary['%s_max' % key] = max(value)
+ return data_summary
+
def setup_for_run(apk_or_apks, out_dir, options):
- adb_utils.root(options.device_id)
+ adb_utils.root(options.device_id)
- print('Installing')
- adb_utils.uninstall(options.app_id, options.device_id)
- if apk_or_apks['apk']:
- adb_utils.install(apk_or_apks['apk'], options.device_id)
- else:
- assert apk_or_apks['apks']
- adb_utils.install_apks(apk_or_apks['apks'], options.device_id)
-
- os.makedirs(out_dir, exist_ok=True)
-
- # Grant notifications.
- if options.grant_post_notification_permission:
- adb_utils.grant(
- options.app_id,
- 'android.permission.POST_NOTIFICATIONS',
- options.device_id)
-
- # AOT compile.
- if options.aot:
- print('AOT compiling')
- if options.baseline_profile:
- adb_utils.clear_profile_data(options.app_id, options.device_id)
- if options.baseline_profile_install == 'adb':
- adb_utils.install_profile_using_adb(
- options.app_id, options.baseline_profile, options.device_id)
- else:
- assert options.baseline_profile_install == 'profileinstaller'
- adb_utils.install_profile_using_profileinstaller(
- options.app_id, options.device_id)
+ print('Installing')
+ adb_utils.uninstall(options.app_id, options.device_id)
+ if apk_or_apks['apk']:
+ adb_utils.install(apk_or_apks['apk'], options.device_id)
else:
- adb_utils.force_compilation(options.app_id, options.device_id)
+ assert apk_or_apks['apks']
+ adb_utils.install_apks(apk_or_apks['apks'], options.device_id)
- # Cooldown and then unlock device.
- if options.cooldown > 0:
- print('Cooling down for %i seconds' % options.cooldown)
- assert adb_utils.get_screen_state(options.device_id).is_off()
- time.sleep(options.cooldown)
- teardown_options = adb_utils.prepare_for_interaction_with_device(
- options.device_id, options.device_pin)
- else:
- teardown_options = None
+ os.makedirs(out_dir, exist_ok=True)
- # Prelaunch for hot startup.
- if options.hot_startup:
- print('Prelaunching')
- adb_utils.launch_activity(
+ # Grant notifications.
+ if options.grant_post_notification_permission:
+ adb_utils.grant(options.app_id, 'android.permission.POST_NOTIFICATIONS',
+ options.device_id)
+
+ # AOT compile.
+ if options.aot:
+ print('AOT compiling')
+ if options.baseline_profile:
+ adb_utils.clear_profile_data(options.app_id, options.device_id)
+ if options.baseline_profile_install == 'adb':
+ adb_utils.install_profile_using_adb(options.app_id,
+ options.baseline_profile,
+ options.device_id)
+ else:
+ assert options.baseline_profile_install == 'profileinstaller'
+ adb_utils.install_profile_using_profileinstaller(
+ options.app_id, options.device_id)
+ else:
+ adb_utils.force_compilation(options.app_id, options.device_id)
+
+ # Cooldown and then unlock device.
+ if options.cooldown > 0:
+ print('Cooling down for %i seconds' % options.cooldown)
+ assert adb_utils.get_screen_state(options.device_id).is_off()
+ time.sleep(options.cooldown)
+ teardown_options = adb_utils.prepare_for_interaction_with_device(
+ options.device_id, options.device_pin)
+ else:
+ teardown_options = None
+
+ # Prelaunch for hot startup.
+ if options.hot_startup:
+ print('Prelaunching')
+ adb_utils.launch_activity(options.app_id,
+ options.main_activity,
+ options.device_id,
+ wait_for_activity_to_launch=False)
+ time.sleep(options.startup_duration)
+ adb_utils.navigate_to_home_screen(options.device_id)
+ time.sleep(1)
+
+ # Drop caches before run.
+ adb_utils.drop_caches(options.device_id)
+ return teardown_options
+
+
+def teardown_for_run(out_dir, options, teardown_options):
+ assert adb_utils.get_screen_state(options.device_id).is_on_and_unlocked()
+
+ if options.capture_screen:
+ target = os.path.join(out_dir, 'screen.png')
+ adb_utils.capture_screen(target, options.device_id)
+
+ if options.cooldown > 0:
+ adb_utils.teardown_after_interaction_with_device(
+ teardown_options, options.device_id)
+ adb_utils.ensure_screen_off(options.device_id)
+ else:
+ assert teardown_options is None
+
+
+def run(out_dir, options, tmp_dir):
+ assert adb_utils.get_screen_state(options.device_id).is_on_and_unlocked()
+
+ # Start logcat for time to fully drawn.
+ logcat_process = None
+ if options.fully_drawn_logcat_message:
+ adb_utils.clear_logcat(options.device_id)
+ logcat_process = adb_utils.start_logcat(
+ options.device_id,
+ format='time',
+ filter='%s ActivityTaskManager:I' %
+ options.fully_drawn_logcat_filter,
+ silent=True)
+
+ # Start perfetto trace collector.
+ perfetto_process = None
+ perfetto_trace_path = None
+ if options.perfetto:
+ perfetto_process, perfetto_trace_path = perfetto_utils.record_android_trace(
+ out_dir, tmp_dir, options.device_id)
+
+ # Launch main activity.
+ launch_activity_result = adb_utils.launch_activity(
options.app_id,
options.main_activity,
options.device_id,
- wait_for_activity_to_launch=False)
- time.sleep(options.startup_duration)
- adb_utils.navigate_to_home_screen(options.device_id)
- time.sleep(1)
+ intent_data_uri=options.intent_data_uri,
+ wait_for_activity_to_launch=True)
- # Drop caches before run.
- adb_utils.drop_caches(options.device_id)
- return teardown_options
+ # Wait for app to be fully drawn.
+ logcat = None
+ if logcat_process is not None:
+ wait_until_fully_drawn(logcat_process, options)
+ logcat = adb_utils.stop_logcat(logcat_process)
-def teardown_for_run(out_dir, options, teardown_options):
- assert adb_utils.get_screen_state(options.device_id).is_on_and_unlocked()
+ # Wait for perfetto trace collector to stop.
+ if options.perfetto:
+ perfetto_utils.stop_record_android_trace(perfetto_process, out_dir)
- if options.capture_screen:
- target = os.path.join(out_dir, 'screen.png')
- adb_utils.capture_screen(target, options.device_id)
+ # Get minor and major page faults from app process.
+ data = compute_data(launch_activity_result, logcat, perfetto_trace_path,
+ options)
+ write_data_to_dir(out_dir, data)
+ return data
- if options.cooldown > 0:
- adb_utils.teardown_after_interaction_with_device(
- teardown_options, options.device_id)
- adb_utils.ensure_screen_off(options.device_id)
- else:
- assert teardown_options is None
-
-def run(out_dir, options, tmp_dir):
- assert adb_utils.get_screen_state(options.device_id).is_on_and_unlocked()
-
- # Start logcat for time to fully drawn.
- logcat_process = None
- if options.fully_drawn_logcat_message:
- adb_utils.clear_logcat(options.device_id)
- logcat_process = adb_utils.start_logcat(
- options.device_id,
- format='time',
- filter='%s ActivityTaskManager:I' % options.fully_drawn_logcat_filter,
- silent=True)
-
- # Start perfetto trace collector.
- perfetto_process = None
- perfetto_trace_path = None
- if options.perfetto:
- perfetto_process, perfetto_trace_path = perfetto_utils.record_android_trace(
- out_dir, tmp_dir, options.device_id)
-
- # Launch main activity.
- launch_activity_result = adb_utils.launch_activity(
- options.app_id,
- options.main_activity,
- options.device_id,
- intent_data_uri=options.intent_data_uri,
- wait_for_activity_to_launch=True)
-
- # Wait for app to be fully drawn.
- logcat = None
- if logcat_process is not None:
- wait_until_fully_drawn(logcat_process, options)
- logcat = adb_utils.stop_logcat(logcat_process)
-
- # Wait for perfetto trace collector to stop.
- if options.perfetto:
- perfetto_utils.stop_record_android_trace(perfetto_process, out_dir)
-
- # Get minor and major page faults from app process.
- data = compute_data(
- launch_activity_result, logcat, perfetto_trace_path, options)
- write_data_to_dir(out_dir, data)
- return data
def wait_until_fully_drawn(logcat_process, options):
- print('Waiting until app is fully drawn')
- while True:
- is_fully_drawn = any(
- is_app_fully_drawn_logcat_message(line, options) \
- for line in logcat_process.lines)
- if is_fully_drawn:
- break
- time.sleep(1)
- print('Done')
+ print('Waiting until app is fully drawn')
+ while True:
+ is_fully_drawn = any(
+ is_app_fully_drawn_logcat_message(line, options) \
+ for line in logcat_process.lines)
+ if is_fully_drawn:
+ break
+ time.sleep(1)
+ print('Done')
+
def compute_time_to_fully_drawn_from_time_to_first_frame(logcat, options):
- displayed_time = None
- fully_drawn_time = None
- for line in logcat:
- if is_app_displayed_logcat_message(line, options):
- displayed_time = get_timestamp_from_logcat_message(line)
- elif is_app_fully_drawn_logcat_message(line, options):
- fully_drawn_time = get_timestamp_from_logcat_message(line)
- assert displayed_time is not None
- assert fully_drawn_time is not None
- assert fully_drawn_time >= displayed_time
- return fully_drawn_time - displayed_time
+ displayed_time = None
+ fully_drawn_time = None
+ for line in logcat:
+ if is_app_displayed_logcat_message(line, options):
+ displayed_time = get_timestamp_from_logcat_message(line)
+ elif is_app_fully_drawn_logcat_message(line, options):
+ fully_drawn_time = get_timestamp_from_logcat_message(line)
+ assert displayed_time is not None
+ assert fully_drawn_time is not None
+ assert fully_drawn_time >= displayed_time
+ return fully_drawn_time - displayed_time
+
def get_timestamp_from_logcat_message(line):
- time_end_index = len('00-00 00:00:00.000')
- time_format = '%m-%d %H:%M:%S.%f'
- time_str = line[0:time_end_index] + '000'
- time_seconds = datetime.datetime.strptime(time_str, time_format).timestamp()
- return int(time_seconds * 1000)
+ time_end_index = len('00-00 00:00:00.000')
+ time_format = '%m-%d %H:%M:%S.%f'
+ time_str = line[0:time_end_index] + '000'
+ time_seconds = datetime.datetime.strptime(time_str, time_format).timestamp()
+ return int(time_seconds * 1000)
+
def is_app_displayed_logcat_message(line, options):
- substring = 'Displayed %s' % adb_utils.get_component_name(
- options.app_id, options.main_activity)
- return substring in line
+ substring = 'Displayed %s' % adb_utils.get_component_name(
+ options.app_id, options.main_activity)
+ return substring in line
+
def is_app_fully_drawn_logcat_message(line, options):
- return re.search(options.fully_drawn_logcat_message, line)
+ return re.search(options.fully_drawn_logcat_message, line)
+
def add_data(data_total, data):
- for key, value in data.items():
- if key == 'app_id':
- assert data_total.get(key, value) == value
- data_total[key] = value
- if key == 'time':
- continue
- if key in data_total:
- if key == 'app_id':
- assert data_total[key] == value
- else:
- existing_value = data_total[key]
- assert isinstance(value, int)
- assert isinstance(existing_value, list)
- existing_value.append(value)
- else:
- assert isinstance(value, int), key
- data_total[key] = [value]
+ for key, value in data.items():
+ if key == 'app_id':
+ assert data_total.get(key, value) == value
+ data_total[key] = value
+ if key == 'time':
+ continue
+ if key in data_total:
+ if key == 'app_id':
+ assert data_total[key] == value
+ else:
+ existing_value = data_total[key]
+ assert isinstance(value, int)
+ assert isinstance(existing_value, list)
+ existing_value.append(value)
+ else:
+ assert isinstance(value, int), key
+ data_total[key] = [value]
+
def compute_data(launch_activity_result, logcat, perfetto_trace_path, options):
- minfl, majfl = adb_utils.get_minor_major_page_faults(
- options.app_id, options.device_id)
- meminfo = adb_utils.get_meminfo(options.app_id, options.device_id)
- data = {
- 'app_id': options.app_id,
- 'time': time.ctime(time.time()),
- 'minfl': minfl,
- 'majfl': majfl
- }
- data.update(meminfo)
- startup_data = compute_startup_data(
- launch_activity_result, logcat, perfetto_trace_path, options)
- return data | startup_data
+ minfl, majfl = adb_utils.get_minor_major_page_faults(
+ options.app_id, options.device_id)
+ meminfo = adb_utils.get_meminfo(options.app_id, options.device_id)
+ data = {
+ 'app_id': options.app_id,
+ 'time': time.ctime(time.time()),
+ 'minfl': minfl,
+ 'majfl': majfl
+ }
+ data.update(meminfo)
+ startup_data = compute_startup_data(launch_activity_result, logcat,
+ perfetto_trace_path, options)
+ return data | startup_data
-def compute_startup_data(
- launch_activity_result, logcat, perfetto_trace_path, options):
- time_to_first_frame = launch_activity_result.get('total_time')
- startup_data = {
- 'adb_startup': time_to_first_frame
- }
- # Time to fully drawn.
- if options.fully_drawn_logcat_message:
- startup_data['time_to_fully_drawn'] = \
- compute_time_to_fully_drawn_from_time_to_first_frame(logcat, options) \
- + time_to_first_frame
+def compute_startup_data(launch_activity_result, logcat, perfetto_trace_path,
+ options):
+ time_to_first_frame = launch_activity_result.get('total_time')
+ startup_data = {'adb_startup': time_to_first_frame}
- # Perfetto stats.
- perfetto_startup_data = {}
- if options.perfetto:
- TraceProcessor = perfetto_utils.get_trace_processor()
- trace_processor = TraceProcessor(file_path=perfetto_trace_path)
+ # Time to fully drawn.
+ if options.fully_drawn_logcat_message:
+ startup_data['time_to_fully_drawn'] = \
+ compute_time_to_fully_drawn_from_time_to_first_frame(logcat, options) \
+ + time_to_first_frame
- # Compute time to first frame according to the builtin android_startup
- # metric.
- startup_metric = trace_processor.metric(['android_startup'])
- time_to_first_frame_ms = \
- startup_metric.android_startup.startup[0].to_first_frame.dur_ms
- perfetto_startup_data['perfetto_startup'] = round(time_to_first_frame_ms)
+ # Perfetto stats.
+ perfetto_startup_data = {}
+ if options.perfetto:
+ TraceProcessor = perfetto_utils.get_trace_processor()
+ trace_processor = TraceProcessor(file_path=perfetto_trace_path)
- if not options.hot_startup:
- # Compute time to first and last doFrame event.
- bind_application_slice = perfetto_utils.find_unique_slice_by_name(
- 'bindApplication', options, trace_processor)
- activity_start_slice = perfetto_utils.find_unique_slice_by_name(
- 'activityStart', options, trace_processor)
- do_frame_slices = perfetto_utils.find_slices_by_name(
- 'Choreographer#doFrame', options, trace_processor)
- first_do_frame_slice = next(do_frame_slices)
- *_, last_do_frame_slice = do_frame_slices
+ # Compute time to first frame according to the builtin android_startup
+ # metric.
+ startup_metric = trace_processor.metric(['android_startup'])
+ time_to_first_frame_ms = \
+ startup_metric.android_startup.startup[0].to_first_frame.dur_ms
+ perfetto_startup_data['perfetto_startup'] = round(
+ time_to_first_frame_ms)
- perfetto_startup_data.update({
- 'time_to_first_choreographer_do_frame_ms':
- round(perfetto_utils.get_slice_end_since_start(
- first_do_frame_slice, bind_application_slice)),
- 'time_to_last_choreographer_do_frame_ms':
- round(perfetto_utils.get_slice_end_since_start(
- last_do_frame_slice, bind_application_slice))
- })
+ if not options.hot_startup:
+ # Compute time to first and last doFrame event.
+ bind_application_slice = perfetto_utils.find_unique_slice_by_name(
+ 'bindApplication', options, trace_processor)
+ activity_start_slice = perfetto_utils.find_unique_slice_by_name(
+ 'activityStart', options, trace_processor)
+ do_frame_slices = perfetto_utils.find_slices_by_name(
+ 'Choreographer#doFrame', options, trace_processor)
+ first_do_frame_slice = next(do_frame_slices)
+ *_, last_do_frame_slice = do_frame_slices
- # Return combined startup data.
- return startup_data | perfetto_startup_data
+ perfetto_startup_data.update({
+ 'time_to_first_choreographer_do_frame_ms':
+ round(
+ perfetto_utils.get_slice_end_since_start(
+ first_do_frame_slice, bind_application_slice)),
+ 'time_to_last_choreographer_do_frame_ms':
+ round(
+ perfetto_utils.get_slice_end_since_start(
+ last_do_frame_slice, bind_application_slice))
+ })
+
+ # Return combined startup data.
+ return startup_data | perfetto_startup_data
+
def write_data_to_dir(out_dir, data):
- data_path = os.path.join(out_dir, 'data.txt')
- write_data_to_file(data_path, data)
+ data_path = os.path.join(out_dir, 'data.txt')
+ write_data_to_file(data_path, data)
+
def write_data_to_file(out_file, data):
- with open(out_file, 'w') as f:
- for key, value in data.items():
- f.write('%s=%s\n' % (key, str(value)))
+ with open(out_file, 'w') as f:
+ for key, value in data.items():
+ f.write('%s=%s\n' % (key, str(value)))
+
def parse_options(argv):
- result = argparse.ArgumentParser(
- description='Generate a perfetto trace file.')
- result.add_argument('--app-id',
- help='The application ID of interest',
- required=True)
- result.add_argument('--aot',
- help='Enable force compilation',
- default=False,
- action='store_true')
- result.add_argument('--apk',
- help='Path to the .apk')
- result.add_argument('--apks',
- help='Path to the .apks')
- result.add_argument('--bundle',
- help='Path to the .aab')
- result.add_argument('--capture-screen',
- help='Take a screenshot after each test',
- default=False,
- action='store_true')
- result.add_argument('--cooldown',
- help='Seconds to wait before running each iteration',
- default=0,
- type=int)
- result.add_argument('--device-id',
- help='Device id (e.g., emulator-5554).')
- result.add_argument('--device-pin',
- help='Device pin code (e.g., 1234)')
- result.add_argument('--fully-drawn-logcat-filter',
- help='Logcat filter for the fully drawn message '
- '(e.g., "tag:I")')
- result.add_argument('--fully-drawn-logcat-message',
- help='Logcat message that indicates that the app is '
- 'fully drawn (regexp)')
- result.add_argument('--grant-post-notification-permission',
- help='Grants the android.permission.POST_NOTIFICATIONS '
- 'permission before launching the app',
- default=False,
- action='store_true')
- result.add_argument('--hot-startup',
- help='Measure hot startup instead of cold startup',
- default=False,
- action='store_true')
- result.add_argument('--intent-data-uri',
- help='Value to use for the -d argument to the intent '
- 'that is used to launch the app')
- result.add_argument('--iterations',
- help='Number of traces to generate',
- default=1,
- type=int)
- result.add_argument('--main-activity',
- help='Main activity class name',
- required=True)
- result.add_argument('--no-perfetto',
- help='Disables perfetto trace generation',
- action='store_true',
- default=False)
- result.add_argument('--out',
- help='File to store result in')
- result.add_argument('--out-dir',
- help='Directory to store trace files in',
- required=True)
- result.add_argument('--baseline-profile',
- help='Baseline profile (.prof) in binary format')
- result.add_argument('--baseline-profile-metadata',
- help='Baseline profile metadata (.profm) in binary '
- 'format')
- result.add_argument('--baseline-profile-install',
- help='Whether to install profile using adb or '
- 'profileinstaller',
- choices=['adb', 'profileinstaller'],
- default='profileinstaller')
- result.add_argument('--startup-duration',
- help='Duration in seconds before shutting down app',
- default=15,
- type=int)
- options, args = result.parse_known_args(argv)
- setattr(options, 'perfetto', not options.no_perfetto)
+ result = argparse.ArgumentParser(
+ description='Generate a perfetto trace file.')
+ result.add_argument('--app-id',
+ help='The application ID of interest',
+ required=True)
+ result.add_argument('--aot',
+ help='Enable force compilation',
+ default=False,
+ action='store_true')
+ result.add_argument('--apk', help='Path to the .apk')
+ result.add_argument('--apks', help='Path to the .apks')
+ result.add_argument('--bundle', help='Path to the .aab')
+ result.add_argument('--capture-screen',
+ help='Take a screenshot after each test',
+ default=False,
+ action='store_true')
+ result.add_argument('--cooldown',
+ help='Seconds to wait before running each iteration',
+ default=0,
+ type=int)
+ result.add_argument('--device-id', help='Device id (e.g., emulator-5554).')
+ result.add_argument('--device-pin', help='Device pin code (e.g., 1234)')
+ result.add_argument('--fully-drawn-logcat-filter',
+ help='Logcat filter for the fully drawn message '
+ '(e.g., "tag:I")')
+ result.add_argument('--fully-drawn-logcat-message',
+ help='Logcat message that indicates that the app is '
+ 'fully drawn (regexp)')
+ result.add_argument('--grant-post-notification-permission',
+ help='Grants the android.permission.POST_NOTIFICATIONS '
+ 'permission before launching the app',
+ default=False,
+ action='store_true')
+ result.add_argument('--hot-startup',
+ help='Measure hot startup instead of cold startup',
+ default=False,
+ action='store_true')
+ result.add_argument('--intent-data-uri',
+ help='Value to use for the -d argument to the intent '
+ 'that is used to launch the app')
+ result.add_argument('--iterations',
+ help='Number of traces to generate',
+ default=1,
+ type=int)
+ result.add_argument('--main-activity',
+ help='Main activity class name',
+ required=True)
+ result.add_argument('--no-perfetto',
+ help='Disables perfetto trace generation',
+ action='store_true',
+ default=False)
+ result.add_argument('--out', help='File to store result in')
+ result.add_argument('--out-dir',
+ help='Directory to store trace files in',
+ required=True)
+ result.add_argument('--baseline-profile',
+ help='Baseline profile (.prof) in binary format')
+ result.add_argument('--baseline-profile-metadata',
+ help='Baseline profile metadata (.profm) in binary '
+ 'format')
+ result.add_argument('--baseline-profile-install',
+ help='Whether to install profile using adb or '
+ 'profileinstaller',
+ choices=['adb', 'profileinstaller'],
+ default='profileinstaller')
+ result.add_argument('--startup-duration',
+ help='Duration in seconds before shutting down app',
+ default=15,
+ type=int)
+ options, args = result.parse_known_args(argv)
+ setattr(options, 'perfetto', not options.no_perfetto)
- paths = [
- path for path in [options.apk, options.apks, options.bundle]
- if path is not None]
- assert len(paths) == 1, 'Expected exactly one .apk, .apks, or .aab file.'
+ paths = [
+ path for path in [options.apk, options.apks, options.bundle]
+ if path is not None
+ ]
+ assert len(paths) == 1, 'Expected exactly one .apk, .apks, or .aab file.'
- # Build .apks file up front to avoid building the bundle upon each install.
- if options.bundle:
- os.makedirs(options.out_dir, exist_ok=True)
- options.apks = os.path.join(options.out_dir, 'Bundle.apks')
- adb_utils.build_apks_from_bundle(
- options.bundle, options.apks, overwrite=True)
- del options.bundle
+ # Build .apks file up front to avoid building the bundle upon each install.
+ if options.bundle:
+ os.makedirs(options.out_dir, exist_ok=True)
+ options.apks = os.path.join(options.out_dir, 'Bundle.apks')
+ adb_utils.build_apks_from_bundle(options.bundle,
+ options.apks,
+ overwrite=True)
+ del options.bundle
- # Profile is only used with --aot.
- assert options.aot or not options.baseline_profile
+ # Profile is only used with --aot.
+ assert options.aot or not options.baseline_profile
- # Fully drawn logcat filter and message is absent or both present.
- assert (options.fully_drawn_logcat_filter is None) == \
- (options.fully_drawn_logcat_message is None)
+ # Fully drawn logcat filter and message is absent or both present.
+ assert (options.fully_drawn_logcat_filter is None) == \
+ (options.fully_drawn_logcat_message is None)
- return options, args
+ return options, args
+
def global_setup(options):
- # If there is no cooldown then unlock the screen once. Otherwise we turn off
- # the screen during the cooldown and unlock the screen before each iteration.
- teardown_options = None
- if options.cooldown == 0:
- teardown_options = adb_utils.prepare_for_interaction_with_device(
- options.device_id, options.device_pin)
- assert adb_utils.get_screen_state(options.device_id).is_on()
- else:
- adb_utils.ensure_screen_off(options.device_id)
- return teardown_options
+ # If there is no cooldown then unlock the screen once. Otherwise we turn off
+ # the screen during the cooldown and unlock the screen before each iteration.
+ teardown_options = None
+ if options.cooldown == 0:
+ teardown_options = adb_utils.prepare_for_interaction_with_device(
+ options.device_id, options.device_pin)
+ assert adb_utils.get_screen_state(options.device_id).is_on()
+ else:
+ adb_utils.ensure_screen_off(options.device_id)
+ return teardown_options
+
def global_teardown(options, teardown_options):
- if options.cooldown == 0:
- adb_utils.teardown_after_interaction_with_device(
- teardown_options, options.device_id)
- else:
- assert teardown_options is None
+ if options.cooldown == 0:
+ adb_utils.teardown_after_interaction_with_device(
+ teardown_options, options.device_id)
+ else:
+ assert teardown_options is None
+
def main(argv):
- (options, args) = parse_options(argv)
- with utils.TempDir() as tmp_dir:
- apk_or_apks = { 'apk': options.apk, 'apks': options.apks }
- if options.baseline_profile \
- and options.baseline_profile_install == 'profileinstaller':
- assert not options.apks, 'Unimplemented'
- apk_or_apks['apk'] = apk_utils.add_baseline_profile_to_apk(
- options.apk,
- options.baseline_profile,
- options.baseline_profile_metadata,
- tmp_dir)
- teardown_options = global_setup(options)
- run_all(apk_or_apks, options, tmp_dir)
- global_teardown(options, teardown_options)
+ (options, args) = parse_options(argv)
+ with utils.TempDir() as tmp_dir:
+ apk_or_apks = {'apk': options.apk, 'apks': options.apks}
+ if options.baseline_profile \
+ and options.baseline_profile_install == 'profileinstaller':
+ assert not options.apks, 'Unimplemented'
+ apk_or_apks['apk'] = apk_utils.add_baseline_profile_to_apk(
+ options.apk, options.baseline_profile,
+ options.baseline_profile_metadata, tmp_dir)
+ teardown_options = global_setup(options)
+ run_all(apk_or_apks, options, tmp_dir)
+ global_teardown(options, teardown_options)
+
if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
\ No newline at end of file
+ sys.exit(main(sys.argv[1:]))
diff --git a/tools/startup/perfetto_utils.py b/tools/startup/perfetto_utils.py
index eb32722..4cf8613 100644
--- a/tools/startup/perfetto_utils.py
+++ b/tools/startup/perfetto_utils.py
@@ -7,85 +7,85 @@
import subprocess
import sys
+
def get_trace_processor():
- try:
- from perfetto.trace_processor import TraceProcessor
- except ImportError:
- sys.exit(
- 'Unable to analyze perfetto trace without the perfetto library. '
- 'Install instructions:\n'
- ' sudo apt install python3-pip\n'
- ' pip3 install perfetto')
- return TraceProcessor
+ try:
+ from perfetto.trace_processor import TraceProcessor
+ except ImportError:
+ sys.exit(
+ 'Unable to analyze perfetto trace without the perfetto library. '
+ 'Install instructions:\n'
+ ' sudo apt install python3-pip\n'
+ ' pip3 install perfetto')
+ return TraceProcessor
+
def ensure_record_android_trace(tmp_dir):
- record_android_trace_path = os.path.join(tmp_dir, 'record_android_trace')
- if not os.path.exists(record_android_trace_path):
- cmd = [
- 'curl',
- '--output',
- record_android_trace_path,
- '--silent',
- 'https://raw.githubusercontent.com/google/perfetto/master/tools/'
+ record_android_trace_path = os.path.join(tmp_dir, 'record_android_trace')
+ if not os.path.exists(record_android_trace_path):
+ cmd = [
+ 'curl', '--output', record_android_trace_path, '--silent',
+ 'https://raw.githubusercontent.com/google/perfetto/master/tools/'
'record_android_trace'
- ]
- subprocess.check_call(cmd)
- assert os.path.exists(record_android_trace_path)
- return record_android_trace_path
+ ]
+ subprocess.check_call(cmd)
+ assert os.path.exists(record_android_trace_path)
+ return record_android_trace_path
+
def record_android_trace(out_dir, tmp_dir, device_id=None):
- record_android_trace_path = ensure_record_android_trace(tmp_dir)
- config_path = os.path.join(os.path.dirname(__file__), 'config.pbtx')
- perfetto_trace_path = os.path.join(out_dir, 'trace.perfetto-trace')
- cmd = [
- sys.executable,
- record_android_trace_path,
- '--config',
- config_path,
- '--out',
- perfetto_trace_path,
- '--no-open']
- if device_id is not None:
- cmd.extend(['--serial', device_id])
- perfetto_process = subprocess.Popen(
- cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- lines = []
- for line in perfetto_process.stdout:
- line = line.decode('utf-8')
- lines.append(line)
- if 'enabled ftrace' in line.strip():
- return perfetto_process, perfetto_trace_path
- raise ValueError(
- 'Expected to find line containing: enabled ftrace, got: %s' % lines)
+ record_android_trace_path = ensure_record_android_trace(tmp_dir)
+ config_path = os.path.join(os.path.dirname(__file__), 'config.pbtx')
+ perfetto_trace_path = os.path.join(out_dir, 'trace.perfetto-trace')
+ cmd = [
+ sys.executable, record_android_trace_path, '--config', config_path,
+ '--out', perfetto_trace_path, '--no-open'
+ ]
+ if device_id is not None:
+ cmd.extend(['--serial', device_id])
+ perfetto_process = subprocess.Popen(cmd,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ lines = []
+ for line in perfetto_process.stdout:
+ line = line.decode('utf-8')
+ lines.append(line)
+ if 'enabled ftrace' in line.strip():
+ return perfetto_process, perfetto_trace_path
+ raise ValueError(
+ 'Expected to find line containing: enabled ftrace, got: %s' % lines)
+
def stop_record_android_trace(perfetto_process, out_dir):
- if perfetto_process.poll() is not None:
- raise ValueError('Expected perfetto process to be running')
- # perfetto should terminate in at most 15 seconds,
- perfetto_config_duration=15
- stdout, stderr = perfetto_process.communicate(
- timeout=perfetto_config_duration*2)
- stdout = stdout.decode('utf-8')
- stderr = stderr.decode('utf-8')
- assert perfetto_process.returncode == 0
- assert os.path.exists(os.path.join(out_dir, 'trace.perfetto-trace'))
+ if perfetto_process.poll() is not None:
+ raise ValueError('Expected perfetto process to be running')
+ # perfetto should terminate in at most 15 seconds,
+ perfetto_config_duration = 15
+ stdout, stderr = perfetto_process.communicate(
+ timeout=perfetto_config_duration * 2)
+ stdout = stdout.decode('utf-8')
+ stderr = stderr.decode('utf-8')
+ assert perfetto_process.returncode == 0
+ assert os.path.exists(os.path.join(out_dir, 'trace.perfetto-trace'))
+
# https://perfetto.dev/docs/analysis/sql-tables
def find_slices_by_name(slice_name, options, trace_processor):
- return trace_processor.query(
- 'SELECT slice.dur, slice.ts FROM slice'
- ' INNER JOIN thread_track ON (slice.track_id = thread_track.id)'
- ' INNER JOIN thread using (utid)'
- ' INNER JOIN process using (upid)'
- ' WHERE slice.name = "%s"'
- ' AND process.name = "%s"'
- ' ORDER BY slice.ts ASC'
- % (slice_name, options.app_id))
+ return trace_processor.query(
+ 'SELECT slice.dur, slice.ts FROM slice'
+ ' INNER JOIN thread_track ON (slice.track_id = thread_track.id)'
+ ' INNER JOIN thread using (utid)'
+ ' INNER JOIN process using (upid)'
+ ' WHERE slice.name = "%s"'
+ ' AND process.name = "%s"'
+ ' ORDER BY slice.ts ASC' % (slice_name, options.app_id))
+
def find_unique_slice_by_name(slice_name, options, trace_processor):
- query_it = find_slices_by_name(slice_name, options, trace_processor)
- assert len(query_it) == 1
- return next(query_it)
+ query_it = find_slices_by_name(slice_name, options, trace_processor)
+ assert len(query_it) == 1
+ return next(query_it)
+
def get_slice_end_since_start(slice, initial_slice):
- return (slice.ts + slice.dur - initial_slice.ts) / 1000000
+ return (slice.ts + slice.dur - initial_slice.ts) / 1000000
diff --git a/tools/startup/profile_utils.py b/tools/startup/profile_utils.py
index 5c74a5c..7a8de4a 100755
--- a/tools/startup/profile_utils.py
+++ b/tools/startup/profile_utils.py
@@ -10,6 +10,7 @@
EXTERNAL_SYNTHETIC_SUFFIX = '$$ExternalSynthetic'
SYNTHETIC_PREFIX = 'S'
+
# Parses a list of class and method descriptors, prefixed with one or more flags
# 'H' (hot), 'S' (startup), 'P' (post startup).
#
@@ -24,86 +25,93 @@
#
# See also https://developer.android.com/studio/profile/baselineprofiles.
def parse_art_profile(lines):
- art_profile = {}
- flags_to_name = { 'H': 'hot', 'S': 'startup', 'P': 'post_startup' }
- for line in lines:
- line = line.strip()
- if not line:
- continue
- flags = { 'hot': False, 'startup': False, 'post_startup': False }
- while line[0] in flags_to_name:
- flag_abbreviation = line[0]
- flag_name = flags_to_name.get(flag_abbreviation)
- flags[flag_name] = True
- line = line[1:]
- while line.startswith('['):
- line = line[1:]
- assert line.startswith('L'), line
- descriptor = line
- art_profile[descriptor] = flags
- return art_profile
+ art_profile = {}
+ flags_to_name = {'H': 'hot', 'S': 'startup', 'P': 'post_startup'}
+ for line in lines:
+ line = line.strip()
+ if not line:
+ continue
+ flags = {'hot': False, 'startup': False, 'post_startup': False}
+ while line[0] in flags_to_name:
+ flag_abbreviation = line[0]
+ flag_name = flags_to_name.get(flag_abbreviation)
+ flags[flag_name] = True
+ line = line[1:]
+ while line.startswith('['):
+ line = line[1:]
+ assert line.startswith('L'), line
+ descriptor = line
+ art_profile[descriptor] = flags
+ return art_profile
-def transform_art_profile_to_r8_startup_list(
- art_profile, generalize_synthetics=False):
- r8_startup_list = {}
- for startup_descriptor, flags in art_profile.items():
- transformed_startup_descriptor = transform_synthetic_descriptor(
- startup_descriptor) if generalize_synthetics else startup_descriptor
- r8_startup_list[transformed_startup_descriptor] = {
- 'conditional_startup': False,
- 'hot': flags['hot'],
- 'startup': flags['startup'],
- 'post_startup': flags['post_startup']
- }
- return r8_startup_list
+
+def transform_art_profile_to_r8_startup_list(art_profile,
+ generalize_synthetics=False):
+ r8_startup_list = {}
+ for startup_descriptor, flags in art_profile.items():
+ transformed_startup_descriptor = transform_synthetic_descriptor(
+ startup_descriptor) if generalize_synthetics else startup_descriptor
+ r8_startup_list[transformed_startup_descriptor] = {
+ 'conditional_startup': False,
+ 'hot': flags['hot'],
+ 'startup': flags['startup'],
+ 'post_startup': flags['post_startup']
+ }
+ return r8_startup_list
+
def transform_synthetic_descriptor(descriptor):
- companion_class_index = descriptor.find(COMPANION_CLASS_SUFFIX)
- if companion_class_index >= 0:
- return SYNTHETIC_PREFIX + descriptor[0:companion_class_index] + ';'
- external_synthetic_index = descriptor.find(EXTERNAL_SYNTHETIC_SUFFIX)
- if external_synthetic_index >= 0:
- return SYNTHETIC_PREFIX + descriptor[0:external_synthetic_index] + ';'
- return descriptor
+ companion_class_index = descriptor.find(COMPANION_CLASS_SUFFIX)
+ if companion_class_index >= 0:
+ return SYNTHETIC_PREFIX + descriptor[0:companion_class_index] + ';'
+ external_synthetic_index = descriptor.find(EXTERNAL_SYNTHETIC_SUFFIX)
+ if external_synthetic_index >= 0:
+ return SYNTHETIC_PREFIX + descriptor[0:external_synthetic_index] + ';'
+ return descriptor
+
def filter_r8_startup_list(r8_startup_list, options):
- filtered_r8_startup_list = {}
- for startup_descriptor, flags in r8_startup_list.items():
- if not options.include_post_startup \
- and flags.get('post_startup') \
- and not flags.get('startup'):
- continue
- filtered_r8_startup_list[startup_descriptor] = flags
- return filtered_r8_startup_list
+ filtered_r8_startup_list = {}
+ for startup_descriptor, flags in r8_startup_list.items():
+ if not options.include_post_startup \
+ and flags.get('post_startup') \
+ and not flags.get('startup'):
+ continue
+ filtered_r8_startup_list[startup_descriptor] = flags
+ return filtered_r8_startup_list
+
def parse_options(argv):
- result = argparse.ArgumentParser(
- description='Utilities for converting an ART profile into an R8 startup '
- 'list.')
- result.add_argument('--art-profile', help='Path to the ART profile')
- result.add_argument('--include-post-startup',
- help='Include post startup classes and methods in the R8 '
- 'startup list',
- action='store_true',
- default=False)
- result.add_argument('--out', help='Where to store the R8 startup list')
- options, args = result.parse_known_args(argv)
- return options, args
+ result = argparse.ArgumentParser(
+ description='Utilities for converting an ART profile into an R8 startup '
+ 'list.')
+ result.add_argument('--art-profile', help='Path to the ART profile')
+ result.add_argument(
+ '--include-post-startup',
+ help='Include post startup classes and methods in the R8 '
+ 'startup list',
+ action='store_true',
+ default=False)
+ result.add_argument('--out', help='Where to store the R8 startup list')
+ options, args = result.parse_known_args(argv)
+ return options, args
+
def main(argv):
- (options, args) = parse_options(argv)
- with open(options.art_profile, 'r') as f:
- art_profile = parse_art_profile(f.read().splitlines())
- r8_startup_list = transform_art_profile_to_r8_startup_list(art_profile)
- filtered_r8_startup_list = filter_r8_startup_list(r8_startup_list, options)
- if options.out is not None:
- with open(options.out, 'w') as f:
- for startup_descriptor, flags in filtered_r8_startup_list.items():
- f.write(startup_descriptor)
- f.write('\n')
- else:
- for startup_descriptor, flags in filtered_r8_startup_list.items():
- print(startup_descriptor)
+ (options, args) = parse_options(argv)
+ with open(options.art_profile, 'r') as f:
+ art_profile = parse_art_profile(f.read().splitlines())
+ r8_startup_list = transform_art_profile_to_r8_startup_list(art_profile)
+ filtered_r8_startup_list = filter_r8_startup_list(r8_startup_list, options)
+ if options.out is not None:
+ with open(options.out, 'w') as f:
+ for startup_descriptor, flags in filtered_r8_startup_list.items():
+ f.write(startup_descriptor)
+ f.write('\n')
+ else:
+ for startup_descriptor, flags in filtered_r8_startup_list.items():
+ print(startup_descriptor)
+
if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
+ sys.exit(main(sys.argv[1:]))
diff --git a/tools/startup/relayout.py b/tools/startup/relayout.py
index b0f8aac..3d0cd94 100755
--- a/tools/startup/relayout.py
+++ b/tools/startup/relayout.py
@@ -17,79 +17,85 @@
import utils
import zip_utils
-LOWEST_SUPPORTED_MIN_API = 21 # Android L (native multi dex)
+LOWEST_SUPPORTED_MIN_API = 21 # Android L (native multi dex)
+
def parse_options(argv):
- result = argparse.ArgumentParser(
- description='Relayout a given APK using a startup profile.')
- result.add_argument('--apk',
- help='Path to the .apk',
- required=True)
- result.add_argument('--desugared-library',
- choices=['auto', 'true', 'false'],
- default='auto',
- help='Whether the last dex file of the app is desugared '
- 'library')
- result.add_argument('--no-build',
- action='store_true',
- default=False,
- help='To disable building using gradle')
- result.add_argument('--out',
- help='Destination of resulting apk',
- required=True)
- result.add_argument('--profile',
- help='Path to the startup profile')
- options, args = result.parse_known_args(argv)
- return options, args
+ result = argparse.ArgumentParser(
+ description='Relayout a given APK using a startup profile.')
+ result.add_argument('--apk', help='Path to the .apk', required=True)
+ result.add_argument(
+ '--desugared-library',
+ choices=['auto', 'true', 'false'],
+ default='auto',
+ help='Whether the last dex file of the app is desugared '
+ 'library')
+ result.add_argument('--no-build',
+ action='store_true',
+ default=False,
+ help='To disable building using gradle')
+ result.add_argument('--out',
+ help='Destination of resulting apk',
+ required=True)
+ result.add_argument('--profile', help='Path to the startup profile')
+ options, args = result.parse_known_args(argv)
+ return options, args
+
def get_dex_to_relayout(options, temp):
- marker = extractmarker.extractmarker(options.apk, build=not options.no_build)
- if '~~L8' not in marker:
- return [options.apk], None
- dex_dir = os.path.join(temp, 'dex')
- dex_predicate = \
- lambda name : name.startswith('classes') and name.endswith('.dex')
- extracted_dex_files = \
- zip_utils.extract_all_that_matches(options.apk, dex_dir, dex_predicate)
- desugared_library_dex = 'classes%s.dex' % len(extracted_dex_files)
- assert desugared_library_dex in extracted_dex_files
- return [
- os.path.join(dex_dir, name) \
- for name in extracted_dex_files if name != desugared_library_dex], \
- os.path.join(dex_dir, desugared_library_dex)
+ marker = extractmarker.extractmarker(options.apk,
+ build=not options.no_build)
+ if '~~L8' not in marker:
+ return [options.apk], None
+ dex_dir = os.path.join(temp, 'dex')
+ dex_predicate = \
+ lambda name : name.startswith('classes') and name.endswith('.dex')
+ extracted_dex_files = \
+ zip_utils.extract_all_that_matches(options.apk, dex_dir, dex_predicate)
+ desugared_library_dex = 'classes%s.dex' % len(extracted_dex_files)
+ assert desugared_library_dex in extracted_dex_files
+ return [
+ os.path.join(dex_dir, name) \
+ for name in extracted_dex_files if name != desugared_library_dex], \
+ os.path.join(dex_dir, desugared_library_dex)
+
def has_desugared_library_dex(options):
- if options.desugared_library == 'auto':
- marker = extractmarker.extractmarker(
- options.apk, build=not options.no_build)
- return '~~L8' in marker
- return options.desugared_library == 'true'
+ if options.desugared_library == 'auto':
+ marker = extractmarker.extractmarker(options.apk,
+ build=not options.no_build)
+ return '~~L8' in marker
+ return options.desugared_library == 'true'
+
def main(argv):
- (options, args) = parse_options(argv)
- with utils.TempDir() as temp:
- dex = os.path.join(temp, 'dex.zip')
- d8_args = [
- '--min-api',
- str(max(apk_utils.get_min_api(options.apk), LOWEST_SUPPORTED_MIN_API)),
- '--output', dex,
- '--no-desugaring',
- '--release']
- if options.profile:
- d8_args.extend(['--startup-profile', options.profile])
- dex_to_relayout, desugared_library_dex = get_dex_to_relayout(options, temp)
- d8_args.extend(dex_to_relayout)
- toolhelper.run(
- 'd8',
- d8_args,
- build=not options.no_build,
- main='com.android.tools.r8.D8')
- if desugared_library_dex is not None:
- dex_files = [name for name in \
- zip_utils.get_names_that_matches(dex, lambda x : True)]
- zip_utils.add_file_to_zip(
- desugared_library_dex, 'classes%s.dex' % str(len(dex_files) + 1), dex)
- apk_masseur.masseur(options.apk, dex=dex, out=options.out)
+ (options, args) = parse_options(argv)
+ with utils.TempDir() as temp:
+ dex = os.path.join(temp, 'dex.zip')
+ d8_args = [
+ '--min-api',
+ str(
+ max(apk_utils.get_min_api(options.apk),
+ LOWEST_SUPPORTED_MIN_API)), '--output', dex,
+ '--no-desugaring', '--release'
+ ]
+ if options.profile:
+ d8_args.extend(['--startup-profile', options.profile])
+ dex_to_relayout, desugared_library_dex = get_dex_to_relayout(
+ options, temp)
+ d8_args.extend(dex_to_relayout)
+ toolhelper.run('d8',
+ d8_args,
+ build=not options.no_build,
+ main='com.android.tools.r8.D8')
+ if desugared_library_dex is not None:
+ dex_files = [name for name in \
+ zip_utils.get_names_that_matches(dex, lambda x : True)]
+ zip_utils.add_file_to_zip(desugared_library_dex,
+ 'classes%s.dex' % str(len(dex_files) + 1),
+ dex)
+ apk_masseur.masseur(options.apk, dex=dex, out=options.out)
+
if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
+ sys.exit(main(sys.argv[1:]))
diff --git a/tools/tag_versions.py b/tools/tag_versions.py
index 28a1cae..243625a 100755
--- a/tools/tag_versions.py
+++ b/tools/tag_versions.py
@@ -14,134 +14,141 @@
import utils
# Grep match string for 'Version X.Y.Z[-dev]'
-VERSION_EXP='^Version [[:digit:]]\+.[[:digit:]]\+.[[:digit:]]\+\(\|-dev\)$'
+VERSION_EXP = '^Version [[:digit:]]\+.[[:digit:]]\+.[[:digit:]]\+\(\|-dev\)$'
# R8 is located in the 'builder' library.
-AGP_MAVEN="https://dl.google.com/android/maven2/com/android/tools/build/builder"
+AGP_MAVEN = "https://dl.google.com/android/maven2/com/android/tools/build/builder"
+
def parse_options():
- parser = argparse.ArgumentParser(description='Tag R8 Versions')
- parser.add_argument(
- '--branch',
- help='The R8 branch to tag versions on, eg, origin/3.0')
- parser.add_argument(
- '--agp',
- help='The AGP to compute the tag for, eg, 4.2.0-beta03')
- parser.add_argument(
- '--dry-run',
- default=False,
- action='store_true',
- help='Print the state changing commands without running them.')
- return parser.parse_args()
+ parser = argparse.ArgumentParser(description='Tag R8 Versions')
+ parser.add_argument('--branch',
+ help='The R8 branch to tag versions on, eg, origin/3.0')
+ parser.add_argument('--agp',
+ help='The AGP to compute the tag for, eg, 4.2.0-beta03')
+ parser.add_argument(
+ '--dry-run',
+ default=False,
+ action='store_true',
+ help='Print the state changing commands without running them.')
+ return parser.parse_args()
+
def run(options, cmd):
- print(' '.join(cmd))
- if not options.dry_run:
- subprocess.check_call(cmd)
+ print(' '.join(cmd))
+ if not options.dry_run:
+ subprocess.check_call(cmd)
+
def main():
- args = parse_options()
- if args.branch:
- tag_r8_branch(args.branch, args)
- elif args.agp:
- if (args.agp == 'all'):
- tag_all_agp_versions(args)
+ args = parse_options()
+ if args.branch:
+ tag_r8_branch(args.branch, args)
+ elif args.agp:
+ if (args.agp == 'all'):
+ tag_all_agp_versions(args)
+ else:
+ tag_agp_version(args.agp, args)
else:
- tag_agp_version(args.agp, args)
- else:
- print("Should use a top-level option, such as --branch or --agp.")
- return 1
- return 0
+ print("Should use a top-level option, such as --branch or --agp.")
+ return 1
+ return 0
+
def prepare_print_version(dist, temp):
- wrapper_file = os.path.join(
- utils.REPO_ROOT,
- 'src/main/java/com/android/tools/r8/utils/PrintR8Version.java')
- cmd = [
- jdk.GetJavacExecutable(),
- wrapper_file,
- '-d', temp,
- '-cp', dist,
- ]
- utils.PrintCmd(cmd)
- subprocess.check_output(cmd)
- return temp
+ wrapper_file = os.path.join(
+ utils.REPO_ROOT,
+ 'src/main/java/com/android/tools/r8/utils/PrintR8Version.java')
+ cmd = [
+ jdk.GetJavacExecutable(),
+ wrapper_file,
+ '-d',
+ temp,
+ '-cp',
+ dist,
+ ]
+ utils.PrintCmd(cmd)
+ subprocess.check_output(cmd)
+ return temp
+
def get_tag_info_on_origin(tag):
- output = subprocess.check_output(
- ['git', 'ls-remote', '--tags', 'origin', tag]).decode('utf-8')
- if len(output.strip()) == 0:
- return None
- return output
+ output = subprocess.check_output(
+ ['git', 'ls-remote', '--tags', 'origin', tag]).decode('utf-8')
+ if len(output.strip()) == 0:
+ return None
+ return output
+
def tag_all_agp_versions(args):
- with utils.TempDir() as temp:
- url = "%s/maven-metadata.xml" % AGP_MAVEN
- metadata = os.path.join(temp, "maven-metadata.xml")
- try:
- urllib.request.urlretrieve(url, metadata)
- except urllib.error.HTTPError as e:
- print('Could not find maven-metadata.xml for agp')
- print(e)
- return 1
- with open(metadata, 'r') as file:
- data = file.read()
- pattern = r'<version>(.+)</version>'
- matches = re.findall(pattern, data)
- matches.reverse()
- for version in matches:
- print('Tagging agp version ' + version)
- tag_agp_version(version, args)
+ with utils.TempDir() as temp:
+ url = "%s/maven-metadata.xml" % AGP_MAVEN
+ metadata = os.path.join(temp, "maven-metadata.xml")
+ try:
+ urllib.request.urlretrieve(url, metadata)
+ except urllib.error.HTTPError as e:
+ print('Could not find maven-metadata.xml for agp')
+ print(e)
+ return 1
+ with open(metadata, 'r') as file:
+ data = file.read()
+ pattern = r'<version>(.+)</version>'
+ matches = re.findall(pattern, data)
+ matches.reverse()
+ for version in matches:
+ print('Tagging agp version ' + version)
+ tag_agp_version(version, args)
def tag_agp_version(agp, args):
- tag = 'agp-%s' % agp
- result = get_tag_info_on_origin(tag)
- if result:
- print('Tag %s is already present' % tag)
- print(result)
- subprocess.call(['git', 'show', '--oneline', '-s', tag])
- return 0
- with utils.TempDir() as temp:
- url = "%s/%s/builder-%s.jar" % (AGP_MAVEN, agp, agp)
- jar = os.path.join(temp, "agp.jar")
- try:
- urllib.request.urlretrieve(url, jar)
- except urllib.error.HTTPError as e:
- print('Could not find jar for agp %s' % agp)
- print(e)
- return 1
- print_version_helper = prepare_print_version(utils.R8_JAR, temp)
- output = subprocess.check_output([
- jdk.GetJavaExecutable(),
- '-cp', ':'.join([jar, print_version_helper]),
- 'com.android.tools.r8.utils.PrintR8Version'
- ]).decode('utf-8')
- version = output.split(' ')[0]
- run(args, ['git', 'tag', '-f', tag, '-m', tag, '%s^{}' % version])
- run(args, ['git', 'push', 'origin', tag])
+ tag = 'agp-%s' % agp
+ result = get_tag_info_on_origin(tag)
+ if result:
+ print('Tag %s is already present' % tag)
+ print(result)
+ subprocess.call(['git', 'show', '--oneline', '-s', tag])
+ return 0
+ with utils.TempDir() as temp:
+ url = "%s/%s/builder-%s.jar" % (AGP_MAVEN, agp, agp)
+ jar = os.path.join(temp, "agp.jar")
+ try:
+ urllib.request.urlretrieve(url, jar)
+ except urllib.error.HTTPError as e:
+ print('Could not find jar for agp %s' % agp)
+ print(e)
+ return 1
+ print_version_helper = prepare_print_version(utils.R8_JAR, temp)
+ output = subprocess.check_output([
+ jdk.GetJavaExecutable(), '-cp',
+ ':'.join([jar, print_version_helper]),
+ 'com.android.tools.r8.utils.PrintR8Version'
+ ]).decode('utf-8')
+ version = output.split(' ')[0]
+ run(args, ['git', 'tag', '-f', tag, '-m', tag, '%s^{}' % version])
+ run(args, ['git', 'push', 'origin', tag])
+
def tag_r8_branch(branch, args):
- if not branch.startswith('origin/'):
- print('Expected branch to start with origin/')
- return 1
- output = subprocess.check_output([
- 'git', 'log', '--pretty=format:%H\t%s', '--grep', VERSION_EXP, branch
- ]).decode('utf-8')
- for l in output.split('\n'):
- (hash, subject) = l.split('\t')
- m = re.search('Version (.+)', subject)
- if not m:
- print('Unable to find a version for line: %s' % l)
- continue
- version = m.group(1)
- result = get_tag_info_on_origin(version)
- if not result:
- run(args, ['git', 'tag', '-a', version, '-m', version, hash])
- run(args, ['git', 'push', 'origin', version])
- if args.dry_run:
- print('Dry run complete. None of the above have been executed.')
+ if not branch.startswith('origin/'):
+ print('Expected branch to start with origin/')
+ return 1
+ output = subprocess.check_output(
+ ['git', 'log', '--pretty=format:%H\t%s', '--grep', VERSION_EXP,
+ branch]).decode('utf-8')
+ for l in output.split('\n'):
+ (hash, subject) = l.split('\t')
+ m = re.search('Version (.+)', subject)
+ if not m:
+ print('Unable to find a version for line: %s' % l)
+ continue
+ version = m.group(1)
+ result = get_tag_info_on_origin(version)
+ if not result:
+ run(args, ['git', 'tag', '-a', version, '-m', version, hash])
+ run(args, ['git', 'push', 'origin', version])
+ if args.dry_run:
+ print('Dry run complete. None of the above have been executed.')
if __name__ == '__main__':
- sys.exit(main())
+ sys.exit(main())
diff --git a/tools/test.py b/tools/test.py
index b106d33..efbe5a1 100755
--- a/tools/test.py
+++ b/tools/test.py
@@ -23,23 +23,14 @@
import utils
if utils.is_python3():
- import threading
+ import threading
else:
- import thread
+ import thread
ALL_ART_VMS = [
- "default",
- "14.0.0",
- "13.0.0",
- "12.0.0",
- "10.0.0",
- "9.0.0",
- "8.1.0",
- "7.0.0",
- "6.0.1",
- "5.1.1",
- "4.4.4",
- "4.0.4"]
+ "default", "14.0.0", "13.0.0", "12.0.0", "10.0.0", "9.0.0", "8.1.0",
+ "7.0.0", "6.0.1", "5.1.1", "4.4.4", "4.0.4"
+]
# How often do we check for progress on the bots:
# Should be long enough that a normal run would always have med progress
@@ -55,562 +46,666 @@
REPORTS_PATH = os.path.join(utils.BUILD, 'reports')
REPORT_INDEX = ['tests', 'test', 'index.html']
VALID_RUNTIMES = [
- 'none',
- 'jdk8',
- 'jdk9',
- 'jdk11',
- 'jdk17',
- 'jdk20',
-] + [ 'dex-%s' % dexvm for dexvm in ALL_ART_VMS ]
+ 'none',
+ 'jdk8',
+ 'jdk9',
+ 'jdk11',
+ 'jdk17',
+ 'jdk20',
+] + ['dex-%s' % dexvm for dexvm in ALL_ART_VMS]
+
def ParseOptions():
- result = argparse.ArgumentParser()
- result.add_argument('--no-internal', '--no_internal',
- help='Do not run Google internal tests.',
- default=False, action='store_true')
- result.add_argument('--archive-failures', '--archive_failures',
- help='Upload test results to cloud storage on failure.',
- default=False, action='store_true')
- result.add_argument('--archive-failures-file-name',
- '--archive_failures_file_name',
- help='Set file name for the archived failures file name',
- default=uuid.uuid4())
- result.add_argument('--only-internal', '--only_internal',
- help='Only run Google internal tests.',
- default=False, action='store_true')
- result.add_argument('--all-tests', '--all_tests',
- help='Run tests in all configurations.',
- default=False, action='store_true')
- result.add_argument('--slow-tests', '--slow_tests',
- help='Also run slow tests.',
- default=False, action='store_true')
- result.add_argument('-v', '--verbose',
- help='Print test stdout to, well, stdout.',
- default=False, action='store_true')
- result.add_argument('--dex-vm', '--dex_vm',
- help='The android version of the vm to use. "all" will run the tests on '
- 'all art vm versions (stopping after first failed execution)',
- default="default",
- choices=ALL_ART_VMS + ["all"])
- result.add_argument('--dex-vm-kind', '--dex_vm_kind',
- help='Whether to use host or target version of runtime',
- default="host",
- nargs=1,
- choices=["host", "target"])
- result.add_argument('--one-line-per-test', '--one_line_per_test',
- help='Print a line before a tests starts and after it ends to stdout.',
- default=False, action='store_true')
- result.add_argument('--tool',
- help='Tool to run ART tests with: "r8" (default) or "d8" or "r8cf"'
- ' (r8 w/CF-backend). Ignored if "--all_tests" enabled.',
- default=None, choices=["r8", "d8", "r8cf"])
- result.add_argument('--disable-assertions', '--disable_assertions', '-da',
- help='Disable Java assertions when running the compiler '
- '(default enabled)',
- default=False, action='store_true')
- result.add_argument('--with-code-coverage', '--with_code_coverage',
- help='Enable code coverage with Jacoco.',
- default=False, action='store_true')
- result.add_argument('--test-dir', '--test_dir',
- help='Use a custom directory for the test artifacts instead of a'
- ' temporary (which is automatically removed after the test).'
- ' Note that the directory will not be cleared before the test.')
- result.add_argument('--command-cache-dir', '--command_cache_dir',
- help='Cache command invocations to this directory, speeds up test runs',
- default=os.environ.get('R8_COMMAND_CACHE_DIR'))
- result.add_argument('--command-cache-stats', '--command_cache_stats',
- help='Collect and print statistics about the command cache.',
- default=False, action='store_true')
- result.add_argument('--java-home', '--java_home',
- help='Use a custom java version to run tests.')
- result.add_argument('--java-max-memory-size', '--java_max_memory_size',
- help='Set memory for running tests, default 4G',
- default=os.environ.get('R8_JAVA_MAX_MEMORY_SIZE', '4G'))
- result.add_argument('--test-namespace', '--test_namespace',
- help='Only run tests in this namespace. The namespace is relative to '
- 'com/android/tools/r8, e.g., desugar/desugaredlibrary',
- default=None)
- result.add_argument('--shard-count', '--shard_count',
- help='We are running this many shards.')
- result.add_argument('--shard-number', '--shard_number',
- help='We are running this shard.')
- result.add_argument('--generate-golden-files-to', '--generate_golden_files_to',
- help='Store dex files produced by tests in the specified directory.'
- ' It is aimed to be read on platforms with no host runtime available'
- ' for comparison.')
- result.add_argument('--use-golden-files-in', '--use_golden_files_in',
- help='Download golden files hierarchy for this commit in the specified'
- ' location and use them instead of executing on host runtime.')
- result.add_argument('--no-r8lib', '--no_r8lib',
- default=False, action='store_true',
- help='Run the tests on R8 full with relocated dependencies.')
- result.add_argument('--no-arttests', '--no_arttests',
- default=False, action='store_true',
- help='Do not run the art tests.')
- result.add_argument('--r8lib-no-deps', '--r8lib_no_deps',
- default=False, action='store_true',
- help='Run the tests on r8lib without relocated dependencies.')
- result.add_argument('--failed',
- default=False, action='store_true',
- help='Run the tests that failed last execution.')
- result.add_argument('--fail-fast', '--fail_fast',
- default=False, action='store_true',
- help='Stop on first failure. Passes --fail-fast to gradle test runner.')
- result.add_argument('--worktree',
- default=False, action='store_true',
- help='Tests are run in worktree and should not use gradle user home.')
- result.add_argument('--runtimes',
- default=None,
- help='Test parameter runtimes to use, separated by : (eg, none:jdk9).'
- ' Special values include: all (for all runtimes)'
- ' and empty (for no runtimes).')
- result.add_argument('--print-hanging-stacks', '--print_hanging_stacks',
- default=-1, type=int, help='Print hanging stacks after timeout in seconds')
- result.add_argument('--print-full-stacktraces', '--print_full_stacktraces',
- default=False, action='store_true',
- help='Print the full stacktraces without any filtering applied')
- result.add_argument(
- '--print-obfuscated-stacktraces', '--print_obfuscated_stacktraces',
- default=False, action='store_true',
- help='Print the obfuscated stacktraces')
- result.add_argument(
- '--debug-agent', '--debug_agent',
- help='Enable Java debug agent and suspend compilation (default disabled)',
- default=False,
- action='store_true')
- result.add_argument('--desugared-library-configuration',
- '--desugared_library-configuration',
- help='Use alternative desugared library configuration.')
- result.add_argument('--desugared-library', '--desugared_library',
- help='Build and use desugared library from GitHub.')
- result.add_argument('--print-times', '--print_times',
- help='Print the execution time of the slowest tests..',
- default=False, action='store_true')
- result.add_argument(
- '--testing-state-dir',
- help='Explicitly set the testing state directory '
- '(defaults to build/test-state/<git-branch>).')
- result.add_argument(
- '--rerun',
- help='Rerun tests (implicitly enables testing state).',
- choices=testing_state.CHOICES)
- result.add_argument(
- '--stacktrace',
- help='Pass --stacktrace to the gradle run',
- default=False, action='store_true')
- result.add_argument('--kotlin-compiler-dev',
- help='Specify to download a kotlin dev compiler and run '
- 'tests with that',
- default=False, action='store_true')
- result.add_argument('--kotlin-compiler-old',
- help='Specify to run tests on older kotlin compilers',
- default=False, action='store_true')
- return result.parse_known_args()
+ result = argparse.ArgumentParser()
+ result.add_argument('--no-internal',
+ '--no_internal',
+ help='Do not run Google internal tests.',
+ default=False,
+ action='store_true')
+ result.add_argument('--archive-failures',
+ '--archive_failures',
+ help='Upload test results to cloud storage on failure.',
+ default=False,
+ action='store_true')
+ result.add_argument(
+ '--archive-failures-file-name',
+ '--archive_failures_file_name',
+ help='Set file name for the archived failures file name',
+ default=uuid.uuid4())
+ result.add_argument('--only-internal',
+ '--only_internal',
+ help='Only run Google internal tests.',
+ default=False,
+ action='store_true')
+ result.add_argument('--all-tests',
+ '--all_tests',
+ help='Run tests in all configurations.',
+ default=False,
+ action='store_true')
+ result.add_argument('--slow-tests',
+ '--slow_tests',
+ help='Also run slow tests.',
+ default=False,
+ action='store_true')
+ result.add_argument('-v',
+ '--verbose',
+ help='Print test stdout to, well, stdout.',
+ default=False,
+ action='store_true')
+ result.add_argument(
+ '--dex-vm',
+ '--dex_vm',
+ help='The android version of the vm to use. "all" will run the tests on '
+ 'all art vm versions (stopping after first failed execution)',
+ default="default",
+ choices=ALL_ART_VMS + ["all"])
+ result.add_argument('--dex-vm-kind',
+ '--dex_vm_kind',
+ help='Whether to use host or target version of runtime',
+ default="host",
+ nargs=1,
+ choices=["host", "target"])
+ result.add_argument(
+ '--one-line-per-test',
+ '--one_line_per_test',
+ help='Print a line before a tests starts and after it ends to stdout.',
+ default=False,
+ action='store_true')
+ result.add_argument(
+ '--tool',
+ help='Tool to run ART tests with: "r8" (default) or "d8" or "r8cf"'
+ ' (r8 w/CF-backend). Ignored if "--all_tests" enabled.',
+ default=None,
+ choices=["r8", "d8", "r8cf"])
+ result.add_argument(
+ '--disable-assertions',
+ '--disable_assertions',
+ '-da',
+ help='Disable Java assertions when running the compiler '
+ '(default enabled)',
+ default=False,
+ action='store_true')
+ result.add_argument('--with-code-coverage',
+ '--with_code_coverage',
+ help='Enable code coverage with Jacoco.',
+ default=False,
+ action='store_true')
+ result.add_argument(
+ '--test-dir',
+ '--test_dir',
+ help='Use a custom directory for the test artifacts instead of a'
+ ' temporary (which is automatically removed after the test).'
+ ' Note that the directory will not be cleared before the test.')
+ result.add_argument(
+ '--command-cache-dir',
+ '--command_cache_dir',
+ help='Cache command invocations to this directory, speeds up test runs',
+ default=os.environ.get('R8_COMMAND_CACHE_DIR'))
+ result.add_argument(
+ '--command-cache-stats',
+ '--command_cache_stats',
+ help='Collect and print statistics about the command cache.',
+ default=False,
+ action='store_true')
+ result.add_argument('--java-home',
+ '--java_home',
+ help='Use a custom java version to run tests.')
+ result.add_argument('--java-max-memory-size',
+ '--java_max_memory_size',
+ help='Set memory for running tests, default 4G',
+ default=os.environ.get('R8_JAVA_MAX_MEMORY_SIZE', '4G'))
+ result.add_argument(
+ '--test-namespace',
+ '--test_namespace',
+ help='Only run tests in this namespace. The namespace is relative to '
+ 'com/android/tools/r8, e.g., desugar/desugaredlibrary',
+ default=None)
+ result.add_argument('--shard-count',
+ '--shard_count',
+ help='We are running this many shards.')
+ result.add_argument('--shard-number',
+ '--shard_number',
+ help='We are running this shard.')
+ result.add_argument(
+ '--generate-golden-files-to',
+ '--generate_golden_files_to',
+ help='Store dex files produced by tests in the specified directory.'
+ ' It is aimed to be read on platforms with no host runtime available'
+ ' for comparison.')
+ result.add_argument(
+ '--use-golden-files-in',
+ '--use_golden_files_in',
+ help='Download golden files hierarchy for this commit in the specified'
+ ' location and use them instead of executing on host runtime.')
+ result.add_argument(
+ '--no-r8lib',
+ '--no_r8lib',
+ default=False,
+ action='store_true',
+ help='Run the tests on R8 full with relocated dependencies.')
+ result.add_argument('--no-arttests',
+ '--no_arttests',
+ default=False,
+ action='store_true',
+ help='Do not run the art tests.')
+ result.add_argument(
+ '--r8lib-no-deps',
+ '--r8lib_no_deps',
+ default=False,
+ action='store_true',
+ help='Run the tests on r8lib without relocated dependencies.')
+ result.add_argument('--failed',
+ default=False,
+ action='store_true',
+ help='Run the tests that failed last execution.')
+ result.add_argument(
+ '--fail-fast',
+ '--fail_fast',
+ default=False,
+ action='store_true',
+ help='Stop on first failure. Passes --fail-fast to gradle test runner.')
+ result.add_argument(
+ '--worktree',
+ default=False,
+ action='store_true',
+ help='Tests are run in worktree and should not use gradle user home.')
+ result.add_argument(
+ '--runtimes',
+ default=None,
+ help='Test parameter runtimes to use, separated by : (eg, none:jdk9).'
+ ' Special values include: all (for all runtimes)'
+ ' and empty (for no runtimes).')
+ result.add_argument('--print-hanging-stacks',
+ '--print_hanging_stacks',
+ default=-1,
+ type=int,
+ help='Print hanging stacks after timeout in seconds')
+ result.add_argument(
+ '--print-full-stacktraces',
+ '--print_full_stacktraces',
+ default=False,
+ action='store_true',
+ help='Print the full stacktraces without any filtering applied')
+ result.add_argument('--print-obfuscated-stacktraces',
+ '--print_obfuscated_stacktraces',
+ default=False,
+ action='store_true',
+ help='Print the obfuscated stacktraces')
+ result.add_argument(
+ '--debug-agent',
+ '--debug_agent',
+ help=
+ 'Enable Java debug agent and suspend compilation (default disabled)',
+ default=False,
+ action='store_true')
+ result.add_argument('--desugared-library-configuration',
+ '--desugared_library-configuration',
+ help='Use alternative desugared library configuration.')
+ result.add_argument('--desugared-library',
+ '--desugared_library',
+ help='Build and use desugared library from GitHub.')
+ result.add_argument('--print-times',
+ '--print_times',
+ help='Print the execution time of the slowest tests..',
+ default=False,
+ action='store_true')
+ result.add_argument('--testing-state-dir',
+ help='Explicitly set the testing state directory '
+ '(defaults to build/test-state/<git-branch>).')
+ result.add_argument('--rerun',
+ help='Rerun tests (implicitly enables testing state).',
+ choices=testing_state.CHOICES)
+ result.add_argument('--stacktrace',
+ help='Pass --stacktrace to the gradle run',
+ default=False,
+ action='store_true')
+ result.add_argument(
+ '--kotlin-compiler-dev',
+ help='Specify to download a kotlin dev compiler and run '
+ 'tests with that',
+ default=False,
+ action='store_true')
+ result.add_argument('--kotlin-compiler-old',
+ help='Specify to run tests on older kotlin compilers',
+ default=False,
+ action='store_true')
+ return result.parse_known_args()
+
def has_failures(classes_file):
- with open(classes_file) as f:
- contents = f.read()
- # The report has a div tag with the percentage of tests that succeeded.
- assert '<div class="percent">' in contents
- return '<div class="percent">100%</div>' not in contents
+ with open(classes_file) as f:
+ contents = f.read()
+ # The report has a div tag with the percentage of tests that succeeded.
+ assert '<div class="percent">' in contents
+ return '<div class="percent">100%</div>' not in contents
+
def should_upload(filename, absolute_filename):
- # filename is relative to REPO_ROOT/build/reports/tests
- if filename.startswith('test/packages'):
- # We don't upload the package overview
- return False
- if filename.startswith('test/classes'):
- return has_failures(absolute_filename)
- # Always upload index, css and js
- return True
+ # filename is relative to REPO_ROOT/build/reports/tests
+ if filename.startswith('test/packages'):
+ # We don't upload the package overview
+ return False
+ if filename.startswith('test/classes'):
+ return has_failures(absolute_filename)
+ # Always upload index, css and js
+ return True
+
def archive_failures(options):
- upload_dir = os.path.join(utils.REPO_ROOT, 'build', 'reports', 'tests')
- file_name = options.archive_failures_file_name
- destination_dir = 'gs://%s/%s/' % (BUCKET, file_name)
- for (dir_path, dir_names, file_names) in os.walk(upload_dir):
- for f in file_names:
- absolute_file = os.path.join(dir_path, f)
- relative_file = absolute_file[len(upload_dir)+1:]
- if (should_upload(relative_file, absolute_file)):
- utils.upload_file_to_cloud_storage(absolute_file,
- destination_dir + relative_file)
- url = 'https://storage.googleapis.com/%s/%s/test/index.html' % (BUCKET, file_name)
- print('Test results available at: %s' % url)
+ upload_dir = os.path.join(utils.REPO_ROOT, 'build', 'reports', 'tests')
+ file_name = options.archive_failures_file_name
+ destination_dir = 'gs://%s/%s/' % (BUCKET, file_name)
+ for (dir_path, dir_names, file_names) in os.walk(upload_dir):
+ for f in file_names:
+ absolute_file = os.path.join(dir_path, f)
+ relative_file = absolute_file[len(upload_dir) + 1:]
+ if (should_upload(relative_file, absolute_file)):
+ utils.upload_file_to_cloud_storage(
+ absolute_file, destination_dir + relative_file)
+ url = 'https://storage.googleapis.com/%s/%s/test/index.html' % (BUCKET,
+ file_name)
+ print('Test results available at: %s' % url)
+
def Main():
- (options, args) = ParseOptions()
- if utils.is_bot():
- gradle.RunGradle(['--no-daemon', 'clean'])
- print('Running with python ' + str(sys.version_info))
- # Always print stats on bots if command cache is enabled
- options.command_cache_stats = options.command_cache_dir is not None
+ (options, args) = ParseOptions()
+ if utils.is_bot():
+ gradle.RunGradle(['--no-daemon', 'clean'])
+ print('Running with python ' + str(sys.version_info))
+ # Always print stats on bots if command cache is enabled
+ options.command_cache_stats = options.command_cache_dir is not None
- desugar_jdk_json_dir = None
- if options.desugared_library_configuration:
- if options.desugared_library_configuration != 'jdk11':
- print("Only value supported for --desugared-library is 'jdk11'")
- exit(1)
- desugar_jdk_json_dir = 'src/library_desugar/jdk11'
+ desugar_jdk_json_dir = None
+ if options.desugared_library_configuration:
+ if options.desugared_library_configuration != 'jdk11':
+ print("Only value supported for --desugared-library is 'jdk11'")
+ exit(1)
+ desugar_jdk_json_dir = 'src/library_desugar/jdk11'
- desugar_jdk_libs = None
- if options.desugared_library:
- if options.desugared_library != 'HEAD':
- print("Only value supported for --desugared-library is 'HEAD'")
- exit(1)
- desugar_jdk_libs_dir = 'build/desugar_jdk_libs'
- shutil.rmtree(desugar_jdk_libs_dir, ignore_errors=True)
- os.makedirs(desugar_jdk_libs_dir)
- print('Building desugared library.')
- with utils.TempDir() as checkout_dir:
- archive_desugar_jdk_libs.CloneDesugaredLibrary('google', checkout_dir, 'HEAD')
- # Make sure bazel is extracted in third_party.
- utils.DownloadFromGoogleCloudStorage(utils.BAZEL_SHA_FILE)
- utils.DownloadFromGoogleCloudStorage(utils.JAVA8_SHA_FILE)
- utils.DownloadFromGoogleCloudStorage(utils.JAVA11_SHA_FILE)
- (library_jar, maven_zip) = archive_desugar_jdk_libs.BuildDesugaredLibrary(checkout_dir, 'jdk11_legacy' if options.desugared_library_configuration == 'jdk11' else 'jdk8')
- desugar_jdk_libs = os.path.join(desugar_jdk_libs_dir, os.path.basename(library_jar))
- shutil.copyfile(library_jar, desugar_jdk_libs)
- print('Desugared library for test in ' + desugar_jdk_libs)
+ desugar_jdk_libs = None
+ if options.desugared_library:
+ if options.desugared_library != 'HEAD':
+ print("Only value supported for --desugared-library is 'HEAD'")
+ exit(1)
+ desugar_jdk_libs_dir = 'build/desugar_jdk_libs'
+ shutil.rmtree(desugar_jdk_libs_dir, ignore_errors=True)
+ os.makedirs(desugar_jdk_libs_dir)
+ print('Building desugared library.')
+ with utils.TempDir() as checkout_dir:
+ archive_desugar_jdk_libs.CloneDesugaredLibrary(
+ 'google', checkout_dir, 'HEAD')
+ # Make sure bazel is extracted in third_party.
+ utils.DownloadFromGoogleCloudStorage(utils.BAZEL_SHA_FILE)
+ utils.DownloadFromGoogleCloudStorage(utils.JAVA8_SHA_FILE)
+ utils.DownloadFromGoogleCloudStorage(utils.JAVA11_SHA_FILE)
+ (library_jar,
+ maven_zip) = archive_desugar_jdk_libs.BuildDesugaredLibrary(
+ checkout_dir, 'jdk11_legacy' if
+ options.desugared_library_configuration == 'jdk11' else 'jdk8')
+ desugar_jdk_libs = os.path.join(desugar_jdk_libs_dir,
+ os.path.basename(library_jar))
+ shutil.copyfile(library_jar, desugar_jdk_libs)
+ print('Desugared library for test in ' + desugar_jdk_libs)
- gradle_args = []
+ gradle_args = []
- if options.stacktrace or utils.is_bot():
- gradle_args.append('--stacktrace')
+ if options.stacktrace or utils.is_bot():
+ gradle_args.append('--stacktrace')
- if utils.is_bot():
- # Bots don't like dangling processes.
- gradle_args.append('--no-daemon')
+ if utils.is_bot():
+ # Bots don't like dangling processes.
+ gradle_args.append('--no-daemon')
- # Set all necessary Gradle properties and options first.
- if options.shard_count:
- assert options.shard_number
- gradle_args.append('-Pshard_count=%s' % options.shard_count)
- gradle_args.append('-Pshard_number=%s' % options.shard_number)
- if options.verbose:
- gradle_args.append('-Pprint_test_stdout')
- if options.no_internal:
- gradle_args.append('-Pno_internal')
- if options.only_internal:
- gradle_args.append('-Ponly_internal')
- if options.all_tests:
- gradle_args.append('-Pall_tests')
- if options.slow_tests:
- gradle_args.append('-Pslow_tests=1')
- if options.tool:
- gradle_args.append('-Ptool=%s' % options.tool)
- if options.one_line_per_test:
- gradle_args.append('-Pone_line_per_test')
- if options.test_namespace:
- gradle_args.append('-Ptest_namespace=%s' % options.test_namespace)
- if options.disable_assertions:
- gradle_args.append('-Pdisable_assertions')
- if options.with_code_coverage:
- gradle_args.append('-Pwith_code_coverage')
- if options.print_full_stacktraces:
- gradle_args.append('-Pprint_full_stacktraces')
- if options.print_obfuscated_stacktraces:
- gradle_args.append('-Pprint_obfuscated_stacktraces')
- if options.kotlin_compiler_old:
- gradle_args.append('-Pkotlin_compiler_old')
- if options.kotlin_compiler_dev:
- gradle_args.append('-Pkotlin_compiler_dev')
- download_kotlin_dev.download_newest()
- if os.name == 'nt':
- gradle_args.append('-Pno_internal')
- if options.test_dir:
- gradle_args.append('-Ptest_dir=' + options.test_dir)
- if not os.path.exists(options.test_dir):
- os.makedirs(options.test_dir)
- if options.command_cache_dir:
- gradle_args.append('-Pcommand_cache_dir=' + options.command_cache_dir)
- if not os.path.exists(options.command_cache_dir):
- os.makedirs(options.command_cache_dir)
- if options.command_cache_stats:
- stats_dir = os.path.join(options.command_cache_dir, 'stats')
- gradle_args.append('-Pcommand_cache_stats_dir=' + stats_dir)
- if not os.path.exists(stats_dir):
- os.makedirs(stats_dir)
- # Clean out old stats files
- for (_, _, file_names) in os.walk(stats_dir):
- for f in file_names:
- os.remove(os.path.join(stats_dir, f))
- if options.java_home:
- gradle_args.append('-Dorg.gradle.java.home=' + options.java_home)
- if options.java_max_memory_size:
- gradle_args.append('-Ptest_xmx=' + options.java_max_memory_size)
- if options.generate_golden_files_to:
- gradle_args.append('-Pgenerate_golden_files_to=' + options.generate_golden_files_to)
- if not os.path.exists(options.generate_golden_files_to):
- os.makedirs(options.generate_golden_files_to)
- gradle_args.append('-PHEAD_sha1=' + utils.get_HEAD_sha1())
- if options.use_golden_files_in:
- gradle_args.append('-Puse_golden_files_in=' + options.use_golden_files_in)
- if not os.path.exists(options.use_golden_files_in):
- os.makedirs(options.use_golden_files_in)
- gradle_args.append('-PHEAD_sha1=' + utils.get_HEAD_sha1())
- if options.r8lib_no_deps and options.no_r8lib:
- print('Inconsistent arguments: both --no-r8lib and --r8lib-no-deps specified.')
- exit(1)
- if options.r8lib_no_deps:
- gradle_args.append('-Pr8lib_no_deps')
- elif not options.no_r8lib:
- gradle_args.append('-Pr8lib')
- if options.worktree:
- gradle_args.append('-g=' + os.path.join(utils.REPO_ROOT, ".gradle_user_home"))
- gradle_args.append('--no-daemon')
- if options.debug_agent:
- gradle_args.append('--no-daemon')
- if desugar_jdk_json_dir:
- gradle_args.append('-Pdesugar_jdk_json_dir=' + desugar_jdk_json_dir)
- if desugar_jdk_libs:
- gradle_args.append('-Pdesugar_jdk_libs=' + desugar_jdk_libs)
- if options.no_arttests:
- gradle_args.append('-Pno_arttests=true')
-
- if options.rerun:
- testing_state.set_up_test_state(gradle_args, options.rerun, options.testing_state_dir)
-
- # Enable completeness testing of ART profile rewriting.
- gradle_args.append('-Part_profile_rewriting_completeness_check=true')
-
- # Build an R8 with dependencies for bootstrapping tests before adding test sources.
- gradle_args.append(utils.GRADLE_TASK_R8)
- gradle_args.append(utils.GRADLE_TASK_CLEAN_TEST)
- gradle_args.append(utils.GRADLE_TASK_TEST)
- gradle_args.append('--stacktrace')
- gradle_args.append('-Pprint_full_stacktraces')
-
- if options.debug_agent:
- gradle_args.append('--debug-jvm')
- if options.fail_fast:
- gradle_args.append('--fail-fast')
- if options.failed:
- args = compute_failed_tests(args)
- if args is None:
- return 1
- if len(args) == 0:
- print("No failing tests")
- return 0
- # Test filtering. Must always follow the 'test' task.
- testFilterProperty = []
- for testFilter in args:
- gradle_args.append('--tests')
- gradle_args.append(testFilter)
- testFilterProperty.append(testFilter)
- assert not ("|" in testFilter), "| is used as separating character"
- if len(testFilterProperty) > 0:
- gradle_args.append("-Ptestfilter=" + "|".join(testFilterProperty))
- if options.with_code_coverage:
- # Create Jacoco report after tests.
- gradle_args.append('jacocoTestReport')
-
- if options.use_golden_files_in:
- sha1 = '%s' % utils.get_HEAD_sha1()
- with utils.ChangedWorkingDirectory(options.use_golden_files_in):
- utils.download_file_from_cloud_storage(
- 'gs://r8-test-results/golden-files/%s.tar.gz' % sha1,
- '%s.tar.gz' % sha1)
- utils.unpack_archive('%s.tar.gz' % sha1)
-
- print_stacks_timeout = options.print_hanging_stacks
- if (utils.is_bot() and not utils.IsWindows()) or print_stacks_timeout > -1:
- timestamp_file = os.path.join(utils.BUILD, 'last_test_time')
- if os.path.exists(timestamp_file):
- os.remove(timestamp_file)
- gradle_args.append('-Pupdate_test_timestamp=' + timestamp_file)
- print_stacks_timeout = (print_stacks_timeout
- if print_stacks_timeout != -1
- else TIMEOUT_HANDLER_PERIOD)
- if utils.is_python3():
- threading.Thread(
- target=timeout_handler,
- args=(timestamp_file, print_stacks_timeout),
- daemon=True).start()
- else:
- thread.start_new_thread(
- timeout_handler, (timestamp_file, print_stacks_timeout,))
- rotate_test_reports()
-
- # Now run tests on selected runtime(s).
- if options.runtimes:
- if options.dex_vm != 'default':
- print('Unexpected runtimes and dex_vm argument: ' + options.dex_vm)
- sys.exit(1)
- if options.runtimes == 'empty':
- # Set runtimes with no content will configure no runtimes.
- gradle_args.append('-Pruntimes=')
- elif options.runtimes == 'all':
- # An unset runtimes will configure all runtimes
- pass
- else:
- prefixes = [prefix.strip() for prefix in options.runtimes.split(':')]
- runtimes = []
- for prefix in prefixes:
- matches = [ rt for rt in VALID_RUNTIMES if rt.startswith(prefix) ]
- if len(matches) == 0:
- print("Invalid runtime prefix '%s'." % prefix)
- print("Must be just 'all', 'empty'," \
- " or a prefix of %s" % ', '.join(VALID_RUNTIMES))
- sys.exit(1)
- runtimes.extend(matches)
- gradle_args.append('-Pruntimes=%s' % ':'.join(runtimes))
-
- return_code = gradle.RunGradle(gradle_args, throw_on_failure=False)
- return archive_and_return(return_code, options)
-
- # Legacy testing populates the runtimes based on dex_vm.
- vms_to_test = [options.dex_vm] if options.dex_vm != "all" else ALL_ART_VMS
-
- if options.print_times:
- gradle_args.append('-Pprint_times=true')
- for art_vm in vms_to_test:
- vm_suffix = "_" + options.dex_vm_kind if art_vm != "default" else ""
- runtimes = ['dex-' + art_vm]
- # Append the "none" runtime and default JVM if running the "default" DEX VM.
- if art_vm == "default":
- runtimes.extend(['jdk11', 'none'])
- return_code = gradle.RunGradle(
- gradle_args + [
- '-Pdex_vm=%s' % art_vm + vm_suffix,
- '-Pruntimes=%s' % ':'.join(runtimes),
- ],
- throw_on_failure=False)
+ # Set all necessary Gradle properties and options first.
+ if options.shard_count:
+ assert options.shard_number
+ gradle_args.append('-Pshard_count=%s' % options.shard_count)
+ gradle_args.append('-Pshard_number=%s' % options.shard_number)
+ if options.verbose:
+ gradle_args.append('-Pprint_test_stdout')
+ if options.no_internal:
+ gradle_args.append('-Pno_internal')
+ if options.only_internal:
+ gradle_args.append('-Ponly_internal')
+ if options.all_tests:
+ gradle_args.append('-Pall_tests')
+ if options.slow_tests:
+ gradle_args.append('-Pslow_tests=1')
+ if options.tool:
+ gradle_args.append('-Ptool=%s' % options.tool)
+ if options.one_line_per_test:
+ gradle_args.append('-Pone_line_per_test')
+ if options.test_namespace:
+ gradle_args.append('-Ptest_namespace=%s' % options.test_namespace)
+ if options.disable_assertions:
+ gradle_args.append('-Pdisable_assertions')
+ if options.with_code_coverage:
+ gradle_args.append('-Pwith_code_coverage')
+ if options.print_full_stacktraces:
+ gradle_args.append('-Pprint_full_stacktraces')
+ if options.print_obfuscated_stacktraces:
+ gradle_args.append('-Pprint_obfuscated_stacktraces')
+ if options.kotlin_compiler_old:
+ gradle_args.append('-Pkotlin_compiler_old')
+ if options.kotlin_compiler_dev:
+ gradle_args.append('-Pkotlin_compiler_dev')
+ download_kotlin_dev.download_newest()
+ if os.name == 'nt':
+ gradle_args.append('-Pno_internal')
+ if options.test_dir:
+ gradle_args.append('-Ptest_dir=' + options.test_dir)
+ if not os.path.exists(options.test_dir):
+ os.makedirs(options.test_dir)
+ if options.command_cache_dir:
+ gradle_args.append('-Pcommand_cache_dir=' + options.command_cache_dir)
+ if not os.path.exists(options.command_cache_dir):
+ os.makedirs(options.command_cache_dir)
+ if options.command_cache_stats:
+ stats_dir = os.path.join(options.command_cache_dir, 'stats')
+ gradle_args.append('-Pcommand_cache_stats_dir=' + stats_dir)
+ if not os.path.exists(stats_dir):
+ os.makedirs(stats_dir)
+ # Clean out old stats files
+ for (_, _, file_names) in os.walk(stats_dir):
+ for f in file_names:
+ os.remove(os.path.join(stats_dir, f))
+ if options.java_home:
+ gradle_args.append('-Dorg.gradle.java.home=' + options.java_home)
+ if options.java_max_memory_size:
+ gradle_args.append('-Ptest_xmx=' + options.java_max_memory_size)
if options.generate_golden_files_to:
- sha1 = '%s' % utils.get_HEAD_sha1()
- with utils.ChangedWorkingDirectory(options.generate_golden_files_to):
- archive = utils.create_archive(sha1)
- utils.upload_file_to_cloud_storage(archive,
- 'gs://r8-test-results/golden-files/' + archive)
+ gradle_args.append('-Pgenerate_golden_files_to=' +
+ options.generate_golden_files_to)
+ if not os.path.exists(options.generate_golden_files_to):
+ os.makedirs(options.generate_golden_files_to)
+ gradle_args.append('-PHEAD_sha1=' + utils.get_HEAD_sha1())
+ if options.use_golden_files_in:
+ gradle_args.append('-Puse_golden_files_in=' +
+ options.use_golden_files_in)
+ if not os.path.exists(options.use_golden_files_in):
+ os.makedirs(options.use_golden_files_in)
+ gradle_args.append('-PHEAD_sha1=' + utils.get_HEAD_sha1())
+ if options.r8lib_no_deps and options.no_r8lib:
+ print(
+ 'Inconsistent arguments: both --no-r8lib and --r8lib-no-deps specified.'
+ )
+ exit(1)
+ if options.r8lib_no_deps:
+ gradle_args.append('-Pr8lib_no_deps')
+ elif not options.no_r8lib:
+ gradle_args.append('-Pr8lib')
+ if options.worktree:
+ gradle_args.append('-g=' +
+ os.path.join(utils.REPO_ROOT, ".gradle_user_home"))
+ gradle_args.append('--no-daemon')
+ if options.debug_agent:
+ gradle_args.append('--no-daemon')
+ if desugar_jdk_json_dir:
+ gradle_args.append('-Pdesugar_jdk_json_dir=' + desugar_jdk_json_dir)
+ if desugar_jdk_libs:
+ gradle_args.append('-Pdesugar_jdk_libs=' + desugar_jdk_libs)
+ if options.no_arttests:
+ gradle_args.append('-Pno_arttests=true')
- return archive_and_return(return_code, options)
+ if options.rerun:
+ testing_state.set_up_test_state(gradle_args, options.rerun,
+ options.testing_state_dir)
- return 0
+ # Enable completeness testing of ART profile rewriting.
+ gradle_args.append('-Part_profile_rewriting_completeness_check=true')
+
+ # Build an R8 with dependencies for bootstrapping tests before adding test sources.
+ gradle_args.append(utils.GRADLE_TASK_R8)
+ gradle_args.append(utils.GRADLE_TASK_CLEAN_TEST)
+ gradle_args.append(utils.GRADLE_TASK_TEST)
+ gradle_args.append('--stacktrace')
+ gradle_args.append('-Pprint_full_stacktraces')
+
+ if options.debug_agent:
+ gradle_args.append('--debug-jvm')
+ if options.fail_fast:
+ gradle_args.append('--fail-fast')
+ if options.failed:
+ args = compute_failed_tests(args)
+ if args is None:
+ return 1
+ if len(args) == 0:
+ print("No failing tests")
+ return 0
+ # Test filtering. Must always follow the 'test' task.
+ testFilterProperty = []
+ for testFilter in args:
+ gradle_args.append('--tests')
+ gradle_args.append(testFilter)
+ testFilterProperty.append(testFilter)
+ assert not ("|" in testFilter), "| is used as separating character"
+ if len(testFilterProperty) > 0:
+ gradle_args.append("-Ptestfilter=" + "|".join(testFilterProperty))
+ if options.with_code_coverage:
+ # Create Jacoco report after tests.
+ gradle_args.append('jacocoTestReport')
+
+ if options.use_golden_files_in:
+ sha1 = '%s' % utils.get_HEAD_sha1()
+ with utils.ChangedWorkingDirectory(options.use_golden_files_in):
+ utils.download_file_from_cloud_storage(
+ 'gs://r8-test-results/golden-files/%s.tar.gz' % sha1,
+ '%s.tar.gz' % sha1)
+ utils.unpack_archive('%s.tar.gz' % sha1)
+
+ print_stacks_timeout = options.print_hanging_stacks
+ if (utils.is_bot() and not utils.IsWindows()) or print_stacks_timeout > -1:
+ timestamp_file = os.path.join(utils.BUILD, 'last_test_time')
+ if os.path.exists(timestamp_file):
+ os.remove(timestamp_file)
+ gradle_args.append('-Pupdate_test_timestamp=' + timestamp_file)
+ print_stacks_timeout = (print_stacks_timeout if print_stacks_timeout
+ != -1 else TIMEOUT_HANDLER_PERIOD)
+ if utils.is_python3():
+ threading.Thread(target=timeout_handler,
+ args=(timestamp_file, print_stacks_timeout),
+ daemon=True).start()
+ else:
+ thread.start_new_thread(timeout_handler, (
+ timestamp_file,
+ print_stacks_timeout,
+ ))
+ rotate_test_reports()
+
+ # Now run tests on selected runtime(s).
+ if options.runtimes:
+ if options.dex_vm != 'default':
+ print('Unexpected runtimes and dex_vm argument: ' + options.dex_vm)
+ sys.exit(1)
+ if options.runtimes == 'empty':
+ # Set runtimes with no content will configure no runtimes.
+ gradle_args.append('-Pruntimes=')
+ elif options.runtimes == 'all':
+ # An unset runtimes will configure all runtimes
+ pass
+ else:
+ prefixes = [
+ prefix.strip() for prefix in options.runtimes.split(':')
+ ]
+ runtimes = []
+ for prefix in prefixes:
+ matches = [rt for rt in VALID_RUNTIMES if rt.startswith(prefix)]
+ if len(matches) == 0:
+ print("Invalid runtime prefix '%s'." % prefix)
+ print("Must be just 'all', 'empty'," \
+ " or a prefix of %s" % ', '.join(VALID_RUNTIMES))
+ sys.exit(1)
+ runtimes.extend(matches)
+ gradle_args.append('-Pruntimes=%s' % ':'.join(runtimes))
+
+ return_code = gradle.RunGradle(gradle_args, throw_on_failure=False)
+ return archive_and_return(return_code, options)
+
+ # Legacy testing populates the runtimes based on dex_vm.
+ vms_to_test = [options.dex_vm] if options.dex_vm != "all" else ALL_ART_VMS
+
+ if options.print_times:
+ gradle_args.append('-Pprint_times=true')
+ for art_vm in vms_to_test:
+ vm_suffix = "_" + options.dex_vm_kind if art_vm != "default" else ""
+ runtimes = ['dex-' + art_vm]
+ # Append the "none" runtime and default JVM if running the "default" DEX VM.
+ if art_vm == "default":
+ runtimes.extend(['jdk11', 'none'])
+ return_code = gradle.RunGradle(gradle_args + [
+ '-Pdex_vm=%s' % art_vm + vm_suffix,
+ '-Pruntimes=%s' % ':'.join(runtimes),
+ ],
+ throw_on_failure=False)
+ if options.generate_golden_files_to:
+ sha1 = '%s' % utils.get_HEAD_sha1()
+ with utils.ChangedWorkingDirectory(
+ options.generate_golden_files_to):
+ archive = utils.create_archive(sha1)
+ utils.upload_file_to_cloud_storage(
+ archive, 'gs://r8-test-results/golden-files/' + archive)
+
+ return archive_and_return(return_code, options)
+
+ return 0
+
def archive_and_return(return_code, options):
- if return_code != 0:
- if options.archive_failures:
- archive_failures(options)
- if options.command_cache_stats:
- stats_dir = os.path.join(options.command_cache_dir, 'stats')
- cache_hit = 0
- cache_miss = 0
- cache_put = 0
- for (_, _, file_names) in os.walk(stats_dir):
- for f in file_names:
- if f.endswith('CACHEHIT'):
- cache_hit += os.stat(os.path.join(stats_dir, f)).st_size
- if f.endswith('CACHEMISS'):
- cache_miss += os.stat(os.path.join(stats_dir, f)).st_size
- if f.endswith('CACHEPUT'):
- cache_put += os.stat(os.path.join(stats_dir, f)).st_size
- print('Command cache stats')
- print(' Cache hits: ' + str(cache_hit))
- print(' Cache miss: ' + str(cache_miss))
- print(' Cache puts: ' + str(cache_put))
- return return_code
+ if return_code != 0:
+ if options.archive_failures:
+ archive_failures(options)
+ if options.command_cache_stats:
+ stats_dir = os.path.join(options.command_cache_dir, 'stats')
+ cache_hit = 0
+ cache_miss = 0
+ cache_put = 0
+ for (_, _, file_names) in os.walk(stats_dir):
+ for f in file_names:
+ if f.endswith('CACHEHIT'):
+ cache_hit += os.stat(os.path.join(stats_dir, f)).st_size
+ if f.endswith('CACHEMISS'):
+ cache_miss += os.stat(os.path.join(stats_dir, f)).st_size
+ if f.endswith('CACHEPUT'):
+ cache_put += os.stat(os.path.join(stats_dir, f)).st_size
+ print('Command cache stats')
+ print(' Cache hits: ' + str(cache_hit))
+ print(' Cache miss: ' + str(cache_miss))
+ print(' Cache puts: ' + str(cache_put))
+ return return_code
+
def print_jstacks():
- processes = subprocess.check_output(['ps', 'aux']).decode('utf-8')
- for l in processes.splitlines():
- if 'art' in l or 'dalvik' in l:
- print('Running art of dalvik process: \n%s' % l)
- if 'java' in l and 'openjdk' in l:
- print('Running jstack on process: \n%s' % l)
- # Example line:
- # ricow 184313 2.6 0.0 36839068 31808 ? Sl 09:53 0:00 /us..
- columns = l.split()
- pid = columns[1]
- return_value = subprocess.call(['jstack', pid])
- if return_value:
- print('Could not jstack %s' % l)
+ processes = subprocess.check_output(['ps', 'aux']).decode('utf-8')
+ for l in processes.splitlines():
+ if 'art' in l or 'dalvik' in l:
+ print('Running art of dalvik process: \n%s' % l)
+ if 'java' in l and 'openjdk' in l:
+ print('Running jstack on process: \n%s' % l)
+ # Example line:
+ # ricow 184313 2.6 0.0 36839068 31808 ? Sl 09:53 0:00 /us..
+ columns = l.split()
+ pid = columns[1]
+ return_value = subprocess.call(['jstack', pid])
+ if return_value:
+ print('Could not jstack %s' % l)
+
def get_time_from_file(timestamp_file):
- if os.path.exists(timestamp_file):
- timestamp = os.stat(timestamp_file).st_mtime
- print('TIMEOUT HANDLER timestamp: %s' % (timestamp))
- sys.stdout.flush()
- return timestamp
- else:
- print('TIMEOUT HANDLER no timestamp file yet')
- sys.stdout.flush()
- return None
+ if os.path.exists(timestamp_file):
+ timestamp = os.stat(timestamp_file).st_mtime
+ print('TIMEOUT HANDLER timestamp: %s' % (timestamp))
+ sys.stdout.flush()
+ return timestamp
+ else:
+ print('TIMEOUT HANDLER no timestamp file yet')
+ sys.stdout.flush()
+ return None
+
def timeout_handler(timestamp_file, timeout_handler_period):
- last_timestamp = None
- while True:
- time.sleep(timeout_handler_period)
- new_timestamp = get_time_from_file(timestamp_file)
- if last_timestamp and new_timestamp == last_timestamp:
- print_jstacks()
- last_timestamp = new_timestamp
+ last_timestamp = None
+ while True:
+ time.sleep(timeout_handler_period)
+ new_timestamp = get_time_from_file(timestamp_file)
+ if last_timestamp and new_timestamp == last_timestamp:
+ print_jstacks()
+ last_timestamp = new_timestamp
+
def report_dir_path(index):
- if index == 0:
- return REPORTS_PATH
- return '%s%d' % (REPORTS_PATH, index)
+ if index == 0:
+ return REPORTS_PATH
+ return '%s%d' % (REPORTS_PATH, index)
+
def report_index_path(index):
- return os.path.join(report_dir_path(index), *REPORT_INDEX)
+ return os.path.join(report_dir_path(index), *REPORT_INDEX)
+
# Rotate test results so previous results are still accessible.
def rotate_test_reports():
- if not os.path.exists(report_dir_path(0)):
- return
- i = 1
- while i < NUMBER_OF_TEST_REPORTS and os.path.exists(report_dir_path(i)):
- i += 1
- if i == NUMBER_OF_TEST_REPORTS and os.path.exists(report_dir_path(i)):
- shutil.rmtree(report_dir_path(i))
- while i > 0:
- shutil.move(report_dir_path(i - 1), report_dir_path(i))
- i -= 1
+ if not os.path.exists(report_dir_path(0)):
+ return
+ i = 1
+ while i < NUMBER_OF_TEST_REPORTS and os.path.exists(report_dir_path(i)):
+ i += 1
+ if i == NUMBER_OF_TEST_REPORTS and os.path.exists(report_dir_path(i)):
+ shutil.rmtree(report_dir_path(i))
+ while i > 0:
+ shutil.move(report_dir_path(i - 1), report_dir_path(i))
+ i -= 1
+
def compute_failed_tests(args):
- if len(args) > 1:
- print("Running with --failed can take an optional path to a report index (or report number).")
- return None
- report = report_index_path(0)
- # If the default report does not exist, fall back to the previous report as it may be a failed
- # gradle run which has already moved the report to report1, but did not produce a new report.
- if not os.path.exists(report):
- report1 = report_index_path(1)
- if os.path.exists(report1):
- report = report1
- if len(args) == 1:
- try:
- # try to parse the arg as a report index.
- index = int(args[0])
- report = report_index_path(index)
- except ValueError:
- # if integer parsing failed assume it is a report file path.
- report = args[0]
- if not os.path.exists(report):
- print("Can't re-run failing, no report at:", report)
- return None
- print("Reading failed tests in", report)
- failing = set()
- inFailedSection = False
- for line in open(report):
- l = line.strip()
- if l == "<h2>Failed tests</h2>":
- inFailedSection = True
- elif l.startswith("<h2>"):
- inFailedSection = False
- prefix = '<a href="classes/'
- if inFailedSection and l.startswith(prefix):
- href = l[len(prefix):l.index('">')]
- # Ignore enties ending with .html which are test classes, not test methods.
- if not href.endswith('.html'):
- # Remove the .html and anchor separateor, also, a classMethod test is the static
- # setup failing so rerun the full class of tests.
- test = href.replace('.html','').replace('#', '.').replace('.classMethod', '')
- failing.add(test)
- return list(failing)
+ if len(args) > 1:
+ print(
+ "Running with --failed can take an optional path to a report index (or report number)."
+ )
+ return None
+ report = report_index_path(0)
+ # If the default report does not exist, fall back to the previous report as it may be a failed
+ # gradle run which has already moved the report to report1, but did not produce a new report.
+ if not os.path.exists(report):
+ report1 = report_index_path(1)
+ if os.path.exists(report1):
+ report = report1
+ if len(args) == 1:
+ try:
+ # try to parse the arg as a report index.
+ index = int(args[0])
+ report = report_index_path(index)
+ except ValueError:
+ # if integer parsing failed assume it is a report file path.
+ report = args[0]
+ if not os.path.exists(report):
+ print("Can't re-run failing, no report at:", report)
+ return None
+ print("Reading failed tests in", report)
+ failing = set()
+ inFailedSection = False
+ for line in open(report):
+ l = line.strip()
+ if l == "<h2>Failed tests</h2>":
+ inFailedSection = True
+ elif l.startswith("<h2>"):
+ inFailedSection = False
+ prefix = '<a href="classes/'
+ if inFailedSection and l.startswith(prefix):
+ href = l[len(prefix):l.index('">')]
+ # Ignore enties ending with .html which are test classes, not test methods.
+ if not href.endswith('.html'):
+ # Remove the .html and anchor separateor, also, a classMethod test is the static
+ # setup failing so rerun the full class of tests.
+ test = href.replace('.html', '').replace('#', '.').replace(
+ '.classMethod', '')
+ failing.add(test)
+ return list(failing)
+
+
if __name__ == '__main__':
- return_code = Main()
- if return_code != 0:
- notify.notify("Tests failed.")
- else:
- notify.notify("Tests passed.")
- sys.exit(return_code)
+ return_code = Main()
+ if return_code != 0:
+ notify.notify("Tests failed.")
+ else:
+ notify.notify("Tests passed.")
+ sys.exit(return_code)
diff --git a/tools/test_r8cfsegments.py b/tools/test_r8cfsegments.py
index 2081ee2..7b21fa5 100755
--- a/tools/test_r8cfsegments.py
+++ b/tools/test_r8cfsegments.py
@@ -27,57 +27,60 @@
import sys
import utils
+
def parse_arguments():
- parser = argparse.ArgumentParser(
- description = 'Run R8 or PG on'
- ' third_party/r8/r8.jar.'
- ' Report Golem-compatible CodeSize and RunTimeRaw values.')
- parser.add_argument('--tool',
- choices = ['pg', 'r8'],
- required = True,
- help = 'Compiler tool to use.')
- parser.add_argument('--name',
- required = True,
- help = 'Results will be printed using the specified benchmark name (e.g.'
- ' <NAME>-<segment>(CodeSize): <bytes>), the full size is reported'
- ' with <NAME>-Total(CodeSize)')
- parser.add_argument('--print-memoryuse',
- help = 'Prints the line \'<NAME>-Total(MemoryUse):'
- ' <mem>\' at the end where <mem> is the peak'
- ' peak resident set size (VmHWM) in bytes.',
- default = False,
- action = 'store_true')
- parser.add_argument('--output',
- help = 'Output directory to keep the generated files')
- return parser.parse_args()
+ parser = argparse.ArgumentParser(
+ description='Run R8 or PG on'
+ ' third_party/r8/r8.jar.'
+ ' Report Golem-compatible CodeSize and RunTimeRaw values.')
+ parser.add_argument('--tool',
+ choices=['pg', 'r8'],
+ required=True,
+ help='Compiler tool to use.')
+ parser.add_argument(
+ '--name',
+ required=True,
+ help='Results will be printed using the specified benchmark name (e.g.'
+ ' <NAME>-<segment>(CodeSize): <bytes>), the full size is reported'
+ ' with <NAME>-Total(CodeSize)')
+ parser.add_argument('--print-memoryuse',
+ help='Prints the line \'<NAME>-Total(MemoryUse):'
+ ' <mem>\' at the end where <mem> is the peak'
+ ' peak resident set size (VmHWM) in bytes.',
+ default=False,
+ action='store_true')
+ parser.add_argument('--output',
+ help='Output directory to keep the generated files')
+ return parser.parse_args()
def Main():
- args = parse_arguments()
- utils.check_java_version()
- output_dir = args.output
- with utils.TempDir() as temp_dir:
- if not output_dir:
- output_dir = temp_dir
- track_memory_file = None
- if args.print_memoryuse:
- track_memory_file = os.path.join(output_dir, utils.MEMORY_USE_TMP_FILE)
- if args.tool == 'pg':
- utils.print_cfsegments(args.name, [utils.PINNED_PGR8_JAR])
- else:
- out_file = os.path.join(output_dir, 'out.jar')
- return_code = minify_tool.minify_tool(
- input_jar=utils.PINNED_R8_JAR,
- output_jar=out_file,
- debug=False,
- build=False,
- track_memory_file=track_memory_file,
- benchmark_name=args.name + "-Total")
- if return_code != 0:
- sys.exit(return_code)
+ args = parse_arguments()
+ utils.check_java_version()
+ output_dir = args.output
+ with utils.TempDir() as temp_dir:
+ if not output_dir:
+ output_dir = temp_dir
+ track_memory_file = None
+ if args.print_memoryuse:
+ track_memory_file = os.path.join(output_dir,
+ utils.MEMORY_USE_TMP_FILE)
+ if args.tool == 'pg':
+ utils.print_cfsegments(args.name, [utils.PINNED_PGR8_JAR])
+ else:
+ out_file = os.path.join(output_dir, 'out.jar')
+ return_code = minify_tool.minify_tool(
+ input_jar=utils.PINNED_R8_JAR,
+ output_jar=out_file,
+ debug=False,
+ build=False,
+ track_memory_file=track_memory_file,
+ benchmark_name=args.name + "-Total")
+ if return_code != 0:
+ sys.exit(return_code)
- utils.print_cfsegments(args.name, [out_file])
+ utils.print_cfsegments(args.name, [out_file])
if __name__ == '__main__':
- sys.exit(Main())
+ sys.exit(Main())
diff --git a/tools/testing_state.py b/tools/testing_state.py
index 6cda6ca..b3a4ed4 100644
--- a/tools/testing_state.py
+++ b/tools/testing_state.py
@@ -11,58 +11,70 @@
CHOICES = ["all", "failing", "past-failing", "outstanding"]
DEFAULT_REPORTS_ROOT = os.path.join(utils.BUILD, "testing-state")
+
def set_up_test_state(gradle_args, testing_state_mode, testing_state_path):
- if not testing_state_mode:
- return
- if not testing_state_path:
- testing_state_path = os.path.join(DEFAULT_REPORTS_ROOT, utils.get_HEAD_branch())
- testing_state_path = os.path.abspath(testing_state_path)
- gradle_args.append('-Ptesting-state-mode=%s' % testing_state_mode)
- gradle_args.append('-Ptesting-state-path=%s' % testing_state_path)
- prepare_testing_index(testing_state_mode, testing_state_path)
+ if not testing_state_mode:
+ return
+ if not testing_state_path:
+ testing_state_path = os.path.join(DEFAULT_REPORTS_ROOT,
+ utils.get_HEAD_branch())
+ testing_state_path = os.path.abspath(testing_state_path)
+ gradle_args.append('-Ptesting-state-mode=%s' % testing_state_mode)
+ gradle_args.append('-Ptesting-state-path=%s' % testing_state_path)
+ prepare_testing_index(testing_state_mode, testing_state_path)
+
def fresh_testing_index(testing_state_dir):
- number = 0
- while True:
- freshIndex = os.path.join(testing_state_dir, "index.%d.html" % number)
- number += 1
- if not os.path.exists(freshIndex):
- return freshIndex
+ number = 0
+ while True:
+ freshIndex = os.path.join(testing_state_dir, "index.%d.html" % number)
+ number += 1
+ if not os.path.exists(freshIndex):
+ return freshIndex
+
def prepare_testing_index(testing_state_mode, testing_state_dir):
- if not os.path.exists(testing_state_dir):
- os.makedirs(testing_state_dir)
- index_path = os.path.join(testing_state_dir, "index.html")
- parent_report = None
- resuming = os.path.exists(index_path)
- mode = testing_state_mode if resuming else f"starting (flag: {testing_state_mode})"
- if (resuming):
- parent_report = fresh_testing_index(testing_state_dir)
- os.rename(index_path, parent_report)
- index = open(index_path, "a")
- title = f"Testing: {os.path.basename(testing_state_dir)}"
- # Print a console link to the test report for easy access.
- print("=" * 70)
- print("Test report written to:")
- print(f" file://{index_path}")
- print("=" * 70)
- # Print the new index content.
- index.write(f"<html><head><title>{title}</title>")
- index.write("<style> * { font-family: monospace; }</style>")
- index.write("<meta http-equiv='refresh' content='10' />")
- index.write(f"</head><body><h1>{title}</h1>")
- index.write(f"<h2>Mode: {mode}</h2>")
- # write index links first to avoid jumping when browsing.
- if parent_report:
- index.write(f"<p><a href=\"file://{parent_report}\">Previous result index</a></p>")
- index.write(f"<p><a href=\"file://{index_path}\">Most recent result index</a></p>")
- index.write(f"<p><a href=\"file://{testing_state_dir}\">Test directories</a></p>")
- # git branch/hash and diff for future reference
- index.write(f"<p>Run on: {datetime.datetime.now()}</p>")
- index.write(f"<p>State path: {testing_state_dir}</p>")
- index.write(f"<p>Git branch: {utils.get_HEAD_branch()}")
- index.write(f"</br>Git SHA: {utils.get_HEAD_sha1()}")
- index.write(f'</br>Git diff summary:\n')
- index.write(f'<pre style="background-color: lightgray">{utils.get_HEAD_diff_stat()}</pre></p>')
- # header for the failing tests
- index.write("<h2>Failing tests (refreshing automatically every 10 seconds)</h2><ul>")
+ if not os.path.exists(testing_state_dir):
+ os.makedirs(testing_state_dir)
+ index_path = os.path.join(testing_state_dir, "index.html")
+ parent_report = None
+ resuming = os.path.exists(index_path)
+ mode = testing_state_mode if resuming else f"starting (flag: {testing_state_mode})"
+ if (resuming):
+ parent_report = fresh_testing_index(testing_state_dir)
+ os.rename(index_path, parent_report)
+ index = open(index_path, "a")
+ title = f"Testing: {os.path.basename(testing_state_dir)}"
+ # Print a console link to the test report for easy access.
+ print("=" * 70)
+ print("Test report written to:")
+ print(f" file://{index_path}")
+ print("=" * 70)
+ # Print the new index content.
+ index.write(f"<html><head><title>{title}</title>")
+ index.write("<style> * { font-family: monospace; }</style>")
+ index.write("<meta http-equiv='refresh' content='10' />")
+ index.write(f"</head><body><h1>{title}</h1>")
+ index.write(f"<h2>Mode: {mode}</h2>")
+ # write index links first to avoid jumping when browsing.
+ if parent_report:
+ index.write(
+ f"<p><a href=\"file://{parent_report}\">Previous result index</a></p>"
+ )
+ index.write(
+ f"<p><a href=\"file://{index_path}\">Most recent result index</a></p>")
+ index.write(
+ f"<p><a href=\"file://{testing_state_dir}\">Test directories</a></p>")
+ # git branch/hash and diff for future reference
+ index.write(f"<p>Run on: {datetime.datetime.now()}</p>")
+ index.write(f"<p>State path: {testing_state_dir}</p>")
+ index.write(f"<p>Git branch: {utils.get_HEAD_branch()}")
+ index.write(f"</br>Git SHA: {utils.get_HEAD_sha1()}")
+ index.write(f'</br>Git diff summary:\n')
+ index.write(
+ f'<pre style="background-color: lightgray">{utils.get_HEAD_diff_stat()}</pre></p>'
+ )
+ # header for the failing tests
+ index.write(
+ "<h2>Failing tests (refreshing automatically every 10 seconds)</h2><ul>"
+ )
diff --git a/tools/thread_utils.py b/tools/thread_utils.py
index 5810332..e92d2c8 100755
--- a/tools/thread_utils.py
+++ b/tools/thread_utils.py
@@ -8,6 +8,7 @@
from threading import Thread
import traceback
+
# A thread that is given a list of jobs. The thread will repeatedly take a job
# from the list of jobs (which is shared with other threads) and execute it
# until there is no more jobs.
@@ -21,99 +22,101 @@
# setting an appropriate timeout).
class WorkerThread(Thread):
- # The initialization of a WorkerThread is never run concurrently with the
- # initialization of other WorkerThreads.
- def __init__(self, jobs, jobs_lock, stop_on_first_failure, worker_id):
- Thread.__init__(self)
- self.completed = False
- self.jobs = jobs
- self.jobs_lock = jobs_lock
- self.number_of_jobs = len(jobs)
- self.stop_on_first_failure = stop_on_first_failure
- self.success = True
- self.worker_id = worker_id
+ # The initialization of a WorkerThread is never run concurrently with the
+ # initialization of other WorkerThreads.
+ def __init__(self, jobs, jobs_lock, stop_on_first_failure, worker_id):
+ Thread.__init__(self)
+ self.completed = False
+ self.jobs = jobs
+ self.jobs_lock = jobs_lock
+ self.number_of_jobs = len(jobs)
+ self.stop_on_first_failure = stop_on_first_failure
+ self.success = True
+ self.worker_id = worker_id
- def run(self):
- print_thread("Starting worker", self.worker_id)
- while True:
- (job, job_id) = self.take_job(self.jobs, self.jobs_lock)
- if job is None:
- break
- try:
- print_thread(
- "Starting job %s/%s" % (job_id, self.number_of_jobs),
- self.worker_id)
- exit_code = job(self.worker_id)
- print_thread(
- "Job %s finished with exit code %s"
- % (job_id, exit_code),
- self.worker_id)
- if exit_code:
- self.success = False
- if self.stop_on_first_failure:
- self.clear_jobs(self.jobs, self.jobs_lock)
- break
- except:
- print_thread("Job %s crashed" % job_id, self.worker_id)
- print_thread(traceback.format_exc(), self.worker_id)
- self.success = False
- if self.stop_on_first_failure:
- self.clear_jobs(self.jobs, self.jobs_lock)
- break
- print_thread("Exiting", self.worker_id)
- self.completed = True
+ def run(self):
+ print_thread("Starting worker", self.worker_id)
+ while True:
+ (job, job_id) = self.take_job(self.jobs, self.jobs_lock)
+ if job is None:
+ break
+ try:
+ print_thread(
+ "Starting job %s/%s" % (job_id, self.number_of_jobs),
+ self.worker_id)
+ exit_code = job(self.worker_id)
+ print_thread(
+ "Job %s finished with exit code %s" % (job_id, exit_code),
+ self.worker_id)
+ if exit_code:
+ self.success = False
+ if self.stop_on_first_failure:
+ self.clear_jobs(self.jobs, self.jobs_lock)
+ break
+ except:
+ print_thread("Job %s crashed" % job_id, self.worker_id)
+ print_thread(traceback.format_exc(), self.worker_id)
+ self.success = False
+ if self.stop_on_first_failure:
+ self.clear_jobs(self.jobs, self.jobs_lock)
+ break
+ print_thread("Exiting", self.worker_id)
+ self.completed = True
- def take_job(self, jobs, jobs_lock):
- jobs_lock.acquire()
- job_id = self.number_of_jobs - len(jobs) + 1
- job = jobs.pop(0) if jobs else None
- jobs_lock.release()
- return (job, job_id)
+ def take_job(self, jobs, jobs_lock):
+ jobs_lock.acquire()
+ job_id = self.number_of_jobs - len(jobs) + 1
+ job = jobs.pop(0) if jobs else None
+ jobs_lock.release()
+ return (job, job_id)
- def clear_jobs(self, jobs, jobs_lock):
- jobs_lock.acquire()
- jobs.clear()
- jobs_lock.release()
+ def clear_jobs(self, jobs, jobs_lock):
+ jobs_lock.acquire()
+ jobs.clear()
+ jobs_lock.release()
+
def run_in_parallel(jobs, number_of_workers, stop_on_first_failure):
- assert number_of_workers > 0
- if number_of_workers > len(jobs):
- number_of_workers = len(jobs)
- if number_of_workers == 1:
- return run_in_sequence(jobs, stop_on_first_failure)
- jobs_lock = threading.Lock()
- threads = []
- for worker_id in range(1, number_of_workers + 1):
- threads.append(
- WorkerThread(jobs, jobs_lock, stop_on_first_failure, worker_id))
- for thread in threads:
- thread.start()
- for thread in threads:
- thread.join()
- for thread in threads:
- if not thread.completed or not thread.success:
- return 1
- return 0
+ assert number_of_workers > 0
+ if number_of_workers > len(jobs):
+ number_of_workers = len(jobs)
+ if number_of_workers == 1:
+ return run_in_sequence(jobs, stop_on_first_failure)
+ jobs_lock = threading.Lock()
+ threads = []
+ for worker_id in range(1, number_of_workers + 1):
+ threads.append(
+ WorkerThread(jobs, jobs_lock, stop_on_first_failure, worker_id))
+ for thread in threads:
+ thread.start()
+ for thread in threads:
+ thread.join()
+ for thread in threads:
+ if not thread.completed or not thread.success:
+ return 1
+ return 0
+
def run_in_sequence(jobs, stop_on_first_failure):
- combined_exit_code = 0
- worker_id = None
- for job in jobs:
- try:
- exit_code = job(worker_id)
- if exit_code:
- combined_exit_code = exit_code
- if stop_on_first_failure:
- break
- except:
- print(traceback.format_exc())
- combined_exit_code = 1
- if stop_on_first_failure:
- break
- return combined_exit_code
+ combined_exit_code = 0
+ worker_id = None
+ for job in jobs:
+ try:
+ exit_code = job(worker_id)
+ if exit_code:
+ combined_exit_code = exit_code
+ if stop_on_first_failure:
+ break
+ except:
+ print(traceback.format_exc())
+ combined_exit_code = 1
+ if stop_on_first_failure:
+ break
+ return combined_exit_code
+
def print_thread(msg, worker_id):
- if worker_id is None:
- print(msg)
- else:
- print('WORKER %s: %s' % (worker_id, msg))
\ No newline at end of file
+ if worker_id is None:
+ print(msg)
+ else:
+ print('WORKER %s: %s' % (worker_id, msg))
diff --git a/tools/toolhelper.py b/tools/toolhelper.py
index f63b3ef..5a6b8ee 100644
--- a/tools/toolhelper.py
+++ b/tools/toolhelper.py
@@ -12,100 +12,121 @@
import utils
-def run(tool, args, build=None, debug=True,
- profile=False, track_memory_file=None, extra_args=None,
- stderr=None, stdout=None, return_stdout=False, timeout=0, quiet=False,
- cmd_prefix=None, jar=None, main=None, time_consumer=None,
- debug_agent=None, worker_id=None):
- cmd = []
- if cmd_prefix:
- cmd.extend(cmd_prefix)
- if build is None:
- build, args = extract_build_from_args(args)
- if build:
- gradle.RunGradle([
- utils.GRADLE_TASK_R8LIB if tool.startswith('r8lib')
- else utils.GRADLE_TASK_R8])
- if track_memory_file:
- cmd.extend(['tools/track_memory.sh', track_memory_file])
- cmd.append(jdk.GetJavaExecutable())
- if extra_args:
- cmd.extend(extra_args)
- if debug_agent is None:
- debug_agent, args = extract_debug_agent_from_args(args)
- if debug_agent:
- cmd.append(
- '-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=*:5005')
- if debug:
- cmd.append('-ea')
- if profile:
- cmd.append('-agentlib:hprof=cpu=samples,interval=1,depth=8')
- if jar:
- cmd.extend(['-cp', jar, main])
- elif tool == 'r8lib-d8':
- cmd.extend(['-cp', utils.R8LIB_JAR, 'com.android.tools.r8.D8'])
- elif tool == 'r8lib-l8':
- cmd.extend(['-cp', utils.R8LIB_JAR, 'com.android.tools.r8.L8'])
- elif tool == 'r8lib-r8':
- cmd.extend(['-cp', utils.R8LIB_JAR, 'com.android.tools.r8.R8'])
- elif tool == 'r8lib-tracereferences':
- cmd.extend(['-cp', utils.R8LIB_JAR, 'com.android.tools.r8.tracereferences.TraceReferences'])
- else:
- cmd.extend(['-jar', utils.R8_JAR, tool])
- lib, args = extract_lib_from_args(args)
- if lib:
- cmd.extend(["--lib", lib])
- cmd.extend(args)
- utils.PrintCmd(cmd, quiet=quiet, worker_id=worker_id)
- start = time.time()
- if timeout > 0:
- kill = lambda process: process.kill()
- proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- timer = Timer(timeout, kill, [proc])
- try:
- timer.start()
- stdout, stderr = proc.communicate()
- finally:
- timer.cancel()
- result = stdout.decode('utf-8') if return_stdout else proc.returncode
- else:
- result = (
- subprocess.check_output(cmd).decode('utf-8')
- if return_stdout
- else subprocess.call(cmd, stdout=stdout, stderr=stderr))
- duration = int((time.time() - start) * 1000)
- if time_consumer:
- time_consumer(duration)
- return result
+def run(tool,
+ args,
+ build=None,
+ debug=True,
+ profile=False,
+ track_memory_file=None,
+ extra_args=None,
+ stderr=None,
+ stdout=None,
+ return_stdout=False,
+ timeout=0,
+ quiet=False,
+ cmd_prefix=None,
+ jar=None,
+ main=None,
+ time_consumer=None,
+ debug_agent=None,
+ worker_id=None):
+ cmd = []
+ if cmd_prefix:
+ cmd.extend(cmd_prefix)
+ if build is None:
+ build, args = extract_build_from_args(args)
+ if build:
+ gradle.RunGradle([
+ utils.GRADLE_TASK_R8LIB
+ if tool.startswith('r8lib') else utils.GRADLE_TASK_R8
+ ])
+ if track_memory_file:
+ cmd.extend(['tools/track_memory.sh', track_memory_file])
+ cmd.append(jdk.GetJavaExecutable())
+ if extra_args:
+ cmd.extend(extra_args)
+ if debug_agent is None:
+ debug_agent, args = extract_debug_agent_from_args(args)
+ if debug_agent:
+ cmd.append(
+ '-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=*:5005'
+ )
+ if debug:
+ cmd.append('-ea')
+ if profile:
+ cmd.append('-agentlib:hprof=cpu=samples,interval=1,depth=8')
+ if jar:
+ cmd.extend(['-cp', jar, main])
+ elif tool == 'r8lib-d8':
+ cmd.extend(['-cp', utils.R8LIB_JAR, 'com.android.tools.r8.D8'])
+ elif tool == 'r8lib-l8':
+ cmd.extend(['-cp', utils.R8LIB_JAR, 'com.android.tools.r8.L8'])
+ elif tool == 'r8lib-r8':
+ cmd.extend(['-cp', utils.R8LIB_JAR, 'com.android.tools.r8.R8'])
+ elif tool == 'r8lib-tracereferences':
+ cmd.extend([
+ '-cp', utils.R8LIB_JAR,
+ 'com.android.tools.r8.tracereferences.TraceReferences'
+ ])
+ else:
+ cmd.extend(['-jar', utils.R8_JAR, tool])
+ lib, args = extract_lib_from_args(args)
+ if lib:
+ cmd.extend(["--lib", lib])
+ cmd.extend(args)
+ utils.PrintCmd(cmd, quiet=quiet, worker_id=worker_id)
+ start = time.time()
+ if timeout > 0:
+ kill = lambda process: process.kill()
+ proc = subprocess.Popen(cmd,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ timer = Timer(timeout, kill, [proc])
+ try:
+ timer.start()
+ stdout, stderr = proc.communicate()
+ finally:
+ timer.cancel()
+ result = stdout.decode('utf-8') if return_stdout else proc.returncode
+ else:
+ result = (subprocess.check_output(cmd).decode('utf-8') if return_stdout
+ else subprocess.call(cmd, stdout=stdout, stderr=stderr))
+ duration = int((time.time() - start) * 1000)
+ if time_consumer:
+ time_consumer(duration)
+ return result
+
def extract_build_from_args(input_args):
- build = True
- args = []
- for arg in input_args:
- if arg in ("--build", "--no-build"):
- build = arg == "--build"
- else:
- args.append(arg)
- return build, args
+ build = True
+ args = []
+ for arg in input_args:
+ if arg in ("--build", "--no-build"):
+ build = arg == "--build"
+ else:
+ args.append(arg)
+ return build, args
+
def extract_lib_from_args(input_args):
- lib = None
- args = []
- for arg in input_args:
- if arg == '--lib-android':
- lib = utils.get_android_jar(28)
- elif arg == '--lib-java':
- lib = utils.RT_JAR
- else:
- args.append(arg)
- return lib, args
+ lib = None
+ args = []
+ for arg in input_args:
+ if arg == '--lib-android':
+ lib = utils.get_android_jar(28)
+ elif arg == '--lib-java':
+ lib = utils.RT_JAR
+ else:
+ args.append(arg)
+ return lib, args
+
def extract_debug_agent_from_args(input_args):
- agent = False
- args = []
- for arg in input_args:
- if arg in ('--debug-agent', '--debug_agent'):
- agent = True
- else:
- args.append(arg)
- return agent, args
+ agent = False
+ args = []
+ for arg in input_args:
+ if arg in ('--debug-agent', '--debug_agent'):
+ agent = True
+ else:
+ args.append(arg)
+ return agent, args
diff --git a/tools/trigger.py b/tools/trigger.py
index b3e09f2..0c44d24 100755
--- a/tools/trigger.py
+++ b/tools/trigger.py
@@ -27,141 +27,152 @@
DESUGAR_JDK8_BOT = 'lib_desugar-archive-jdk8'
SMALI_BOT = 'smali'
-def ParseOptions():
- result = optparse.OptionParser()
- result.add_option('--release',
- help='Run on the release branch builders.',
- default=False, action='store_true')
- result.add_option('--cl',
- metavar=('<url>'),
- help='Run the specified cl on the bots. This should be '
- 'the full url, e.g., '
- 'https://r8-review.googlesource.com/c/r8/+/37420/1')
- result.add_option('--desugar-jdk11',
- help='Run the jdk11 library desugar and archiving bot.',
- default=False, action='store_true')
- result.add_option('--desugar-jdk11-legacy',
- help='Run the jdk11 legacy library desugar and archiving bot.',
- default=False, action='store_true')
- result.add_option('--desugar-jdk8',
- help='Run the jdk8 library desugar and archiving bot.',
- default=False, action='store_true')
- result.add_option('--smali',
- metavar=('<version>'),
- help='Build smali version <version>.')
- result.add_option('--builder', help='Trigger specific builder')
- return result.parse_args()
+def ParseOptions():
+ result = optparse.OptionParser()
+ result.add_option('--release',
+ help='Run on the release branch builders.',
+ default=False,
+ action='store_true')
+ result.add_option('--cl',
+ metavar=('<url>'),
+ help='Run the specified cl on the bots. This should be '
+ 'the full url, e.g., '
+ 'https://r8-review.googlesource.com/c/r8/+/37420/1')
+ result.add_option('--desugar-jdk11',
+ help='Run the jdk11 library desugar and archiving bot.',
+ default=False,
+ action='store_true')
+ result.add_option(
+ '--desugar-jdk11-legacy',
+ help='Run the jdk11 legacy library desugar and archiving bot.',
+ default=False,
+ action='store_true')
+ result.add_option('--desugar-jdk8',
+ help='Run the jdk8 library desugar and archiving bot.',
+ default=False,
+ action='store_true')
+ result.add_option('--smali',
+ metavar=('<version>'),
+ help='Build smali version <version>.')
+
+ result.add_option('--builder', help='Trigger specific builder')
+ return result.parse_args()
+
def get_builders():
- is_release = False
- main_builders = []
- release_builders = []
- with open(LUCI_SCHEDULE, 'r') as fp:
- lines = fp.readlines()
- for line in lines:
- if 'branch-gitiles' in line:
- is_release = True
- if 'main-gitiles-trigger' in line:
- is_release = False
- match = re.match(TRIGGERS_RE, line)
- if match:
- builder = match.group(1)
- if is_release:
- assert 'release' in builder, builder
- release_builders.append(builder)
- else:
- assert 'release' not in builder, builder
- main_builders.append(builder)
- print('Desugar jdk11 builder:\n ' + DESUGAR_JDK11_BOT)
- print('Desugar jdk11 legacy builder:\n ' + DESUGAR_JDK11_LEGACY_BOT)
- print('Desugar jdk8 builder:\n ' + DESUGAR_JDK8_BOT)
- print('Smali builder:\n ' + SMALI_BOT)
- print('Main builders:\n ' + '\n '.join(main_builders))
- print('Release builders:\n ' + '\n '.join(release_builders))
- return (main_builders, release_builders)
+ is_release = False
+ main_builders = []
+ release_builders = []
+ with open(LUCI_SCHEDULE, 'r') as fp:
+ lines = fp.readlines()
+ for line in lines:
+ if 'branch-gitiles' in line:
+ is_release = True
+ if 'main-gitiles-trigger' in line:
+ is_release = False
+ match = re.match(TRIGGERS_RE, line)
+ if match:
+ builder = match.group(1)
+ if is_release:
+ assert 'release' in builder, builder
+ release_builders.append(builder)
+ else:
+ assert 'release' not in builder, builder
+ main_builders.append(builder)
+ print('Desugar jdk11 builder:\n ' + DESUGAR_JDK11_BOT)
+ print('Desugar jdk11 legacy builder:\n ' + DESUGAR_JDK11_LEGACY_BOT)
+ print('Desugar jdk8 builder:\n ' + DESUGAR_JDK8_BOT)
+ print('Smali builder:\n ' + SMALI_BOT)
+ print('Main builders:\n ' + '\n '.join(main_builders))
+ print('Release builders:\n ' + '\n '.join(release_builders))
+ return (main_builders, release_builders)
+
def sanity_check_url(url):
- a = urlopen(url)
- if a.getcode() != 200:
- raise Exception('Url: %s \n returned %s' % (url, a.getcode()))
+ a = urlopen(url)
+ if a.getcode() != 200:
+ raise Exception('Url: %s \n returned %s' % (url, a.getcode()))
+
def trigger_builders(builders, commit):
- commit_url = 'https://r8.googlesource.com/r8/+/%s' % commit
- sanity_check_url(commit_url)
- for builder in builders:
- cmd = ['bb', 'add', 'r8/ci/%s' % builder , '-commit', commit_url]
- subprocess.check_call(cmd)
+ commit_url = 'https://r8.googlesource.com/r8/+/%s' % commit
+ sanity_check_url(commit_url)
+ for builder in builders:
+ cmd = ['bb', 'add', 'r8/ci/%s' % builder, '-commit', commit_url]
+ subprocess.check_call(cmd)
+
def trigger_smali_builder(version):
- utils.check_basic_semver_version(
- version,
- 'use semantic version of the smali version to built (pre-releases are not supported)',
- allowPrerelease = False)
- cmd = [
- 'bb',
- 'add',
- 'r8/ci/%s' % SMALI_BOT,
- '-p',
- 'test_options=["--version", "%s"]' % version
- ]
- subprocess.check_call(cmd)
-
-def trigger_cl(builders, cl_url):
- for builder in builders:
- cmd = ['bb', 'add', 'r8/ci/%s' % builder , '-cl', cl_url]
+ utils.check_basic_semver_version(
+ version,
+ 'use semantic version of the smali version to built (pre-releases are not supported)',
+ allowPrerelease=False)
+ cmd = [
+ 'bb', 'add',
+ 'r8/ci/%s' % SMALI_BOT, '-p',
+ 'test_options=["--version", "%s"]' % version
+ ]
subprocess.check_call(cmd)
+
+def trigger_cl(builders, cl_url):
+ for builder in builders:
+ cmd = ['bb', 'add', 'r8/ci/%s' % builder, '-cl', cl_url]
+ subprocess.check_call(cmd)
+
+
def Main():
- (options, args) = ParseOptions()
- desugar = options.desugar_jdk11 or options.desugar_jdk11_legacy or options.desugar_jdk8
- requires_commit = not options.cl and not desugar and not options.smali
- if len(args) != 1 and requires_commit:
- print('Takes exactly one argument, the commit to run')
- return 1
+ (options, args) = ParseOptions()
+ desugar = options.desugar_jdk11 or options.desugar_jdk11_legacy or options.desugar_jdk8
+ requires_commit = not options.cl and not desugar and not options.smali
+ if len(args) != 1 and requires_commit:
+ print('Takes exactly one argument, the commit to run')
+ return 1
- if options.cl and options.release:
- print('You can\'t run cls on the release bots')
- return 1
+ if options.cl and options.release:
+ print('You can\'t run cls on the release bots')
+ return 1
- if options.cl and desugar:
- print('You can\'t run cls on the desugar bot')
- return 1
+ if options.cl and desugar:
+ print('You can\'t run cls on the desugar bot')
+ return 1
- if options.cl and options.smali:
- print('You can\'t run cls on the smali bot')
- return 1
+ if options.cl and options.smali:
+ print('You can\'t run cls on the smali bot')
+ return 1
- if options.smali:
- if not options.release:
- print('Only release versions of smali can be built')
- return 1
+ if options.smali:
+ if not options.release:
+ print('Only release versions of smali can be built')
+ return 1
- trigger_smali_builder(options.smali)
- return
+ trigger_smali_builder(options.smali)
+ return
- commit = None if not requires_commit else args[0]
- (main_builders, release_builders) = get_builders()
- builders = release_builders if options.release else main_builders
- if options.builder:
- builder = options.builder
- assert builder in main_builders or builder in release_builders
- builders = [options.builder]
- if desugar:
- assert options.desugar_jdk11 or options.desugar_jdk11_legacy or options.desugar_jdk8
- if options.desugar_jdk11:
- builders = [DESUGAR_JDK11_BOT]
- elif options.desugar_jdk11_legacy:
- builders = [DESUGAR_JDK11_LEGACY_BOT]
+ commit = None if not requires_commit else args[0]
+ (main_builders, release_builders) = get_builders()
+ builders = release_builders if options.release else main_builders
+ if options.builder:
+ builder = options.builder
+ assert builder in main_builders or builder in release_builders
+ builders = [options.builder]
+ if desugar:
+ assert options.desugar_jdk11 or options.desugar_jdk11_legacy or options.desugar_jdk8
+ if options.desugar_jdk11:
+ builders = [DESUGAR_JDK11_BOT]
+ elif options.desugar_jdk11_legacy:
+ builders = [DESUGAR_JDK11_LEGACY_BOT]
+ else:
+ builders = [DESUGAR_JDK8_BOT]
+ commit = git_utils.GetHeadRevision(utils.REPO_ROOT, use_main=True)
+ if options.cl:
+ trigger_cl(builders, options.cl)
else:
- builders = [DESUGAR_JDK8_BOT]
- commit = git_utils.GetHeadRevision(utils.REPO_ROOT, use_main=True)
- if options.cl:
- trigger_cl(builders, options.cl)
- else:
- assert commit
- trigger_builders(builders, commit)
+ assert commit
+ trigger_builders(builders, commit)
+
if __name__ == '__main__':
- sys.exit(Main())
+ sys.exit(Main())
diff --git a/tools/update_prebuilds_in_android.py b/tools/update_prebuilds_in_android.py
index ea2c32e..31ee23e 100755
--- a/tools/update_prebuilds_in_android.py
+++ b/tools/update_prebuilds_in_android.py
@@ -12,129 +12,138 @@
import archive
JAR_TARGETS_MAP = {
- 'full': [
- (utils.R8, 'r8'),
- ],
- 'lib': [
- (utils.R8LIB, 'r8'),
- ],
+ 'full': [(utils.R8, 'r8'),],
+ 'lib': [(utils.R8LIB, 'r8'),],
}
OTHER_TARGETS = ["LICENSE"]
KEEPANNO_JAR = 'keepanno-annotations.jar'
+
def parse_arguments():
- parser = argparse.ArgumentParser(
- description = 'Build and copy jars to an Android tree.')
- parser.add_argument('android_root', nargs=1,
- help='Android checkout root.')
- parser.add_argument('--commit_hash', default=None, help='Commit hash')
- parser.add_argument('--version', default=None, help='The version to download')
- parser.add_argument(
- '--targets',
- required=True,
- choices=['full', 'lib'],
- help="Use 'full' to download the full, non-optimized jars (legacy" +
- " behaviour) and 'lib' for the R8-processed, optimized jars.",
- )
- parser.add_argument(
- '--maps',
- action='store_true',
- help="Download proguard maps for jars, use only with '--target lib'.",
- )
- parser.add_argument(
- '--keepanno',
- action='store_true',
- help="Download keepanno-annotations library.",
- )
- parser.add_argument(
- '--java-max-memory-size',
- '--java_max_memory_size',
- help='Use a custom max memory size for the gradle java instance, eg, 4g')
- return parser.parse_args()
+ parser = argparse.ArgumentParser(
+ description='Build and copy jars to an Android tree.')
+ parser.add_argument('android_root', nargs=1, help='Android checkout root.')
+ parser.add_argument('--commit_hash', default=None, help='Commit hash')
+ parser.add_argument('--version',
+ default=None,
+ help='The version to download')
+ parser.add_argument(
+ '--targets',
+ required=True,
+ choices=['full', 'lib'],
+ help="Use 'full' to download the full, non-optimized jars (legacy" +
+ " behaviour) and 'lib' for the R8-processed, optimized jars.",
+ )
+ parser.add_argument(
+ '--maps',
+ action='store_true',
+ help="Download proguard maps for jars, use only with '--target lib'.",
+ )
+ parser.add_argument(
+ '--keepanno',
+ action='store_true',
+ help="Download keepanno-annotations library.",
+ )
+ parser.add_argument(
+ '--java-max-memory-size',
+ '--java_max_memory_size',
+ help='Use a custom max memory size for the gradle java instance, eg, 4g'
+ )
+ return parser.parse_args()
+
def copy_targets(root, target_root, srcs, dests, maps=False):
- assert len(srcs) == len(dests)
- for i in range(len(srcs)):
- src = os.path.join(root, srcs[i])
- dest = os.path.join(target_root, 'prebuilts', 'r8', dests[i])
- if os.path.exists(dest):
- print('Copying: ' + src + ' -> ' + dest)
- copyfile(src, dest)
- if maps:
- print('Copying: ' + src + '.map -> ' + dest + '.map')
- copyfile(src + '.map', dest + '.map')
- else:
- print('WARNING: Not copying ' + src + ' -> ' + dest +
- ', as' + dest + ' does not exist already')
+ assert len(srcs) == len(dests)
+ for i in range(len(srcs)):
+ src = os.path.join(root, srcs[i])
+ dest = os.path.join(target_root, 'prebuilts', 'r8', dests[i])
+ if os.path.exists(dest):
+ print('Copying: ' + src + ' -> ' + dest)
+ copyfile(src, dest)
+ if maps:
+ print('Copying: ' + src + '.map -> ' + dest + '.map')
+ copyfile(src + '.map', dest + '.map')
+ else:
+ print('WARNING: Not copying ' + src + ' -> ' + dest + ', as' +
+ dest + ' does not exist already')
+
def copy_jar_targets(root, target_root, jar_targets, maps):
- srcs = list(map((lambda t: t[0] + '.jar'), jar_targets))
- dests = list(map((lambda t: t[1] + '.jar'), jar_targets))
- copy_targets(root, target_root, srcs, dests, maps=maps)
+ srcs = list(map((lambda t: t[0] + '.jar'), jar_targets))
+ dests = list(map((lambda t: t[1] + '.jar'), jar_targets))
+ copy_targets(root, target_root, srcs, dests, maps=maps)
+
def copy_other_targets(root, target_root):
- copy_targets(root, target_root, OTHER_TARGETS, OTHER_TARGETS)
+ copy_targets(root, target_root, OTHER_TARGETS, OTHER_TARGETS)
+
def download_hash(root, commit_hash, target, quiet=False):
- download_target(root, target, commit_hash, True, quiet=quiet)
+ download_target(root, target, commit_hash, True, quiet=quiet)
+
def download_version(root, version, target):
- download_target(root, target, version, False)
+ download_target(root, target, version, False)
+
def download_target(root, target, hash_or_version, is_hash, quiet=False):
- download_path = os.path.join(root, target)
- url = archive.GetUploadDestination(
- hash_or_version,
- target,
- is_hash)
- if not quiet:
- print('Downloading: ' + url + ' -> ' + download_path)
- utils.download_file_from_cloud_storage(url, download_path, quiet=quiet)
+ download_path = os.path.join(root, target)
+ url = archive.GetUploadDestination(hash_or_version, target, is_hash)
+ if not quiet:
+ print('Downloading: ' + url + ' -> ' + download_path)
+ utils.download_file_from_cloud_storage(url, download_path, quiet=quiet)
+
def main_download(hash, maps, targets, target_root, version, keepanno=False):
- jar_targets = JAR_TARGETS_MAP[targets]
- final_targets = list(map((lambda t: t[0] + '.jar'), jar_targets)) + OTHER_TARGETS
- with utils.TempDir() as root:
- for target in final_targets:
- if hash:
- download_hash(root, hash, target)
- if maps and target not in OTHER_TARGETS:
- download_hash(root, hash, target + '.map')
+ jar_targets = JAR_TARGETS_MAP[targets]
+ final_targets = list(map(
+ (lambda t: t[0] + '.jar'), jar_targets)) + OTHER_TARGETS
+ with utils.TempDir() as root:
+ for target in final_targets:
+ if hash:
+ download_hash(root, hash, target)
+ if maps and target not in OTHER_TARGETS:
+ download_hash(root, hash, target + '.map')
+ if keepanno:
+ download_hash(root, hash, KEEPANNO_JAR)
+ else:
+ assert version
+ download_version(root, version, target)
+ if maps and target not in OTHER_TARGETS:
+ download_version(root, version, target + '.map')
+ if keepanno:
+ download_version(root, version, KEEPANNO_JAR)
+ copy_jar_targets(root, target_root, jar_targets, maps)
+ copy_other_targets(root, target_root)
if keepanno:
- download_hash(root, hash, KEEPANNO_JAR)
- else:
- assert version
- download_version(root, version, target)
- if maps and target not in OTHER_TARGETS:
- download_version(root, version, target + '.map')
- if keepanno:
- download_version(root, version, KEEPANNO_JAR)
- copy_jar_targets(root, target_root, jar_targets, maps)
- copy_other_targets(root, target_root)
- if keepanno:
- copy_targets(root, target_root, [KEEPANNO_JAR], [KEEPANNO_JAR])
+ copy_targets(root, target_root, [KEEPANNO_JAR], [KEEPANNO_JAR])
+
def main_build(maps, max_memory_size, targets, target_root):
- jar_targets = JAR_TARGETS_MAP[targets]
- gradle_args = [utils.GRADLE_TASK_R8LIB if targets == 'lib'
- else utils.GRADLE_TASK_R8]
- if max_memory_size:
- gradle_args.append('-Dorg.gradle.jvmargs=-Xmx' + max_memory_size)
- gradle.RunGradle(gradle_args)
- copy_jar_targets(utils.LIBS, target_root, jar_targets, maps)
- copy_other_targets(utils.GENERATED_LICENSE_DIR, target_root)
+ jar_targets = JAR_TARGETS_MAP[targets]
+ gradle_args = [
+ utils.GRADLE_TASK_R8LIB if targets == 'lib' else utils.GRADLE_TASK_R8
+ ]
+ if max_memory_size:
+ gradle_args.append('-Dorg.gradle.jvmargs=-Xmx' + max_memory_size)
+ gradle.RunGradle(gradle_args)
+ copy_jar_targets(utils.LIBS, target_root, jar_targets, maps)
+ copy_other_targets(utils.GENERATED_LICENSE_DIR, target_root)
+
def main(args):
- if args.maps and args.targets != 'lib':
- raise Exception("Use '--maps' only with '--targets lib.")
- target_root = args.android_root[0]
- if args.commit_hash == None and args.version == None:
- main_build(args.maps, args.java_max_memory_size, args.targets, target_root)
- else:
- assert args.commit_hash == None or args.version == None
- main_download(
- args.commit_hash, args.maps, args.targets, target_root, args.version, args.keepanno)
+ if args.maps and args.targets != 'lib':
+ raise Exception("Use '--maps' only with '--targets lib.")
+ target_root = args.android_root[0]
+ if args.commit_hash == None and args.version == None:
+ main_build(args.maps, args.java_max_memory_size, args.targets,
+ target_root)
+ else:
+ assert args.commit_hash == None or args.version == None
+ main_download(args.commit_hash, args.maps, args.targets, target_root,
+ args.version, args.keepanno)
+
if __name__ == '__main__':
- sys.exit(main(parse_arguments()))
+ sys.exit(main(parse_arguments()))
diff --git a/tools/upload_to_x20.py b/tools/upload_to_x20.py
index e6c293d..3ef7b73 100755
--- a/tools/upload_to_x20.py
+++ b/tools/upload_to_x20.py
@@ -17,30 +17,36 @@
GMSCORE_DEPS = '/google/data/rw/teams/r8/deps'
+
def parse_options():
- return optparse.OptionParser().parse_args()
+ return optparse.OptionParser().parse_args()
+
def uploadFile(filename, dest):
- print('Uploading to %s' % dest)
- shutil.copyfile(filename, dest)
- subprocess.check_call(['chmod', '664', dest])
+ print('Uploading to %s' % dest)
+ shutil.copyfile(filename, dest)
+ subprocess.check_call(['chmod', '664', dest])
+
def Main():
- (options, args) = parse_options()
- assert len(args) == 1
- name = args[0]
- print('Creating archive for %s' % name)
- if not name in os.listdir('.'):
- print('You must be standing directly below the directory you are uploading')
- return 1
- filename = utils.create_archive(name)
- sha1 = utils.get_sha1(filename)
- dest = os.path.join(GMSCORE_DEPS, sha1)
- uploadFile(filename, dest)
- sha1_file = '%s.sha1' % filename
- with open(sha1_file, 'w') as output:
- output.write(sha1)
- print('Sha (%s) written to: %s' % (sha1, sha1_file))
+ (options, args) = parse_options()
+ assert len(args) == 1
+ name = args[0]
+ print('Creating archive for %s' % name)
+ if not name in os.listdir('.'):
+ print(
+ 'You must be standing directly below the directory you are uploading'
+ )
+ return 1
+ filename = utils.create_archive(name)
+ sha1 = utils.get_sha1(filename)
+ dest = os.path.join(GMSCORE_DEPS, sha1)
+ uploadFile(filename, dest)
+ sha1_file = '%s.sha1' % filename
+ with open(sha1_file, 'w') as output:
+ output.write(sha1)
+ print('Sha (%s) written to: %s' % (sha1, sha1_file))
+
if __name__ == '__main__':
- sys.exit(Main())
+ sys.exit(Main())
diff --git a/tools/utils.py b/tools/utils.py
index 2fd1140..db9844e 100644
--- a/tools/utils.py
+++ b/tools/utils.py
@@ -34,8 +34,8 @@
BUILD = os.path.join(REPO_ROOT, 'build')
BUILD_JAVA_MAIN_DIR = os.path.join(BUILD, 'classes', 'java', 'main')
LIBS = os.path.join(BUILD, 'libs')
-CUSTOM_CONVERSION_DIR = os.path.join(
- THIRD_PARTY, 'openjdk', 'custom_conversion')
+CUSTOM_CONVERSION_DIR = os.path.join(THIRD_PARTY, 'openjdk',
+ 'custom_conversion')
GENERATED_LICENSE_DIR = os.path.join(BUILD, 'generatedLicense')
SRC_ROOT = os.path.join(REPO_ROOT, 'src', 'main', 'java')
REPO_SOURCE = 'https://r8.googlesource.com/r8'
@@ -78,30 +78,33 @@
CUSTOM_CONVERSION_DIR, 'library_desugar_conversions.jar')
KEEPANNO_ANNOTATIONS_JAR = os.path.join(LIBS, 'keepanno-annotations.jar')
-DESUGAR_CONFIGURATION = os.path.join(
- 'src', 'library_desugar', 'desugar_jdk_libs.json')
-DESUGAR_IMPLEMENTATION = os.path.join(
- 'third_party', 'openjdk', 'desugar_jdk_libs', 'desugar_jdk_libs.jar')
+DESUGAR_CONFIGURATION = os.path.join('src', 'library_desugar',
+ 'desugar_jdk_libs.json')
+DESUGAR_IMPLEMENTATION = os.path.join('third_party', 'openjdk',
+ 'desugar_jdk_libs',
+ 'desugar_jdk_libs.jar')
DESUGAR_CONFIGURATION_JDK11_LEGACY = os.path.join(
- 'src', 'library_desugar', 'jdk11', 'desugar_jdk_libs_legacy.json')
+ 'src', 'library_desugar', 'jdk11', 'desugar_jdk_libs_legacy.json')
DESUGAR_CONFIGURATION_JDK11_MINIMAL = os.path.join(
- 'src', 'library_desugar', 'jdk11', 'desugar_jdk_libs_minimal.json')
-DESUGAR_CONFIGURATION_JDK11 = os.path.join(
- 'src', 'library_desugar', 'jdk11', 'desugar_jdk_libs.json')
-DESUGAR_CONFIGURATION_JDK11_NIO = os.path.join(
- 'src', 'library_desugar', 'jdk11', 'desugar_jdk_libs_nio.json')
-DESUGAR_IMPLEMENTATION_JDK11 = os.path.join(
- 'third_party', 'openjdk', 'desugar_jdk_libs_11', 'desugar_jdk_libs.jar')
+ 'src', 'library_desugar', 'jdk11', 'desugar_jdk_libs_minimal.json')
+DESUGAR_CONFIGURATION_JDK11 = os.path.join('src', 'library_desugar', 'jdk11',
+ 'desugar_jdk_libs.json')
+DESUGAR_CONFIGURATION_JDK11_NIO = os.path.join('src', 'library_desugar',
+ 'jdk11',
+ 'desugar_jdk_libs_nio.json')
+DESUGAR_IMPLEMENTATION_JDK11 = os.path.join('third_party', 'openjdk',
+ 'desugar_jdk_libs_11',
+ 'desugar_jdk_libs.jar')
DESUGAR_CONFIGURATION_MAVEN_ZIP = os.path.join(
- LIBS, 'desugar_jdk_libs_configuration.zip')
+ LIBS, 'desugar_jdk_libs_configuration.zip')
DESUGAR_CONFIGURATION_JDK11_LEGACY_MAVEN_ZIP = os.path.join(
- LIBS, 'desugar_jdk_libs_configuration_jdk11_legacy.zip')
+ LIBS, 'desugar_jdk_libs_configuration_jdk11_legacy.zip')
DESUGAR_CONFIGURATION_JDK11_MINIMAL_MAVEN_ZIP = os.path.join(
- LIBS, 'desugar_jdk_libs_configuration_jdk11_minimal.zip')
+ LIBS, 'desugar_jdk_libs_configuration_jdk11_minimal.zip')
DESUGAR_CONFIGURATION_JDK11_MAVEN_ZIP = os.path.join(
- LIBS, 'desugar_jdk_libs_configuration_jdk11.zip')
+ LIBS, 'desugar_jdk_libs_configuration_jdk11.zip')
DESUGAR_CONFIGURATION_JDK11_NIO_MAVEN_ZIP = os.path.join(
- LIBS, 'desugar_jdk_libs_configuration_jdk11_nio.zip')
+ LIBS, 'desugar_jdk_libs_configuration_jdk11_nio.zip')
GENERATED_LICENSE = os.path.join(GENERATED_LICENSE_DIR, 'LICENSE')
RT_JAR = os.path.join(REPO_ROOT, 'third_party/openjdk/openjdk-rt-1.8/rt.jar')
R8LIB_KEEP_RULES = os.path.join(REPO_ROOT, 'src/main/keep.txt')
@@ -113,10 +116,14 @@
INTERNAL_DUMPS_DIR = os.path.join(THIRD_PARTY, 'internal-apps')
BAZEL_SHA_FILE = os.path.join(THIRD_PARTY, 'bazel.tar.gz.sha1')
BAZEL_TOOL = os.path.join(THIRD_PARTY, 'bazel')
-JAVA8_SHA_FILE = os.path.join(THIRD_PARTY, 'openjdk', 'jdk8', 'linux-x86.tar.gz.sha1')
-JAVA11_SHA_FILE = os.path.join(THIRD_PARTY, 'openjdk', 'jdk-11', 'linux.tar.gz.sha1')
-DESUGAR_JDK_LIBS_11_SHA_FILE = os.path.join(THIRD_PARTY, 'openjdk', 'desugar_jdk_libs_11.tar.gz.sha1')
-IGNORE_WARNINGS_RULES = os.path.join(REPO_ROOT, 'src', 'test', 'ignorewarnings.rules')
+JAVA8_SHA_FILE = os.path.join(THIRD_PARTY, 'openjdk', 'jdk8',
+ 'linux-x86.tar.gz.sha1')
+JAVA11_SHA_FILE = os.path.join(THIRD_PARTY, 'openjdk', 'jdk-11',
+ 'linux.tar.gz.sha1')
+DESUGAR_JDK_LIBS_11_SHA_FILE = os.path.join(THIRD_PARTY, 'openjdk',
+ 'desugar_jdk_libs_11.tar.gz.sha1')
+IGNORE_WARNINGS_RULES = os.path.join(REPO_ROOT, 'src', 'test',
+ 'ignorewarnings.rules')
ANDROID_HOME_ENVIROMENT_NAME = "ANDROID_HOME"
ANDROID_TOOLS_VERSION_ENVIRONMENT_NAME = "ANDROID_TOOLS_VERSION"
USER_HOME = os.path.expanduser('~')
@@ -124,573 +131,639 @@
R8_TEST_RESULTS_BUCKET = 'r8-test-results'
R8_INTERNAL_TEST_RESULTS_BUCKET = 'r8-internal-test-results'
+
def archive_file(name, gs_dir, src_file):
- gs_file = '%s/%s' % (gs_dir, name)
- upload_file_to_cloud_storage(src_file, gs_file)
+ gs_file = '%s/%s' % (gs_dir, name)
+ upload_file_to_cloud_storage(src_file, gs_file)
+
def archive_value(name, gs_dir, value):
- with TempDir() as temp:
- temparchive = os.path.join(temp, name)
- with open(temparchive, 'w') as f:
- f.write(str(value))
- archive_file(name, gs_dir, temparchive)
+ with TempDir() as temp:
+ temparchive = os.path.join(temp, name)
+ with open(temparchive, 'w') as f:
+ f.write(str(value))
+ archive_file(name, gs_dir, temparchive)
+
def find_cloud_storage_file_from_options(name, options, orElse=None):
- # Import archive on-demand since archive depends on utils.
- from archive import GetUploadDestination
- hash_or_version = find_hash_or_version_from_options(options)
- if not hash_or_version:
- return orElse
- is_hash = options.commit_hash is not None
- download_path = GetUploadDestination(hash_or_version, name, is_hash)
- if file_exists_on_cloud_storage(download_path):
- out = tempfile.NamedTemporaryFile().name
- download_file_from_cloud_storage(download_path, out)
- return out
- else:
- raise Exception('Could not find file {} from hash/version: {}.'
- .format(name, hash_or_version))
+ # Import archive on-demand since archive depends on utils.
+ from archive import GetUploadDestination
+ hash_or_version = find_hash_or_version_from_options(options)
+ if not hash_or_version:
+ return orElse
+ is_hash = options.commit_hash is not None
+ download_path = GetUploadDestination(hash_or_version, name, is_hash)
+ if file_exists_on_cloud_storage(download_path):
+ out = tempfile.NamedTemporaryFile().name
+ download_file_from_cloud_storage(download_path, out)
+ return out
+ else:
+ raise Exception('Could not find file {} from hash/version: {}.'.format(
+ name, hash_or_version))
+
def find_r8_jar_from_options(options):
- return find_cloud_storage_file_from_options('r8.jar', options)
+ return find_cloud_storage_file_from_options('r8.jar', options)
def find_hash_or_version_from_options(options):
- if options.tag:
- return find_hash_or_version_from_tag(options.tag)
- else:
- return options.commit_hash or options.version
+ if options.tag:
+ return find_hash_or_version_from_tag(options.tag)
+ else:
+ return options.commit_hash or options.version
+
def find_hash_or_version_from_tag(tag_or_hash):
- info = subprocess.check_output([
- 'git',
- 'show',
- tag_or_hash,
- '-s',
- '--format=oneline']).decode('utf-8').splitlines()[-1].split()
- # The info should be on the following form [hash,"Version",version]
- if len(info) == 3 and len(info[0]) == 40 and info[1] == "Version":
- return info[2]
- return None
+ info = subprocess.check_output(
+ ['git', 'show', tag_or_hash, '-s',
+ '--format=oneline']).decode('utf-8').splitlines()[-1].split()
+ # The info should be on the following form [hash,"Version",version]
+ if len(info) == 3 and len(info[0]) == 40 and info[1] == "Version":
+ return info[2]
+ return None
+
def getAndroidHome():
- return os.environ.get(
- ANDROID_HOME_ENVIROMENT_NAME, os.path.join(USER_HOME, 'Android', 'Sdk'))
+ return os.environ.get(ANDROID_HOME_ENVIROMENT_NAME,
+ os.path.join(USER_HOME, 'Android', 'Sdk'))
+
def getAndroidBuildTools():
- if ANDROID_TOOLS_VERSION_ENVIRONMENT_NAME in os.environ:
- version = os.environ.get(ANDROID_TOOLS_VERSION_ENVIRONMENT_NAME)
- build_tools_dir = os.path.join(getAndroidHome(), 'build-tools', version)
- assert os.path.exists(build_tools_dir)
- return build_tools_dir
- else:
- versions = ['33.0.1', '32.0.0']
- for version in versions:
- build_tools_dir = os.path.join(getAndroidHome(), 'build-tools', version)
- if os.path.exists(build_tools_dir):
+ if ANDROID_TOOLS_VERSION_ENVIRONMENT_NAME in os.environ:
+ version = os.environ.get(ANDROID_TOOLS_VERSION_ENVIRONMENT_NAME)
+ build_tools_dir = os.path.join(getAndroidHome(), 'build-tools', version)
+ assert os.path.exists(build_tools_dir)
return build_tools_dir
- raise Exception('Unable to find Android build-tools')
+ else:
+ versions = ['33.0.1', '32.0.0']
+ for version in versions:
+ build_tools_dir = os.path.join(getAndroidHome(), 'build-tools',
+ version)
+ if os.path.exists(build_tools_dir):
+ return build_tools_dir
+ raise Exception('Unable to find Android build-tools')
+
def is_python3():
- return sys.version_info.major == 3
+ return sys.version_info.major == 3
+
def Print(s, quiet=False):
- if quiet:
- return
- print(s)
+ if quiet:
+ return
+ print(s)
+
def Warn(message):
- CRED = '\033[91m'
- CEND = '\033[0m'
- print(CRED + message + CEND)
+ CRED = '\033[91m'
+ CEND = '\033[0m'
+ print(CRED + message + CEND)
+
def PrintCmd(cmd, env=None, quiet=False, worker_id=None):
- if quiet:
- return
- if type(cmd) is list:
- cmd = ' '.join(cmd)
- if env:
- env = ' '.join(['{}=\"{}\"'.format(x, y) for x, y in env.iteritems()])
- print_thread('Running: {} {}'.format(env, cmd), worker_id)
- else:
- print_thread('Running: {}'.format(cmd), worker_id)
- # I know this will hit os on windows eventually if we don't do this.
- sys.stdout.flush()
+ if quiet:
+ return
+ if type(cmd) is list:
+ cmd = ' '.join(cmd)
+ if env:
+ env = ' '.join(['{}=\"{}\"'.format(x, y) for x, y in env.iteritems()])
+ print_thread('Running: {} {}'.format(env, cmd), worker_id)
+ else:
+ print_thread('Running: {}'.format(cmd), worker_id)
+ # I know this will hit os on windows eventually if we don't do this.
+ sys.stdout.flush()
+
class ProgressLogger(object):
- CLEAR_LINE = '\033[K'
- UP = '\033[F'
+ CLEAR_LINE = '\033[K'
+ UP = '\033[F'
- def __init__(self, quiet=False):
- self._count = 0
- self._has_printed = False
- self._quiet = quiet
+ def __init__(self, quiet=False):
+ self._count = 0
+ self._has_printed = False
+ self._quiet = quiet
- def log(self, text):
- if len(text.strip()) == 0:
- return
- if self._quiet:
- if self._has_printed:
- sys.stdout.write(ProgressLogger.UP + ProgressLogger.CLEAR_LINE)
- if len(text) > 140:
- text = text[0:140] + '...'
- print(text)
- self._has_printed = True
+ def log(self, text):
+ if len(text.strip()) == 0:
+ return
+ if self._quiet:
+ if self._has_printed:
+ sys.stdout.write(ProgressLogger.UP + ProgressLogger.CLEAR_LINE)
+ if len(text) > 140:
+ text = text[0:140] + '...'
+ print(text)
+ self._has_printed = True
- def done(self):
- if self._quiet and self._has_printed:
- sys.stdout.write(ProgressLogger.UP + ProgressLogger.CLEAR_LINE)
- print('')
- sys.stdout.write(ProgressLogger.UP)
+ def done(self):
+ if self._quiet and self._has_printed:
+ sys.stdout.write(ProgressLogger.UP + ProgressLogger.CLEAR_LINE)
+ print('')
+ sys.stdout.write(ProgressLogger.UP)
+
def RunCmd(cmd, env_vars=None, quiet=False, fail=True, logging=True):
- PrintCmd(cmd, env=env_vars, quiet=quiet)
- env = os.environ.copy()
- if env_vars:
- env.update(env_vars)
- process = subprocess.Popen(
- cmd, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
- stdout = []
- logger = ProgressLogger(quiet=quiet) if logging else None
- failed = False
- while True:
- line = process.stdout.readline().decode('utf-8')
- if line != '':
- stripped = line.rstrip()
- stdout.append(stripped)
- if logger:
- logger.log(stripped)
- # TODO(christofferqa): r8 should fail with non-zero exit code.
- if ('AssertionError:' in stripped
- or 'CompilationError:' in stripped
- or 'CompilationFailedException:' in stripped
- or 'Compilation failed' in stripped
- or 'FAILURE:' in stripped
- or 'org.gradle.api.ProjectConfigurationException' in stripped
- or 'BUILD FAILED' in stripped):
- failed = True
- else:
- if logger:
- logger.done()
- exit_code = process.poll()
- if exit_code or failed:
- for line in stdout:
- Warn(line)
- if fail:
- raise subprocess.CalledProcessError(
- exit_code or -1, cmd, output='\n'.join(stdout))
- return stdout
+ PrintCmd(cmd, env=env_vars, quiet=quiet)
+ env = os.environ.copy()
+ if env_vars:
+ env.update(env_vars)
+ process = subprocess.Popen(cmd,
+ env=env,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT)
+ stdout = []
+ logger = ProgressLogger(quiet=quiet) if logging else None
+ failed = False
+ while True:
+ line = process.stdout.readline().decode('utf-8')
+ if line != '':
+ stripped = line.rstrip()
+ stdout.append(stripped)
+ if logger:
+ logger.log(stripped)
+ # TODO(christofferqa): r8 should fail with non-zero exit code.
+ if ('AssertionError:' in stripped or
+ 'CompilationError:' in stripped or
+ 'CompilationFailedException:' in stripped or
+ 'Compilation failed' in stripped or
+ 'FAILURE:' in stripped or
+ 'org.gradle.api.ProjectConfigurationException' in stripped
+ or 'BUILD FAILED' in stripped):
+ failed = True
+ else:
+ if logger:
+ logger.done()
+ exit_code = process.poll()
+ if exit_code or failed:
+ for line in stdout:
+ Warn(line)
+ if fail:
+ raise subprocess.CalledProcessError(
+ exit_code or -1, cmd, output='\n'.join(stdout))
+ return stdout
+
def IsWindows():
- return defines.IsWindows()
+ return defines.IsWindows()
+
def EnsureDepFromGoogleCloudStorage(dep, tgz, sha1, msg):
- if (not os.path.exists(dep)
- or not os.path.exists(tgz)
- or os.path.getmtime(tgz) < os.path.getmtime(sha1)):
- DownloadFromGoogleCloudStorage(sha1)
- # Update the mtime of the tar file to make sure we do not run again unless
- # there is an update.
- os.utime(tgz, None)
- else:
- print('Ensure cloud dependency:', msg, 'present')
+ if (not os.path.exists(dep) or not os.path.exists(tgz) or
+ os.path.getmtime(tgz) < os.path.getmtime(sha1)):
+ DownloadFromGoogleCloudStorage(sha1)
+ # Update the mtime of the tar file to make sure we do not run again unless
+ # there is an update.
+ os.utime(tgz, None)
+ else:
+ print('Ensure cloud dependency:', msg, 'present')
+
def DownloadFromX20(sha1_file):
- download_script = os.path.join(REPO_ROOT, 'tools', 'download_from_x20.py')
- cmd = [download_script, sha1_file]
- PrintCmd(cmd)
- subprocess.check_call(cmd)
-
-def DownloadFromGoogleCloudStorage(sha1_file, bucket='r8-deps', auth=False,
- quiet=False):
- suffix = '.bat' if IsWindows() else ''
- download_script = 'download_from_google_storage%s' % suffix
- cmd = [download_script]
- if not auth:
- cmd.append('-n')
- cmd.extend(['-b', bucket, '-u', '-s', sha1_file])
- if not quiet:
+ download_script = os.path.join(REPO_ROOT, 'tools', 'download_from_x20.py')
+ cmd = [download_script, sha1_file]
PrintCmd(cmd)
subprocess.check_call(cmd)
- else:
- subprocess.check_output(cmd)
+
+
+def DownloadFromGoogleCloudStorage(sha1_file,
+ bucket='r8-deps',
+ auth=False,
+ quiet=False):
+ suffix = '.bat' if IsWindows() else ''
+ download_script = 'download_from_google_storage%s' % suffix
+ cmd = [download_script]
+ if not auth:
+ cmd.append('-n')
+ cmd.extend(['-b', bucket, '-u', '-s', sha1_file])
+ if not quiet:
+ PrintCmd(cmd)
+ subprocess.check_call(cmd)
+ else:
+ subprocess.check_output(cmd)
+
def get_sha1(filename):
- sha1 = hashlib.sha1()
- with open(filename, 'rb') as f:
- while True:
- chunk = f.read(1024*1024)
- if not chunk:
- break
- sha1.update(chunk)
- return sha1.hexdigest()
+ sha1 = hashlib.sha1()
+ with open(filename, 'rb') as f:
+ while True:
+ chunk = f.read(1024 * 1024)
+ if not chunk:
+ break
+ sha1.update(chunk)
+ return sha1.hexdigest()
+
def get_HEAD_branch():
- result = subprocess.check_output(['git', 'rev-parse', '--abbrev-ref', 'HEAD']).decode('utf-8')
- return result.strip()
+ result = subprocess.check_output(
+ ['git', 'rev-parse', '--abbrev-ref', 'HEAD']).decode('utf-8')
+ return result.strip()
+
def get_HEAD_sha1():
- return get_HEAD_sha1_for_checkout(REPO_ROOT)
+ return get_HEAD_sha1_for_checkout(REPO_ROOT)
+
def get_HEAD_diff_stat():
- return subprocess.check_output(['git', 'diff', '--stat']).decode('utf-8')
+ return subprocess.check_output(['git', 'diff', '--stat']).decode('utf-8')
+
def get_HEAD_sha1_for_checkout(checkout):
- cmd = ['git', 'rev-parse', 'HEAD']
- PrintCmd(cmd)
- with ChangedWorkingDirectory(checkout):
- return subprocess.check_output(cmd).decode('utf-8').strip()
+ cmd = ['git', 'rev-parse', 'HEAD']
+ PrintCmd(cmd)
+ with ChangedWorkingDirectory(checkout):
+ return subprocess.check_output(cmd).decode('utf-8').strip()
+
def makedirs_if_needed(path):
- try:
- os.makedirs(path)
- except OSError:
- if not os.path.isdir(path):
- raise
+ try:
+ os.makedirs(path)
+ except OSError:
+ if not os.path.isdir(path):
+ raise
+
def get_gsutil():
- return 'gsutil.py' if os.name != 'nt' else 'gsutil.py.bat'
+ return 'gsutil.py' if os.name != 'nt' else 'gsutil.py.bat'
+
def upload_file_to_cloud_storage(source, destination):
- cmd = [get_gsutil(), 'cp']
- cmd += [source, destination]
- PrintCmd(cmd)
- subprocess.check_call(cmd)
+ cmd = [get_gsutil(), 'cp']
+ cmd += [source, destination]
+ PrintCmd(cmd)
+ subprocess.check_call(cmd)
+
def delete_file_from_cloud_storage(destination):
- cmd = [get_gsutil(), 'rm', destination]
- PrintCmd(cmd)
- subprocess.check_call(cmd)
+ cmd = [get_gsutil(), 'rm', destination]
+ PrintCmd(cmd)
+ subprocess.check_call(cmd)
+
def ls_files_on_cloud_storage(destination):
- cmd = [get_gsutil(), 'ls', destination]
- PrintCmd(cmd)
- return subprocess.check_output(cmd).decode('utf-8')
+ cmd = [get_gsutil(), 'ls', destination]
+ PrintCmd(cmd)
+ return subprocess.check_output(cmd).decode('utf-8')
+
def cat_file_on_cloud_storage(destination, ignore_errors=False):
- cmd = [get_gsutil(), 'cat', destination]
- PrintCmd(cmd)
- try:
- return subprocess.check_output(cmd).decode('utf-8').strip()
- except subprocess.CalledProcessError as e:
- if ignore_errors:
- return ''
- else:
- raise e
+ cmd = [get_gsutil(), 'cat', destination]
+ PrintCmd(cmd)
+ try:
+ return subprocess.check_output(cmd).decode('utf-8').strip()
+ except subprocess.CalledProcessError as e:
+ if ignore_errors:
+ return ''
+ else:
+ raise e
+
def file_exists_on_cloud_storage(destination):
- cmd = [get_gsutil(), 'ls', destination]
- PrintCmd(cmd)
- return subprocess.call(cmd) == 0
+ cmd = [get_gsutil(), 'ls', destination]
+ PrintCmd(cmd)
+ return subprocess.call(cmd) == 0
+
def download_file_from_cloud_storage(source, destination, quiet=False):
- cmd = [get_gsutil(), 'cp', source, destination]
- PrintCmd(cmd, quiet=quiet)
- subprocess.check_call(cmd)
+ cmd = [get_gsutil(), 'cp', source, destination]
+ PrintCmd(cmd, quiet=quiet)
+ subprocess.check_call(cmd)
+
def create_archive(name, sources=None):
- if not sources:
- sources = [name]
- tarname = '%s.tar.gz' % name
- with tarfile.open(tarname, 'w:gz') as tar:
- for source in sources:
- tar.add(source)
- return tarname
+ if not sources:
+ sources = [name]
+ tarname = '%s.tar.gz' % name
+ with tarfile.open(tarname, 'w:gz') as tar:
+ for source in sources:
+ tar.add(source)
+ return tarname
+
def extract_dir(filename):
- return filename[0:len(filename) - len('.tar.gz')]
+ return filename[0:len(filename) - len('.tar.gz')]
+
def unpack_archive(filename):
- dest_dir = extract_dir(filename)
- if os.path.exists(dest_dir):
- print('Deleting existing dir %s' % dest_dir)
- shutil.rmtree(dest_dir)
- dirname = os.path.dirname(os.path.abspath(filename))
- with tarfile.open(filename, 'r:gz') as tar:
- tar.extractall(path=dirname)
+ dest_dir = extract_dir(filename)
+ if os.path.exists(dest_dir):
+ print('Deleting existing dir %s' % dest_dir)
+ shutil.rmtree(dest_dir)
+ dirname = os.path.dirname(os.path.abspath(filename))
+ with tarfile.open(filename, 'r:gz') as tar:
+ tar.extractall(path=dirname)
+
def check_gcert():
- status = subprocess.call(['gcertstatus'])
- if status != 0:
- subprocess.check_call(['gcert'])
+ status = subprocess.call(['gcertstatus'])
+ if status != 0:
+ subprocess.check_call(['gcert'])
+
# Note that gcs is eventually consistent with regards to list operations.
# This is not a problem in our case, but don't ever use this method
# for synchronization.
def cloud_storage_exists(destination):
- cmd = [get_gsutil(), 'ls', destination]
- PrintCmd(cmd)
- exit_code = subprocess.call(cmd)
- return exit_code == 0
+ cmd = [get_gsutil(), 'ls', destination]
+ PrintCmd(cmd)
+ exit_code = subprocess.call(cmd)
+ return exit_code == 0
+
class TempDir(object):
- def __init__(self, prefix='', delete=True):
- self._temp_dir = None
- self._prefix = prefix
- self._delete = delete
- def __enter__(self):
- self._temp_dir = tempfile.mkdtemp(self._prefix)
- return self._temp_dir
+ def __init__(self, prefix='', delete=True):
+ self._temp_dir = None
+ self._prefix = prefix
+ self._delete = delete
- def __exit__(self, *_):
- if self._delete:
- shutil.rmtree(self._temp_dir, ignore_errors=True)
+ def __enter__(self):
+ self._temp_dir = tempfile.mkdtemp(self._prefix)
+ return self._temp_dir
+
+ def __exit__(self, *_):
+ if self._delete:
+ shutil.rmtree(self._temp_dir, ignore_errors=True)
+
class ChangedWorkingDirectory(object):
- def __init__(self, working_directory, quiet=False):
- self._quiet = quiet
- self._working_directory = working_directory
- def __enter__(self):
- self._old_cwd = os.getcwd()
- if not self._quiet:
- print('Enter directory:', self._working_directory)
- os.chdir(self._working_directory)
+ def __init__(self, working_directory, quiet=False):
+ self._quiet = quiet
+ self._working_directory = working_directory
- def __exit__(self, *_):
- if not self._quiet:
- print('Enter directory:', self._old_cwd)
- os.chdir(self._old_cwd)
+ def __enter__(self):
+ self._old_cwd = os.getcwd()
+ if not self._quiet:
+ print('Enter directory:', self._working_directory)
+ os.chdir(self._working_directory)
+
+ def __exit__(self, *_):
+ if not self._quiet:
+ print('Enter directory:', self._old_cwd)
+ os.chdir(self._old_cwd)
+
# Reading Android CTS test_result.xml
+
class CtsModule(object):
- def __init__(self, module_name):
- self.name = module_name
+
+ def __init__(self, module_name):
+ self.name = module_name
+
class CtsTestCase(object):
- def __init__(self, test_case_name):
- self.name = test_case_name
+
+ def __init__(self, test_case_name):
+ self.name = test_case_name
+
class CtsTest(object):
- def __init__(self, test_name, outcome):
- self.name = test_name
- self.outcome = outcome
+
+ def __init__(self, test_name, outcome):
+ self.name = test_name
+ self.outcome = outcome
+
# Generator yielding CtsModule, CtsTestCase or CtsTest from
# reading through a CTS test_result.xml file.
def read_cts_test_result(file_xml):
- re_module = re.compile('<Module name="([^"]*)"')
- re_test_case = re.compile('<TestCase name="([^"]*)"')
- re_test = re.compile('<Test result="(pass|fail)" name="([^"]*)"')
- with open(file_xml) as f:
- for line in f:
- m = re_module.search(line)
- if m:
- yield CtsModule(m.groups()[0])
- continue
- m = re_test_case.search(line)
- if m:
- yield CtsTestCase(m.groups()[0])
- continue
- m = re_test.search(line)
- if m:
- outcome = m.groups()[0]
- assert outcome in ['fail', 'pass']
- yield CtsTest(m.groups()[1], outcome == 'pass')
+ re_module = re.compile('<Module name="([^"]*)"')
+ re_test_case = re.compile('<TestCase name="([^"]*)"')
+ re_test = re.compile('<Test result="(pass|fail)" name="([^"]*)"')
+ with open(file_xml) as f:
+ for line in f:
+ m = re_module.search(line)
+ if m:
+ yield CtsModule(m.groups()[0])
+ continue
+ m = re_test_case.search(line)
+ if m:
+ yield CtsTestCase(m.groups()[0])
+ continue
+ m = re_test.search(line)
+ if m:
+ outcome = m.groups()[0]
+ assert outcome in ['fail', 'pass']
+ yield CtsTest(m.groups()[1], outcome == 'pass')
+
def grep_memoryuse(logfile):
- re_vmhwm = re.compile('^VmHWM:[ \t]*([0-9]+)[ \t]*([a-zA-Z]*)')
- result = None
- with open(logfile) as f:
- for line in f:
- m = re_vmhwm.search(line)
- if m:
- groups = m.groups()
- s = len(groups)
- if s >= 1:
- result = int(groups[0])
- if s >= 2:
- unit = groups[1]
- if unit == 'kB':
- result *= 1024
- elif unit != '':
- raise Exception('Unrecognized unit in memory usage log: {}'
- .format(unit))
- if result is None:
- raise Exception('No memory usage found in log: {}'.format(logfile))
- return result
+ re_vmhwm = re.compile('^VmHWM:[ \t]*([0-9]+)[ \t]*([a-zA-Z]*)')
+ result = None
+ with open(logfile) as f:
+ for line in f:
+ m = re_vmhwm.search(line)
+ if m:
+ groups = m.groups()
+ s = len(groups)
+ if s >= 1:
+ result = int(groups[0])
+ if s >= 2:
+ unit = groups[1]
+ if unit == 'kB':
+ result *= 1024
+ elif unit != '':
+ raise Exception(
+ 'Unrecognized unit in memory usage log: {}'.
+ format(unit))
+ if result is None:
+ raise Exception('No memory usage found in log: {}'.format(logfile))
+ return result
+
# Return a dictionary: {segment_name -> segments_size}
def getDexSegmentSizes(dex_files):
- assert len(dex_files) > 0
- cmd = [jdk.GetJavaExecutable(), '-jar', R8_JAR, 'dexsegments']
- cmd.extend(dex_files)
- PrintCmd(cmd)
- output = subprocess.check_output(cmd).decode('utf-8')
+ assert len(dex_files) > 0
+ cmd = [jdk.GetJavaExecutable(), '-jar', R8_JAR, 'dexsegments']
+ cmd.extend(dex_files)
+ PrintCmd(cmd)
+ output = subprocess.check_output(cmd).decode('utf-8')
- matches = DEX_SEGMENTS_RESULT_PATTERN.findall(output)
+ matches = DEX_SEGMENTS_RESULT_PATTERN.findall(output)
- if matches is None or len(matches) == 0:
- raise Exception('DexSegments failed to return any output for' \
- ' these files: {}'.format(dex_files))
+ if matches is None or len(matches) == 0:
+ raise Exception('DexSegments failed to return any output for' \
+ ' these files: {}'.format(dex_files))
- result = {}
+ result = {}
- for match in matches:
- result[match[0]] = int(match[1])
+ for match in matches:
+ result[match[0]] = int(match[1])
- return result
+ return result
+
# Return a dictionary: {segment_name -> segments_size}
def getCfSegmentSizes(cfFile):
- cmd = [jdk.GetJavaExecutable(),
- '-cp',
- CF_SEGMENTS_TOOL,
- 'com.android.tools.r8.cf_segments.MeasureLib',
- cfFile]
- PrintCmd(cmd)
- output = subprocess.check_output(cmd).decode('utf-8')
+ cmd = [
+ jdk.GetJavaExecutable(), '-cp', CF_SEGMENTS_TOOL,
+ 'com.android.tools.r8.cf_segments.MeasureLib', cfFile
+ ]
+ PrintCmd(cmd)
+ output = subprocess.check_output(cmd).decode('utf-8')
- matches = DEX_SEGMENTS_RESULT_PATTERN.findall(output)
+ matches = DEX_SEGMENTS_RESULT_PATTERN.findall(output)
- if matches is None or len(matches) == 0:
- raise Exception('CfSegments failed to return any output for' \
- ' the file: ' + cfFile)
+ if matches is None or len(matches) == 0:
+ raise Exception('CfSegments failed to return any output for' \
+ ' the file: ' + cfFile)
- result = {}
+ result = {}
- for match in matches:
- result[match[0]] = int(match[1])
+ for match in matches:
+ result[match[0]] = int(match[1])
- return result
+ return result
+
def get_maven_path(artifact, version):
- return os.path.join('com', 'android', 'tools', artifact, version)
+ return os.path.join('com', 'android', 'tools', artifact, version)
+
def print_cfsegments(prefix, cf_files):
- for cf_file in cf_files:
- for segment_name, size in getCfSegmentSizes(cf_file).items():
- print('{}-{}(CodeSize): {}'
- .format(prefix, segment_name, size))
+ for cf_file in cf_files:
+ for segment_name, size in getCfSegmentSizes(cf_file).items():
+ print('{}-{}(CodeSize): {}'.format(prefix, segment_name, size))
+
def print_dexsegments(prefix, dex_files, worker_id=None):
- for segment_name, size in getDexSegmentSizes(dex_files).items():
- print_thread(
- '{}-{}(CodeSize): {}'.format(prefix, segment_name, size),
- worker_id)
+ for segment_name, size in getDexSegmentSizes(dex_files).items():
+ print_thread('{}-{}(CodeSize): {}'.format(prefix, segment_name, size),
+ worker_id)
+
# Ensure that we are not benchmarking with a google jvm.
def check_java_version():
- cmd= [jdk.GetJavaExecutable(), '-version']
- output = subprocess.check_output(cmd, stderr = subprocess.STDOUT).decode('utf-8')
- m = re.search('openjdk version "([^"]*)"', output)
- if m is None:
- raise Exception("Can't check java version: no version string in output"
- " of 'java -version': '{}'".format(output))
- version = m.groups(0)[0]
- m = re.search('google', version)
- if m is not None:
- raise Exception("Do not use google JVM for benchmarking: " + version)
+ cmd = [jdk.GetJavaExecutable(), '-version']
+ output = subprocess.check_output(cmd,
+ stderr=subprocess.STDOUT).decode('utf-8')
+ m = re.search('openjdk version "([^"]*)"', output)
+ if m is None:
+ raise Exception("Can't check java version: no version string in output"
+ " of 'java -version': '{}'".format(output))
+ version = m.groups(0)[0]
+ m = re.search('google', version)
+ if m is not None:
+ raise Exception("Do not use google JVM for benchmarking: " + version)
+
def get_android_jar_dir(api):
- return os.path.join(REPO_ROOT, ANDROID_JAR_DIR.format(api=api))
+ return os.path.join(REPO_ROOT, ANDROID_JAR_DIR.format(api=api))
+
def get_android_jar(api):
- return os.path.join(REPO_ROOT, ANDROID_JAR.format(api=api))
+ return os.path.join(REPO_ROOT, ANDROID_JAR.format(api=api))
+
def is_bot():
- return 'SWARMING_BOT_ID' in os.environ
+ return 'SWARMING_BOT_ID' in os.environ
+
def uncompressed_size(path):
- return sum(z.file_size for z in zipfile.ZipFile(path).infolist())
+ return sum(z.file_size for z in zipfile.ZipFile(path).infolist())
+
def desugar_configuration_name_and_version(configuration, is_for_maven):
- name = 'desugar_jdk_libs_configuration'
- with open(configuration, 'r') as f:
- configuration_json = json.loads(f.read())
- configuration_format_version = \
- configuration_json.get('configuration_format_version')
- if (not configuration_format_version):
- raise Exception(
- 'No "configuration_format_version" found in ' + configuration)
- if (configuration_format_version != 3
- and configuration_format_version != 5
- and configuration_format_version != (200 if is_for_maven else 100)):
- raise Exception(
- 'Unsupported "configuration_format_version" "%s" found in %s'
- % (configuration_format_version, configuration))
- version = configuration_json.get('version')
- if not version:
- if configuration_format_version == (200 if is_for_maven else 100):
- identifier = configuration_json.get('identifier')
- if not identifier:
- raise Exception(
- 'No "identifier" found in ' + configuration)
- identifier_split = identifier.split(':')
- if (len(identifier_split) != 3):
- raise Exception('Invalid "identifier" found in ' + configuration)
- if (identifier_split[0] != 'com.tools.android'):
- raise Exception('Invalid "identifier" found in ' + configuration)
- if not identifier_split[1].startswith('desugar_jdk_libs_configuration'):
- raise Exception('Invalid "identifier" found in ' + configuration)
- name = identifier_split[1]
- version = identifier_split[2]
- else:
- raise Exception(
- 'No "version" found in ' + configuration)
- else:
- if configuration_format_version == (200 if is_for_maven else 100):
- raise Exception(
- 'No "version" expected in ' + configuration)
- # Disallow prerelease, as older R8 versions cannot parse it causing hard to
- # understand errors.
- check_basic_semver_version(version, 'in ' + configuration, allowPrerelease = False)
- return (name, version)
+ name = 'desugar_jdk_libs_configuration'
+ with open(configuration, 'r') as f:
+ configuration_json = json.loads(f.read())
+ configuration_format_version = \
+ configuration_json.get('configuration_format_version')
+ if (not configuration_format_version):
+ raise Exception('No "configuration_format_version" found in ' +
+ configuration)
+ if (configuration_format_version != 3 and
+ configuration_format_version != 5 and
+ configuration_format_version != (200 if is_for_maven else 100)):
+ raise Exception(
+ 'Unsupported "configuration_format_version" "%s" found in %s' %
+ (configuration_format_version, configuration))
+ version = configuration_json.get('version')
+ if not version:
+ if configuration_format_version == (200 if is_for_maven else 100):
+ identifier = configuration_json.get('identifier')
+ if not identifier:
+ raise Exception('No "identifier" found in ' + configuration)
+ identifier_split = identifier.split(':')
+ if (len(identifier_split) != 3):
+ raise Exception('Invalid "identifier" found in ' +
+ configuration)
+ if (identifier_split[0] != 'com.tools.android'):
+ raise Exception('Invalid "identifier" found in ' +
+ configuration)
+ if not identifier_split[1].startswith(
+ 'desugar_jdk_libs_configuration'):
+ raise Exception('Invalid "identifier" found in ' +
+ configuration)
+ name = identifier_split[1]
+ version = identifier_split[2]
+ else:
+ raise Exception('No "version" found in ' + configuration)
+ else:
+ if configuration_format_version == (200 if is_for_maven else 100):
+ raise Exception('No "version" expected in ' + configuration)
+ # Disallow prerelease, as older R8 versions cannot parse it causing hard to
+ # understand errors.
+ check_basic_semver_version(version,
+ 'in ' + configuration,
+ allowPrerelease=False)
+ return (name, version)
+
class SemanticVersion:
- def __init__(self, major, minor, patch, prerelease):
- self.major = major
- self.minor = minor
- self.patch = patch
- self.prerelease = prerelease
- # Build metadata currently not suppported
- def larger_than(self, other):
- if self.prerelease or other.prerelease:
- raise Exception("Comparison with prerelease not implemented")
- if self.major > other.major:
- return True
- if self.major == other.major and self.minor > other.minor:
- return True
- if self.patch:
- return (self.major == other.major
- and self.minor == other.minor
- and self.patch > other.patch)
- else:
- return False
+ def __init__(self, major, minor, patch, prerelease):
+ self.major = major
+ self.minor = minor
+ self.patch = patch
+ self.prerelease = prerelease
+ # Build metadata currently not suppported
+
+ def larger_than(self, other):
+ if self.prerelease or other.prerelease:
+ raise Exception("Comparison with prerelease not implemented")
+ if self.major > other.major:
+ return True
+ if self.major == other.major and self.minor > other.minor:
+ return True
+ if self.patch:
+ return (self.major == other.major and self.minor == other.minor and
+ self.patch > other.patch)
+ else:
+ return False
# Check that the passed string is formatted as a basic semver version (x.y.z or x.y.z-prerelease
# depending on the value of allowPrerelease).
# See https://semver.org/. The regexp parts used are not all complient with what is suggested
# on https://semver.org/#is-there-a-suggested-regular-expression-regex-to-check-a-semver-string.
-def check_basic_semver_version(version, error_context = '', components = 3, allowPrerelease = False):
+def check_basic_semver_version(version,
+ error_context='',
+ components=3,
+ allowPrerelease=False):
regexp = '^'
for x in range(components):
- regexp += '([0-9]+)'
- if x < components - 1:
- regexp += '\\.'
+ regexp += '([0-9]+)'
+ if x < components - 1:
+ regexp += '\\.'
if allowPrerelease:
- # This part is from
- # https://semver.org/#is-there-a-suggested-regular-expression-regex-to-check-a-semver-string
- regexp += r'(?:-(?P<prerelease>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?'
+ # This part is from
+ # https://semver.org/#is-there-a-suggested-regular-expression-regex-to-check-a-semver-string
+ regexp += r'(?:-(?P<prerelease>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?'
regexp += '$'
reg = re.compile(regexp)
match = reg.match(version)
if not match:
- raise Exception("Invalid version '"
- + version
- + "'"
- + (' ' + error_context) if len(error_context) > 0 else '')
+ raise Exception("Invalid version '" + version + "'" +
+ (' ' + error_context) if len(error_context) > 0 else '')
if components == 2:
- return SemanticVersion(int(match.group(1)), int(match.group(2)), None, None)
+ return SemanticVersion(int(match.group(1)), int(match.group(2)), None,
+ None)
elif components == 3 and not allowPrerelease:
- return SemanticVersion(
- int(match.group(1)), int(match.group(2)), int(match.group(3)), None)
+ return SemanticVersion(int(match.group(1)), int(match.group(2)),
+ int(match.group(3)), None)
elif components == 3 and allowPrerelease:
- return SemanticVersion(
- int(match.group(1)), int(match.group(2)), int(match.group(3)), match.group('prerelease'))
+ return SemanticVersion(int(match.group(1)), int(match.group(2)),
+ int(match.group(3)), match.group('prerelease'))
else:
- raise Exception('Argument "components" must be 2 or 3')
+ raise Exception('Argument "components" must be 2 or 3')
diff --git a/tools/utils_aosp.py b/tools/utils_aosp.py
index 216e43d..e8b0ffb 100644
--- a/tools/utils_aosp.py
+++ b/tools/utils_aosp.py
@@ -3,7 +3,6 @@
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
-
from os.path import join
from subprocess import check_call
@@ -17,19 +16,22 @@
DEFAULT_ROOT = join(utils.REPO_ROOT, 'build', 'aosp')
+
def add_root_argument(parser):
- parser.add_argument('--aosp-root',
- help='Root of the AOSP checkout. ' +
- 'Defaults to ' + DEFAULT_ROOT +'.',
- default=DEFAULT_ROOT)
+ parser.add_argument('--aosp-root',
+ help='Root of the AOSP checkout. ' + 'Defaults to ' +
+ DEFAULT_ROOT + '.',
+ default=DEFAULT_ROOT)
+
def add_common_arguments(parser):
- add_root_argument(parser)
- parser.add_argument('--lunch',
- help='Build menu. ' +
- 'Defaults to ' + DEFAULT_LUNCH + '.',
- default=DEFAULT_LUNCH)
+ add_root_argument(parser)
+ parser.add_argument('--lunch',
+ help='Build menu. ' + 'Defaults to ' + DEFAULT_LUNCH +
+ '.',
+ default=DEFAULT_LUNCH)
+
def run_through_aosp_helper(lunch, args, cwd):
- args[0:0] = [AOSP_HELPER_SH, lunch]
- check_call(args, cwd = cwd)
+ args[0:0] = [AOSP_HELPER_SH, lunch]
+ check_call(args, cwd=cwd)
diff --git a/tools/youtube_data.py b/tools/youtube_data.py
index 3631691..37385c8 100644
--- a/tools/youtube_data.py
+++ b/tools/youtube_data.py
@@ -15,53 +15,65 @@
LATEST_VERSION = '17.19'
VERSIONS = {
- '17.19': {
- 'deploy' : {
- 'sanitize_libraries': False,
- 'inputs': ['%s_deploy.jar' % V17_19_PREFIX],
- 'libraries' : [
- os.path.join(
- V17_19_BASE,
- 'legacy_YouTubeRelease_combined_library_jars_filtered.jar')],
- 'pgconf': [
- '%s_proguard.config' % V17_19_PREFIX,
- '%s_proguard_extra.config' % V17_19_PREFIX,
- '%s/proguardsettings/YouTubeRelease_proguard.config' % utils.THIRD_PARTY,
- utils.IGNORE_WARNINGS_RULES],
- 'min-api' : ANDROID_M_API,
- 'system-properties': [
- # TODO(b/235169948): Reenable -checkenumunboxed.
- # '-Dcom.android.tools.r8.experimental.enablecheckenumunboxed=1',
- '-Dcom.android.tools.r8.experimental.enableconvertchecknotnull=1'],
- 'android_java8_libs': {
- 'config': '%s/desugar_jdk_libs/full_desugar_jdk_libs.json' % V17_19_BASE,
- # Intentionally not adding desugar_jdk_libs_configuration.jar since it
- # is part of jdk_libs_to_desugar.jar in YouTube 17.19.
- 'program': ['%s/desugar_jdk_libs/jdk_libs_to_desugar.jar' % V17_19_BASE],
- 'library': '%s/android_jar/lib-v33/android.jar' % utils.THIRD_PARTY,
- 'pgconf': [
- '%s/desugar_jdk_libs/base.pgcfg' % V17_19_BASE,
- '%s/desugar_jdk_libs/minify_desugar_jdk_libs.pgcfg' % V17_19_BASE
- ]
- }
+ '17.19': {
+ 'deploy': {
+ 'sanitize_libraries': False,
+ 'inputs': ['%s_deploy.jar' % V17_19_PREFIX],
+ 'libraries': [
+ os.path.join(
+ V17_19_BASE,
+ 'legacy_YouTubeRelease_combined_library_jars_filtered.jar')
+ ],
+ 'pgconf': [
+ '%s_proguard.config' % V17_19_PREFIX,
+ '%s_proguard_extra.config' % V17_19_PREFIX,
+ '%s/proguardsettings/YouTubeRelease_proguard.config' %
+ utils.THIRD_PARTY, utils.IGNORE_WARNINGS_RULES
+ ],
+ 'min-api': ANDROID_M_API,
+ 'system-properties': [
+ # TODO(b/235169948): Reenable -checkenumunboxed.
+ # '-Dcom.android.tools.r8.experimental.enablecheckenumunboxed=1',
+ '-Dcom.android.tools.r8.experimental.enableconvertchecknotnull=1'
+ ],
+ 'android_java8_libs': {
+ 'config':
+ '%s/desugar_jdk_libs/full_desugar_jdk_libs.json' %
+ V17_19_BASE,
+ # Intentionally not adding desugar_jdk_libs_configuration.jar since it
+ # is part of jdk_libs_to_desugar.jar in YouTube 17.19.
+ 'program': [
+ '%s/desugar_jdk_libs/jdk_libs_to_desugar.jar' % V17_19_BASE
+ ],
+ 'library':
+ '%s/android_jar/lib-v33/android.jar' % utils.THIRD_PARTY,
+ 'pgconf': [
+ '%s/desugar_jdk_libs/base.pgcfg' % V17_19_BASE,
+ '%s/desugar_jdk_libs/minify_desugar_jdk_libs.pgcfg' %
+ V17_19_BASE
+ ]
+ }
+ },
},
- },
}
+
def GetLatestVersion():
- return LATEST_VERSION
+ return LATEST_VERSION
+
def GetName():
- return 'youtube'
+ return 'youtube'
+
def GetMemoryData(version):
- assert version == '16.20'
- return {
- 'find-xmx-min': 3150,
- 'find-xmx-max': 3300,
- 'find-xmx-range': 64,
- 'oom-threshold': 3100,
- # TODO(b/143431825): Youtube can OOM randomly in memory configurations
- # that should work.
- 'skip-find-xmx-max': True,
- }
+ assert version == '16.20'
+ return {
+ 'find-xmx-min': 3150,
+ 'find-xmx-max': 3300,
+ 'find-xmx-range': 64,
+ 'oom-threshold': 3100,
+ # TODO(b/143431825): Youtube can OOM randomly in memory configurations
+ # that should work.
+ 'skip-find-xmx-max': True,
+ }
diff --git a/tools/zip_utils.py b/tools/zip_utils.py
index 219b443..21077be 100644
--- a/tools/zip_utils.py
+++ b/tools/zip_utils.py
@@ -10,27 +10,32 @@
import utils
+
def add_file_to_zip(file, destination, zip_file):
- with zipfile.ZipFile(zip_file, 'a') as zip:
- zip.write(file, destination)
+ with zipfile.ZipFile(zip_file, 'a') as zip:
+ zip.write(file, destination)
+
def extract_all_that_matches(zip_file, destination, predicate):
- with zipfile.ZipFile(zip_file) as zip:
- names_to_extract = [name for name in zip.namelist() if predicate(name)]
- zip.extractall(path=destination, members=names_to_extract)
- return names_to_extract
+ with zipfile.ZipFile(zip_file) as zip:
+ names_to_extract = [name for name in zip.namelist() if predicate(name)]
+ zip.extractall(path=destination, members=names_to_extract)
+ return names_to_extract
+
def extract_member(zip_file, member, destination):
- with zipfile.ZipFile(zip_file) as zip:
- with utils.TempDir() as temp:
- zip.extract(member, path=temp)
- shutil.move(os.path.join(temp, member), destination)
+ with zipfile.ZipFile(zip_file) as zip:
+ with utils.TempDir() as temp:
+ zip.extract(member, path=temp)
+ shutil.move(os.path.join(temp, member), destination)
+
def get_names_that_matches(zip_file, predicate):
- with zipfile.ZipFile(zip_file) as zip:
- return [name for name in zip.namelist() if predicate(name)]
+ with zipfile.ZipFile(zip_file) as zip:
+ return [name for name in zip.namelist() if predicate(name)]
+
def remove_files_from_zip(files, zip_file):
- assert os.path.exists(zip_file)
- cmd = ['zip', '-d', zip_file] + files
- subprocess.run(cmd)
+ assert os.path.exists(zip_file)
+ cmd = ['zip', '-d', zip_file] + files
+ subprocess.run(cmd)