Store results from perf.py as json file

Change-Id: I9b03ffacea4dff35bafc326c1db2532ac7784a3a
diff --git a/tools/perf.py b/tools/perf.py
index a1de1b9..9827bd0 100755
--- a/tools/perf.py
+++ b/tools/perf.py
@@ -4,14 +4,22 @@
 # BSD-style license that can be found in the LICENSE file.
 
 import argparse
+import json
 import os
+import shutil
 import subprocess
 import sys
 
 import utils
 
-DEFAULT_ITERATIONS = 10
 BUCKET = "r8-perf-results"
+SAMPLE_BENCHMARK_RESULT_JSON = {
+    'benchmark_name': '<benchmark_name>',
+    'results': [{
+        'code_size': 0,
+        'runtime': 0
+    }]
+}
 
 # Result structure on cloud storage
 # gs://bucket/benchmark_results/APP/TARGET/GIT_HASH/results
@@ -19,68 +27,121 @@
 # where results simply contains the result lines and
 # meta contains information about the execution (machine)
 
+
 def ParseOptions():
     result = argparse.ArgumentParser()
     result.add_argument('--app',
-                        help='Specific app to measure.',
-                        default='NowInAndroidApp')
+                        help='Specific app(s) to measure.',
+                        action='append')
+    result.add_argument('--iterations',
+                        help='How many iterations to run.',
+                        type=int,
+                        default=10)
+    result.add_argument('--outdir',
+                        help='Output directory for running locally.')
     result.add_argument('--target',
                         help='Specific target to run on.',
                         default='r8-full',
                         choices=['d8', 'r8-full', 'r8-force', 'r8-compat'])
-    result.add_argument('--iterations',
-                        help='How many iterations to run.',
-                        type=int,
-                        default=DEFAULT_ITERATIONS)
-    return result.parse_known_args()
+    result.add_argument('--verbose',
+                        help='To enable verbose logging.',
+                        action='store_true',
+                        default=False)
+    options, args = result.parse_known_args()
+    options.apps = options.app or ['NowInAndroidApp', 'TiviApp']
+    options.quiet = not options.verbose
+    del options.app
+    return options, args
 
 
-def ParseOutput(output, options, log_array):
-    for line in output.decode('utf-8').splitlines():
-        print("   -- " + line)
-        # Output lines look like:
-        #    Benchmark results for NowInAndroidApp on target r8-full
-        #      warmup reporting mode: average
-        #      warmup iterations: 1
-        #      warmup total time: 58580 ms
-        #      benchmark reporting mode: average
-        #      benchmark iterations: 1
-        #      benchmark total time: 39613 ms
-        #    NowInAndroidApp(RunTimeRaw): 39154 ms
-        #    NowInAndroidApp(CodeSize): 5102196
-        if line.startswith(options.app + "("):
-            log_array.append(line)
+def MergeBenchmarkResultJsonFiles(benchmark_result_json_files):
+    merged_benchmark_result_json = None
+    for benchmark_result_json_file in benchmark_result_json_files:
+        benchmark_result_json = ParseBenchmarkResultJsonFile(
+            benchmark_result_json_file)
+        if merged_benchmark_result_json is None:
+            merged_benchmark_result_json = benchmark_result_json
+        else:
+            MergeBenchmarkResultJsonFile(merged_benchmark_result_json,
+                                         benchmark_result_json)
+    return merged_benchmark_result_json
 
 
-def GetGSLocation(app, target, filename):
-    return "gs://%s/%s/%s/%s/%s" % (BUCKET, app, target,
-                                    utils.get_HEAD_sha1(), filename)
+def MergeBenchmarkResultJsonFile(merged_benchmark_result_json,
+                                 benchmark_result_json):
+    assert benchmark_result_json.keys() == SAMPLE_BENCHMARK_RESULT_JSON.keys()
+    assert merged_benchmark_result_json[
+        'benchmark_name'] == benchmark_result_json['benchmark_name']
+    merged_benchmark_result_json['results'].extend(
+        benchmark_result_json['results'])
+
+
+def ParseBenchmarkResultJsonFile(result_json_file):
+    with open(result_json_file, 'r') as f:
+        lines = f.readlines()
+        return json.loads(''.join(lines))
+
+
+def GetArtifactLocation(app, target, filename):
+    return f'{app}/{target}/{utils.get_HEAD_sha1()}/{filename}'
+
+
+def GetGSLocation(filename):
+    return f'gs://{BUCKET}/{filename}'
+
+
+def ArchiveOutputFile(file, dest, options):
+    if options.outdir:
+        dest_in_outdir = os.path.join(options.outdir, dest)
+        os.makedirs(os.path.dirname(dest_in_outdir), exist_ok=True)
+        shutil.copyfile(file, dest_in_outdir)
+    else:
+        utils.upload_file_to_cloud_storage(file, GetGSLocation(dest))
+
 
 def main():
-    (options, args) = ParseOptions()
-    cmd = ['tools/run_benchmark.py', '--target', options.target,
-           '--benchmark', options.app]
-    log_array = []
-    # Build and warmup
-    output = subprocess.check_output(cmd)
-    ParseOutput(output, options, log_array)
-    cmd.append('--no-build')
-    for i in range(options.iterations):
-        output = subprocess.check_output(cmd)
-        ParseOutput(output, options, log_array)
+    options, args = ParseOptions()
     with utils.TempDir() as temp:
-        result_file = os.path.join(temp, "result_file")
-        with open(result_file, 'w') as f:
-            for l in log_array:
-                f.write(l + "\n")
-        utils.upload_file_to_cloud_storage(result_file,
-            GetGSLocation(options.app, options.target, 'results'))
-        if os.environ.get('SWARMING_BOT_ID'):
-            meta_file = os.path.join(temp, "meta")
-            with open(meta_file, 'w') as f:
-                f.write("Produced by: " + os.environ.get('SWARMING_BOT_ID'))
-            utils.upload_file_to_cloud_storage(meta_file,
-                GetGSLocation(options.app, options.target, 'meta'))
+        for app in options.apps:
+            cmd = [
+                'tools/run_benchmark.py', '--benchmark', app, '--iterations',
+                '1', '--target', options.target
+            ]
+
+            # Build and warmup
+            utils.Print(f'Preparing {app}', quiet=options.quiet)
+            subprocess.check_output(cmd)
+
+            # Run benchmark.
+            benchmark_result_json_files = []
+            for i in range(options.iterations):
+                utils.Print(f'Benchmarking {app} ({i+1}/{options.iterations})',
+                            quiet=options.quiet)
+                benchhmark_result_file = os.path.join(temp, f"result_file_{i}")
+                iteration_cmd = cmd + [
+                    '--output', benchhmark_result_file, '--no-build'
+                ]
+                subprocess.check_output(iteration_cmd)
+                benchmark_result_json_files.append(benchhmark_result_file)
+
+            # Merge results and write output.
+            result_file = os.path.join(temp, 'result_file')
+            with open(result_file, 'w') as f:
+                json.dump(
+                    MergeBenchmarkResultJsonFiles(benchmark_result_json_files),
+                    f)
+            ArchiveOutputFile(
+                result_file, GetArtifactLocation(app, options.target,
+                                                 'results'), options)
+
+            # Write metadata.
+            if os.environ.get('SWARMING_BOT_ID'):
+                meta_file = os.path.join(temp, "meta")
+                with open(meta_file, 'w') as f:
+                    f.write("Produced by: " + os.environ.get('SWARMING_BOT_ID'))
+                ArchiveOutputFile(
+                    meta_file, GetArtifactLocation(app, options.target, 'meta'),
+                    options)
 
 
 if __name__ == '__main__':
diff --git a/tools/utils.py b/tools/utils.py
index 249c10b..4bd6910 100644
--- a/tools/utils.py
+++ b/tools/utils.py
@@ -68,8 +68,10 @@
 R8_SRC_JAR = os.path.join(LIBS, 'r8-src.jar')
 R8LIB_EXCLUDE_DEPS_JAR = os.path.join(LIBS, 'r8lib-exclude-deps.jar')
 R8_FULL_EXCLUDE_DEPS_JAR = os.path.join(LIBS, 'r8-full-exclude-deps.jar')
-THREADING_MODULE_BLOCKING_JAR = os.path.join(LIBS, 'threading-module-blocking.jar')
-THREADING_MODULE_SINGLE_THREADED_JAR = os.path.join(LIBS, 'threading-module-single-threaded.jar')
+THREADING_MODULE_BLOCKING_JAR = os.path.join(LIBS,
+                                             'threading-module-blocking.jar')
+THREADING_MODULE_SINGLE_THREADED_JAR = os.path.join(
+    LIBS, 'threading-module-single-threaded.jar')
 R8_TESTS_JAR = os.path.join(LIBS, 'r8tests.jar')
 R8_TESTBASE_JAR = os.path.join(LIBS, 'r8test_base.jar')
 R8LIB_TESTBASE_JAR = os.path.join(LIBS, 'r8libtestbase-cf.jar')
@@ -82,7 +84,8 @@
 LIBRARY_DESUGAR_CONVERSIONS_ZIP = os.path.join(
     CUSTOM_CONVERSION_DIR, 'library_desugar_conversions.jar')
 KEEPANNO_ANNOTATIONS_JAR = os.path.join(LIBS, 'keepanno-annotations.jar')
-KEEPANNO_ANNOTATIONS_DOC = os.path.join('d8_r8', 'keepanno', 'build', 'docs', 'javadoc')
+KEEPANNO_ANNOTATIONS_DOC = os.path.join('d8_r8', 'keepanno', 'build', 'docs',
+                                        'javadoc')
 
 DESUGAR_CONFIGURATION = os.path.join('src', 'library_desugar',
                                      'desugar_jdk_libs.json')
@@ -397,6 +400,7 @@
     PrintCmd(cmd)
     subprocess.check_call(cmd)
 
+
 def check_dir_args(source, destination):
     # We require that the dirname of the paths coincide, e.g., src/dirname and dst/dirname
     # The target is then stripped so the upload command will be: cp -R src/dirname dst/
@@ -407,10 +411,11 @@
             f'{source} and {destination}')
     if len(destination_parent.strip()) == 0:
         raise Exception(
-            'Attempt to upload directory to empty destination directory: '
-            + destination)
+            'Attempt to upload directory to empty destination directory: ' +
+            destination)
     return destination_parent
 
+
 def upload_directory_to_cloud_storage(source, destination, parallel=True):
     destination_parent = check_dir_args(source, destination)
     cmd = [get_gsutil()]
@@ -421,6 +426,7 @@
     PrintCmd(cmd)
     subprocess.check_call(cmd)
 
+
 def rsync_directory_to_cloud_storage(source, destination, parallel=True):
     check_dir_args(source, destination)
     cmd = [get_gsutil()]
@@ -431,6 +437,7 @@
     PrintCmd(cmd)
     subprocess.check_call(cmd)
 
+
 def delete_file_from_cloud_storage(destination):
     cmd = [get_gsutil(), 'rm', destination]
     PrintCmd(cmd)