Use StartupTime metric to measure benchmark warmup phase.

Change-Id: Iba88a6464afdf8e858a01e077418e0d77fe7d955
diff --git a/src/test/java/com/android/tools/r8/ToolHelper.java b/src/test/java/com/android/tools/r8/ToolHelper.java
index 89584dc..112e1b9 100644
--- a/src/test/java/com/android/tools/r8/ToolHelper.java
+++ b/src/test/java/com/android/tools/r8/ToolHelper.java
@@ -1292,7 +1292,7 @@
     R8.runForTesting(command.getInputApp(), internalOptions);
     if (benchmarkResults != null) {
       long end = System.nanoTime();
-      benchmarkResults.addRuntimeRawResult(end - start);
+      benchmarkResults.addRuntimeResult(end - start);
     }
   }
 
@@ -1385,7 +1385,7 @@
     D8.runForTesting(command.getInputApp(), options);
     if (benchmarkResults != null) {
       long end = System.nanoTime();
-      benchmarkResults.addRuntimeRawResult(end - start);
+      benchmarkResults.addRuntimeResult(end - start);
     }
     return compatSink.build();
   }
diff --git a/src/test/java/com/android/tools/r8/benchmarks/BenchmarkCollectionPrinter.java b/src/test/java/com/android/tools/r8/benchmarks/BenchmarkCollectionPrinter.java
index 77c8eed..c1fbad4 100644
--- a/src/test/java/com/android/tools/r8/benchmarks/BenchmarkCollectionPrinter.java
+++ b/src/test/java/com/android/tools/r8/benchmarks/BenchmarkCollectionPrinter.java
@@ -110,7 +110,6 @@
       throws IOException {
     // Common properties that must be consistent among all the benchmark variants.
     String suite = BenchmarkConfig.getCommonSuite(benchmarkVariants).getDartName();
-    boolean hasWarmup = BenchmarkConfig.getCommonTimeWarmupRuns(benchmarkVariants);
     List<String> metrics =
         new ArrayList<>(
             ListUtils.map(
@@ -118,12 +117,12 @@
     metrics.sort(String::compareTo);
     printSemi("final name = " + quote(benchmarkName));
     printSemi("final metrics = " + StringUtils.join(", ", metrics, BraceType.SQUARE));
-    printSemi("final group = new GroupBenchmark(name + \"Group\", metrics)");
+    printSemi("final benchmark = new StandardBenchmark(name, metrics)");
     for (BenchmarkConfig benchmark : benchmarkVariants) {
       scopeBraces(
           () -> {
             printSemi("final target = " + quote(benchmark.getTarget().getGolemName()));
-            printSemi("final options = group.addTargets(noImplementation, [target])");
+            printSemi("final options = benchmark.addTargets(noImplementation, [target])");
             printSemi("options.cpus = cpus");
             printSemi("options.isScript = true");
             printSemi("options.fromRevision = " + benchmark.getFromRevision());
@@ -148,13 +147,7 @@
             }
           });
     }
-    printSemi("group.addBenchmark(name, metrics)");
     printSemi(suite + ".addBenchmark(name)");
-    if (hasWarmup) {
-      printSemi("final warmupName = name + \"Warmup\"");
-      printSemi("group.addBenchmark(warmupName, [Metric.RunTimeRaw])");
-      printSemi(suite + ".addBenchmark(warmupName)");
-    }
   }
 
   private void addGolemResource(String name, Path tarball) throws IOException {
diff --git a/src/test/java/com/android/tools/r8/benchmarks/BenchmarkConfig.java b/src/test/java/com/android/tools/r8/benchmarks/BenchmarkConfig.java
index 1191758..6daaa7a 100644
--- a/src/test/java/com/android/tools/r8/benchmarks/BenchmarkConfig.java
+++ b/src/test/java/com/android/tools/r8/benchmarks/BenchmarkConfig.java
@@ -39,10 +39,6 @@
     return getConsistentRepresentative(variants).getSuite();
   }
 
-  public static boolean getCommonTimeWarmupRuns(List<BenchmarkConfig> variants) {
-    return getConsistentRepresentative(variants).hasTimeWarmupRuns();
-  }
-
   private static BenchmarkConfig getConsistentRepresentative(List<BenchmarkConfig> variants) {
     if (variants.isEmpty()) {
       throw new BenchmarkConfigError("Unexpected attempt to check consistency of empty collection");
@@ -64,8 +60,6 @@
     private Collection<BenchmarkDependency> dependencies = new ArrayList<>();
     private int fromRevision = -1;
 
-    private boolean timeWarmupRuns = false;
-
     private Builder() {}
 
     public BenchmarkConfig build() {
@@ -87,9 +81,6 @@
       if (fromRevision < 0) {
         throw new Unreachable("Benchmark must specify from which golem revision it is valid");
       }
-      if (timeWarmupRuns && !metrics.contains(BenchmarkMetric.RunTimeRaw)) {
-        throw new Unreachable("Benchmark with warmup time must measure RunTimeRaw");
-      }
       return new BenchmarkConfig(
           name,
           method,
@@ -97,7 +88,6 @@
           ImmutableSet.copyOf(metrics),
           suite,
           fromRevision,
-          timeWarmupRuns,
           dependencies);
     }
 
@@ -116,7 +106,7 @@
       return this;
     }
 
-    public Builder measureRunTimeRaw() {
+    public Builder measureRunTime() {
       metrics.add(BenchmarkMetric.RunTimeRaw);
       return this;
     }
@@ -126,6 +116,11 @@
       return this;
     }
 
+    public Builder measureWarmup() {
+      metrics.add(BenchmarkMetric.StartupTime);
+      return this;
+    }
+
     public Builder setSuite(BenchmarkSuite suite) {
       this.suite = suite;
       return this;
@@ -136,11 +131,6 @@
       return this;
     }
 
-    public Builder timeWarmupRuns() {
-      this.timeWarmupRuns = true;
-      return this;
-    }
-
     public Builder addDependency(BenchmarkDependency dependency) {
       dependencies.add(dependency);
       return this;
@@ -157,7 +147,6 @@
   private final BenchmarkSuite suite;
   private final Collection<BenchmarkDependency> dependencies;
   private final int fromRevision;
-  private final boolean timeWarmupRuns;
 
   private BenchmarkConfig(
       String name,
@@ -166,14 +155,12 @@
       ImmutableSet<BenchmarkMetric> metrics,
       BenchmarkSuite suite,
       int fromRevision,
-      boolean timeWarmupRuns,
       Collection<BenchmarkDependency> dependencies) {
     this.id = new BenchmarkIdentifier(name, target);
     this.method = benchmarkMethod;
     this.metrics = metrics;
     this.suite = suite;
     this.fromRevision = fromRevision;
-    this.timeWarmupRuns = timeWarmupRuns;
     this.dependencies = dependencies;
   }
 
@@ -185,13 +172,6 @@
     return id.getName();
   }
 
-  public String getWarmupName() {
-    if (!timeWarmupRuns) {
-      throw new BenchmarkConfigError("Invalid attempt at getting warmup benchmark name");
-    }
-    return getName() + "Warmup";
-  }
-
   public BenchmarkTarget getTarget() {
     return id.getTarget();
   }
@@ -213,7 +193,7 @@
   }
 
   public boolean hasTimeWarmupRuns() {
-    return timeWarmupRuns;
+    return hasMetric(BenchmarkMetric.StartupTime);
   }
 
   public Collection<BenchmarkDependency> getDependencies() {
diff --git a/src/test/java/com/android/tools/r8/benchmarks/BenchmarkMetric.java b/src/test/java/com/android/tools/r8/benchmarks/BenchmarkMetric.java
index 9483499..62bc0ba 100644
--- a/src/test/java/com/android/tools/r8/benchmarks/BenchmarkMetric.java
+++ b/src/test/java/com/android/tools/r8/benchmarks/BenchmarkMetric.java
@@ -5,7 +5,8 @@
 
 public enum BenchmarkMetric {
   RunTimeRaw,
-  CodeSize;
+  CodeSize,
+  StartupTime;
 
   public String getDartType() {
     return "Metric." + name();
diff --git a/src/test/java/com/android/tools/r8/benchmarks/BenchmarkResults.java b/src/test/java/com/android/tools/r8/benchmarks/BenchmarkResults.java
index 1dd5968..51f9f3a 100644
--- a/src/test/java/com/android/tools/r8/benchmarks/BenchmarkResults.java
+++ b/src/test/java/com/android/tools/r8/benchmarks/BenchmarkResults.java
@@ -9,8 +9,8 @@
 
 public class BenchmarkResults {
 
-  private final boolean isWarmupResults;
-  private final LongList runtimeRawResults = new LongArrayList();
+  private final BenchmarkMetric runtimeMetric;
+  private final LongList runtimeResults = new LongArrayList();
   private final LongList codeSizeResults = new LongArrayList();
 
   public static BenchmarkResults create() {
@@ -22,15 +22,19 @@
   }
 
   private BenchmarkResults(boolean isWarmupResults) {
-    this.isWarmupResults = isWarmupResults;
+    this.runtimeMetric = isWarmupResults ? BenchmarkMetric.StartupTime : BenchmarkMetric.RunTimeRaw;
+  }
+
+  private boolean isWarmupResults() {
+    return runtimeMetric == BenchmarkMetric.StartupTime;
   }
 
   private String getName(BenchmarkConfig config) {
-    return isWarmupResults ? config.getWarmupName() : config.getName();
+    return config.getName();
   }
 
-  public void addRuntimeRawResult(long result) {
-    runtimeRawResults.add(result);
+  public void addRuntimeResult(long result) {
+    runtimeResults.add(result);
   }
 
   public void addCodeSizeResult(long result) {
@@ -53,7 +57,7 @@
     verifyMetric(
         BenchmarkMetric.RunTimeRaw,
         config.getMetrics().contains(BenchmarkMetric.RunTimeRaw),
-        !runtimeRawResults.isEmpty());
+        !runtimeResults.isEmpty());
     verifyMetric(
         BenchmarkMetric.CodeSize,
         config.getMetrics().contains(BenchmarkMetric.CodeSize),
@@ -64,8 +68,9 @@
     return "" + (nanoTime / 1000000) + " ms";
   }
 
-  private void printRunTimeRaw(BenchmarkConfig config, long duration) {
-    System.out.println(getName(config) + "(RunTimeRaw): " + prettyTime(duration));
+  private void printRunTime(BenchmarkConfig config, long duration) {
+    String metric = runtimeMetric.name();
+    System.out.println(getName(config) + "(" + metric + "): " + prettyTime(duration));
   }
 
   private void printCodeSize(BenchmarkConfig config, long bytes) {
@@ -74,15 +79,15 @@
 
   public void printResults(ResultMode mode, BenchmarkConfig config) {
     verifyConfigAndResults(config);
-    if (config.hasMetric(BenchmarkMetric.RunTimeRaw)) {
-      long sum = runtimeRawResults.stream().mapToLong(l -> l).sum();
+    if (config.hasMetric(runtimeMetric)) {
+      long sum = runtimeResults.stream().mapToLong(l -> l).sum();
       if (mode == ResultMode.SUM) {
-        printRunTimeRaw(config, sum);
+        printRunTime(config, sum);
       } else if (mode == ResultMode.AVERAGE) {
-        printRunTimeRaw(config, sum / runtimeRawResults.size());
+        printRunTime(config, sum / runtimeResults.size());
       }
     }
-    if (!isWarmupResults && config.hasMetric(BenchmarkMetric.CodeSize)) {
+    if (!isWarmupResults() && config.hasMetric(BenchmarkMetric.CodeSize)) {
       long size = codeSizeResults.getLong(0);
       for (int i = 1; i < codeSizeResults.size(); i++) {
         if (size != codeSizeResults.getLong(i)) {
diff --git a/src/test/java/com/android/tools/r8/benchmarks/helloworld/HelloWorldBenchmark.java b/src/test/java/com/android/tools/r8/benchmarks/helloworld/HelloWorldBenchmark.java
index f6f481d..a838108 100644
--- a/src/test/java/com/android/tools/r8/benchmarks/helloworld/HelloWorldBenchmark.java
+++ b/src/test/java/com/android/tools/r8/benchmarks/helloworld/HelloWorldBenchmark.java
@@ -45,6 +45,7 @@
   }
 
   // Options/parameter setup to define variants of the benchmark above.
+  // Other benchmarks may not need this kind of options. It is just to help create the variants.
   private static class Options {
     final BenchmarkTarget target;
     final Backend backend;
@@ -78,7 +79,7 @@
                 .setName(options.getName())
                 .setTarget(target)
                 // The benchmark is required to have at least one metric.
-                .measureRunTimeRaw()
+                .measureRunTime()
                 .measureCodeSize()
                 // The benchmark is required to have a runner method which defines the actual
                 // execution.
@@ -88,7 +89,7 @@
                 .setFromRevision(12150)
                 // The benchmark can optionally time the warmup. This is not needed to use a warmup
                 // in the actual run, only to include it as its own benchmark entry on golem.
-                .timeWarmupRuns();
+                .measureWarmup();
         // If compiling with a library it needs to be added as a dependency.
         if (options.library != null) {
           builder.addDependency(options.library);
@@ -110,7 +111,7 @@
                         .setMinApi(options.minApi)
                         .addLibraryFiles(getLibraryFiles(options, environment))
                         .addProgramClasses(TestClass.class)
-                        // Compile and emit RunTimeRaw measure.
+                        // Compile and measure the run time.
                         .benchmarkCompile(results)
                         // Measure the output size.
                         .benchmarkCodeSize(results));
@@ -130,7 +131,7 @@
                         .setMinApi(options.minApi)
                         .addProgramClasses(TestClass.class)
                         .addKeepMainRule(TestClass.class)
-                        // Compile and emit RunTimeRaw measure.
+                        // Compile and measure the run time.
                         .benchmarkCompile(results)
                         // Measure the output size.
                         .benchmarkCodeSize(results));