Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

dry-run benchmarks exits when some benchmark fails #9397

Merged
merged 10 commits into from
Mar 14, 2024
1 change: 1 addition & 0 deletions build.sbt
Original file line number Diff line number Diff line change
@@ -1850,6 +1850,7 @@ lazy val `runtime-benchmarks` =
"-Xlint:unchecked"
),
Compile / compile := (Compile / compile)
.dependsOn(`runtime-fat-jar` / assembly)
.dependsOn(Def.task { (Compile / sourceManaged).value.mkdirs })
.value,
parallelExecution := false,
2 changes: 1 addition & 1 deletion build/build/src/engine/context.rs
Original file line number Diff line number Diff line change
@@ -453,7 +453,7 @@ impl RunContext {
// TODO [mwu] It should be possible to run them through context config option.
if self.config.build_benchmarks {
tasks.extend([
"runtime/Benchmark/compile",
"runtime-benchmarks/compile",
"language-server/Benchmark/compile",
"searcher/Benchmark/compile",
"std-benchmarks/Benchmark/compile",
Original file line number Diff line number Diff line change
@@ -8,6 +8,7 @@
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.logging.Level;
import org.enso.interpreter.runtime.EnsoContext;
import org.enso.polyglot.LanguageInfo;
@@ -21,6 +22,9 @@ public class Utils {
public static Context.Builder createDefaultContextBuilder() {
return Context.newBuilder()
.allowExperimentalOptions(true)
.option(
RuntimeOptions.LANGUAGE_HOME_OVERRIDE,
Paths.get("../../distribution/component").toFile().getAbsolutePath())
.option(RuntimeOptions.LOG_LEVEL, Level.WARNING.getName())
.option(RuntimeOptions.DISABLE_IR_CACHES, "true")
.option(RuntimeOptions.STRICT_ERRORS, "true")
Original file line number Diff line number Diff line change
@@ -3,7 +3,6 @@
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.file.Paths;
import java.util.List;
import java.util.Set;
import java.util.concurrent.TimeUnit;
@@ -93,9 +92,6 @@ public void setup(BenchmarkParams params) throws IOException {
this.out = new ByteArrayOutputStream();
this.context =
Utils.createDefaultContextBuilder()
.option(
RuntimeOptions.LANGUAGE_HOME_OVERRIDE,
Paths.get("../../distribution/component").toFile().getAbsolutePath())
// Enable IR caches - we don't want to compile the imported modules from the standard
// libraries
.option(RuntimeOptions.DISABLE_IR_CACHES, "false")
Original file line number Diff line number Diff line change
@@ -25,8 +25,8 @@

@BenchmarkMode(Mode.AverageTime)
@Fork(1)
@Warmup(iterations = 5, time = 1)
@Measurement(iterations = 3, time = 3)
@Warmup(iterations = 5)
@Measurement(iterations = 3)
@OutputTimeUnit(TimeUnit.MILLISECONDS)
@State(Scope.Benchmark)
public class IfVsCaseBenchmarks {
Original file line number Diff line number Diff line change
@@ -3,13 +3,9 @@
import jakarta.xml.bind.JAXBException;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.stream.Collectors;
import org.openjdk.jmh.results.RunResult;
import org.openjdk.jmh.runner.BenchmarkList;
import org.openjdk.jmh.runner.BenchmarkListEntry;
import org.openjdk.jmh.runner.Runner;
import org.openjdk.jmh.runner.RunnerException;
import org.openjdk.jmh.runner.options.CommandLineOptionException;
@@ -21,15 +17,6 @@
public class BenchmarksRunner {
public static final File REPORT_FILE = new File("./bench-report.xml");

/**
* @return A list of qualified names of all benchmarks visible to JMH.
*/
public List<String> getAvailable() {
return BenchmarkList.defaultList().getAll(null, new ArrayList<>()).stream()
.map(BenchmarkListEntry::getUsername)
.collect(Collectors.toList());
}

public static void run(String[] args) throws RunnerException {
CommandLineOptions cmdOpts = null;
try {
@@ -75,35 +62,35 @@ public static void run(String[] args) throws RunnerException {
}
}

private static Collection<RunResult> runCompileOnly(List<String> includes)
throws RunnerException {
System.out.println("Running benchmarks " + includes + " in compileOnly mode");
/**
* Results from compileOnly mode are not reported. Moreover, if some of the benchmarks in this
* mode fails, the whole process immediately fails. This behavior is different to *normal*
* benchmarks, where a single failure does not stop the whole process.
*/
private static void runCompileOnly(List<String> includes) {
if (includes.isEmpty()) {
System.out.println("Running all benchmarks in compileOnly mode");
} else {
System.out.println("Running benchmarks " + includes + " in compileOnly mode");
}
var optsBuilder =
new OptionsBuilder()
.measurementTime(TimeValue.seconds(1))
.measurementIterations(1)
.warmupIterations(0)
.shouldFailOnError(true)
.forks(0);
includes.forEach(optsBuilder::include);
var opts = optsBuilder.build();
var runner = new Runner(opts);
return runner.run();
}

public static BenchmarkItem runSingle(String label) throws RunnerException, JAXBException {
String includeRegex = "^" + label + "$";
if (Boolean.getBoolean("bench.compileOnly")) {
var results = runCompileOnly(List.of(includeRegex));
var firstResult = results.iterator().next();
return reportResult(label, firstResult);
} else {
var opts =
new OptionsBuilder()
.jvmArgsAppend("-Xss16M", "-Dpolyglot.engine.MultiTier=false")
.include(includeRegex)
.build();
RunResult benchmarksResult = new Runner(opts).runSingle();
return reportResult(label, benchmarksResult);
try {
runner.run();
System.out.println(
"benchmarks run successfully in compileOnly mode. Results are not reported.");
} catch (RunnerException e) {
System.err.println("Benchmark run failed: " + e.getMessage());
e.printStackTrace(System.err);
System.exit(1);
}
}