diff --git a/docs/configuration-options.md b/docs/configuration-options.md index 1f54a55c..31a76d61 100644 --- a/docs/configuration-options.md +++ b/docs/configuration-options.md @@ -60,15 +60,23 @@ The options listed in the following sections allow you to tailor the benchmark e | `advanced("nativeGCAfterIteration", value)` | Whether to trigger garbage collection after each iteration. | `true`, `false` | `false` | ### Kotlin/JVM -| Option | Description | Possible Values | Default Value | -|---------------------------------------------|------------------------------------------------------------|----------------------------------------|----------------| -| `advanced("jvmForks", value)` | Specifies the number of times the harness should fork. | Non-negative Integer, `"definedByJmh"` | `1` | +| Option | Description | Possible Values | Default Value | +|---------------------------------------------|--------------------------------------------------------|----------------------------------------|----------------------| +| `advanced("jvmForks", value)` | Specifies the number of times the harness should fork. | Non-negative Integer, `"definedByJmh"` | `1` | +| `advanced("jvmProfiler", "value")` | Specifies the profiler to be used during benchmarking. | String identifier of the profiler | `null` (No profiler) | **Notes on "jvmForks":** - **0** - "no fork", i.e., no subprocesses are forked to run benchmarks. - A positive integer value – the amount used for all benchmarks in this configuration. - **"definedByJmh"** – Let JMH determine the amount, using the value in the [`@Fork` annotation](https://javadoc.io/doc/org.openjdk.jmh/jmh-core/latest/org/openjdk/jmh/annotations/Fork.html) for the benchmark function or its enclosing class. If not specified by `@Fork`, it defaults to [Defaults.MEASUREMENT_FORKS (`5`)](https://javadoc.io/doc/org.openjdk.jmh/jmh-core/latest/org/openjdk/jmh/runner/Defaults.html#MEASUREMENT_FORKS). +**Notes on "jvmProfiler":** +- This option corresponds to the `-prof` command line option when running JMH benchmarks from console. + Some examples of possible values include `gc`, `stack`, `cl`, `perf`, and `dtraceasm`. +- You can learn about profilers provided by the latest JMH [here](https://javadoc.io/doc/org.openjdk.jmh/jmh-core/latest/org/openjdk/jmh/profile/package-summary.html). +- A sample illustrating the output format of the profilers can be found [here](https://github.com/openjdk/jmh/blob/1.37/jmh-samples/src/main/java/org/openjdk/jmh/samples/JMHSample_35_Profilers.java). +- Some profilers may require root privileges. + The library offers the flexibility to specify the version of the Java Microbenchmark Harness (JMH) to use when running benchmarks on the JVM. The default version is set to `1.21`, but you can customize it while registering a JVM target for benchmarking: diff --git a/integration/src/main/kotlin/kotlinx/benchmark/integration/BenchmarkConfiguration.kt b/integration/src/main/kotlin/kotlinx/benchmark/integration/BenchmarkConfiguration.kt index 3360994e..bda141a3 100644 --- a/integration/src/main/kotlin/kotlinx/benchmark/integration/BenchmarkConfiguration.kt +++ b/integration/src/main/kotlin/kotlinx/benchmark/integration/BenchmarkConfiguration.kt @@ -11,7 +11,7 @@ class BenchmarkConfiguration { private var includes: MutableList = mutableListOf() private var excludes: MutableList = mutableListOf() private var params: MutableMap> = mutableMapOf() - private var advanced: MutableMap = mutableMapOf() + private var advanced: MutableMap = mutableMapOf() fun include(pattern: String) { includes.add(pattern) @@ -26,7 +26,7 @@ class BenchmarkConfiguration { values.addAll(value) } - fun advanced(name: String, value: Any) { + fun advanced(name: String, value: Any?) { advanced[name] = value } diff --git a/integration/src/test/kotlin/kotlinx/benchmark/integration/JvmProfilerTest.kt b/integration/src/test/kotlin/kotlinx/benchmark/integration/JvmProfilerTest.kt new file mode 100644 index 00000000..2b3b52f7 --- /dev/null +++ b/integration/src/test/kotlin/kotlinx/benchmark/integration/JvmProfilerTest.kt @@ -0,0 +1,74 @@ +package kotlinx.benchmark.integration + +import kotlin.test.* + +class JvmProfilerTest : GradleTest() { + + @Test + fun testGcProfiler() { + val runner = project("kotlin-multiplatform") { + configuration("gcProfiler") { + iterations = 1 + iterationTime = 100 + iterationTimeUnit = "ms" + advanced("jvmProfiler", "gc") + } + } + + runner.run("jvmGcProfilerBenchmark") { + assertOutputContains("gc.alloc.rate") + assertOutputContains("BUILD SUCCESSFUL") + } + } + + @Test + fun testStackProfilerEffect() { + val runner = project("kotlin-multiplatform") { + configuration("stackProfiler") { + iterations = 1 + iterationTime = 100 + iterationTimeUnit = "ms" + advanced("jvmProfiler", "stack") + } + } + + runner.run("jvmStackProfilerBenchmark") { + assertOutputContains("stack") + assertOutputContains("BUILD SUCCESSFUL") + } + } + + @Test + fun testClProfiler() { + val runner = project("kotlin-multiplatform") { + configuration("clProfiler") { + iterations = 1 + iterationTime = 100 + iterationTimeUnit = "ms" + advanced("jvmProfiler", "cl") + } + } + + runner.run("jvmClProfilerBenchmark") { + assertOutputContains("class.unload.norm") + assertOutputContains("BUILD SUCCESSFUL") + } + } + + @Test + fun testCompProfilerEffect() { + val runner = project("kotlin-multiplatform") { + configuration("compProfiler") { + iterations = 1 + iterationTime = 100 + iterationTimeUnit = "ms" + advanced("jvmProfiler", "comp") + } + } + + runner.run("jvmCompProfilerBenchmark") { + assertOutputContains("compiler.time.profiled") + assertOutputContains("BUILD SUCCESSFUL") + } + } +} diff --git a/integration/src/test/kotlin/kotlinx/benchmark/integration/OptionsValidationTest.kt b/integration/src/test/kotlin/kotlinx/benchmark/integration/OptionsValidationTest.kt index f4940978..0631dff3 100644 --- a/integration/src/test/kotlin/kotlinx/benchmark/integration/OptionsValidationTest.kt +++ b/integration/src/test/kotlin/kotlinx/benchmark/integration/OptionsValidationTest.kt @@ -206,6 +206,13 @@ class OptionsValidationTest : GradleTest() { advanced(" ", "value") } + configuration("blankAdvancedConfigValue") { + iterations = 1 + iterationTime = 100 + iterationTimeUnit = "ms" + advanced("name", " ") + } + configuration("invalidAdvancedConfigName") { iterations = 1 iterationTime = 100 @@ -240,13 +247,31 @@ class OptionsValidationTest : GradleTest() { iterationTimeUnit = "ms" advanced("jsUseBridge", "x") } + + configuration("nullJvmProfiler") { + iterations = 1 + iterationTime = 100 + iterationTimeUnit = "ms" + advanced("jvmProfiler", null) + } + + configuration("notStringJvmProfiler") { + iterations = 1 + iterationTime = 100 + iterationTimeUnit = "ms" + advanced("jvmProfiler", 1) + } } runner.runAndFail("blankAdvancedConfigNameBenchmark") { assertOutputContains("Invalid advanced option name: ' '. It must not be blank.") } + runner.runAndFail("blankAdvancedConfigValueBenchmark") { + assertOutputContains("Invalid value for advanced option 'name': ' '. Value should not be blank.") + } runner.runAndFail("invalidAdvancedConfigNameBenchmark") { - assertOutputContains("Invalid advanced option name: 'jsFork'. Accepted options: \"nativeFork\", \"nativeGCAfterIteration\", \"jvmForks\", \"jsUseBridge\".") + assertOutputContains("Invalid advanced option name: 'jsFork'. Accepted options: \"nativeFork\", " + + "\"nativeGCAfterIteration\", \"jvmForks\", \"jsUseBridge\", \"jvmProfiler\".") } runner.runAndFail("invalidNativeForkBenchmark") { assertOutputContains("Invalid value for 'nativeFork': 'x'. Accepted values: ${ValidOptions.nativeForks.joinToString(", ")}.") @@ -260,6 +285,12 @@ class OptionsValidationTest : GradleTest() { runner.runAndFail("invalidJsUseBridgeBenchmark") { assertOutputContains("Invalid value for 'jsUseBridge': 'x'. Expected a Boolean value.") } + runner.runAndFail("nullJvmProfiler") { + assertOutputContains("Invalid value for 'jvmProfiler': 'null'. Expected a String value.") + } + runner.runAndFail("notStringJvmProfiler") { + assertOutputContains("Invalid value for 'jvmProfiler': '1'. Expected a String value.") + } } } diff --git a/plugin/main/src/kotlinx/benchmark/gradle/Utils.kt b/plugin/main/src/kotlinx/benchmark/gradle/Utils.kt index fb77d6bd..0354375c 100644 --- a/plugin/main/src/kotlinx/benchmark/gradle/Utils.kt +++ b/plugin/main/src/kotlinx/benchmark/gradle/Utils.kt @@ -243,7 +243,12 @@ private fun validateConfig(config: BenchmarkConfiguration) { "jsUseBridge" -> require(value is Boolean) { "Invalid value for 'jsUseBridge': '$value'. Expected a Boolean value." } - else -> throw IllegalArgumentException("Invalid advanced option name: '$param'. Accepted options: \"nativeFork\", \"nativeGCAfterIteration\", \"jvmForks\", \"jsUseBridge\".") + "jvmProfiler" -> require(value is String) { + "Invalid value for 'jvmProfiler': '$value'. Expected a String value." + } + else -> throw IllegalArgumentException( + "Invalid advanced option name: '$param'. Accepted options: ${ValidOptions.advancedOptions.joinToString(", ") { "\"$it\"" }}." + ) } } } @@ -259,6 +264,7 @@ private object ValidOptions { ) val modes = setOf("thrpt", "avgt", "Throughput", "AverageTime") val nativeForks = setOf("perBenchmark", "perIteration") + val advancedOptions = setOf("nativeFork", "nativeGCAfterIteration", "jvmForks", "jsUseBridge", "jvmProfiler") } internal fun Project.getSystemProperty(key: String): String? { diff --git a/runtime/commonMain/src/kotlinx/benchmark/BenchmarkProgress.kt b/runtime/commonMain/src/kotlinx/benchmark/BenchmarkProgress.kt index 7d543ca2..946be55e 100644 --- a/runtime/commonMain/src/kotlinx/benchmark/BenchmarkProgress.kt +++ b/runtime/commonMain/src/kotlinx/benchmark/BenchmarkProgress.kt @@ -105,7 +105,7 @@ class ConsoleBenchmarkProgress : BenchmarkProgress() { } override fun endBenchmark(suite: String, benchmark: String, status: FinishStatus, message: String) { - println(" $status: $message") + println(message) } override fun endBenchmarkException(suite: String, benchmark: String, error: String, stacktrace: String) { diff --git a/runtime/jvmMain/src/kotlinx/benchmark/jvm/JvmBenchmarkRunner.kt b/runtime/jvmMain/src/kotlinx/benchmark/jvm/JvmBenchmarkRunner.kt index ba8c5dd4..dd617d86 100644 --- a/runtime/jvmMain/src/kotlinx/benchmark/jvm/JvmBenchmarkRunner.kt +++ b/runtime/jvmMain/src/kotlinx/benchmark/jvm/JvmBenchmarkRunner.kt @@ -42,9 +42,17 @@ fun main(args: Array) { jmhOptions.param(key, *value.toTypedArray()) } + val reportFormat = ResultFormatType.valueOf(config.reportFormat.uppercase()) + val reporter = BenchmarkProgress.create(config.traceFormat) + val output = JmhOutputFormat(reporter, config.name) + + // "libasyncProfiler" is passed when a benchmark task is run from the IntelliJ Gradle panel with an embedded profiler. val runtimeMXBean = ManagementFactory.getRuntimeMXBean() val jvmArgs = runtimeMXBean.inputArguments - if (jvmArgs.any { it.contains("libasyncProfiler") }) { + val hasAttachedProfiler = jvmArgs.any { it.contains("libasyncProfiler") } + if (hasAttachedProfiler) { + // The attached profiler profiles this process, so don't fork the benchmark run to a separate process. + output.println("Warning: an IDE profiler is attached to this process, not forking benchmark run to a separate process.") jmhOptions.forks(0) } else { when (val jvmForks = config.advanced["jvmForks"]) { @@ -58,9 +66,14 @@ fun main(args: Array) { } } - val reportFormat = ResultFormatType.valueOf(config.reportFormat.uppercase()) - val reporter = BenchmarkProgress.create(config.traceFormat) - val output = JmhOutputFormat(reporter, config.name) + val profilerName = config.advanced["jvmProfiler"] + if (profilerName != null) { + if (hasAttachedProfiler) { + output.println("Warning: an IDE profiler is attached to this process, ignoring jvmProfiler = $profilerName.") + } else { + jmhOptions.addProfiler(profilerName) + } + } try { val runner = Runner(jmhOptions.build(), output) val results = runner.run() @@ -105,8 +118,19 @@ class JmhOutputFormat(private val reporter: BenchmarkProgress, private val suite override fun endBenchmark(result: BenchmarkResult?) { if (result != null) { val benchmarkId = getBenchmarkId(result.params) - val value = result.primaryResult - val message = value.extendedInfo().trim() + val message = buildString { + appendLine("Result \"${result.params.benchmark}\":") + appendLine(result.primaryResult.extendedInfo()) + + for (r in result.secondaryResults.values) { + val info = r.extendedInfo() + if (info.trim().isNotEmpty()) { + appendLine("Secondary result \"${result.params.benchmark}:${r.label}\":") + appendLine(info) + } + } + } + reporter.endBenchmark(suiteName, benchmarkId, BenchmarkProgress.FinishStatus.Success, message) } else { reporter.endBenchmarkException(suiteName, lastBenchmarkStart, "", "") @@ -133,7 +157,25 @@ class JmhOutputFormat(private val reporter: BenchmarkProgress, private val suite ) { when (params.type) { IterationType.WARMUP -> println("Warm-up $iteration: ${data.primaryResult}") - IterationType.MEASUREMENT -> println("Iteration $iteration: ${data.primaryResult}") + IterationType.MEASUREMENT -> { + val message = buildString { + appendLine("Iteration $iteration: ${data.primaryResult}") + + if (data.secondaryResults.isNotEmpty()) { + val prefix = " ".repeat(16) + val maxKeyLen = data.secondaryResults.maxOf { it.key.length } + + for ((key, value) in data.secondaryResults) { + append(prefix) + append("%-${maxKeyLen + 1}s ".format("$key:")) + appendLine(value) + } + appendLine() + } + } + + print(message) + } null -> throw UnsupportedOperationException("Iteration type not set") } flush()