Skip to content

Commit fe9c6d5

Browse files
authored
Add support for running jmh benchmarks via bash (#5559)
1 parent 17c39f8 commit fe9c6d5

File tree

3 files changed

+345
-0
lines changed

3 files changed

+345
-0
lines changed

AGENTS.md

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -77,6 +77,10 @@ PIOSEE → Traceability trio mapping
7777

7878
For documentation‑only edits and other Routine B cases, still run PIOSEE briefly to confirm neutrality and reversibility.
7979

80+
### Benchmarking workflow (repository-wide)
81+
82+
The `scripts/run-single-benchmark.sh` helper is the supported path for spot-checking performance optimisations. It builds the chosen module with the `benchmarks` profile, constrains the benchmark selection to a single `@Benchmark` method, and when `--enable-jfr` is supplied it enforces repeatable profiling defaults (no warmup, ten 10-second measurements, one fork) while clearly reporting the destination of the generated JFR recording. Lean on this script whenever you need a reproducible measurement harness.
83+
8084
## Proportionality Model (Think before you test)
8185

8286
Score the change on these lenses. If any are **High**, prefer **Routine A**.

scripts/run-single-benchmark.sh

Lines changed: 244 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,244 @@
1+
#!/usr/bin/env bash
2+
set -euo pipefail
3+
4+
usage() {
5+
cat <<USAGE
6+
Usage: $0 --module <modulePath> --class <fullyQualifiedClass> --method <methodName> [options]
7+
8+
Options:
9+
--dry-run Print the Maven and JMH commands without executing them
10+
--warmup-iterations <number> Number of warmup iterations (default: 1)
11+
--measurement-iterations <number> Number of measurement iterations (default: 3)
12+
--forks <number> Number of forks (default: 1)
13+
--jvm-arg <value> Append a JVM argument (can be repeated)
14+
--jmh-arg <value> Append a raw JMH argument (can be repeated)
15+
--enable-jfr Enable JFR profiling with fixed iteration and timing settings
16+
--enable-jfr-cpu-times Include Java 25 CPU time JFR options (requires --enable-jfr)
17+
--jfr-output <path> Override the destination file for the JFR recording
18+
-- Treat the remaining arguments as raw JMH arguments
19+
USAGE
20+
}
21+
22+
SCRIPT_DIR="$(cd -- "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
23+
REPO_ROOT="$(cd -- "${SCRIPT_DIR}/.." && pwd)"
24+
25+
module=""
26+
benchmark_class=""
27+
benchmark_method=""
28+
dry_run=false
29+
warmup_iterations=1
30+
measurement_iterations=3
31+
forks=1
32+
jmh_extra_args=()
33+
jvm_args=()
34+
measurement_time=""
35+
enable_jfr=false
36+
enable_jfr_cpu_times=false
37+
jfr_output=""
38+
warmup_overridden=false
39+
measurement_overridden=false
40+
forks_overridden=false
41+
jfr_notice=""
42+
43+
while [[ $# -gt 0 ]]; do
44+
case "$1" in
45+
--module|-m)
46+
module="$2"
47+
shift 2
48+
;;
49+
--class|-c)
50+
benchmark_class="$2"
51+
shift 2
52+
;;
53+
--method|-b|--benchmark)
54+
benchmark_method="$2"
55+
shift 2
56+
;;
57+
--warmup-iterations)
58+
warmup_iterations="$2"
59+
warmup_overridden=true
60+
shift 2
61+
;;
62+
--measurement-iterations)
63+
measurement_iterations="$2"
64+
measurement_overridden=true
65+
shift 2
66+
;;
67+
--forks)
68+
forks="$2"
69+
forks_overridden=true
70+
shift 2
71+
;;
72+
--jvm-arg)
73+
jvm_args+=("$2")
74+
shift 2
75+
;;
76+
--jmh-arg)
77+
jmh_extra_args+=("$2")
78+
shift 2
79+
;;
80+
--enable-jfr)
81+
enable_jfr=true
82+
shift
83+
;;
84+
--enable-jfr-cpu-times)
85+
enable_jfr_cpu_times=true
86+
shift
87+
;;
88+
--jfr-output)
89+
jfr_output="$2"
90+
shift 2
91+
;;
92+
--dry-run)
93+
dry_run=true
94+
shift
95+
;;
96+
--help|-h)
97+
usage
98+
exit 0
99+
;;
100+
--)
101+
shift
102+
while [[ $# -gt 0 ]]; do
103+
jmh_extra_args+=("$1")
104+
shift
105+
done
106+
;;
107+
*)
108+
echo "Unknown option: $1" >&2
109+
usage >&2
110+
exit 1
111+
;;
112+
esac
113+
done
114+
115+
if [[ -z "${module}" || -z "${benchmark_class}" || -z "${benchmark_method}" ]]; then
116+
echo "Error: --module, --class, and --method are required." >&2
117+
usage >&2
118+
exit 1
119+
fi
120+
121+
module_dir="${REPO_ROOT}/${module}"
122+
if [[ ! -d "${module_dir}" ]]; then
123+
echo "Error: Module directory '${module}' does not exist." >&2
124+
exit 1
125+
fi
126+
127+
if ${enable_jfr_cpu_times} && ! ${enable_jfr}; then
128+
echo "Error: --enable-jfr-cpu-times requires --enable-jfr." >&2
129+
exit 1
130+
fi
131+
132+
if ${enable_jfr}; then
133+
if (( ${#jmh_extra_args[@]} > 0 )); then
134+
echo "Error: --enable-jfr cannot be combined with additional JMH arguments." >&2
135+
exit 1
136+
fi
137+
138+
if ${warmup_overridden} && [[ "${warmup_iterations}" != "0" ]]; then
139+
echo "Error: --enable-jfr requires 0 warmup iterations." >&2
140+
exit 1
141+
fi
142+
143+
if ${measurement_overridden} && [[ "${measurement_iterations}" != "10" ]]; then
144+
echo "Error: --enable-jfr requires 10 measurement iterations." >&2
145+
exit 1
146+
fi
147+
148+
if ${forks_overridden} && [[ "${forks}" != "1" ]]; then
149+
echo "Error: --enable-jfr requires a single fork." >&2
150+
exit 1
151+
fi
152+
153+
warmup_iterations=0
154+
measurement_iterations=10
155+
measurement_time="10s"
156+
forks=1
157+
158+
if [[ -z "${jfr_output}" ]]; then
159+
local_class="${benchmark_class##*.}"
160+
sanitized_class="${local_class//[^A-Za-z0-9_]/_}"
161+
sanitized_method="${benchmark_method//[^A-Za-z0-9_]/_}"
162+
jfr_output="${module_dir}/target/${sanitized_class}.${sanitized_method}.jfr"
163+
elif [[ "${jfr_output}" != /* ]]; then
164+
jfr_output="${REPO_ROOT}/${jfr_output}"
165+
fi
166+
167+
jvm_args+=("-XX:StartFlightRecording=settings=profile,dumponexit=true,filename=${jfr_output},duration=120s")
168+
169+
if ${enable_jfr_cpu_times}; then
170+
jvm_args+=("-XX:FlightRecorderOptions=enableThreadCpuTime=true,enableProcessCpuTime=true")
171+
fi
172+
173+
jfr_notice="JFR profiling enabled: enforcing warmup=0, measurement=10 iterations of 10s, forks=1. Recording will be written to ${jfr_output}."
174+
fi
175+
176+
mvn_cmd=(mvn "-pl" "${module}" "-am" "-P" "benchmarks" "-DskipTests" package)
177+
178+
benchmark_pattern="${benchmark_class}.${benchmark_method}"
179+
jmh_args=(-wi "${warmup_iterations}" -i "${measurement_iterations}" -f "${forks}")
180+
if [[ -n "${measurement_time}" ]]; then
181+
jmh_args+=(-r "${measurement_time}")
182+
fi
183+
for arg in "${jvm_args[@]}"; do
184+
jmh_args+=("-jvmArgsAppend" "${arg}")
185+
done
186+
for arg in "${jmh_extra_args[@]}"; do
187+
jmh_args+=("${arg}")
188+
done
189+
190+
find_benchmark_jar() {
191+
local module_path="$1"
192+
local require_existing="$2"
193+
local target_dir="${module_path}/target"
194+
mapfile -t candidates < <(find "${target_dir}" -maxdepth 2 -type f \( -name '*jmh*.jar' -o -name '*benchmark*.jar' \) 2>/dev/null | sort)
195+
if [[ ${#candidates[@]} -gt 0 ]]; then
196+
for jar in "${candidates[@]}"; do
197+
if [[ "$(basename "${jar}")" != original-* ]]; then
198+
printf '%s\n' "${jar}"
199+
return 0
200+
fi
201+
done
202+
printf '%s\n' "${candidates[0]}"
203+
return 0
204+
fi
205+
206+
if [[ "${require_existing}" == "true" ]]; then
207+
echo "Error: Unable to locate a benchmark jar in '${target_dir}'." >&2
208+
exit 1
209+
fi
210+
211+
printf '%s\n' "${module_path}/target/jmh.jar"
212+
}
213+
214+
print_command() {
215+
printf '%q ' "$@"
216+
printf '\n'
217+
}
218+
219+
if ${dry_run}; then
220+
if ${enable_jfr}; then
221+
echo "${jfr_notice}"
222+
fi
223+
jar_path="$(find_benchmark_jar "${module_dir}" false)"
224+
print_command "${mvn_cmd[@]}"
225+
java_cmd=(java -jar "${jar_path}" "${jmh_args[@]}" "${benchmark_pattern}")
226+
print_command "${java_cmd[@]}"
227+
exit 0
228+
fi
229+
230+
(
231+
cd "${REPO_ROOT}"
232+
"${mvn_cmd[@]}"
233+
)
234+
235+
jar_path="$(find_benchmark_jar "${module_dir}" true)"
236+
java_cmd=(java -jar "${jar_path}" "${jmh_args[@]}" "${benchmark_pattern}")
237+
238+
if ${enable_jfr}; then
239+
echo "${jfr_notice}"
240+
mkdir -p "$(dirname "${jfr_output}")"
241+
fi
242+
243+
printf 'Running benchmark with jar %s\n' "${jar_path}"
244+
"${java_cmd[@]}"
Lines changed: 97 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,97 @@
1+
#!/usr/bin/env bash
2+
set -euo pipefail
3+
4+
SCRIPT_DIR="$(cd -- "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
5+
REPO_ROOT="$(cd -- "${SCRIPT_DIR}/../.." && pwd)"
6+
SCRIPT="${REPO_ROOT}/scripts/run-single-benchmark.sh"
7+
8+
set +e
9+
OUTPUT="$(bash "${SCRIPT}" --dry-run --module testsuites/benchmark --class org.eclipse.rdf4j.benchmark.ReasoningBenchmark --method forwardChainingSchemaCachingRDFSInferencer 2>&1)"
10+
STATUS=$?
11+
set -e
12+
13+
echo "${OUTPUT}"
14+
15+
if [[ ${STATUS} -ne 0 ]]; then
16+
exit ${STATUS}
17+
fi
18+
19+
if [[ "${OUTPUT}" != *"mvn -pl testsuites/benchmark -am -P benchmarks -DskipTests package"* ]]; then
20+
echo "Expected Maven command not found in output" >&2
21+
exit 1
22+
fi
23+
24+
if [[ "${OUTPUT}" != *"ReasoningBenchmark.forwardChainingSchemaCachingRDFSInferencer"* ]]; then
25+
echo "Expected benchmark method not found in output" >&2
26+
exit 1
27+
fi
28+
29+
set +e
30+
JFR_OUTPUT="$(bash "${SCRIPT}" --dry-run --module testsuites/benchmark --class org.eclipse.rdf4j.benchmark.ReasoningBenchmark --method forwardChainingSchemaCachingRDFSInferencer --enable-jfr 2>&1)"
31+
JFR_STATUS=$?
32+
set -e
33+
34+
echo "${JFR_OUTPUT}"
35+
36+
if [[ ${JFR_STATUS} -ne 0 ]]; then
37+
exit ${JFR_STATUS}
38+
fi
39+
40+
if [[ "${JFR_OUTPUT}" != *"JFR profiling enabled:"* ]]; then
41+
echo "Expected JFR guidance banner when profiling is enabled" >&2
42+
exit 1
43+
fi
44+
45+
EXPECTED_JFR_PATH="testsuites/benchmark/target/ReasoningBenchmark.forwardChainingSchemaCachingRDFSInferencer.jfr"
46+
if [[ "${JFR_OUTPUT}" != *"${EXPECTED_JFR_PATH}"* ]]; then
47+
echo "Expected JFR banner to include the recording destination" >&2
48+
exit 1
49+
fi
50+
51+
if [[ "${JFR_OUTPUT}" != *"-wi 0"* ]]; then
52+
echo "Expected JFR run to disable warmup iterations" >&2
53+
exit 1
54+
fi
55+
56+
if [[ "${JFR_OUTPUT}" != *"-i 10"* ]]; then
57+
echo "Expected JFR run to force 10 measurement iterations" >&2
58+
exit 1
59+
fi
60+
61+
if [[ "${JFR_OUTPUT}" != *"-r 10s"* ]]; then
62+
echo "Expected JFR run to set measurement time to 10 seconds" >&2
63+
exit 1
64+
fi
65+
66+
if [[ "${JFR_OUTPUT}" != *"-f 1"* ]]; then
67+
echo "Expected JFR run to enforce a single fork" >&2
68+
exit 1
69+
fi
70+
71+
if [[ "${JFR_OUTPUT}" != *"-XX:StartFlightRecording=settings=profile\\,dumponexit=true"* ]]; then
72+
echo "Expected JFR run to enable JFR profiling" >&2
73+
exit 1
74+
fi
75+
76+
if [[ "${JFR_OUTPUT}" != *"testsuites/benchmark/target/ReasoningBenchmark.forwardChainingSchemaCachingRDFSInferencer.jfr"* ]]; then
77+
echo "Expected JFR run to emit recording into the module target directory" >&2
78+
exit 1
79+
fi
80+
81+
set +e
82+
JFR_CPU_OUTPUT="$(bash "${SCRIPT}" --dry-run --module testsuites/benchmark --class org.eclipse.rdf4j.benchmark.ReasoningBenchmark --method forwardChainingSchemaCachingRDFSInferencer --enable-jfr --enable-jfr-cpu-times 2>&1)"
83+
JFR_CPU_STATUS=$?
84+
set -e
85+
86+
echo "${JFR_CPU_OUTPUT}"
87+
88+
if [[ ${JFR_CPU_STATUS} -ne 0 ]]; then
89+
exit ${JFR_CPU_STATUS}
90+
fi
91+
92+
if [[ "${JFR_CPU_OUTPUT}" != *"-XX:FlightRecorderOptions=enableThreadCpuTime=true\\,enableProcessCpuTime=true"* ]]; then
93+
echo "Expected CPU time options to be appended when requested" >&2
94+
exit 1
95+
fi
96+
97+
exit 0

0 commit comments

Comments
 (0)