Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions AGENTS.md
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,10 @@ PIOSEE → Traceability trio mapping

For documentation‑only edits and other Routine B cases, still run PIOSEE briefly to confirm neutrality and reversibility.

### Benchmarking workflow (repository-wide)

The `scripts/run-single-benchmark.sh` helper is the supported path for spot-checking performance optimisations. It builds the chosen module with the `benchmarks` profile, constrains the benchmark selection to a single `@Benchmark` method, and when `--enable-jfr` is supplied it enforces repeatable profiling defaults (no warmup, ten 10-second measurements, one fork) while clearly reporting the destination of the generated JFR recording. Lean on this script whenever you need a reproducible measurement harness.

## Proportionality Model (Think before you test)

Score the change on these lenses. If any are **High**, prefer **Routine A**.
Expand Down
244 changes: 244 additions & 0 deletions scripts/run-single-benchmark.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,244 @@
#!/usr/bin/env bash
set -euo pipefail

usage() {
cat <<USAGE
Usage: $0 --module <modulePath> --class <fullyQualifiedClass> --method <methodName> [options]

Options:
--dry-run Print the Maven and JMH commands without executing them
--warmup-iterations <number> Number of warmup iterations (default: 1)
--measurement-iterations <number> Number of measurement iterations (default: 3)
--forks <number> Number of forks (default: 1)
--jvm-arg <value> Append a JVM argument (can be repeated)
--jmh-arg <value> Append a raw JMH argument (can be repeated)
--enable-jfr Enable JFR profiling with fixed iteration and timing settings
--enable-jfr-cpu-times Include Java 25 CPU time JFR options (requires --enable-jfr)
--jfr-output <path> Override the destination file for the JFR recording
-- Treat the remaining arguments as raw JMH arguments
USAGE
}

SCRIPT_DIR="$(cd -- "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd -- "${SCRIPT_DIR}/.." && pwd)"

module=""
benchmark_class=""
benchmark_method=""
dry_run=false
warmup_iterations=1
measurement_iterations=3
forks=1
jmh_extra_args=()
jvm_args=()
measurement_time=""
enable_jfr=false
enable_jfr_cpu_times=false
jfr_output=""
warmup_overridden=false
measurement_overridden=false
forks_overridden=false
jfr_notice=""

while [[ $# -gt 0 ]]; do
case "$1" in
--module|-m)
module="$2"
shift 2
;;
--class|-c)
benchmark_class="$2"
shift 2
;;
--method|-b|--benchmark)
benchmark_method="$2"
shift 2
;;
--warmup-iterations)
warmup_iterations="$2"
warmup_overridden=true
shift 2
;;
--measurement-iterations)
measurement_iterations="$2"
measurement_overridden=true
shift 2
;;
--forks)
forks="$2"
forks_overridden=true
shift 2
;;
--jvm-arg)
jvm_args+=("$2")
shift 2
;;
--jmh-arg)
jmh_extra_args+=("$2")
shift 2
;;
--enable-jfr)
enable_jfr=true
shift
;;
--enable-jfr-cpu-times)
enable_jfr_cpu_times=true
shift
;;
--jfr-output)
jfr_output="$2"
shift 2
;;
--dry-run)
dry_run=true
shift
;;
--help|-h)
usage
exit 0
;;
--)
shift
while [[ $# -gt 0 ]]; do
jmh_extra_args+=("$1")
shift
done
;;
*)
echo "Unknown option: $1" >&2
usage >&2
exit 1
;;
esac
done

if [[ -z "${module}" || -z "${benchmark_class}" || -z "${benchmark_method}" ]]; then
echo "Error: --module, --class, and --method are required." >&2
usage >&2
exit 1
fi

module_dir="${REPO_ROOT}/${module}"
if [[ ! -d "${module_dir}" ]]; then
echo "Error: Module directory '${module}' does not exist." >&2
exit 1
fi

if ${enable_jfr_cpu_times} && ! ${enable_jfr}; then
echo "Error: --enable-jfr-cpu-times requires --enable-jfr." >&2
exit 1
fi

if ${enable_jfr}; then
if (( ${#jmh_extra_args[@]} > 0 )); then
echo "Error: --enable-jfr cannot be combined with additional JMH arguments." >&2
exit 1
fi

if ${warmup_overridden} && [[ "${warmup_iterations}" != "0" ]]; then
echo "Error: --enable-jfr requires 0 warmup iterations." >&2
exit 1
fi

if ${measurement_overridden} && [[ "${measurement_iterations}" != "10" ]]; then
echo "Error: --enable-jfr requires 10 measurement iterations." >&2
exit 1
fi

if ${forks_overridden} && [[ "${forks}" != "1" ]]; then
echo "Error: --enable-jfr requires a single fork." >&2
exit 1
fi

warmup_iterations=0
measurement_iterations=10
measurement_time="10s"
forks=1

if [[ -z "${jfr_output}" ]]; then
local_class="${benchmark_class##*.}"
sanitized_class="${local_class//[^A-Za-z0-9_]/_}"
sanitized_method="${benchmark_method//[^A-Za-z0-9_]/_}"
jfr_output="${module_dir}/target/${sanitized_class}.${sanitized_method}.jfr"
elif [[ "${jfr_output}" != /* ]]; then
jfr_output="${REPO_ROOT}/${jfr_output}"
fi

jvm_args+=("-XX:StartFlightRecording=settings=profile,dumponexit=true,filename=${jfr_output},duration=120s")

if ${enable_jfr_cpu_times}; then
jvm_args+=("-XX:FlightRecorderOptions=enableThreadCpuTime=true,enableProcessCpuTime=true")
fi

jfr_notice="JFR profiling enabled: enforcing warmup=0, measurement=10 iterations of 10s, forks=1. Recording will be written to ${jfr_output}."
fi

mvn_cmd=(mvn "-pl" "${module}" "-am" "-P" "benchmarks" "-DskipTests" package)

benchmark_pattern="${benchmark_class}.${benchmark_method}"
jmh_args=(-wi "${warmup_iterations}" -i "${measurement_iterations}" -f "${forks}")
if [[ -n "${measurement_time}" ]]; then
jmh_args+=(-r "${measurement_time}")
fi
for arg in "${jvm_args[@]}"; do
jmh_args+=("-jvmArgsAppend" "${arg}")
done
for arg in "${jmh_extra_args[@]}"; do
jmh_args+=("${arg}")
done

find_benchmark_jar() {
local module_path="$1"
local require_existing="$2"
local target_dir="${module_path}/target"
mapfile -t candidates < <(find "${target_dir}" -maxdepth 2 -type f \( -name '*jmh*.jar' -o -name '*benchmark*.jar' \) 2>/dev/null | sort)
if [[ ${#candidates[@]} -gt 0 ]]; then
for jar in "${candidates[@]}"; do
if [[ "$(basename "${jar}")" != original-* ]]; then
printf '%s\n' "${jar}"
return 0
fi
done
printf '%s\n' "${candidates[0]}"
return 0
fi

if [[ "${require_existing}" == "true" ]]; then
echo "Error: Unable to locate a benchmark jar in '${target_dir}'." >&2
exit 1
fi

printf '%s\n' "${module_path}/target/jmh.jar"
}

print_command() {
printf '%q ' "$@"
printf '\n'
}

if ${dry_run}; then
if ${enable_jfr}; then
echo "${jfr_notice}"
fi
jar_path="$(find_benchmark_jar "${module_dir}" false)"
print_command "${mvn_cmd[@]}"
java_cmd=(java -jar "${jar_path}" "${jmh_args[@]}" "${benchmark_pattern}")
print_command "${java_cmd[@]}"
exit 0
fi

(
cd "${REPO_ROOT}"
"${mvn_cmd[@]}"
)

jar_path="$(find_benchmark_jar "${module_dir}" true)"
java_cmd=(java -jar "${jar_path}" "${jmh_args[@]}" "${benchmark_pattern}")

if ${enable_jfr}; then
echo "${jfr_notice}"
mkdir -p "$(dirname "${jfr_output}")"
fi

printf 'Running benchmark with jar %s\n' "${jar_path}"
"${java_cmd[@]}"
97 changes: 97 additions & 0 deletions testsuites/benchmark/test-run-single-benchmark.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
#!/usr/bin/env bash
set -euo pipefail

SCRIPT_DIR="$(cd -- "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd -- "${SCRIPT_DIR}/../.." && pwd)"
SCRIPT="${REPO_ROOT}/scripts/run-single-benchmark.sh"

set +e
OUTPUT="$(bash "${SCRIPT}" --dry-run --module testsuites/benchmark --class org.eclipse.rdf4j.benchmark.ReasoningBenchmark --method forwardChainingSchemaCachingRDFSInferencer 2>&1)"
STATUS=$?
set -e

echo "${OUTPUT}"

if [[ ${STATUS} -ne 0 ]]; then
exit ${STATUS}
fi

if [[ "${OUTPUT}" != *"mvn -pl testsuites/benchmark -am -P benchmarks -DskipTests package"* ]]; then
echo "Expected Maven command not found in output" >&2
exit 1
fi

if [[ "${OUTPUT}" != *"ReasoningBenchmark.forwardChainingSchemaCachingRDFSInferencer"* ]]; then
echo "Expected benchmark method not found in output" >&2
exit 1
fi

set +e
JFR_OUTPUT="$(bash "${SCRIPT}" --dry-run --module testsuites/benchmark --class org.eclipse.rdf4j.benchmark.ReasoningBenchmark --method forwardChainingSchemaCachingRDFSInferencer --enable-jfr 2>&1)"
JFR_STATUS=$?
set -e

echo "${JFR_OUTPUT}"

if [[ ${JFR_STATUS} -ne 0 ]]; then
exit ${JFR_STATUS}
fi

if [[ "${JFR_OUTPUT}" != *"JFR profiling enabled:"* ]]; then
echo "Expected JFR guidance banner when profiling is enabled" >&2
exit 1
fi

EXPECTED_JFR_PATH="testsuites/benchmark/target/ReasoningBenchmark.forwardChainingSchemaCachingRDFSInferencer.jfr"
if [[ "${JFR_OUTPUT}" != *"${EXPECTED_JFR_PATH}"* ]]; then
echo "Expected JFR banner to include the recording destination" >&2
exit 1
fi

if [[ "${JFR_OUTPUT}" != *"-wi 0"* ]]; then
echo "Expected JFR run to disable warmup iterations" >&2
exit 1
fi

if [[ "${JFR_OUTPUT}" != *"-i 10"* ]]; then
echo "Expected JFR run to force 10 measurement iterations" >&2
exit 1
fi

if [[ "${JFR_OUTPUT}" != *"-r 10s"* ]]; then
echo "Expected JFR run to set measurement time to 10 seconds" >&2
exit 1
fi

if [[ "${JFR_OUTPUT}" != *"-f 1"* ]]; then
echo "Expected JFR run to enforce a single fork" >&2
exit 1
fi

if [[ "${JFR_OUTPUT}" != *"-XX:StartFlightRecording=settings=profile\\,dumponexit=true"* ]]; then
echo "Expected JFR run to enable JFR profiling" >&2
exit 1
fi

if [[ "${JFR_OUTPUT}" != *"testsuites/benchmark/target/ReasoningBenchmark.forwardChainingSchemaCachingRDFSInferencer.jfr"* ]]; then
echo "Expected JFR run to emit recording into the module target directory" >&2
exit 1
fi

set +e
JFR_CPU_OUTPUT="$(bash "${SCRIPT}" --dry-run --module testsuites/benchmark --class org.eclipse.rdf4j.benchmark.ReasoningBenchmark --method forwardChainingSchemaCachingRDFSInferencer --enable-jfr --enable-jfr-cpu-times 2>&1)"
JFR_CPU_STATUS=$?
set -e

echo "${JFR_CPU_OUTPUT}"

if [[ ${JFR_CPU_STATUS} -ne 0 ]]; then
exit ${JFR_CPU_STATUS}
fi

if [[ "${JFR_CPU_OUTPUT}" != *"-XX:FlightRecorderOptions=enableThreadCpuTime=true\\,enableProcessCpuTime=true"* ]]; then
echo "Expected CPU time options to be appended when requested" >&2
exit 1
fi

exit 0
Loading