Skip to content

Commit 60b54b5

Browse files
committed
Switch from JuliaFormatter to Runic.jl for code formatting
- Update CI workflow to use fredrikekre/runic-action@v1 - Remove .JuliaFormatter.toml configuration - Format all source files with Runic.jl 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
1 parent 35b7d75 commit 60b54b5

76 files changed

Lines changed: 2482 additions & 1537 deletions

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

.JuliaFormatter.toml

Lines changed: 0 additions & 3 deletions
This file was deleted.

.github/workflows/FormatCheck.yml

Lines changed: 10 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,19 @@
1-
name: "Format Check"
1+
name: format-check
22

33
on:
44
push:
55
branches:
66
- 'master'
7+
- 'main'
8+
- 'release-'
79
tags: '*'
810
pull_request:
911

1012
jobs:
11-
format-check:
12-
name: "Format Check"
13-
uses: "SciML/.github/.github/workflows/format-check.yml@v1"
13+
runic:
14+
runs-on: ubuntu-latest
15+
steps:
16+
- uses: actions/checkout@v4
17+
- uses: fredrikekre/runic-action@v1
18+
with:
19+
version: '1'

benchmarks/extended_jump_array.jl

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,11 +10,11 @@ benchmark_out = ExtendedJumpArray(zeros(500000), zeros(500000))
1010
benchmark_in = ExtendedJumpArray(rand(rng, 500000), rand(rng, 500000))
1111

1212
function test_single_dot(out, array)
13-
@inbounds @. out = array + 1.23 * array
13+
return @inbounds @. out = array + 1.23 * array
1414
end
1515

1616
function test_double_dot(out, array)
17-
@inbounds @.. out = array + 1.23 * array
17+
return @inbounds @.. out = array + 1.23 * array
1818
end
1919

2020
println("Base-case normal broadcasting")

docs/make.jl

Lines changed: 22 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -11,23 +11,33 @@ cp(joinpath(docpath, "Project.toml"), joinpath(assetpath, "Project.toml"), force
1111

1212
include("pages.jl")
1313

14-
mathengine = MathJax3(Dict(:loader => Dict("load" => ["[tex]/require", "[tex]/mathtools"]), :tex => Dict("inlineMath" => [["\$", "\$"], ["\\(", "\\)"]],
15-
"packages" => [
16-
"base",
17-
"ams",
18-
"autoload",
19-
"mathtools",
20-
"require"
21-
])))
14+
mathengine = MathJax3(
15+
Dict(
16+
:loader => Dict("load" => ["[tex]/require", "[tex]/mathtools"]), :tex => Dict(
17+
"inlineMath" => [["\$", "\$"], ["\\(", "\\)"]],
18+
"packages" => [
19+
"base",
20+
"ams",
21+
"autoload",
22+
"mathtools",
23+
"require",
24+
]
25+
)
26+
)
27+
)
2228

23-
makedocs(sitename = "JumpProcesses.jl", authors = "Chris Rackauckas", modules = [JumpProcesses],
29+
makedocs(
30+
sitename = "JumpProcesses.jl", authors = "Chris Rackauckas", modules = [JumpProcesses],
2431
clean = true, doctest = false, linkcheck = true, warnonly = [:missing_docs],
25-
format = Documenter.HTML(; assets = ["assets/favicon.ico"],
32+
format = Documenter.HTML(;
33+
assets = ["assets/favicon.ico"],
2634
canonical = "https://docs.sciml.ai/JumpProcesses/",
2735
prettyurls = (get(ENV, "CI", nothing) == "true"),
2836
mathengine,
2937
edit_link = "master",
30-
repolink = "https://github.com/SciML/JumpProcesses.jl"),
31-
pages = pages)
38+
repolink = "https://github.com/SciML/JumpProcesses.jl"
39+
),
40+
pages = pages
41+
)
3242

3343
deploydocs(repo = "github.com/SciML/JumpProcesses.jl.git"; push_preview = true)

docs/pages.jl

Lines changed: 11 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,19 @@
11
# Put in a separate page so it can be used by SciMLDocs.jl
22

3-
pages = ["index.md",
4-
"Tutorials" => Any["tutorials/simple_poisson_process.md",
3+
pages = [
4+
"index.md",
5+
"Tutorials" => Any[
6+
"tutorials/simple_poisson_process.md",
57
"tutorials/discrete_stochastic_example.md",
68
"tutorials/point_process_simulation.md",
79
"tutorials/jump_diffusion.md",
8-
"tutorials/spatial.md"],
10+
"tutorials/spatial.md",
11+
],
912
"Applications" => Any["applications/advanced_point_process.md"],
10-
"Type Documentation" => Any["Jumps, JumpProblem, and Aggregators" => "jump_types.md",
11-
"Jump solvers" => "jump_solve.md"],
13+
"Type Documentation" => Any[
14+
"Jumps, JumpProblem, and Aggregators" => "jump_types.md",
15+
"Jump solvers" => "jump_solve.md",
16+
],
1217
"FAQ" => "faq.md",
13-
"API" => "api.md"
18+
"API" => "api.md",
1419
]

ext/JumpProcessesKernelAbstractionsExt.jl

Lines changed: 58 additions & 41 deletions
Original file line numberDiff line numberDiff line change
@@ -5,20 +5,24 @@ using KernelAbstractions, Adapt
55
using StaticArrays
66
using PoissonRandom, Random
77

8-
function SciMLBase.__solve(ensembleprob::SciMLBase.AbstractEnsembleProblem,
8+
function SciMLBase.__solve(
9+
ensembleprob::SciMLBase.AbstractEnsembleProblem,
910
alg::SimpleTauLeaping,
1011
ensemblealg::EnsembleGPUKernel;
1112
trajectories,
1213
seed = nothing,
1314
dt = error("dt is required for SimpleTauLeaping."),
14-
kwargs...)
15+
kwargs...
16+
)
1517
if trajectories == 1
16-
return SciMLBase.__solve(ensembleprob, alg, EnsembleSerial(); trajectories = 1,
17-
seed, dt, kwargs...)
18+
return SciMLBase.__solve(
19+
ensembleprob, alg, EnsembleSerial(); trajectories = 1,
20+
seed, dt, kwargs...
21+
)
1822
end
1923

2024
ensemblealg.backend === nothing ? backend = CPU() :
21-
backend = ensemblealg.backend
25+
backend = ensemblealg.backend
2226

2327
jump_prob = ensembleprob.prob
2428

@@ -31,36 +35,42 @@ function SciMLBase.__solve(ensembleprob::SciMLBase.AbstractEnsembleProblem,
3135

3236
# Run vectorized solve
3337
ts,
34-
us = vectorized_solve(
35-
probs, jump_prob, SimpleTauLeaping(); backend, trajectories, seed, dt)
38+
us = vectorized_solve(
39+
probs, jump_prob, SimpleTauLeaping(); backend, trajectories, seed, dt
40+
)
3641

3742
# Convert to CPU for inspection
3843
_ts = Array(ts)
3944
_us = Array(us)
4045

41-
time = @elapsed sol = [begin
42-
ts = @view _ts[:, i]
43-
us = @view _us[:, :, i]
44-
sol_idx = findlast(x -> x != probs[i].prob.tspan[1], ts)
45-
if sol_idx === nothing
46-
@error "No solution found" tspan=probs[i].tspan[1] ts
47-
error("Batch solve failed")
48-
end
49-
@views ensembleprob.output_func(
50-
SciMLBase.build_solution(probs[i].prob,
51-
alg,
52-
ts[1:sol_idx],
53-
[us[j, :] for j in 1:sol_idx],
54-
k = nothing,
55-
stats = nothing,
56-
calculate_error = false,
57-
retcode = sol_idx !=
58-
length(ts) ?
59-
ReturnCode.Terminated :
60-
ReturnCode.Success),
61-
i)[1]
62-
end
63-
for i in eachindex(probs)]
46+
time = @elapsed sol = [
47+
begin
48+
ts = @view _ts[:, i]
49+
us = @view _us[:, :, i]
50+
sol_idx = findlast(x -> x != probs[i].prob.tspan[1], ts)
51+
if sol_idx === nothing
52+
@error "No solution found" tspan = probs[i].tspan[1] ts
53+
error("Batch solve failed")
54+
end
55+
@views ensembleprob.output_func(
56+
SciMLBase.build_solution(
57+
probs[i].prob,
58+
alg,
59+
ts[1:sol_idx],
60+
[us[j, :] for j in 1:sol_idx],
61+
k = nothing,
62+
stats = nothing,
63+
calculate_error = false,
64+
retcode = sol_idx !=
65+
length(ts) ?
66+
ReturnCode.Terminated :
67+
ReturnCode.Success
68+
),
69+
i
70+
)[1]
71+
end
72+
for i in eachindex(probs)
73+
]
6474
return SciMLBase.EnsembleSolution(sol, time, true)
6575
end
6676

@@ -82,7 +92,8 @@ end
8292
@kernel function simple_tau_leaping_kernel(
8393
@Const(probs_data), _us, _ts, dt, @Const(rj_data),
8494
current_u_buf, rate_cache_buf, counts_buf, local_dc_buf,
85-
seed::UInt64)
95+
seed::UInt64
96+
)
8697
i = @index(Global, Linear)
8798

8899
# Get thread-local buffers
@@ -114,7 +125,7 @@ end
114125

115126
# Get input/output arrays
116127
ts_view = @inbounds view(_ts, :, i)
117-
us_view = @inbounds view(_us,:,:,i)
128+
us_view = @inbounds view(_us, :, :, i)
118129

119130
# Initialize first time step and state
120131
@inbounds ts_view[1] = tspan[1]
@@ -124,7 +135,7 @@ end
124135

125136
# Main loop
126137
for j in 2:n
127-
tprev = tspan[1] + (j-2) * dt
138+
tprev = tspan[1] + (j - 2) * dt
128139

129140
# Compute rates and scale by dt
130141
rate(rate_cache, current_u, p, tprev)
@@ -143,20 +154,24 @@ end
143154
@inbounds for k in 1:state_dim
144155
us_view[j, k] = current_u[k]
145156
end
146-
@inbounds ts_view[j] = tspan[1] + (j-1) * dt
157+
@inbounds ts_view[j] = tspan[1] + (j - 1) * dt
147158
end
148159
end
149160

150161
# Vectorized solve function
151-
function vectorized_solve(probs, prob::JumpProblem, alg::SimpleTauLeaping;
152-
backend, trajectories, seed, dt, kwargs...)
162+
function vectorized_solve(
163+
probs, prob::JumpProblem, alg::SimpleTauLeaping;
164+
backend, trajectories, seed, dt, kwargs...
165+
)
153166
# Extract common jump data
154167
rj = prob.regular_jump
155168
rj_data = JumpData(rj.rate, rj.c, rj.numjumps)
156169

157170
# Extract trajectory-specific data without static typing
158-
probs_data = [TrajectoryData(SA{eltype(p.prob.u0)}[p.prob.u0...], p.prob.p, p.prob.tspan)
159-
for p in probs]
171+
probs_data = [
172+
TrajectoryData(SA{eltype(p.prob.u0)}[p.prob.u0...], p.prob.p, p.prob.tspan)
173+
for p in probs
174+
]
160175

161176
# Adapt to GPU
162177
probs_data_gpu = adapt(backend, probs_data)
@@ -197,13 +212,15 @@ function vectorized_solve(probs, prob::JumpProblem, alg::SimpleTauLeaping;
197212
KernelAbstractions.synchronize(backend)
198213

199214
# Seed for Poisson sampling
200-
seed = seed === nothing ? UInt64(12345) : UInt64(seed);
215+
seed = seed === nothing ? UInt64(12345) : UInt64(seed)
201216

202217
# Launch main kernel
203218
kernel = simple_tau_leaping_kernel(backend)
204-
main_event = kernel(probs_data_gpu, us, ts, dt, rj_data_gpu,
219+
main_event = kernel(
220+
probs_data_gpu, us, ts, dt, rj_data_gpu,
205221
current_u_buf, rate_cache_buf, counts_buf, local_dc_buf, seed;
206-
ndrange = n_trajectories)
222+
ndrange = n_trajectories
223+
)
207224
KernelAbstractions.synchronize(backend)
208225

209226
return ts, us

src/JumpProcesses.jl

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -33,18 +33,18 @@ import SymbolicIndexingInterface as SII
3333

3434
# Import additional types and functions from DiffEqBase and SciMLBase
3535
using DiffEqBase: DiffEqBase, CallbackSet, ContinuousCallback, DAEFunction,
36-
DDEFunction, DiscreteProblem, ODEFunction, ODEProblem,
37-
ODESolution, ReturnCode, SDEFunction, SDEProblem, add_tstop!,
38-
deleteat!, isinplace, remake, savevalues!, step!,
39-
u_modified!
36+
DDEFunction, DiscreteProblem, ODEFunction, ODEProblem,
37+
ODESolution, ReturnCode, SDEFunction, SDEProblem, add_tstop!,
38+
deleteat!, isinplace, remake, savevalues!, step!,
39+
u_modified!
4040
using SciMLBase: SciMLBase, DEIntegrator
4141

4242
abstract type AbstractJump end
4343
abstract type AbstractMassActionJump <: AbstractJump end
4444
abstract type AbstractAggregatorAlgorithm end
4545
abstract type AbstractJumpAggregator end
4646
abstract type AbstractSSAIntegrator{Alg, IIP, U, T} <:
47-
DEIntegrator{Alg, IIP, U, T} end
47+
DEIntegrator{Alg, IIP, U, T} end
4848

4949
const DEFAULT_RNG = Random.default_rng()
5050

@@ -126,7 +126,7 @@ export init, solve, solve!
126126
include("SSA_stepper.jl")
127127
export SSAStepper
128128

129-
# leaping:
129+
# leaping:
130130
include("simple_regular_solve.jl")
131131
export SimpleTauLeaping, EnsembleGPUKernel
132132

0 commit comments

Comments
 (0)