diff --git a/benchmark/README.md b/benchmark/README.md new file mode 100644 index 000000000..989a1865f --- /dev/null +++ b/benchmark/README.md @@ -0,0 +1,53 @@ +# Benchmarking Suite + +The following benchmarking suite uses `PkgBenchmark.jl` and `BenchmarkTools.jl` to compare the performance of different branches of Gridap. + +## Running the benchmarks + +### Running as CI job + +The benchmarks are setup as a manual Github Actions workflow in `.github/workflows/benchmark.yml`. To run the workflow, you will need administrator access to the Gridap repository, then follow instructions [here](https://docs.github.com/en/actions/managing-workflow-runs-and-deployments/managing-workflow-runs/manually-running-a-workflow). + +The workflow will has two inputs: `target` and `base`, which are the branches/tags/commits you want to compare. The workflow will run the benchmarks on the `target` branch and compare them with the `base` branch (`master` by default). + +### Running Locally + +To run the benchmarks locally, you can have a look at the [documentation for `PkgBenchmark.jl`](https://juliaci.github.io/PkgBenchmark.jl/stable/run_benchmarks/). + +Alternatively, you can run the CI script locally from a local copy of the repository. From the Gridap root directory, run the following commands: + +```bash +# Instantiate Gridap and Gridap/benchmark +julia -e 'using Pkg; Pkg.develop(PackageSpec(path=pwd())); Pkg.instantiate()' +julia --project=benchmark/ -e 'using Pkg; Pkg.develop(PackageSpec(path=pwd())); Pkg.instantiate()' +# Run the benchmarks +export BM_TARGET = "your target branch" +export BM_BASE = "your reference branch" +julia --project=benchmark/ --color=yes benchmark/run_benchmarks.jl +``` + +where `BM_TARGET` and `BM_BASE` are the branches/tags/commits you want to compare. + +## Adding a new benchmark + +To add a new benchmark suite `xyx`, create a new file `bm/bm_xyx.jl` that with the following structure: + +```julia +module bm_xyx + +using PkgBenchmark, BenchmarkTools + +const SUITE = BenchmarkGroup() + +[... Add your benchmarks here ...] + +end # module +``` + +Then, add the following line to the `benchmarks.jl` file: + +```julia +@include_bm "bm_xyz" +``` + +This should automatically include the new benchmarks into the global benchmarking suite. diff --git a/benchmark/benchmarks.jl b/benchmark/benchmarks.jl index e14106e0a..ad8aa0aa6 100644 --- a/benchmark/benchmarks.jl +++ b/benchmark/benchmarks.jl @@ -1,29 +1,14 @@ using BenchmarkTools +using PkgBenchmark using Gridap -include("drivers.jl") +macro include_bm(SUITE,name) + quote + include("bm/$($name).jl") + SUITE["$($name)"] = $(Symbol(name)).SUITE + end +end const SUITE = BenchmarkGroup() -for (D,ncells) in [(2,20),(3,8)] - for order in [1,2,3] - basis_cases = [ - ("lagrangian",lagrangian,Float64,order), - ("vector_lagragian",lagrangian,VectorValue{D,Float64},order), - ("raviart_thomas",raviart_thomas,Float64,order-1), - ] - for (basis_name,basis,T,degree) in basis_cases - biform_cases = [ - ("mass",mass,2*order), - ("laplacian",laplacian,2*(order-1)), - ] - for (biform_name,biform,qdegree) in biform_cases - reffe = ReferenceFE(basis, T, degree) - name = "assembly_$(D)D_$(basis_name)_$(biform_name)_$(order)" - SUITE[name] = @benchmarkable bm_matrix_assembly( - $(D),$(ncells),$(reffe),$(qdegree),$(biform) - ) - end - end - end -end +@include_bm SUITE "bm_assembly" diff --git a/benchmark/bm/bm_assembly.jl b/benchmark/bm/bm_assembly.jl new file mode 100644 index 000000000..80b378163 --- /dev/null +++ b/benchmark/bm/bm_assembly.jl @@ -0,0 +1,59 @@ +module bm_assembly + +using PkgBenchmark, BenchmarkTools +using Gridap +using Gridap.Geometry + +mass(u,v,dΩ) = ∫(u⋅v)dΩ +laplacian(u,v,dΩ) = ∫(∇(u)⊙∇(v))dΩ +graddiv(u,v,dΩ) = ∫((∇⋅u)⋅(∇⋅v))dΩ + +function driver( + Ω :: Triangulation, + reffe :: Tuple, + qdegree :: Integer, + biform :: Function +) + model = get_background_model(Ω) + dΩ = Measure(Ω,qdegree) + V = TestFESpace(model, reffe) + a(u,v) = biform(u,v,dΩ) + A = assemble_matrix(a, V, V) +end + +const SUITE = BenchmarkGroup() + +for (D,n) in [(2,10),(3,6)] + domain = Tuple([repeat([0,1], D)...]) + partition = Tuple(fill(n, D)) + model = UnstructuredDiscreteModel(CartesianDiscreteModel(domain, partition)) + trian_cases = [ + ("bulk",Triangulation(model)), + ("view",Triangulation(model,collect(1:div(n^D,2)))), # About half of the cells + ] + for (trian_name, trian) in trian_cases + for order in [1,2,3] + basis_cases = [ + ("lagrangian",lagrangian,Float64,order), + ("vector_lagragian",lagrangian,VectorValue{D,Float64},order), + ("raviart_thomas",raviart_thomas,Float64,order-1), + ] + for (basis_name,basis,T,degree) in basis_cases + biform_cases = [ + ("mass",mass,2*order), + ("laplacian",laplacian,2*(order-1)), + ("graddiv",graddiv,2*(order-1)), + ] + for (biform_name,biform,qdegree) in biform_cases + reffe = ReferenceFE(basis, T, degree) + name = "assembly_$(D)D_$(trian_name)_$(basis_name)_$(biform_name)_$(order)" + SUITE[name] = @benchmarkable driver( + $(trian),$(reffe),$(qdegree),$(biform) + ) + end + end + end + end +end + +end # module \ No newline at end of file diff --git a/benchmark/drivers.jl b/benchmark/drivers.jl deleted file mode 100644 index 6ac5bf6a5..000000000 --- a/benchmark/drivers.jl +++ /dev/null @@ -1,22 +0,0 @@ - -using Gridap.Geometry - -mass(u,v,dΩ) = ∫(u⋅v)dΩ -laplacian(u,v,dΩ) = ∫(∇(u)⊙∇(v))dΩ - -function bm_matrix_assembly( - D :: Integer, - n :: Integer, - reffe :: Tuple, - qdegree :: Integer, - biform :: Function -) - domain = Tuple([repeat([0,1], D)...]) - partition = Tuple(fill(n, D)) - model = UnstructuredDiscreteModel(CartesianDiscreteModel(domain, partition)) - Ω = Triangulation(model) - dΩ = Measure(Ω,qdegree) - V = TestFESpace(model, reffe) - a(u,v) = biform(u,v,dΩ) - A = assemble_matrix(a, V, V) -end