Skip to content

Commit

Permalink
Merge pull request #50 from JuliaConstraints/newdocs
Browse files Browse the repository at this point in the history
Improves doc for PerfChecker
  • Loading branch information
Azzaare authored Jun 18, 2024
2 parents 8936812 + 3266c18 commit 90e6640
Show file tree
Hide file tree
Showing 8 changed files with 316 additions and 27 deletions.
44 changes: 25 additions & 19 deletions docs/Manifest.toml
Original file line number Diff line number Diff line change
Expand Up @@ -216,15 +216,15 @@ version = "0.1.2"

[[deps.ConstraintCommons]]
deps = ["Dictionaries", "TestItemRunner", "TestItems"]
git-tree-sha1 = "866bd8d55f57037d7542c9f1d8d17285f44b78dd"
git-tree-sha1 = "4698469c1292bea5573dc294a180c364d6ebfa81"
repo-rev = "main"
repo-url = "https://github.com/JuliaConstraints/ConstraintCommons.jl.git"
uuid = "e37357d9-0691-492f-a822-e5ea6a920954"
version = "0.2.0"
version = "0.2.1"

[[deps.ConstraintDomains]]
deps = ["ConstraintCommons", "Dictionaries", "Intervals", "PatternFolds", "StatsBase", "TestItemRunner", "TestItems"]
git-tree-sha1 = "0d087d2580392660948480a0484dbb17b0141e83"
git-tree-sha1 = "50801b71002e4b8c7d943980a8b3008dcbc2aa33"
repo-rev = "main"
repo-url = "https://github.com/JuliaConstraints/ConstraintDomains.jl.git"
uuid = "5800fd60-8556-4464-8d61-84ebf7a0bedb"
Expand Down Expand Up @@ -374,9 +374,9 @@ version = "1.4.1"

[[deps.DocumenterVitepress]]
deps = ["ANSIColoredPrinters", "Base64", "DocStringExtensions", "Documenter", "IOCapture", "Markdown", "NodeJS_20_jll", "REPL"]
git-tree-sha1 = "733cf5d8819a583b2753a59e820e9de130c2df4e"
git-tree-sha1 = "a58b1681c9d3ef9f5842ac86c13f9f5b8ff2eee6"
uuid = "4710194d-e776-4893-9690-8d956a29c365"
version = "0.1.0"
version = "0.1.1"

[deps.DocumenterVitepress.extensions]
DocumenterVitepressDocumenterCitationsExt = "DocumenterCitations"
Expand Down Expand Up @@ -461,9 +461,9 @@ version = "2.23.1"

[[deps.Flux]]
deps = ["Adapt", "ChainRulesCore", "Compat", "Functors", "LinearAlgebra", "MLUtils", "MacroTools", "NNlib", "OneHotArrays", "Optimisers", "Preferences", "ProgressLogging", "Random", "Reexport", "SparseArrays", "SpecialFunctions", "Statistics", "Zygote"]
git-tree-sha1 = "a5475163b611812d073171583982c42ea48d22b0"
git-tree-sha1 = "edacf029ed6276301e455e34d7ceeba8cc34078a"
uuid = "587475ba-b771-5e3f-ad9e-33799f191a9c"
version = "0.14.15"
version = "0.14.16"

[deps.Flux.extensions]
FluxAMDGPUExt = "AMDGPU"
Expand Down Expand Up @@ -499,9 +499,9 @@ uuid = "9fa8497b-333b-5362-9e8d-4d0656e87820"

[[deps.GPUArrays]]
deps = ["Adapt", "GPUArraysCore", "LLVM", "LinearAlgebra", "Printf", "Random", "Reexport", "Serialization", "Statistics"]
git-tree-sha1 = "38cb19b8a3e600e509dc36a6396ac74266d108c1"
git-tree-sha1 = "c154546e322a9c73364e8a60430b0f79b812d320"
uuid = "0c68f7d7-f131-5f86-a1c3-88cf8149b2d7"
version = "10.1.1"
version = "10.2.0"

[[deps.GPUArraysCore]]
deps = ["Adapt"]
Expand All @@ -528,9 +528,9 @@ version = "1.3.1"

[[deps.IOCapture]]
deps = ["Logging", "Random"]
git-tree-sha1 = "8b72179abc660bfab5e28472e019392b97d0985c"
git-tree-sha1 = "b6d6bfdd7ce25b0f9b2f6b3dd56b2673a66c8770"
uuid = "b5f81e59-6552-4d32-b1f0-c071b021bf89"
version = "0.2.4"
version = "0.2.5"

[[deps.IRTools]]
deps = ["InteractiveUtils", "MacroTools"]
Expand All @@ -550,9 +550,15 @@ version = "0.3.1"

[[deps.InlineStrings]]
deps = ["Parsers"]
git-tree-sha1 = "9cc2baf75c6d09f9da536ddf58eb2f29dedaf461"
git-tree-sha1 = "86356004f30f8e737eff143d57d41bd580e437aa"
uuid = "842dd82b-1e85-43dc-bf29-5d0ee9dffc48"
version = "1.4.0"
version = "1.4.1"

[deps.InlineStrings.extensions]
ArrowTypesExt = "ArrowTypes"

[deps.InlineStrings.weakdeps]
ArrowTypes = "31f734f8-188a-4ce0-8406-c8a06bd891cd"

[[deps.InteractiveUtils]]
deps = ["Markdown"]
Expand Down Expand Up @@ -593,9 +599,9 @@ version = "0.21.4"

[[deps.JuMP]]
deps = ["LinearAlgebra", "MacroTools", "MathOptInterface", "MutableArithmetics", "OrderedCollections", "PrecompileTools", "Printf", "SparseArrays"]
git-tree-sha1 = "28f9313ba6603e0d2850fc3eae617e769c99bf83"
git-tree-sha1 = "7e10a0d8b534f2d8e9f712b33488584254624fb1"
uuid = "4076af6c-e467-56ae-b986-b466b2749572"
version = "1.22.1"
version = "1.22.2"

[deps.JuMP.extensions]
JuMPDimensionalDataExt = "DimensionalData"
Expand Down Expand Up @@ -813,9 +819,9 @@ version = "2023.1.10"

[[deps.MutableArithmetics]]
deps = ["LinearAlgebra", "SparseArrays", "Test"]
git-tree-sha1 = "a3589efe0005fc4718775d8641b2de9060d23f73"
git-tree-sha1 = "898c56fbf8bf71afb0c02146ef26f3a454e88873"
uuid = "d8a4904e-b15c-11e9-3269-09a3773c0cb0"
version = "1.4.4"
version = "1.4.5"

[[deps.NLSolversBase]]
deps = ["DiffResults", "Distributed", "FiniteDiff", "ForwardDiff"]
Expand Down Expand Up @@ -930,8 +936,8 @@ version = "0.2.4"

[[deps.PerfChecker]]
deps = ["CSV", "CoverageTools", "CpuId", "JSON", "Malt", "Pkg", "Profile", "TOML", "TypedTables", "UUIDs"]
git-tree-sha1 = "113b402af7d1d4d1d1a4fe176f7d4881822872b1"
repo-rev = "main"
git-tree-sha1 = "58979ea0b3de20e82626de04aca6328e1a81c3fd"
repo-rev = "docstuff"
repo-url = "https://github.com/JuliaConstraints/PerfChecker.jl.git"
uuid = "6309bf6b-a531-4b08-891e-8ee981e5c424"
version = "0.2.1"
Expand Down
8 changes: 6 additions & 2 deletions docs/make.jl
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,8 @@ makedocs(;
repo = "https://github.com/JuliaConstraints/JuliaConstraints.github.io",
devurl = "dev",
deploy_url = "https://juliaconstraints.github.io/",
description = "Resources about Constraint Programming in Julia"
description = "Resources about Constraint Programming in Julia",
build_vitepress = false
),
pages=[
"Home" => "index.md",
Expand Down Expand Up @@ -106,8 +107,11 @@ makedocs(;
],
"Performance Checking" => [
"PerfChecker.jl" => "perf/perf_checker.md",
"BenchmarkToolsExt" => "perf/benchmark_ext.md",
"Tutorial" => "perf/tutorial.md",
"Interface" => "perf/perf_interface.md",
"BenchmarkToolsExt" => "perf/benchmark_ext.md",
"ChairmarksExt" => "perf/chairmarks_ext.md",
"API" => "perf/api.md"
],
"API" => [
"Public" => "public_api.md",
Expand Down
7 changes: 7 additions & 0 deletions docs/src/perf/api.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
# API

Here's the API for PerfChecker.jl

```@autodocs
Modules=[PerfChecker]
```
35 changes: 34 additions & 1 deletion docs/src/perf/benchmark_ext.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,36 @@
# BenchmarkTools Extension

A benchmarking extension, based on `BenchmarkTools.jl`, has been interfaced with `PerfChecker.jl`. This section (will) provides some usage examples, documentation, and links to related notebooks.
A benchmarking extension, based on `BenchmarkTools.jl`, has been interfaced with `PerfChecker.jl`.
This section will provide some usage examples, documentation, and links to related notebooks.

## Usage

Like all other extensions, `BenchmarkTools` extension can be used in the following way:

```julia
julia> using BenchmarkTools, PerfChecker

julia> @check :benchmark Dict(:option1 => "value1", :option2 => "value2", :PATH => @__DIR__) begin
# the prelimnary code goes here
using Example
end begin
# the code you want to be benchmarked
Example.domath(10) # returns x + 5, 15 in this case.
end
```

## Options

Options specific to this backend with their default values are defined as:
```julia
:threads => 1
:track => "none"
:samples => BenchmarkTools.DEFAULT_PARAMETERS.samples
:seconds => BenchmarkTools.DEFAULT_PARAMETERS.seconds
:evals => BenchmarkTools.DEFAULT_PARAMETERS.evals
:overhead => BenchmarkTools.DEFAULT_PARAMETERS.overhead
:gctrial => BenchmarkTools.DEFAULT_PARAMETERS.gctrial
:gcsample => BenchmarkTools.DEFAULT_PARAMETERS.gcsample
:time_tolerance => BenchmarkTools.DEFAULT_PARAMETERS.time_tolerance
:memory_tolerance => BenchmarkTools.DEFAULT_PARAMETERS.memory_tolerance
```
32 changes: 32 additions & 0 deletions docs/src/perf/chairmarks_ext.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
# Chairmarks Extension

A benchmarking extension, based on `Chairmarks.jl`, has been interfaced with `PerfChecker.jl`.
This section will provide some usage examples, documentation, and links to related notebooks.

## Usage

Like all other extensions, `BenchmarkTools` extension can be used in the following way:

```julia
julia> using Chairmarks, PerfChecker

julia> @check :chairmark Dict(:option1 => "value1", :option2 => "value2", :PATH => @__DIR__) begin
# the prelimnary code goes here
using Example
end begin
# the code you want to be benchmarked
Example.domath(10) # returns x + 5, 15 in this case.
end
```

## Options

Options specific to this backend with their default values are defined as:
```julia
:threads => 1
:track => "none"
:evals => nothing
:seconds => 1,
:samples => nothing
:gc => true
```
47 changes: 44 additions & 3 deletions docs/src/perf/perf_checker.md
Original file line number Diff line number Diff line change
@@ -1,7 +1,48 @@
# PerfChecker.jl

Documentation for `PerfChecker.jl`.
PerfChecker.jl is a package designed for package authors to easily performance test their packages.
To achieve that, it provides the follwing features:

```@autodocs
Modules=[PerfChecker]
- The main macro `@check`, which provides an easy-to-use interface over various interfaces, configurable for various backends via a dictionary.
- (WIP) A CI for reproducible performance testing.
- Visualization of different metrics from `@check` using Makie.jl

## Usage

The primary usage of PerfChecker.jl looks like this:

```julia
using PerfChecker
# optional using custom backend like BenchmarkTools, Chairmark etc
config = Dict(:option1 => "value1", :option2 => :value2)

results = @check :name_of_backend config begin
# preparatory code goes here
end begin
# the code block to be performance tested goes here
end

# Visualization of the results
using Makie
checkres_to_scatterlines(results)
```

The config dictionary can take many options, depending on the backend.

Some of the commonly used options are:
- `:PATH` => The path where to the default environment of julia when creating a new process.
- `:pkgs` => A list of versions to test performance for. Its defined as the `Tuple`, `(name::String, option::Symbol, versions::Vector{VersionNumber}, last_or_first::Bool)` Can be given as follows:
- `name` is the name of the package.
- `option` is one of the 5 symbols:
- `:patches`: last patch or first patch of a version
- `:breaking`: last breaking or next breaking version
- `:major`: previous or next major version
- `:minor`: previous or next minor version
- `:custom`: custom version numbers (provide any boolean value for `last_or_first` in this case as it doesn't matter)
- `versions`: The input for the provided `option`
- `last_or_first`: Input for the provided `option`
- `:tags` => A list of tags (a vector of symbols) to easily tag performance tests.
- `:devops` => Giving a custom input to `Pkg.develop`. Intended to be used to test performance of a local development branch of a pacakge with previous versions. Often can be used as simply as `:devops => "MyPackageName"`
- `:threads` => An integer to select the number of threads to start Julia with.

Checkout the documentation of the other backends for more default options and the default values.
102 changes: 100 additions & 2 deletions docs/src/perf/perf_interface.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,101 @@
# Interfacing PerfChecker
# Extending PerfChecker

PerfChecker was build as an easy to extend interface. This section will cover the few method required.
PerfChecker was build as an easy to extend interface. A good reference example for this is the Chairmarks extension.

Extending PerfChecker works via PkgExtensions feature in Julia. There are 6 essential functions that need to be extended inside the Pkg extension.
Each extension has a keyword symbol for it, which users can input to use the extension.

## The Default Options

Method to be overloaded: `PerfChecker.default_options(::Val{:myperfextension})::Dict`

PerfChecker works via a config dictionary. Users can populate this dictionary with options and provide it to the main `check` macro to customize the performance testing to their liking.

For Chairmarks.jl, it looks like this:
```julia
function PerfChecker.default_options(::Val{:chairmark})
return Dict(
:threads => 1,
:track => "none",
:evals => nothing,
:seconds => 1,
:samples => nothing,
:gc => true
)
end
```


## Package initialization
Method to be overloaded: `PerfChecker.initpkgs(::Val{:myperfextension})::Expr`

This method is plainly to load the main package(s) associated with the custom backend. In case of Chairmarks.jl, it looks like this:
```julia
PerfChecker.initpkgs(::Val{:chairmark}) = quote
using Chairmarks
end
```

## Preparatory Code:
Method to be overloaded: `PerfChecker.prep(config_dict::Dict, block::Expr, ::Val{:myperfextension})::Expr`

This method exists to run arbitrary "preparatory" code (represented by `block` parameter here) before running the code to be performance tested for.

The output from here are stored inside the `:prep_result` key of the configuration dictionary.

Example for Chairmarks.jl:
```julia
PerfChecker.prep(::Dict, block::Expr, ::Val{:chairmark}) = quote
$block
nothing
end
```

This just runs the code in `block` provided by the user.

## Main Code to be Performance Tested
Method to be overloaded: `PerfChecker.check(config_dict::Dict, block::Expr, ::Val{:myperfextension})::Expr`.

Runs the appropriate code to run the performance testing on user's code. For Chairmarks.jl, it looks like this:
```julia
function PerfChecker.check(d::Dict, block::Expr, ::Val{:chairmark})
quote
d = $d
return @be $block evals=d[:evals] seconds=d[:seconds] samples=d[:samples] gc=d[:gc]
end
end
```

The output from here are stored inside the `:check_result` key of the configuration dictionary.

## Post Performance Testing Code
Method to be overloaded: `PerfChecker.post(config_dict::Dict, ::Val{:myperfextension})`

The code to be run after the performance testing is done. The output from here is converted into a table via the `to_table` method overloading.

In the case of Chairmarks.jl:
```julia
PerfChecker.post(d::Dict, ::Val{:chairmark}) = d[:check_result]
```

## Converting the result into a Table
Method to be overloaded: `PerfChecker.to_table`

Convert the output from `post` function into an appropriate table.

In the case of Chairmarks.jl
```julia
function PerfChecker.to_table(chair::Chairmarks.Benchmark)
l = length(chair.samples)
times = [chair.samples[i].time for i in 1:l]
gctimes = [chair.samples[i].gc_fraction for i in 1:l]
bytes = [chair.samples[i].bytes for i in 1:l]
allocs = [chair.samples[i].allocs for i in 1:l]
return Table(times = times, gctimes = gctimes, bytes = bytes, allocs = allocs)
end
```


---

There are also other functions that can be overloaded, mostly related to plotting but these are the basic functions to extend PerfChecker for a custom backend.
Loading

0 comments on commit 90e6640

Please sign in to comment.