Skip to content

Commit d00fb95

Browse files
st--devmotion
andauthored
Introduce JuliaFormatter with style=blue (JuliaGaussianProcesses#220)
Co-authored-by: David Widmann <[email protected]> Co-authored-by: David Widmann <[email protected]>
1 parent 4b1961e commit d00fb95

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

92 files changed

+845
-683
lines changed

.JuliaFormatter.toml

+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
style = "blue"

.github/workflows/ci.yml

+20
Original file line numberDiff line numberDiff line change
@@ -47,3 +47,23 @@ jobs:
4747
with:
4848
github-token: ${{ secrets.GITHUB_TOKEN }}
4949
path-to-lcov: ./lcov.info
50+
51+
format-check:
52+
runs-on: ubuntu-latest
53+
steps:
54+
- uses: julia-actions/setup-julia@latest
55+
with:
56+
version: 1
57+
- uses: actions/checkout@v1
58+
- name: Install JuliaFormatter and format
59+
run: |
60+
julia --project=test/ -e 'using Pkg; Pkg.instantiate()'
61+
julia --project=test/ -e 'using JuliaFormatter; format(".", verbose=true)'
62+
- name: Format check
63+
run: |
64+
CHANGED="$(git diff --name-only)"
65+
if [ ! -z $CHANGED ]; then
66+
>&2 echo "Some files have not been formatted !!!"
67+
echo "$CHANGED"
68+
exit 1
69+
fi

benchmark/MLKernels.jl

+6-4
Original file line numberDiff line numberDiff line change
@@ -3,16 +3,18 @@ using MLKernels
33
SUITE["MLKernels"] = BenchmarkGroup()
44

55
mlkernelnames = ["SquaredExponentialKernel"]
6-
kernels=Dict{String,MLKernels.Kernel}()
6+
kernels = Dict{String,MLKernels.Kernel}()
77
for k in mlkernelnames
88
SUITE["MLKernels"][k] = BenchmarkGroup()
9-
kernels[k] = eval(Meta.parse("MLKernels."*k*"(alpha)"))
9+
kernels[k] = eval(Meta.parse("MLKernels." * k * "(alpha)"))
1010
end
1111

1212
for k in mlkernelnames
13-
SUITE["MLKernels"][k]["k(X,Y)"] = @benchmarkable MLKernels.kernelmatrix($(kernels[k]),$X,$Y)
13+
SUITE["MLKernels"][k]["k(X,Y)"] = @benchmarkable MLKernels.kernelmatrix(
14+
$(kernels[k]), $X, $Y
15+
)
1416
# SUITE["MLKernels"][k][kt]["k!(X,Y)"] = @benchmarkable MLKernels.kernelmatrix!(KXY,$(kernels[k][kt]),$X,$Y) setup=(KXY=copy($KXY))
15-
SUITE["MLKernels"][k]["k(X)"] = @benchmarkable MLKernels.kernelmatrix($(kernels[k]),$X)
17+
SUITE["MLKernels"][k]["k(X)"] = @benchmarkable MLKernels.kernelmatrix($(kernels[k]), $X)
1618
# SUITE["MLKernels"][k][kt]["k!(X)"] = @benchmarkable MLKernels.kernelmatrix!(KX,$(kernels[k][kt]),$X) setup=(KX=copy($KX))
1719
# SUITE["MLKernels"][k][kt]["kdiag(X)"] = @benchmarkable MLKernels.kerneldiagmatrix($(kernels[k][kt]),$X)
1820
# SUITE["MLKernels"][k][kt]["kdiag!(X)"] = @benchmarkable MLKernels.kerneldiagmatrix!(kX,$(kernels[k][kt]),$X) setup=(kX=copy($kX))

benchmark/benchmarks.jl

+8-7
Original file line numberDiff line numberDiff line change
@@ -7,16 +7,17 @@ const SUITE = BenchmarkGroup()
77
Random.seed!(1234)
88

99
dim = 50
10-
N1 = 1000; N2 = 500;
10+
N1 = 1000;
11+
N2 = 500;
1112
alpha = 2.0
1213

13-
X = rand(Float64,N1,dim)
14-
Y = rand(Float64,N2,dim)
14+
X = rand(Float64, N1, dim)
15+
Y = rand(Float64, N2, dim)
1516

16-
KXY = rand(Float64,N1,N2)
17-
KX = rand(Float64,N1,N1)
18-
sKX = Symmetric(rand(Float64,N1,N1))
19-
kX = rand(Float64,N1)
17+
KXY = rand(Float64, N1, N2)
18+
KX = rand(Float64, N1, N1)
19+
sKX = Symmetric(rand(Float64, N1, N1))
20+
kX = rand(Float64, N1)
2021

2122
include("kernelmatrix.jl")
2223
include("MLKernels.jl")

benchmark/kernelmatrix.jl

+17-5
Original file line numberDiff line numberDiff line change
@@ -3,22 +3,34 @@ using KernelFunctions
33
SUITE["KernelFunctions"] = BenchmarkGroup()
44

55
kernelnames = ["SqExponentialKernel"]
6-
kerneltypes = ["ARD","ISO"]
7-
kernels=Dict{String,Dict{String,KernelFunctions.Kernel}}()
6+
kerneltypes = ["ARD", "ISO"]
7+
kernels = Dict{String,Dict{String,KernelFunctions.Kernel}}()
88
for k in kernelnames
99
kernels[k] = Dict{String,KernelFunctions.Kernel}()
1010
SUITE["KernelFunctions"][k] = BenchmarkGroup()
1111
for kt in kerneltypes
1212
SUITE["KernelFunctions"][k][kt] = BenchmarkGroup()
13-
kernels[k][kt] = eval(Meta.parse("KernelFunctions."*k*"("*(kt == "ARD" ? "alpha*ones(Float64,dim)" : "alpha" )*")"))
13+
kernels[k][kt] = eval(
14+
Meta.parse(
15+
"KernelFunctions." *
16+
k *
17+
"(" *
18+
(kt == "ARD" ? "alpha*ones(Float64,dim)" : "alpha") *
19+
")",
20+
),
21+
)
1422
end
1523
end
1624

1725
for k in kernelnames
1826
for kt in kerneltypes
19-
SUITE["KernelFunctions"][k][kt]["k(X,Y)"] = @benchmarkable KernelFunctions.kernelmatrix($(kernels[k][kt]),$X,$Y,obsdim=1)
27+
SUITE["KernelFunctions"][k][kt]["k(X,Y)"] = @benchmarkable KernelFunctions.kernelmatrix(
28+
$(kernels[k][kt]), $X, $Y; obsdim=1
29+
)
2030
# SUITE["KernelFunctions"][k][kt]["k!(X,Y)"] = @benchmarkable KernelFunctions.kernelmatrix!(KXY,$(kernels[k][kt]),$X,$Y) setup=(KXY=copy($KXY))
21-
SUITE["KernelFunctions"][k][kt]["k(X)"] = @benchmarkable KernelFunctions.kernelmatrix($(kernels[k][kt]),$X,obsdim=1)
31+
SUITE["KernelFunctions"][k][kt]["k(X)"] = @benchmarkable KernelFunctions.kernelmatrix(
32+
$(kernels[k][kt]), $X; obsdim=1
33+
)
2234
# SUITE["KernelFunctions"][k][kt]["k!(X)"] = @benchmarkable KernelFunctions.kernelmatrix!(KX,$(kernels[k][kt]),$X) setup=(KX=copy($KX))
2335
# SUITE["KernelFunctions"][k][kt]["kdiag(X)"] = @benchmarkable KernelFunctions.kerneldiagmatrix($(kernels[k][kt]),$X)
2436
# SUITE["KernelFunctions"][k][kt]["kdiag!(X)"] = @benchmarkable KernelFunctions.kerneldiagmatrix!(kX,$(kernels[k][kt]),$X) setup=(kX=copy($kX))

docs/create_kernel_plots.jl

+83-54
Original file line numberDiff line numberDiff line change
@@ -1,77 +1,106 @@
1-
using Plots; pyplot();
1+
using Plots;
2+
pyplot();
23
using Distributions
34
using LinearAlgebra
45
using KernelFunctions
56
# Translational invariants kernels
67

7-
default(lw=3.0,titlefontsize=28,tickfontsize=18)
8+
default(; lw=3.0, titlefontsize=28, tickfontsize=18)
89

9-
x₀ = 0.0; l = 0.1
10+
x₀ = 0.0;
11+
l = 0.1;
1012
n_grid = 101
11-
fill(x₀,n_grid,1)
12-
xrange = reshape(collect(range(-3,3,length=n_grid)),:,1)
13+
fill(x₀, n_grid, 1)
14+
xrange = reshape(collect(range(-3, 3; length=n_grid)), :, 1)
1315

14-
k = transform(SqExponentialKernel(),1.0)
15-
K1 = kernelmatrix(k,xrange,obsdim=1)
16-
p = heatmap(K1,yflip=true,colorbar=false,framestyle=:none,background_color=RGBA(0.0,0.0,0.0,0.0))
17-
savefig(joinpath(@__DIR__,"src","assets","heatmap_sqexp.png"))
16+
k = transform(SqExponentialKernel(), 1.0)
17+
K1 = kernelmatrix(k, xrange; obsdim=1)
18+
p = heatmap(
19+
K1;
20+
yflip=true,
21+
colorbar=false,
22+
framestyle=:none,
23+
background_color=RGBA(0.0, 0.0, 0.0, 0.0),
24+
)
25+
savefig(joinpath(@__DIR__, "src", "assets", "heatmap_sqexp.png"))
1826

27+
k = @kernel Matern32Kernel FunctionTransform(x -> (sin.(x)) .^ 2)
28+
K2 = kernelmatrix(k, xrange; obsdim=1)
29+
p = heatmap(
30+
K2;
31+
yflip=true,
32+
colorbar=false,
33+
framestyle=:none,
34+
background_color=RGBA(0.0, 0.0, 0.0, 0.0),
35+
)
36+
savefig(joinpath(@__DIR__, "src", "assets", "heatmap_matern.png"))
1937

20-
k = @kernel Matern32Kernel FunctionTransform(x->(sin.(x)).^2)
21-
K2 = kernelmatrix(k,xrange,obsdim=1)
22-
p = heatmap(K2,yflip=true,colorbar=false,framestyle=:none,background_color=RGBA(0.0,0.0,0.0,0.0))
23-
savefig(joinpath(@__DIR__,"src","assets","heatmap_matern.png"))
38+
k = transform(PolynomialKernel(; c=0.0, d=2.0), LinearTransform(randn(3, 1)))
39+
K3 = kernelmatrix(k, xrange; obsdim=1)
40+
p = heatmap(
41+
K3;
42+
yflip=true,
43+
colorbar=false,
44+
framestyle=:none,
45+
background_color=RGBA(0.0, 0.0, 0.0, 0.0),
46+
)
47+
savefig(joinpath(@__DIR__, "src", "assets", "heatmap_poly.png"))
2448

49+
k =
50+
0.5 * SqExponentialKernel() * transform(LinearKernel(), 0.5) +
51+
0.4 * (@kernel Matern32Kernel() FunctionTransform(x -> sin.(x)))
52+
K4 = kernelmatrix(k, xrange; obsdim=1)
53+
p = heatmap(
54+
K4;
55+
yflip=true,
56+
colorbar=false,
57+
framestyle=:none,
58+
background_color=RGBA(0.0, 0.0, 0.0, 0.0),
59+
)
60+
savefig(joinpath(@__DIR__, "src", "assets", "heatmap_prodsum.png"))
2561

26-
k = transform(PolynomialKernel(c=0.0,d=2.0), LinearTransform(randn(3,1)))
27-
K3 = kernelmatrix(k,xrange,obsdim=1)
28-
p = heatmap(K3,yflip=true,colorbar=false,framestyle=:none,background_color=RGBA(0.0,0.0,0.0,0.0))
29-
savefig(joinpath(@__DIR__,"src","assets","heatmap_poly.png"))
30-
31-
k = 0.5*SqExponentialKernel()*transform(LinearKernel(),0.5) + 0.4*(@kernel Matern32Kernel() FunctionTransform(x->sin.(x)))
32-
K4 = kernelmatrix(k,xrange,obsdim=1)
33-
p = heatmap(K4,yflip=true,colorbar=false,framestyle=:none,background_color=RGBA(0.0,0.0,0.0,0.0))
34-
savefig(joinpath(@__DIR__,"src","assets","heatmap_prodsum.png"))
35-
36-
plot(heatmap.([K1,K2,K3,K4],yflip=true,colorbar=false)...,layout=(2,2))
37-
savefig(joinpath(@__DIR__,"src","assets","heatmap_combination.png"))
62+
plot(heatmap.([K1, K2, K3, K4], yflip=true, colorbar=false)...; layout=(2, 2))
63+
savefig(joinpath(@__DIR__, "src", "assets", "heatmap_combination.png"))
3864

3965
##
4066

41-
for k in [SqExponentialKernel,ExponentialKernel]
42-
K = kernelmatrix(k(),xrange,obsdim=1)
43-
v = rand(MvNormal(K+1e-7I))
44-
plot(xrange,v,lab="",title="f(x)",framestyle=:none) |> display
45-
savefig(joinpath(@__DIR__,"src","assets","GP_sample_$(k).png"))
46-
plot(xrange,kernel.(k(),x₀,xrange),lab="",ylims=(0,1.1),title="k(0,x)") |> display
47-
savefig(joinpath(@__DIR__,"src","assets","kappa_function_$(k).png"))
67+
for k in [SqExponentialKernel, ExponentialKernel]
68+
K = kernelmatrix(k(), xrange; obsdim=1)
69+
v = rand(MvNormal(K + 1e-7I))
70+
display(plot(xrange, v; lab="", title="f(x)", framestyle=:none))
71+
savefig(joinpath(@__DIR__, "src", "assets", "GP_sample_$(k).png"))
72+
display(plot(xrange, kernel.(k(), x₀, xrange); lab="", ylims=(0, 1.1), title="k(0,x)"))
73+
savefig(joinpath(@__DIR__, "src", "assets", "kappa_function_$(k).png"))
4874
end
4975

50-
for k in [GammaExponentialKernel(1.0,1.5)]
51-
sparse =1
52-
while !isposdef(kernelmatrix(k,xrange*sparse,obsdim=1) + 1e-5I); sparse += 1; end
53-
v = rand(MvNormal(kernelmatrix(k,xrange*sparse,obsdim=1)+1e-7I))
54-
plot(xrange,v,lab="",title="f(x)",framestyle=:none) |> display
55-
savefig(joinpath(@__DIR__,"src","assets","GP_sample_GammaExponentialKernel.png"))
56-
plot(xrange,kernel.(k,x₀,xrange),lab="",ylims=(0,1.1),title="k(0,x)") |> display
57-
savefig(joinpath(@__DIR__,"src","assets","kappa_function_GammaExponentialKernel.png"))
76+
for k in [GammaExponentialKernel(1.0, 1.5)]
77+
sparse = 1
78+
while !isposdef(kernelmatrix(k, xrange * sparse; obsdim=1) + 1e-5I)
79+
sparse += 1
80+
end
81+
v = rand(MvNormal(kernelmatrix(k, xrange * sparse; obsdim=1) + 1e-7I))
82+
display(plot(xrange, v; lab="", title="f(x)", framestyle=:none))
83+
savefig(joinpath(@__DIR__, "src", "assets", "GP_sample_GammaExponentialKernel.png"))
84+
display(plot(xrange, kernel.(k, x₀, xrange); lab="", ylims=(0, 1.1), title="k(0,x)"))
85+
savefig(
86+
joinpath(@__DIR__, "src", "assets", "kappa_function_GammaExponentialKernel.png")
87+
)
5888
end
5989

60-
for k in [MaternKernel,Matern32Kernel,Matern52Kernel]
61-
K = kernelmatrix(k(),xrange,obsdim=1)
62-
v = rand(MvNormal(K+1e-7I))
63-
plot(xrange,v,lab="",title="f(x)",framestyle=:none) |> display
64-
savefig(joinpath(@__DIR__,"src","assets","GP_sample_$(k).png"))
65-
plot(xrange,kernel.(k(),x₀,xrange),lab="",ylims=(0,1.1),title="k(0,x)") |> display
66-
savefig(joinpath(@__DIR__,"src","assets","kappa_function_$(k).png"))
90+
for k in [MaternKernel, Matern32Kernel, Matern52Kernel]
91+
K = kernelmatrix(k(), xrange; obsdim=1)
92+
v = rand(MvNormal(K + 1e-7I))
93+
display(plot(xrange, v; lab="", title="f(x)", framestyle=:none))
94+
savefig(joinpath(@__DIR__, "src", "assets", "GP_sample_$(k).png"))
95+
display(plot(xrange, kernel.(k(), x₀, xrange); lab="", ylims=(0, 1.1), title="k(0,x)"))
96+
savefig(joinpath(@__DIR__, "src", "assets", "kappa_function_$(k).png"))
6797
end
6898

69-
7099
for k in [RationalQuadraticKernel]
71-
K = kernelmatrix(k(),xrange,obsdim=1)
72-
v = rand(MvNormal(K+1e-7I))
73-
plot(xrange,v,lab="",title="f(x)",framestyle=:none) |> display
74-
savefig(joinpath(@__DIR__,"src","assets","GP_sample_$(k).png"))
75-
plot(xrange,kernel.(k(),x₀,xrange),lab="",ylims=(0,1.1),title="k(0,x)") |> display
76-
savefig(joinpath(@__DIR__,"src","assets","kappa_function_$(k).png"))
100+
K = kernelmatrix(k(), xrange; obsdim=1)
101+
v = rand(MvNormal(K + 1e-7I))
102+
display(plot(xrange, v; lab="", title="f(x)", framestyle=:none))
103+
savefig(joinpath(@__DIR__, "src", "assets", "GP_sample_$(k).png"))
104+
display(plot(xrange, kernel.(k(), x₀, xrange); lab="", ylims=(0, 1.1), title="k(0,x)"))
105+
savefig(joinpath(@__DIR__, "src", "assets", "kappa_function_$(k).png"))
77106
end

docs/make.jl

+16-15
Original file line numberDiff line numberDiff line change
@@ -8,22 +8,23 @@ DocMeta.setdocmeta!(
88
recursive=true,
99
)
1010

11-
makedocs(
12-
sitename = "KernelFunctions",
13-
format = Documenter.HTML(),
14-
modules = [KernelFunctions],
15-
pages = ["Home"=>"index.md",
16-
"User Guide" => "userguide.md",
17-
"Examples"=>"example.md",
18-
"Kernel Functions"=>"kernels.md",
19-
"Input Transforms"=>"transform.md",
20-
"Metrics"=>"metrics.md",
21-
"Theory"=>"theory.md",
22-
"Custom Kernels"=>"create_kernel.md",
23-
"API"=>"api.md"]
11+
makedocs(;
12+
sitename="KernelFunctions",
13+
format=Documenter.HTML(),
14+
modules=[KernelFunctions],
15+
pages=[
16+
"Home" => "index.md",
17+
"User Guide" => "userguide.md",
18+
"Examples" => "example.md",
19+
"Kernel Functions" => "kernels.md",
20+
"Input Transforms" => "transform.md",
21+
"Metrics" => "metrics.md",
22+
"Theory" => "theory.md",
23+
"Custom Kernels" => "create_kernel.md",
24+
"API" => "api.md",
25+
],
2426
)
2527

2628
deploydocs(;
27-
repo = "github.com/JuliaGaussianProcesses/KernelFunctions.jl.git",
28-
push_preview = true,
29+
repo="github.com/JuliaGaussianProcesses/KernelFunctions.jl.git", push_preview=true
2930
)

examples/deepkernellearning.jl

+19-14
Original file line numberDiff line numberDiff line change
@@ -10,26 +10,31 @@ Flux.@functor KernelSum
1010
Flux.@functor Matern32Kernel
1111
Flux.@functor FunctionTransform
1212

13-
neuralnet = Chain(Dense(1,3),Dense(3,2))
13+
neuralnet = Chain(Dense(1, 3), Dense(3, 2))
1414
k = SqExponentialKernel(FunctionTransform(neuralnet))
15-
xmin = -3; xmax = 3
16-
x = range(xmin,xmax,length=100)
17-
x_test = rand(Uniform(xmin,xmax),200)
18-
x,y = noisy_function(sinc,x;noise=0.1)
19-
X = reshape(x,:,1)
15+
xmin = -3;
16+
xmax = 3;
17+
x = range(xmin, xmax; length=100)
18+
x_test = rand(Uniform(xmin, xmax), 200)
19+
x, y = noisy_function(sinc, x; noise=0.1)
20+
X = reshape(x, :, 1)
2021
λ = [0.1]
21-
f(x,k,λ) = kernelmatrix(k,X,x,obsdim=1)*inv(kernelmatrix(k,X,obsdim=1)+exp(λ[1])*I)*y
22-
f(X,k,1.0)
23-
loss(k,λ) = f(X,k,λ) |>->sum(y-ŷ)/length(y)+exp(λ[1])*norm(ŷ)
24-
loss(k,λ)
22+
function f(x, k, λ)
23+
return kernelmatrix(k, X, x; obsdim=1) *
24+
inv(kernelmatrix(k, X; obsdim=1) + exp(λ[1]) * I) *
25+
y
26+
end
27+
f(X, k, 1.0)
28+
loss(k, λ) =-> sum(y - ŷ) / length(y) + exp(λ[1]) * norm(ŷ)(f(X, k, λ))
29+
loss(k, λ)
2530
ps = Flux.params(k)
2631
# push!(ps,λ)
2732
opt = Flux.Momentum(1.0)
2833
##
2934
for i in 1:10
30-
grads = Zygote.gradient(()->loss(k,λ),ps)
31-
Flux.Optimise.update!(opt,ps,grads)
32-
p = Plots.scatter(x,y,lab="data",title="Loss = $(loss(k,λ))")
33-
Plots.plot!(x,f(X,k,λ),lab="Prediction",lw=3.0)
35+
grads = Zygote.gradient(() -> loss(k, λ), ps)
36+
Flux.Optimise.update!(opt, ps, grads)
37+
p = Plots.scatter(x, y; lab="data", title="Loss = $(loss(k,λ))")
38+
Plots.plot!(x, f(X, k, λ); lab="Prediction", lw=3.0)
3439
display(p)
3540
end

0 commit comments

Comments
 (0)