Skip to content

Commit

Permalink
CompatHelper: bump compat for LogDensityProblems to 2, (keep existing…
Browse files Browse the repository at this point in the history
… compat) (#1917)

* CompatHelper: bump compat for LogDensityProblems to 2, (keep existing compat)

* Use LogDensityProblemsAD

* Fix tests for Gibbs (#1920)

* Update ad.jl

* Update ad.jl

Co-authored-by: CompatHelper Julia <[email protected]>
Co-authored-by: David Widmann <[email protected]>
Co-authored-by: Hong Ge <[email protected]>
  • Loading branch information
4 people authored Dec 10, 2022
1 parent 51fddaf commit 61b06f6
Show file tree
Hide file tree
Showing 14 changed files with 35 additions and 27 deletions.
6 changes: 4 additions & 2 deletions Project.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
name = "Turing"
uuid = "fce5fe82-541a-59a6-adf8-730c64b5f9a0"
version = "0.23.0"
version = "0.23.1"

[deps]
AbstractMCMC = "80f14c24-f653-4e6a-9b94-39d6b0f70001"
Expand All @@ -20,6 +20,7 @@ ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210"
Libtask = "6f1fad26-d15e-5dc8-ae53-837a1d7b8c9f"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
LogDensityProblems = "6fdf6af0-433a-55f7-b3ed-c6c6e0b8df7c"
LogDensityProblemsAD = "996a588d-648d-4e1f-a8f0-a84b347e47b1"
MCMCChains = "c7f686f2-ff18-58e9-bc7b-31028e88f75d"
NamedArrays = "86f7a689-2022-50b4-a561-43c23ac3c673"
Printf = "de0858da-6303-5e67-8744-51eddeeeb8d7"
Expand Down Expand Up @@ -49,8 +50,9 @@ DocStringExtensions = "0.8, 0.9"
DynamicPPL = "0.21"
EllipticalSliceSampling = "0.5, 1"
ForwardDiff = "0.10.3"
LogDensityProblems = "0.12, 1"
Libtask = "0.7, 0.8"
LogDensityProblems = "2"
LogDensityProblemsAD = "1"
MCMCChains = "5"
NamedArrays = "0.9"
Reexport = "0.2, 1"
Expand Down
4 changes: 2 additions & 2 deletions src/contrib/inference/dynamichmc.jl
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ function gibbs_state(
varinfo::AbstractVarInfo,
)
# Update the log density function and its cached evaluation.
= LogDensityProblems.ADgradient(Turing.LogDensityFunction(varinfo, model, spl, DynamicPPL.DefaultContext()))
= LogDensityProblemsAD.ADgradient(Turing.LogDensityFunction(varinfo, model, spl, DynamicPPL.DefaultContext()))
Q = DynamicHMC.evaluate_ℓ(ℓ, varinfo[spl])
return DynamicNUTSState(ℓ, varinfo, Q, state.metric, state.stepsize)
end
Expand All @@ -65,7 +65,7 @@ function DynamicPPL.initialstep(
end

# Define log-density function.
= LogDensityProblems.ADgradient(Turing.LogDensityFunction(vi, model, spl, DynamicPPL.DefaultContext()))
= LogDensityProblemsAD.ADgradient(Turing.LogDensityFunction(vi, model, spl, DynamicPPL.DefaultContext()))

# Perform initial step.
results = DynamicHMC.mcmc_keep_warmup(
Expand Down
4 changes: 2 additions & 2 deletions src/contrib/inference/sghmc.jl
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ function DynamicPPL.initialstep(

# Compute initial sample and state.
sample = Transition(vi)
= LogDensityProblems.ADgradient(Turing.LogDensityFunction(vi, model, spl, DynamicPPL.DefaultContext()))
= LogDensityProblemsAD.ADgradient(Turing.LogDensityFunction(vi, model, spl, DynamicPPL.DefaultContext()))
state = SGHMCState(ℓ, vi, zero(vi[spl]))

return sample, state
Expand Down Expand Up @@ -215,7 +215,7 @@ function DynamicPPL.initialstep(

# Create first sample and state.
sample = SGLDTransition(vi, zero(spl.alg.stepsize(0)))
= LogDensityProblems.ADgradient(Turing.LogDensityFunction(vi, model, spl, DynamicPPL.DefaultContext()))
= LogDensityProblemsAD.ADgradient(Turing.LogDensityFunction(vi, model, spl, DynamicPPL.DefaultContext()))
state = SGLDState(ℓ, vi, 1)

return sample, state
Expand Down
1 change: 1 addition & 0 deletions src/essential/Essential.jl
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ using StatsFuns: logsumexp, softmax

import AdvancedPS
import LogDensityProblems
import LogDensityProblemsAD

include("container.jl")
include("ad.jl")
Expand Down
20 changes: 10 additions & 10 deletions src/essential/ad.jl
Original file line number Diff line number Diff line change
Expand Up @@ -78,11 +78,11 @@ Find the autodifferentiation backend of the algorithm `alg`.
getADbackend(spl::Sampler) = getADbackend(spl.alg)
getADbackend(::SampleFromPrior) = ADBackend()()

function LogDensityProblems.ADgradient(ℓ::Turing.LogDensityFunction)
return LogDensityProblems.ADgradient(getADbackend(ℓ.sampler), ℓ)
function LogDensityProblemsAD.ADgradient(ℓ::Turing.LogDensityFunction)
return LogDensityProblemsAD.ADgradient(getADbackend(ℓ.sampler), ℓ)
end

function LogDensityProblems.ADgradient(ad::ForwardDiffAD, ℓ::Turing.LogDensityFunction)
function LogDensityProblemsAD.ADgradient(ad::ForwardDiffAD, ℓ::Turing.LogDensityFunction)
θ =.varinfo[ℓ.sampler]
f = Base.Fix1(LogDensityProblems.logdensity, ℓ)

Expand All @@ -99,21 +99,21 @@ function LogDensityProblems.ADgradient(ad::ForwardDiffAD, ℓ::Turing.LogDensity
ForwardDiff.GradientConfig(f, θ, ForwardDiff.Chunk(length(θ), chunk_size), tag)
end

return LogDensityProblems.ADgradient(Val(:ForwardDiff), ℓ; gradientconfig=config)
return LogDensityProblemsAD.ADgradient(Val(:ForwardDiff), ℓ; gradientconfig=config)
end

function LogDensityProblems.ADgradient(::TrackerAD, ℓ::Turing.LogDensityFunction)
return LogDensityProblems.ADgradient(Val(:Tracker), ℓ)
function LogDensityProblemsAD.ADgradient(::TrackerAD, ℓ::Turing.LogDensityFunction)
return LogDensityProblemsAD.ADgradient(Val(:Tracker), ℓ)
end

function LogDensityProblems.ADgradient(::ZygoteAD, ℓ::Turing.LogDensityFunction)
return LogDensityProblems.ADgradient(Val(:Zygote), ℓ)
function LogDensityProblemsAD.ADgradient(::ZygoteAD, ℓ::Turing.LogDensityFunction)
return LogDensityProblemsAD.ADgradient(Val(:Zygote), ℓ)
end

for cache in (:true, :false)
@eval begin
function LogDensityProblems.ADgradient(::ReverseDiffAD{$cache}, ℓ::Turing.LogDensityFunction)
return LogDensityProblems.ADgradient(Val(:ReverseDiff), ℓ; compile=Val($cache))
function LogDensityProblemsAD.ADgradient(::ReverseDiffAD{$cache}, ℓ::Turing.LogDensityFunction)
return LogDensityProblemsAD.ADgradient(Val(:ReverseDiff), ℓ; compile=Val($cache))
end
end
end
Expand Down
1 change: 1 addition & 0 deletions src/inference/Inference.jl
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ import BangBang
import ..Essential: getADbackend
import EllipticalSliceSampling
import LogDensityProblems
import LogDensityProblemsAD
import Random
import MCMCChains
import StatsBase: predict
Expand Down
4 changes: 2 additions & 2 deletions src/inference/hmc.jl
Original file line number Diff line number Diff line change
Expand Up @@ -158,7 +158,7 @@ function DynamicPPL.initialstep(
# Create a Hamiltonian.
metricT = getmetricT(spl.alg)
metric = metricT(length(theta))
= LogDensityProblems.ADgradient(
= LogDensityProblemsAD.ADgradient(
Turing.LogDensityFunction(vi, model, spl, DynamicPPL.DefaultContext())
)
logπ = Base.Fix1(LogDensityProblems.logdensity, ℓ)
Expand Down Expand Up @@ -264,7 +264,7 @@ end

function get_hamiltonian(model, spl, vi, state, n)
metric = gen_metric(n, spl, state)
= LogDensityProblems.ADgradient(
= LogDensityProblemsAD.ADgradient(
Turing.LogDensityFunction(vi, model, spl, DynamicPPL.DefaultContext())
)
ℓπ = Base.Fix1(LogDensityProblems.logdensity, ℓ)
Expand Down
3 changes: 2 additions & 1 deletion src/modes/ModeEstimation.jl
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ using DynamicPPL: Model, AbstractContext, VarInfo, VarName,
get_and_set_val!, istrans

import LogDensityProblems
import LogDensityProblemsAD

export constrained_space,
MAP,
Expand Down Expand Up @@ -111,7 +112,7 @@ function (f::OptimLogDensity)(F, G, z)
if G !== nothing
# Calculate negative log joint and its gradient.
# TODO: Make OptimLogDensity already an LogDensityProblems.ADgradient? Allow to specify AD?
= LogDensityProblems.ADgradient(f)
= LogDensityProblemsAD.ADgradient(f)
neglogp, ∇neglogp = LogDensityProblems.logdensity_and_gradient(ℓ, z)

# Save the gradient to the pre-allocated array.
Expand Down
4 changes: 3 additions & 1 deletion test/Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ FiniteDifferences = "26cc04aa-876d-5657-8c51-4c34ba976000"
ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
LogDensityProblems = "6fdf6af0-433a-55f7-b3ed-c6c6e0b8df7c"
LogDensityProblemsAD = "996a588d-648d-4e1f-a8f0-a84b347e47b1"
MCMCChains = "c7f686f2-ff18-58e9-bc7b-31028e88f75d"
NamedArrays = "86f7a689-2022-50b4-a561-43c23ac3c673"
Optim = "429524aa-4258-5aef-a3af-852621145aeb"
Expand Down Expand Up @@ -42,7 +43,8 @@ DynamicHMC = "2.1.6, 3.0"
DynamicPPL = "0.21"
FiniteDifferences = "0.10.8, 0.11, 0.12"
ForwardDiff = "0.10.12 - 0.10.32"
LogDensityProblems = "0.12, 1"
LogDensityProblems = "2"
LogDensityProblemsAD = "1"
MCMCChains = "5"
NamedArrays = "0.9.4"
Optim = "0.22, 1.0"
Expand Down
8 changes: 4 additions & 4 deletions test/essential/ad.jl
Original file line number Diff line number Diff line change
Expand Up @@ -30,14 +30,14 @@
= Turing.LogDensityFunction(vi, ad_test_f, SampleFromPrior(), DynamicPPL.DefaultContext())
x = map(x->Float64(x), vi[SampleFromPrior()])

trackerℓ = LogDensityProblems.ADgradient(TrackerAD(), ℓ)
@test trackerℓ isa LogDensityProblems.TrackerGradientLogDensity
trackerℓ = LogDensityProblemsAD.ADgradient(TrackerAD(), ℓ)
@test trackerℓ isa LogDensityProblemsAD.TrackerGradientLogDensity
@test trackerℓ.===
∇E1 = LogDensityProblems.logdensity_and_gradient(trackerℓ, x)[2]
@test sort(∇E1) grad_FWAD atol=1e-9

zygoteℓ = LogDensityProblems.ADgradient(ZygoteAD(), ℓ)
@test zygoteℓ isa LogDensityProblems.ZygoteGradientLogDensity
zygoteℓ = LogDensityProblemsAD.ADgradient(ZygoteAD(), ℓ)
@test zygoteℓ isa LogDensityProblemsAD.ZygoteGradientLogDensity
@test zygoteℓ.===
∇E2 = LogDensityProblems.logdensity_and_gradient(zygoteℓ, x)[2]
@test sort(∇E2) grad_FWAD atol=1e-9
Expand Down
2 changes: 1 addition & 1 deletion test/inference/Inference.jl
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@
chn3 = sample(gdemo_default, alg3, 5_000; save_state=true)
check_gdemo(chn3)

chn3_contd = sample(gdemo_default, alg3, 2_000; resume_from=chn3)
chn3_contd = sample(gdemo_default, alg3, 5_000; resume_from=chn3)
check_gdemo(chn3_contd)
end
@testset "Contexts" begin
Expand Down
1 change: 1 addition & 0 deletions test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@ using Turing.Essential: TuringDenseMvNormal, TuringDiagMvNormal
using Turing.Variational: TruncatedADAGrad, DecayedADAGrad, AdvancedVI

import LogDensityProblems
import LogDensityProblemsAD

setprogress!(false)

Expand Down
2 changes: 1 addition & 1 deletion test/skipped/unit_test_helper.jl
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ function test_grad(turing_model, grad_f; trans=Dict())
end
d = length(vi.vals)
@testset "Gradient using random inputs" begin
= LogDensityProblems.ADgradient(
= LogDensityProblemsAD.ADgradient(
TrackerAD(),
Turing.LogDensityFunction(vi, model_f, SampleFromPrior(), DynamicPPL.DefaultContext()),
)
Expand Down
2 changes: 1 addition & 1 deletion test/test_utils/ad_utils.jl
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ function test_model_ad(model, f, syms::Vector{Symbol})
# Compare with `logdensity_and_gradient`.
z = vi[SampleFromPrior()]
for chunksize in (0, 1, 10), standardtag in (true, false, 0, 3)
= LogDensityProblems.ADgradient(
= LogDensityProblemsAD.ADgradient(
ForwardDiffAD{chunksize, standardtag}(),
Turing.LogDensityFunction(vi, model, SampleFromPrior(), DynamicPPL.DefaultContext()),
)
Expand Down

2 comments on commit 61b06f6

@devmotion
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@JuliaRegistrator
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Registration pull request created: JuliaRegistries/General/73850

After the above pull request is merged, it is recommended that a tag is created on this repository for the registered package version.

This will be done automatically if the Julia TagBot GitHub Action is installed, or can be done manually through the github interface, or via:

git tag -a v0.23.1 -m "<description of version>" 61b06f642872522ca14133bb5c86fc603841dbba
git push origin v0.23.1

Please sign in to comment.