Skip to content

Commit 74869f4

Browse files
yebaiCompatHelper Juliaharisorgntorfjeldedevmotion
authored
Bump bijectors compat (#2052)
* CompatHelper: bump compat for Bijectors to 0.13, (keep existing compat) * Update Project.toml * Replacement for #2039 (#2040) * Fix testset for external samplers * Update abstractmcmc.jl * Update test/contrib/inference/abstractmcmc.jl Co-authored-by: Tor Erlend Fjelde <[email protected]> * Update test/contrib/inference/abstractmcmc.jl Co-authored-by: Tor Erlend Fjelde <[email protected]> * Update FillArrays compat to 1.4.1 (#2035) * Update FillArrays compat to 1.4.0 * Update test compat * Try to enable ReverseDiff tests * Update Project.toml * Update Project.toml * Bump version * Revert dependencies on FillArrays (#2042) * Update Project.toml * Update Project.toml * Fix redundant definition of `getstats` (#2044) * Fix redundant definition of `getstats` * Update Inference.jl * Revert "Update Inference.jl" This reverts commit e4f51c2. * Bump version --------- Co-authored-by: Hong Ge <[email protected]> * Transfer some test utility function into DynamicPPL (#2049) * Update OptimInterface.jl * Only run optimisation tests in numerical stage. * fix function lookup after moving functions --------- Co-authored-by: Xianda Sun <[email protected]> * Move Optim support to extension (#2051) * Move Optim support to extension * More imports * Update Project.toml --------- Co-authored-by: Hong Ge <[email protected]> --------- Co-authored-by: CompatHelper Julia <[email protected]> Co-authored-by: haris organtzidis <[email protected]> Co-authored-by: Tor Erlend Fjelde <[email protected]> Co-authored-by: David Widmann <[email protected]> Co-authored-by: Xianda Sun <[email protected]> Co-authored-by: Cameron Pfiffer <[email protected]>
1 parent 4ab5939 commit 74869f4

File tree

8 files changed

+97
-117
lines changed

8 files changed

+97
-117
lines changed

Diff for: Project.toml

+12-4
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
name = "Turing"
22
uuid = "fce5fe82-541a-59a6-adf8-730c64b5f9a0"
3-
version = "0.26.4"
3+
version = "0.27"
44

55
[deps]
66
AbstractMCMC = "80f14c24-f653-4e6a-9b94-39d6b0f70001"
@@ -16,7 +16,6 @@ DistributionsAD = "ced4e74d-a319-5a8a-b0ac-84af2272839c"
1616
DocStringExtensions = "ffbed154-4ef7-542d-bbb7-c09d3a79fcae"
1717
DynamicPPL = "366bfd00-2699-11ea-058f-f148b4cae6d8"
1818
EllipticalSliceSampling = "cad2338a-1db2-11e9-3401-43bc07c9ede2"
19-
FillArrays = "1a297f60-69ca-5386-bcde-b61e274b549b"
2019
ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210"
2120
Libtask = "6f1fad26-d15e-5dc8-ae53-837a1d7b8c9f"
2221
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
@@ -44,20 +43,20 @@ AdvancedMH = "0.6.8, 0.7"
4443
AdvancedPS = "0.4"
4544
AdvancedVI = "0.2"
4645
BangBang = "0.3"
47-
Bijectors = "0.12"
46+
Bijectors = "0.13.2"
4847
DataStructures = "0.18"
4948
Distributions = "0.23.3, 0.24, 0.25"
5049
DistributionsAD = "0.6"
5150
DocStringExtensions = "0.8, 0.9"
5251
DynamicPPL = "0.23"
5352
EllipticalSliceSampling = "0.5, 1"
54-
FillArrays = "=1.0.0"
5553
ForwardDiff = "0.10.3"
5654
Libtask = "0.7, 0.8"
5755
LogDensityProblems = "2"
5856
LogDensityProblemsAD = "1.4"
5957
MCMCChains = "5, 6"
6058
NamedArrays = "0.9"
59+
Optim = "1"
6160
Reexport = "0.2, 1"
6261
Requires = "0.5, 1.0"
6362
SciMLBase = "1.37.1"
@@ -68,3 +67,12 @@ StatsBase = "0.32, 0.33, 0.34"
6867
StatsFuns = "0.8, 0.9, 1"
6968
Tracker = "0.2.3"
7069
julia = "1.7"
70+
71+
[weakdeps]
72+
Optim = "429524aa-4258-5aef-a3af-852621145aeb"
73+
74+
[extensions]
75+
TuringOptimExt = "Optim"
76+
77+
[extras]
78+
Optim = "429524aa-4258-5aef-a3af-852621145aeb"

Diff for: src/modes/OptimInterface.jl renamed to ext/TuringOptimExt.jl

+47-45
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,14 @@
1-
using Setfield
2-
using DynamicPPL: DefaultContext, LikelihoodContext
3-
using DynamicPPL: DynamicPPL
4-
import .Optim
5-
import .Optim: optimize
6-
import ..ForwardDiff
7-
import NamedArrays
8-
import StatsBase
9-
import Printf
10-
import StatsAPI
11-
1+
module TuringOptimExt
2+
3+
if isdefined(Base, :get_extension)
4+
import Turing
5+
import Turing: Distributions, DynamicPPL, ForwardDiff, NamedArrays, Printf, Setfield, Statistics, StatsAPI, StatsBase
6+
import Optim
7+
else
8+
import ..Turing
9+
import ..Turing: Distributions, DynamicPPL, ForwardDiff, NamedArrays, Printf, Setfield, Statistics, StatsAPI, StatsBase
10+
import ..Optim
11+
end
1212

1313
"""
1414
ModeResult{
@@ -23,7 +23,7 @@ A wrapper struct to store various results from a MAP or MLE estimation.
2323
struct ModeResult{
2424
V<:NamedArrays.NamedArray,
2525
O<:Optim.MultivariateOptimizationResults,
26-
M<:OptimLogDensity
26+
M<:Turing.OptimLogDensity
2727
} <: StatsBase.StatisticalModel
2828
"A vector with the resulting point estimates."
2929
values::V
@@ -57,10 +57,10 @@ function StatsBase.coeftable(m::ModeResult; level::Real=0.95)
5757
estimates = m.values.array[:, 1]
5858
stderrors = StatsBase.stderror(m)
5959
zscore = estimates ./ stderrors
60-
p = map(z -> StatsAPI.pvalue(Normal(), z; tail=:both), zscore)
60+
p = map(z -> StatsAPI.pvalue(Distributions.Normal(), z; tail=:both), zscore)
6161

6262
# Confidence interval (CI)
63-
q = quantile(Normal(), (1 + level) / 2)
63+
q = Statistics.quantile(Distributions.Normal(), (1 + level) / 2)
6464
ci_low = estimates .- q .* stderrors
6565
ci_high = estimates .+ q .* stderrors
6666

@@ -80,7 +80,7 @@ function StatsBase.informationmatrix(m::ModeResult; hessian_function=ForwardDiff
8080
# Hessian is computed with respect to the untransformed parameters.
8181
linked = DynamicPPL.istrans(m.f.varinfo)
8282
if linked
83-
@set! m.f.varinfo = invlink!!(m.f.varinfo, m.f.model)
83+
Setfield.@set! m.f.varinfo = DynamicPPL.invlink!!(m.f.varinfo, m.f.model)
8484
end
8585

8686
# Calculate the Hessian.
@@ -90,7 +90,7 @@ function StatsBase.informationmatrix(m::ModeResult; hessian_function=ForwardDiff
9090

9191
# Link it back if we invlinked it.
9292
if linked
93-
@set! m.f.varinfo = link!!(m.f.varinfo, m.f.model)
93+
Setfield.@set! m.f.varinfo = DynamicPPL.link!!(m.f.varinfo, m.f.model)
9494
end
9595

9696
return NamedArrays.NamedArray(info, (varnames, varnames))
@@ -126,18 +126,18 @@ mle = optimize(model, MLE())
126126
mle = optimize(model, MLE(), NelderMead())
127127
```
128128
"""
129-
function Optim.optimize(model::Model, ::MLE, options::Optim.Options=Optim.Options(); kwargs...)
129+
function Optim.optimize(model::DynamicPPL.Model, ::Turing.MLE, options::Optim.Options=Optim.Options(); kwargs...)
130130
return _mle_optimize(model, options; kwargs...)
131131
end
132-
function Optim.optimize(model::Model, ::MLE, init_vals::AbstractArray, options::Optim.Options=Optim.Options(); kwargs...)
132+
function Optim.optimize(model::DynamicPPL.Model, ::Turing.MLE, init_vals::AbstractArray, options::Optim.Options=Optim.Options(); kwargs...)
133133
return _mle_optimize(model, init_vals, options; kwargs...)
134134
end
135-
function Optim.optimize(model::Model, ::MLE, optimizer::Optim.AbstractOptimizer, options::Optim.Options=Optim.Options(); kwargs...)
135+
function Optim.optimize(model::DynamicPPL.Model, ::Turing.MLE, optimizer::Optim.AbstractOptimizer, options::Optim.Options=Optim.Options(); kwargs...)
136136
return _mle_optimize(model, optimizer, options; kwargs...)
137137
end
138138
function Optim.optimize(
139-
model::Model,
140-
::MLE,
139+
model::DynamicPPL.Model,
140+
::Turing.MLE,
141141
init_vals::AbstractArray,
142142
optimizer::Optim.AbstractOptimizer,
143143
options::Optim.Options=Optim.Options();
@@ -146,9 +146,9 @@ function Optim.optimize(
146146
return _mle_optimize(model, init_vals, optimizer, options; kwargs...)
147147
end
148148

149-
function _mle_optimize(model::Model, args...; kwargs...)
150-
ctx = OptimizationContext(DynamicPPL.LikelihoodContext())
151-
return _optimize(model, OptimLogDensity(model, ctx), args...; kwargs...)
149+
function _mle_optimize(model::DynamicPPL.Model, args...; kwargs...)
150+
ctx = Turing.OptimizationContext(DynamicPPL.LikelihoodContext())
151+
return _optimize(model, Turing.OptimLogDensity(model, ctx), args...; kwargs...)
152152
end
153153

154154
"""
@@ -172,18 +172,18 @@ map_est = optimize(model, MAP(), NelderMead())
172172
```
173173
"""
174174

175-
function Optim.optimize(model::Model, ::MAP, options::Optim.Options=Optim.Options(); kwargs...)
175+
function Optim.optimize(model::DynamicPPL.Model, ::Turing.MAP, options::Optim.Options=Optim.Options(); kwargs...)
176176
return _map_optimize(model, options; kwargs...)
177177
end
178-
function Optim.optimize(model::Model, ::MAP, init_vals::AbstractArray, options::Optim.Options=Optim.Options(); kwargs...)
178+
function Optim.optimize(model::DynamicPPL.Model, ::Turing.MAP, init_vals::AbstractArray, options::Optim.Options=Optim.Options(); kwargs...)
179179
return _map_optimize(model, init_vals, options; kwargs...)
180180
end
181-
function Optim.optimize(model::Model, ::MAP, optimizer::Optim.AbstractOptimizer, options::Optim.Options=Optim.Options(); kwargs...)
181+
function Optim.optimize(model::DynamicPPL.Model, ::Turing.MAP, optimizer::Optim.AbstractOptimizer, options::Optim.Options=Optim.Options(); kwargs...)
182182
return _map_optimize(model, optimizer, options; kwargs...)
183183
end
184184
function Optim.optimize(
185-
model::Model,
186-
::MAP,
185+
model::DynamicPPL.Model,
186+
::Turing.MAP,
187187
init_vals::AbstractArray,
188188
optimizer::Optim.AbstractOptimizer,
189189
options::Optim.Options=Optim.Options();
@@ -192,9 +192,9 @@ function Optim.optimize(
192192
return _map_optimize(model, init_vals, optimizer, options; kwargs...)
193193
end
194194

195-
function _map_optimize(model::Model, args...; kwargs...)
196-
ctx = OptimizationContext(DynamicPPL.DefaultContext())
197-
return _optimize(model, OptimLogDensity(model, ctx), args...; kwargs...)
195+
function _map_optimize(model::DynamicPPL.Model, args...; kwargs...)
196+
ctx = Turing.OptimizationContext(DynamicPPL.DefaultContext())
197+
return _optimize(model, Turing.OptimLogDensity(model, ctx), args...; kwargs...)
198198
end
199199

200200
"""
@@ -203,8 +203,8 @@ end
203203
Estimate a mode, i.e., compute a MLE or MAP estimate.
204204
"""
205205
function _optimize(
206-
model::Model,
207-
f::OptimLogDensity,
206+
model::DynamicPPL.Model,
207+
f::Turing.OptimLogDensity,
208208
optimizer::Optim.AbstractOptimizer=Optim.LBFGS(),
209209
args...;
210210
kwargs...
@@ -213,8 +213,8 @@ function _optimize(
213213
end
214214

215215
function _optimize(
216-
model::Model,
217-
f::OptimLogDensity,
216+
model::DynamicPPL.Model,
217+
f::Turing.OptimLogDensity,
218218
options::Optim.Options=Optim.Options(),
219219
args...;
220220
kwargs...
@@ -223,8 +223,8 @@ function _optimize(
223223
end
224224

225225
function _optimize(
226-
model::Model,
227-
f::OptimLogDensity,
226+
model::DynamicPPL.Model,
227+
f::Turing.OptimLogDensity,
228228
init_vals::AbstractArray=DynamicPPL.getparams(f),
229229
options::Optim.Options=Optim.Options(),
230230
args...;
@@ -234,8 +234,8 @@ function _optimize(
234234
end
235235

236236
function _optimize(
237-
model::Model,
238-
f::OptimLogDensity,
237+
model::DynamicPPL.Model,
238+
f::Turing.OptimLogDensity,
239239
init_vals::AbstractArray=DynamicPPL.getparams(f),
240240
optimizer::Optim.AbstractOptimizer=Optim.LBFGS(),
241241
options::Optim.Options=Optim.Options(),
@@ -244,8 +244,8 @@ function _optimize(
244244
)
245245
# Convert the initial values, since it is assumed that users provide them
246246
# in the constrained space.
247-
@set! f.varinfo = DynamicPPL.unflatten(f.varinfo, init_vals)
248-
@set! f.varinfo = DynamicPPL.link!!(f.varinfo, model)
247+
Setfield.@set! f.varinfo = DynamicPPL.unflatten(f.varinfo, init_vals)
248+
Setfield.@set! f.varinfo = DynamicPPL.link!!(f.varinfo, model)
249249
init_vals = DynamicPPL.getparams(f)
250250

251251
# Optimize!
@@ -258,10 +258,10 @@ function _optimize(
258258

259259
# Get the VarInfo at the MLE/MAP point, and run the model to ensure
260260
# correct dimensionality.
261-
@set! f.varinfo = DynamicPPL.unflatten(f.varinfo, M.minimizer)
262-
@set! f.varinfo = invlink!!(f.varinfo, model)
261+
Setfield.@set! f.varinfo = DynamicPPL.unflatten(f.varinfo, M.minimizer)
262+
Setfield.@set! f.varinfo = DynamicPPL.invlink!!(f.varinfo, model)
263263
vals = DynamicPPL.getparams(f)
264-
@set! f.varinfo = link!!(f.varinfo, model)
264+
Setfield.@set! f.varinfo = DynamicPPL.link!!(f.varinfo, model)
265265

266266
# Make one transition to get the parameter names.
267267
ts = [Turing.Inference.Transition(
@@ -275,3 +275,5 @@ function _optimize(
275275

276276
return ModeResult(vmat, M, -M.minimum, f)
277277
end
278+
279+
end # module

Diff for: src/Turing.jl

+24-17
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,12 @@ import AdvancedVI
1111
using DynamicPPL: DynamicPPL, LogDensityFunction
1212
import DynamicPPL: getspace, NoDist, NamedDist
1313
import LogDensityProblems
14+
import NamedArrays
15+
import Setfield
16+
import StatsAPI
17+
import StatsBase
18+
19+
import Printf
1420
import Random
1521

1622
const PROGRESS = Ref(true)
@@ -48,26 +54,9 @@ using .Inference
4854
include("variational/VariationalInference.jl")
4955
using .Variational
5056

51-
@init @require DynamicHMC="bbc10e6e-7c05-544b-b16e-64fede858acb" begin
52-
@eval Inference begin
53-
import ..DynamicHMC
54-
55-
if isdefined(DynamicHMC, :mcmc_with_warmup)
56-
include("contrib/inference/dynamichmc.jl")
57-
else
58-
error("Please update DynamicHMC, v1.x is no longer supported")
59-
end
60-
end
61-
end
62-
6357
include("modes/ModeEstimation.jl")
6458
using .ModeEstimation
6559

66-
@init @require Optim="429524aa-4258-5aef-a3af-852621145aeb" @eval begin
67-
include("modes/OptimInterface.jl")
68-
export optimize
69-
end
70-
7160
###########
7261
# Exports #
7362
###########
@@ -146,4 +135,22 @@ export @model, # modelling
146135
optim_objective,
147136
optim_function,
148137
optim_problem
138+
139+
function __init__()
140+
@static if !isdefined(Base, :get_extension)
141+
@require Optim="429524aa-4258-5aef-a3af-852621145aeb" include("../ext/TuringOptimExt.jl")
142+
end
143+
@require DynamicHMC="bbc10e6e-7c05-544b-b16e-64fede858acb" begin
144+
@eval Inference begin
145+
import ..DynamicHMC
146+
147+
if isdefined(DynamicHMC, :mcmc_with_warmup)
148+
include("contrib/inference/dynamichmc.jl")
149+
else
150+
error("Please update DynamicHMC, v1.x is no longer supported")
151+
end
152+
end
153+
end
154+
end
155+
149156
end

Diff for: src/contrib/inference/abstractmcmc.jl

-1
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,6 @@ getparams(transition::AdvancedHMC.Transition) = transition.z.θ
1919
getstats(transition::AdvancedHMC.Transition) = transition.stat
2020

2121
getparams(transition::AdvancedMH.Transition) = transition.params
22-
getstats(transition) = NamedTuple()
2322

2423
getvarinfo(f::DynamicPPL.LogDensityFunction) = f.varinfo
2524
getvarinfo(f::LogDensityProblemsAD.ADGradientWrapper) = getvarinfo(parent(f))

Diff for: test/Project.toml

+1-3
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,6 @@ Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f"
88
DistributionsAD = "ced4e74d-a319-5a8a-b0ac-84af2272839c"
99
DynamicHMC = "bbc10e6e-7c05-544b-b16e-64fede858acb"
1010
DynamicPPL = "366bfd00-2699-11ea-058f-f148b4cae6d8"
11-
FillArrays = "1a297f60-69ca-5386-bcde-b61e274b549b"
1211
FiniteDifferences = "26cc04aa-876d-5657-8c51-4c34ba976000"
1312
ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210"
1413
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
@@ -42,14 +41,13 @@ Distributions = "0.25"
4241
DistributionsAD = "0.6.3"
4342
DynamicHMC = "2.1.6, 3.0"
4443
DynamicPPL = "0.23"
45-
FillArrays = "=1.0.0"
4644
FiniteDifferences = "0.10.8, 0.11, 0.12"
4745
ForwardDiff = "0.10.12 - 0.10.32, 0.10"
4846
LogDensityProblems = "2"
4947
LogDensityProblemsAD = "1.4"
5048
MCMCChains = "5, 6"
5149
NamedArrays = "0.9.4"
52-
Optim = "0.22, 1.0"
50+
Optim = "1"
5351
Optimization = "3.5"
5452
OptimizationOptimJL = "0.1"
5553
PDMats = "0.10, 0.11"

Diff for: test/contrib/inference/abstractmcmc.jl

+6-4
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ function initialize_mh(model)
4141
end
4242

4343
@testset "External samplers" begin
44-
@testset "AdvancedHMC.jl" begin
44+
@turing_testset "AdvancedHMC.jl" begin
4545
for model in DynamicPPL.TestUtils.DEMO_MODELS
4646
# Need some functionality to initialize the sampler.
4747
# TODO: Remove this once the constructors in the respective packages become "lazy".
@@ -52,12 +52,13 @@ end
5252
5_000;
5353
nadapts=1_000,
5454
discard_initial=1_000,
55-
rtol=0.2
55+
rtol=0.2,
56+
sampler_name="AdvancedHMC"
5657
)
5758
end
5859
end
5960

60-
@testset "AdvancedMH.jl" begin
61+
@turing_testset "AdvancedMH.jl" begin
6162
for model in DynamicPPL.TestUtils.DEMO_MODELS
6263
# Need some functionality to initialize the sampler.
6364
# TODO: Remove this once the constructors in the respective packages become "lazy".
@@ -68,7 +69,8 @@ end
6869
10_000;
6970
discard_initial=1_000,
7071
thinning=10,
71-
rtol=0.2
72+
rtol=0.2,
73+
sampler_name="AdvancedMH"
7274
)
7375
end
7476
end

0 commit comments

Comments
 (0)