Skip to content

Commit d8beaf0

Browse files
github-actions[bot]CompatHelper Juliayebaiharisorgntorfjelde
authored
CompatHelper: bump compat for Bijectors to 0.13, (keep existing compat) (#2018)
* CompatHelper: bump compat for Bijectors to 0.13, (keep existing compat) * Update Project.toml * Replacement for #2039 (#2040) * Fix testset for external samplers * Update abstractmcmc.jl * Update test/contrib/inference/abstractmcmc.jl Co-authored-by: Tor Erlend Fjelde <[email protected]> * Update test/contrib/inference/abstractmcmc.jl Co-authored-by: Tor Erlend Fjelde <[email protected]> * Transfer some test utility function into DynamicPPL (#2049) * Update OptimInterface.jl * Only run optimisation tests in numerical stage. * fix function lookup after moving functions --------- Co-authored-by: Xianda Sun <[email protected]> * Remove tracker tests. * Update Project.toml * Update Project.toml * Update Project.toml --------- Co-authored-by: CompatHelper Julia <[email protected]> Co-authored-by: Hong Ge <[email protected]> Co-authored-by: haris organtzidis <[email protected]> Co-authored-by: Tor Erlend Fjelde <[email protected]> Co-authored-by: Xianda Sun <[email protected]> Co-authored-by: Cameron Pfiffer <[email protected]>
1 parent 1b67694 commit d8beaf0

File tree

4 files changed

+10
-49
lines changed

4 files changed

+10
-49
lines changed

Project.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@ AdvancedMH = "0.6.8, 0.7"
5151
AdvancedPS = "0.4"
5252
AdvancedVI = "0.2"
5353
BangBang = "0.3"
54-
Bijectors = "0.12"
54+
Bijectors = "0.13.6"
5555
DataStructures = "0.18"
5656
Distributions = "0.23.3, 0.24, 0.25"
5757
DistributionsAD = "0.6"

test/contrib/inference/abstractmcmc.jl

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ function initialize_mh(model)
4141
end
4242

4343
@testset "External samplers" begin
44-
@testset "AdvancedHMC.jl" begin
44+
@turing_testset "AdvancedHMC.jl" begin
4545
for model in DynamicPPL.TestUtils.DEMO_MODELS
4646
# Need some functionality to initialize the sampler.
4747
# TODO: Remove this once the constructors in the respective packages become "lazy".
@@ -52,12 +52,13 @@ end
5252
5_000;
5353
n_adapts=1_000,
5454
discard_initial=1_000,
55-
rtol=0.2
55+
rtol=0.2,
56+
sampler_name="AdvancedHMC"
5657
)
5758
end
5859
end
5960

60-
@testset "AdvancedMH.jl" begin
61+
@turing_testset "AdvancedMH.jl" begin
6162
for model in DynamicPPL.TestUtils.DEMO_MODELS
6263
# Need some functionality to initialize the sampler.
6364
# TODO: Remove this once the constructors in the respective packages become "lazy".
@@ -68,7 +69,8 @@ end
6869
10_000;
6970
discard_initial=1_000,
7071
thinning=10,
71-
rtol=0.2
72+
rtol=0.2,
73+
sampler_name="AdvancedMH"
7274
)
7375
end
7476
end

test/essential/ad.jl

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -84,8 +84,6 @@
8484
@model function dir()
8585
theta ~ Dirichlet(1 ./ fill(4, 4))
8686
end
87-
Turing.setadbackend(:tracker)
88-
sample(dir(), HMC(0.01, 1), 1000);
8987
Turing.setadbackend(:zygote)
9088
sample(dir(), HMC(0.01, 1), 1000)
9189
Turing.setadbackend(:reversediff)
@@ -99,8 +97,6 @@
9997
@model function wishart()
10098
theta ~ Wishart(4, Matrix{Float64}(I, 4, 4))
10199
end
102-
Turing.setadbackend(:tracker)
103-
sample(wishart(), HMC(0.01, 1), 1000);
104100
Turing.setadbackend(:reversediff)
105101
sample(wishart(), HMC(0.01, 1), 1000);
106102
Turing.setadbackend(:zygote)
@@ -109,8 +105,6 @@
109105
@model function invwishart()
110106
theta ~ InverseWishart(4, Matrix{Float64}(I, 4, 4))
111107
end
112-
Turing.setadbackend(:tracker)
113-
sample(invwishart(), HMC(0.01, 1), 1000);
114108
Turing.setadbackend(:reversediff)
115109
sample(invwishart(), HMC(0.01, 1), 1000);
116110
Turing.setadbackend(:zygote)

test/modes/OptimInterface.jl

Lines changed: 3 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -1,38 +1,3 @@
1-
# TODO: Remove these once the equivalent is present in `DynamicPPL.TestUtils.
2-
function likelihood_optima(::DynamicPPL.TestUtils.UnivariateAssumeDemoModels)
3-
return (s=1/16, m=7/4)
4-
end
5-
function posterior_optima(::DynamicPPL.TestUtils.UnivariateAssumeDemoModels)
6-
# TODO: Figure out exact for `s`.
7-
return (s=0.907407, m=7/6)
8-
end
9-
10-
function likelihood_optima(model::DynamicPPL.TestUtils.MultivariateAssumeDemoModels)
11-
# Get some containers to fill.
12-
vals = Random.rand(model)
13-
14-
# NOTE: These are "as close to zero as we can get".
15-
vals.s[1] = 1e-32
16-
vals.s[2] = 1e-32
17-
18-
vals.m[1] = 1.5
19-
vals.m[2] = 2.0
20-
21-
return vals
22-
end
23-
function posterior_optima(model::DynamicPPL.TestUtils.MultivariateAssumeDemoModels)
24-
# Get some containers to fill.
25-
vals = Random.rand(model)
26-
27-
# TODO: Figure out exact for `s[1]`.
28-
vals.s[1] = 0.890625
29-
vals.s[2] = 1
30-
vals.m[1] = 3/4
31-
vals.m[2] = 1
32-
33-
return vals
34-
end
35-
361
# Used for testing how well it works with nested contexts.
372
struct OverrideContext{C,T1,T2} <: DynamicPPL.AbstractContext
383
context::C
@@ -57,7 +22,7 @@ function DynamicPPL.tilde_observe(context::OverrideContext, right, left, vi)
5722
return context.loglikelihood_weight, vi
5823
end
5924

60-
@testset "OptimInterface.jl" begin
25+
@numerical_testset "OptimInterface.jl" begin
6126
@testset "MLE" begin
6227
Random.seed!(222)
6328
true_value = [0.0625, 1.75]
@@ -157,7 +122,7 @@ end
157122
# FIXME: Some models doesn't work for Tracker and ReverseDiff.
158123
if Turing.Essential.ADBACKEND[] === :forwarddiff
159124
@testset "MAP for $(model.f)" for model in DynamicPPL.TestUtils.DEMO_MODELS
160-
result_true = posterior_optima(model)
125+
result_true = DynamicPPL.TestUtils.posterior_optima(model)
161126

162127
@testset "$(nameof(typeof(optimizer)))" for optimizer in [LBFGS(), NelderMead()]
163128
result = optimize(model, MAP(), optimizer)
@@ -188,7 +153,7 @@ end
188153
DynamicPPL.TestUtils.demo_dot_assume_matrix_dot_observe_matrix,
189154
]
190155
@testset "MLE for $(model.f)" for model in DynamicPPL.TestUtils.DEMO_MODELS
191-
result_true = likelihood_optima(model)
156+
result_true = DynamicPPL.TestUtils.likelihood_optima(model)
192157

193158
# `NelderMead` seems to struggle with convergence here, so we exclude it.
194159
@testset "$(nameof(typeof(optimizer)))" for optimizer in [LBFGS(),]

0 commit comments

Comments
 (0)