-
Notifications
You must be signed in to change notification settings - Fork 231
Closed
Labels
Description
Minimal working example
using Turing
using Distributions
using Random
rng = Random.default_rng()
X = [1., missing, 2., 3.]
@model function linear_regression(X)
N = size(X, 1)
μX ~ Normal(0, 1)
# Set prior on X
for i in 1:N
X[i] ~ Normal(μX, 1)
end
end
model = linear_regression(X)
chain = sample(rng, model, NUTS(), 1000) # This works
q_init = q_meanfield_gaussian(model);
n_iters = 100
q_avg, q_last, info, state = vi(model, q_init, n_iters) # This does not workDescription
Hi,
This simple example crashes with the q_meanfield_gaussian VI algorithm (but runs with MCMC). Is there anything wrong with the code?
Full stacktrace:
BoundsError: attempt to access Tuple{Bijectors.MultivariateTransformed{AdvancedVI.MvLocationScale{LinearAlgebra.Diagonal{Float64, Vector{Float64}}, Normal{Float64}, Vector{Float64}}, Bijectors.Stacked{Vector{typeof(identity)}, Vector{UnitRange{Int64}}}}, Vector{@NamedTuple{elbo::Float64, iteration::Int64}}, @NamedTuple{prob::LogDensityFunction{true, DynamicPPL.Model{typeof(linear_regression), (:X,), (), (), Tuple{Vector{Union{Missing, Float64}}}, Tuple{}, DynamicPPL.DefaultContext, false}, AutoForwardDiff{2, ForwardDiff.Tag{DynamicPPL.DynamicPPLTag, Float64}}, typeof(DynamicPPL.getlogjoint_internal), @NamedTuple{μX::DynamicPPL.RangeAndLinked}, DifferentiationInterfaceForwardDiffExt.ForwardDiffGradientPrep{Tuple{DynamicPPL.LogDensityAt{true, DynamicPPL.Model{typeof(linear_regression), (:X,), (), (), Tuple{Vector{Union{Missing, Float64}}}, Tuple{}, DynamicPPL.DefaultContext, false}, typeof(DynamicPPL.getlogjoint_internal), @NamedTuple{μX::DynamicPPL.RangeAndLinked}}, AutoForwardDiff{2, ForwardDiff.Tag{DynamicPPL.DynamicPPLTag, Float64}}, Vector{Float64}, Tuple{}}, ForwardDiff.GradientConfig{ForwardDiff.Tag{DynamicPPL.DynamicPPLTag, Float64}, Float64, 2, Vector{ForwardDiff.Dual{ForwardDiff.Tag{DynamicPPL.DynamicPPLTag, Float64}, Float64, 2}}}, Tuple{}}, Vector{Float64}}, q::AdvancedVI.MvLocationScale{LinearAlgebra.Diagonal{Float64, Vector{Float64}}, Normal{Float64}, Vector{Float64}}, iteration::Int64, grad_buf::DiffResults.MutableDiffResult{1, Float64, Tuple{Vector{Float64}}}, opt_st::Optimisers.Leaf{AdvancedVI.DoWG{Float64}, Tuple{Vector{Float64}, Float64, Float64}}, obj_st::AdvancedVI.RepGradELBOState{AdvancedVI.MixedADLogDensityProblem{LogDensityFunction{true, DynamicPPL.Model{typeof(linear_regression), (:X,), (), (), Tuple{Vector{Union{Missing, Float64}}}, Tuple{}, DynamicPPL.DefaultContext, false}, AutoForwardDiff{2, ForwardDiff.Tag{DynamicPPL.DynamicPPLTag, Float64}}, typeof(DynamicPPL.getlogjoint_internal), @NamedTuple{μX::DynamicPPL.RangeAndLinked}, DifferentiationInterfaceForwardDiffExt.ForwardDiffGradientPrep{Tuple{DynamicPPL.LogDensityAt{true, DynamicPPL.Model{typeof(linear_regression), (:X,), (), (), Tuple{Vector{Union{Missing, Float64}}}, Tuple{}, DynamicPPL.DefaultContext, false}, typeof(DynamicPPL.getlogjoint_internal), @NamedTuple{μX::DynamicPPL.RangeAndLinked}}, AutoForwardDiff{2, ForwardDiff.Tag{DynamicPPL.DynamicPPLTag, Float64}}, Vector{Float64}, Tuple{}}, ForwardDiff.GradientConfig{ForwardDiff.Tag{DynamicPPL.DynamicPPLTag, Float64}, Float64, 2, Vector{ForwardDiff.Dual{ForwardDiff.Tag{DynamicPPL.DynamicPPLTag, Float64}, Float64, 2}}}, Tuple{}}, Vector{Float64}}}, DifferentiationInterfaceForwardDiffExt.ForwardDiffGradientPrep{Tuple{typeof(AdvancedVI.estimate_repgradelbo_ad_forward), AutoForwardDiff{nothing, Nothing}, Vector{Float64}, Tuple{DifferentiationInterface.Constant{@NamedTuple{rng::TaskLocalRNG, adtype::AutoForwardDiff{nothing, Nothing}, obj::AdvancedVI.RepGradELBO{AdvancedVI.ClosedFormEntropyZeroGradient}, problem::AdvancedVI.MixedADLogDensityProblem{LogDensityFunction{true, DynamicPPL.Model{typeof(linear_regression), (:X,), (), (), Tuple{Vector{Union{Missing, Float64}}}, Tuple{}, DynamicPPL.DefaultContext, false}, AutoForwardDiff{2, ForwardDiff.Tag{DynamicPPL.DynamicPPLTag, Float64}}, typeof(DynamicPPL.getlogjoint_internal), @NamedTuple{μX::DynamicPPL.RangeAndLinked}, DifferentiationInterfaceForwardDiffExt.ForwardDiffGradientPrep{Tuple{DynamicPPL.LogDensityAt{true, DynamicPPL.Model{typeof(linear_regression), (:X,), (), (), Tuple{Vector{Union{Missing, Float64}}}, Tuple{}, DynamicPPL.DefaultContext, false}, typeof(DynamicPPL.getlogjoint_internal), @NamedTuple{μX::DynamicPPL.RangeAndLinked}}, AutoForwardDiff{2, ForwardDiff.Tag{DynamicPPL.DynamicPPLTag, Float64}}, Vector{Float64}, Tuple{}}, ForwardDiff.GradientConfig{ForwardDiff.Tag{DynamicPPL.DynamicPPLTag, Float64}, Float64, 2, Vector{ForwardDiff.Dual{ForwardDiff.Tag{DynamicPPL.DynamicPPLTag, Float64}, Float64, 2}}}, Tuple{}}, Vector{Float64}}}, restructure::AdvancedVI.RestructureMeanField{LinearAlgebra.Diagonal{Float64, Vector{Float64}}, Normal{Float64}, Vector{Float64}}, q_stop::AdvancedVI.MvLocationScale{LinearAlgebra.Diagonal{Float64, Vector{Float64}}, Normal{Float64}, Vector{Float64}}}}}}, ForwardDiff.GradientConfig{ForwardDiff.Tag{typeof(AdvancedVI.estimate_repgradelbo_ad_forward), Float64}, Float64, 4, Vector{ForwardDiff.Dual{ForwardDiff.Tag{typeof(AdvancedVI.estimate_repgradelbo_ad_forward), Float64}, Float64, 4}}}, Tuple{Nothing}}}, avg_st::Tuple{Vector{Float64}, Int64}}} at index [4]
Stacktrace:
[1] indexed_iterate(t::Tuple{Bijectors.MultivariateTransformed{AdvancedVI.MvLocationScale{LinearAlgebra.Diagonal{Float64, Vector{Float64}}, Normal{Float64}, Vector{Float64}}, Bijectors.Stacked{Vector{typeof(identity)}, Vector{UnitRange{Int64}}}}, Vector{@NamedTuple{elbo::Float64, iteration::Int64}}, @NamedTuple{prob::LogDensityFunction{true, DynamicPPL.Model{typeof(linear_regression), (:X,), (), (), Tuple{Vector{Union{Missing, Float64}}}, Tuple{}, DynamicPPL.DefaultContext, false}, AutoForwardDiff{2, ForwardDiff.Tag{DynamicPPL.DynamicPPLTag, Float64}}, typeof(DynamicPPL.getlogjoint_internal), @NamedTuple{μX::DynamicPPL.RangeAndLinked}, DifferentiationInterfaceForwardDiffExt.ForwardDiffGradientPrep{Tuple{DynamicPPL.LogDensityAt{true, DynamicPPL.Model{typeof(linear_regression), (:X,), (), (), Tuple{Vector{Union{Missing, Float64}}}, Tuple{}, DynamicPPL.DefaultContext, false}, typeof(DynamicPPL.getlogjoint_internal), @NamedTuple{μX::DynamicPPL.RangeAndLinked}}, AutoForwardDiff{2, ForwardDiff.Tag{DynamicPPL.DynamicPPLTag, Float64}}, Vector{Float64}, Tuple{}}, ForwardDiff.GradientConfig{ForwardDiff.Tag{DynamicPPL.DynamicPPLTag, Float64}, Float64, 2, Vector{ForwardDiff.Dual{ForwardDiff.Tag{DynamicPPL.DynamicPPLTag, Float64}, Float64, 2}}}, Tuple{}}, Vector{Float64}}, q::AdvancedVI.MvLocationScale{LinearAlgebra.Diagonal{Float64, Vector{Float64}}, Normal{Float64}, Vector{Float64}}, iteration::Int64, grad_buf::DiffResults.MutableDiffResult{1, Float64, Tuple{Vector{Float64}}}, opt_st::Optimisers.Leaf{AdvancedVI.DoWG{Float64}, Tuple{Vector{Float64}, Float64, Float64}}, obj_st::AdvancedVI.RepGradELBOState{AdvancedVI.MixedADLogDensityProblem{LogDensityFunction{true, DynamicPPL.Model{typeof(linear_regression), (:X,), (), (), Tuple{Vector{Union{Missing, Float64}}}, Tuple{}, DynamicPPL.DefaultContext, false}, AutoForwardDiff{2, ForwardDiff.Tag{DynamicPPL.DynamicPPLTag, Float64}}, typeof(DynamicPPL.getlogjoint_internal), @NamedTuple{μX::DynamicPPL.RangeAndLinked}, DifferentiationInterfaceForwardDiffExt.ForwardDiffGradientPrep{Tuple{DynamicPPL.LogDensityAt{true, DynamicPPL.Model{typeof(linear_regression), (:X,), (), (), Tuple{Vector{Union{Missing, Float64}}}, Tuple{}, DynamicPPL.DefaultContext, false}, typeof(DynamicPPL.getlogjoint_internal), @NamedTuple{μX::DynamicPPL.RangeAndLinked}}, AutoForwardDiff{2, ForwardDiff.Tag{DynamicPPL.DynamicPPLTag, Float64}}, Vector{Float64}, Tuple{}}, ForwardDiff.GradientConfig{ForwardDiff.Tag{DynamicPPL.DynamicPPLTag, Float64}, Float64, 2, Vector{ForwardDiff.Dual{ForwardDiff.Tag{DynamicPPL.DynamicPPLTag, Float64}, Float64, 2}}}, Tuple{}}, Vector{Float64}}}, DifferentiationInterfaceForwardDiffExt.ForwardDiffGradientPrep{Tuple{typeof(AdvancedVI.estimate_repgradelbo_ad_forward), AutoForwardDiff{nothing, Nothing}, Vector{Float64}, Tuple{DifferentiationInterface.Constant{@NamedTuple{rng::TaskLocalRNG, adtype::AutoForwardDiff{nothing, Nothing}, obj::AdvancedVI.RepGradELBO{AdvancedVI.ClosedFormEntropyZeroGradient}, problem::AdvancedVI.MixedADLogDensityProblem{LogDensityFunction{true, DynamicPPL.Model{typeof(linear_regression), (:X,), (), (), Tuple{Vector{Union{Missing, Float64}}}, Tuple{}, DynamicPPL.DefaultContext, false}, AutoForwardDiff{2, ForwardDiff.Tag{DynamicPPL.DynamicPPLTag, Float64}}, typeof(DynamicPPL.getlogjoint_internal), @NamedTuple{μX::DynamicPPL.RangeAndLinked}, DifferentiationInterfaceForwardDiffExt.ForwardDiffGradientPrep{Tuple{DynamicPPL.LogDensityAt{true, DynamicPPL.Model{typeof(linear_regression), (:X,), (), (), Tuple{Vector{Union{Missing, Float64}}}, Tuple{}, DynamicPPL.DefaultContext, false}, typeof(DynamicPPL.getlogjoint_internal), @NamedTuple{μX::DynamicPPL.RangeAndLinked}}, AutoForwardDiff{2, ForwardDiff.Tag{DynamicPPL.DynamicPPLTag, Float64}}, Vector{Float64}, Tuple{}}, ForwardDiff.GradientConfig{ForwardDiff.Tag{DynamicPPL.DynamicPPLTag, Float64}, Float64, 2, Vector{ForwardDiff.Dual{ForwardDiff.Tag{DynamicPPL.DynamicPPLTag, Float64}, Float64, 2}}}, Tuple{}}, Vector{Float64}}}, restructure::AdvancedVI.RestructureMeanField{LinearAlgebra.Diagonal{Float64, Vector{Float64}}, Normal{Float64}, Vector{Float64}}, q_stop::AdvancedVI.MvLocationScale{LinearAlgebra.Diagonal{Float64, Vector{Float64}}, Normal{Float64}, Vector{Float64}}}}}}, ForwardDiff.GradientConfig{ForwardDiff.Tag{typeof(AdvancedVI.estimate_repgradelbo_ad_forward), Float64}, Float64, 4, Vector{ForwardDiff.Dual{ForwardDiff.Tag{typeof(AdvancedVI.estimate_repgradelbo_ad_forward), Float64}, Float64, 4}}}, Tuple{Nothing}}}, avg_st::Tuple{Vector{Float64}, Int64}}}, i::Int64, state::Int64)
@ Base ./tuple.jl:162
[2] top-level scope
@ REPL[48]:1
[3] eval(m::Module, e::Any)
@ Core ./boot.jl:489
[4] eval
@ ./Base_compiler.jl:146 [inlined]
[5] repleval(m::Module, code::Expr, ::String)
@ VSCodeServer ~/.vscode/extensions/julialang.language-julia-1.158.2/scripts/packages/VSCodeServer/src/repl.jl:231
[6] #evalrepl##2
@ ~/.vscode/extensions/julialang.language-julia-1.158.2/scripts/packages/VSCodeServer/src/repl.jl:194 [inlined]
[7] with_logstate(f::VSCodeServer.var"#evalrepl##2#evalrepl##3"{Module, Expr, REPL.LineEditREPL, REPL.LineEdit.Prompt}, logstate::Base.CoreLogging.LogState)
@ Base.CoreLogging ./logging/logging.jl:540
[8] with_logger
@ ./logging/logging.jl:651 [inlined]
[9] (::VSCodeServer.var"#evalrepl##0#evalrepl##1"{Module, Expr, REPL.LineEditREPL, REPL.LineEdit.Prompt})()
@ VSCodeServer ~/.vscode/extensions/julialang.language-julia-1.158.2/scripts/packages/VSCodeServer/src/repl.jl:195
[10] (::VSCodeServer.var"#start_eval_backend##0#start_eval_backend##1")()
@ VSCodeServer ~/.vscode/extensions/julialang.language-julia-1.158.2/scripts/packages/VSCodeServer/src/eval.jl:34Julia version info
versioninfo()
1.12