From 2409aa1c14522b18051ae2aea7be4800d8f589c1 Mon Sep 17 00:00:00 2001 From: Penelope Yong Date: Tue, 21 Jan 2025 02:35:48 +0000 Subject: [PATCH] Replace Turing.Model -> DynamicPPL.Model --- src/mcmc/Inference.jl | 6 ++++-- test/mcmc/abstractmcmc.jl | 2 +- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/src/mcmc/Inference.jl b/src/mcmc/Inference.jl index f38220201..947d422ef 100644 --- a/src/mcmc/Inference.jl +++ b/src/mcmc/Inference.jl @@ -767,13 +767,15 @@ julia> [first(t.θ.x) for t in transitions] # extract samples for `x` [-1.704630494695469] ``` """ -function transitions_from_chain(model::Turing.Model, chain::MCMCChains.Chains; kwargs...) +function transitions_from_chain( + model::DynamicPPL.Model, chain::MCMCChains.Chains; kwargs... +) return transitions_from_chain(Random.default_rng(), model, chain; kwargs...) end function transitions_from_chain( rng::Random.AbstractRNG, - model::Turing.Model, + model::DynamicPPL.Model, chain::MCMCChains.Chains; sampler=DynamicPPL.SampleFromPrior(), ) diff --git a/test/mcmc/abstractmcmc.jl b/test/mcmc/abstractmcmc.jl index 4de63aa94..50334fc51 100644 --- a/test/mcmc/abstractmcmc.jl +++ b/test/mcmc/abstractmcmc.jl @@ -18,7 +18,7 @@ using Test: @test, @test_throws, @testset using Turing using Turing.Inference: AdvancedHMC -function initialize_nuts(model::Turing.Model) +function initialize_nuts(model::DynamicPPL.Model) # Create a log-density function with an implementation of the # gradient so we ensure that we're using the same AD backend as in Turing. f = LogDensityProblemsAD.ADgradient(DynamicPPL.LogDensityFunction(model))