From c755471cd277792a5dd47fcb2c1dcd7eb4fad0a1 Mon Sep 17 00:00:00 2001 From: Hong Ge <3279477+yebai@users.noreply.github.com> Date: Thu, 8 Aug 2024 17:55:22 +0100 Subject: [PATCH 1/5] Update DynamicPPLReverseDiffExt.jl --- ext/DynamicPPLReverseDiffExt.jl | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ext/DynamicPPLReverseDiffExt.jl b/ext/DynamicPPLReverseDiffExt.jl index b2b378d45..4700c5266 100644 --- a/ext/DynamicPPLReverseDiffExt.jl +++ b/ext/DynamicPPLReverseDiffExt.jl @@ -9,12 +9,12 @@ else end function LogDensityProblemsAD.ADgradient( - ad::ADTypes.AutoReverseDiff, ℓ::DynamicPPL.LogDensityFunction -) + ad::ADTypes.AutoReverseDiff{Tcompile}, ℓ::DynamicPPL.LogDensityFunction +) where Tcompile return LogDensityProblemsAD.ADgradient( Val(:ReverseDiff), ℓ; - compile=Val(ad.compile), + compile=Val(Tcompile) # `getparams` can return `Vector{Real}`, in which case, `ReverseDiff` will initialize the gradients to Integer 0 # because at https://github.com/JuliaDiff/ReverseDiff.jl/blob/c982cde5494fc166965a9d04691f390d9e3073fd/src/tracked.jl#L473 # `zero(D)` will return 0 when D is Real. From 87bb7cad03d03a8127f10bcbd8b3c8a121408aa6 Mon Sep 17 00:00:00 2001 From: Hong Ge <3279477+yebai@users.noreply.github.com> Date: Thu, 8 Aug 2024 17:56:09 +0100 Subject: [PATCH 2/5] Update Project.toml --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 7ee7d2f97..f21aa442f 100644 --- a/Project.toml +++ b/Project.toml @@ -41,7 +41,7 @@ DynamicPPLReverseDiffExt = ["ReverseDiff"] DynamicPPLZygoteRulesExt = ["ZygoteRules"] [compat] -ADTypes = "0.2, 1" +ADTypes = "1" AbstractMCMC = "5" AbstractPPL = "0.8.4" Accessors = "0.1" From 24cebf39b4705e813ba27efe87e824f62012b04a Mon Sep 17 00:00:00 2001 From: Hong Ge <3279477+yebai@users.noreply.github.com> Date: Thu, 8 Aug 2024 17:58:11 +0100 Subject: [PATCH 3/5] Update ext/DynamicPPLReverseDiffExt.jl Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- ext/DynamicPPLReverseDiffExt.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ext/DynamicPPLReverseDiffExt.jl b/ext/DynamicPPLReverseDiffExt.jl index 4700c5266..1d33a0368 100644 --- a/ext/DynamicPPLReverseDiffExt.jl +++ b/ext/DynamicPPLReverseDiffExt.jl @@ -10,7 +10,7 @@ end function LogDensityProblemsAD.ADgradient( ad::ADTypes.AutoReverseDiff{Tcompile}, ℓ::DynamicPPL.LogDensityFunction -) where Tcompile +) where {Tcompile} return LogDensityProblemsAD.ADgradient( Val(:ReverseDiff), ℓ; From 397367f8623600e6138b24f9d9bb7cc66ffb18be Mon Sep 17 00:00:00 2001 From: Hong Ge <3279477+yebai@users.noreply.github.com> Date: Thu, 8 Aug 2024 18:22:49 +0100 Subject: [PATCH 4/5] Update DynamicPPLReverseDiffExt.jl --- ext/DynamicPPLReverseDiffExt.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ext/DynamicPPLReverseDiffExt.jl b/ext/DynamicPPLReverseDiffExt.jl index 1d33a0368..3fd174ed1 100644 --- a/ext/DynamicPPLReverseDiffExt.jl +++ b/ext/DynamicPPLReverseDiffExt.jl @@ -14,7 +14,7 @@ function LogDensityProblemsAD.ADgradient( return LogDensityProblemsAD.ADgradient( Val(:ReverseDiff), ℓ; - compile=Val(Tcompile) + compile=Val(Tcompile), # `getparams` can return `Vector{Real}`, in which case, `ReverseDiff` will initialize the gradients to Integer 0 # because at https://github.com/JuliaDiff/ReverseDiff.jl/blob/c982cde5494fc166965a9d04691f390d9e3073fd/src/tracked.jl#L473 # `zero(D)` will return 0 when D is Real. From 01df7324914a7c79ceadb00a0803fccbfe3e701c Mon Sep 17 00:00:00 2001 From: Hong Ge <3279477+yebai@users.noreply.github.com> Date: Thu, 8 Aug 2024 21:03:22 +0100 Subject: [PATCH 5/5] Update Project.toml --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index f21aa442f..e0da1b0c0 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "DynamicPPL" uuid = "366bfd00-2699-11ea-058f-f148b4cae6d8" -version = "0.28.2" +version = "0.28.3" [deps] ADTypes = "47edcb42-4c32-4615-8424-f2b9edc5f35b"