Skip to content

Commit

Permalink
Avoid missing_rvs warning when using RandomWalk
Browse files Browse the repository at this point in the history
  • Loading branch information
ricardoV94 committed Jan 20, 2023
1 parent 6007e84 commit 22b4446
Show file tree
Hide file tree
Showing 2 changed files with 45 additions and 12 deletions.
19 changes: 15 additions & 4 deletions pymc/distributions/timeseries.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
import pytensor
import pytensor.tensor as at

from pytensor.graph.basic import Node
from pytensor.graph.basic import Node, ancestors
from pytensor.graph.replace import clone_replace
from pytensor.tensor import TensorVariable
from pytensor.tensor.random.op import RandomVariable
Expand All @@ -33,7 +33,7 @@
_moment,
moment,
)
from pymc.distributions.logprob import ignore_logprob, logp
from pymc.distributions.logprob import ignore_logprob, logp, reconsider_logprob
from pymc.distributions.multivariate import MvNormal, MvStudentT
from pymc.distributions.shape_utils import (
_change_dist_size,
Expand Down Expand Up @@ -106,6 +106,15 @@ def dist(cls, init_dist, innovation_dist, steps=None, **kwargs) -> at.TensorVari
"init_dist and innovation_dist must have the same support dimensionality"
)

# We need to check this, because we clone the variables when we ignore their logprob next
if init_dist in ancestors([innovation_dist]) or innovation_dist in ancestors([init_dist]):
raise ValueError("init_dist and innovation_dist must be completely independent")

# PyMC should not be concerned that these variables don't have values, as they will be
# accounted for in the logp of RandomWalk
init_dist = ignore_logprob(init_dist)
innovation_dist = ignore_logprob(innovation_dist)

steps = cls.get_steps(
innovation_dist=innovation_dist,
steps=steps,
Expand Down Expand Up @@ -225,12 +234,14 @@ def random_walk_moment(op, rv, init_dist, innovation_dist, steps):


@_logprob.register(RandomWalkRV)
def random_walk_logp(op, values, *inputs, **kwargs):
def random_walk_logp(op, values, init_dist, innovation_dist, steps, **kwargs):
# Although we can derive the logprob of random walks, it does not collapse
# what we consider the core dimension of steps. We do it manually here.
(value,) = values
# Recreate RV and obtain inner graph
rv_node = op.make_node(*inputs)
rv_node = op.make_node(
reconsider_logprob(init_dist), reconsider_logprob(innovation_dist), steps
)
rv = clone_replace(
op.inner_outputs, replace={u: v for u, v in zip(op.inner_inputs, rv_node.inputs)}
)[op.default_output]
Expand Down
38 changes: 30 additions & 8 deletions pymc/tests/distributions/test_timeseries.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,10 @@
from pymc.tests.distributions.util import assert_moment_is_expected
from pymc.tests.helpers import select_by_precision

# Turn all warnings into errors for this module
# Ignoring NumPy deprecation warning tracked in https://github.com/pymc-devs/pytensor/issues/146
pytestmark = pytest.mark.filterwarnings("error", "ignore: NumPy will stop allowing conversion")


class TestRandomWalk:
def test_dists_types(self):
Expand Down Expand Up @@ -92,6 +96,14 @@ def test_dists_not_registered_check(self):
):
RandomWalk("rw", init_dist=init_dist, innovation_dist=innovation, steps=5)

def test_dists_independent_check(self):
init_dist = Normal.dist()
innovation_dist = Normal.dist(init_dist)
with pytest.raises(
ValueError, match="init_dist and innovation_dist must be completely independent"
):
RandomWalk.dist(init_dist=init_dist, innovation_dist=innovation_dist)

@pytest.mark.parametrize(
"init_dist, innovation_dist, steps, size, shape, "
"init_dist_size, innovation_dist_size, rw_shape",
Expand Down Expand Up @@ -423,15 +435,18 @@ def test_mvgaussian_with_chol_cov_rv(self, param):
"chol_cov", n=3, eta=2, sd_dist=sd_dist, compute_corr=True
)
# pylint: enable=unpacking-non-sequence
if param == "chol":
mv = MvGaussianRandomWalk("mv", mu, chol=chol, shape=(10, 7, 3))
elif param == "cov":
mv = MvGaussianRandomWalk("mv", mu, cov=pm.math.dot(chol, chol.T), shape=(10, 7, 3))
else:
raise ValueError
with pytest.warns(UserWarning, match="Initial distribution not specified"):
if param == "chol":
mv = MvGaussianRandomWalk("mv", mu, chol=chol, shape=(10, 7, 3))
elif param == "cov":
mv = MvGaussianRandomWalk(
"mv", mu, cov=pm.math.dot(chol, chol.T), shape=(10, 7, 3)
)
else:
raise ValueError
assert draw(mv, draws=5).shape == (5, 10, 7, 3)

@pytest.mark.parametrize("param", ["cov", "chol", "tau"])
@pytest.mark.parametrize("param", ["scale", "chol", "tau"])
def test_mvstudentt(self, param):
x = MvStudentTRandomWalk.dist(
nu=4,
Expand Down Expand Up @@ -853,7 +868,13 @@ def sde_fn(x, k, d, s):
with Model() as t0:
init_dist = pm.Normal.dist(0, 10, shape=(batch_size,))
y = EulerMaruyama(
"y", dt=0.02, sde_fn=sde_fn, sde_pars=sde_pars, init_dist=init_dist, **kwargs
"y",
dt=0.02,
sde_fn=sde_fn,
sde_pars=sde_pars,
init_dist=init_dist,
initval="prior",
**kwargs,
)

y_eval = draw(y, draws=2, random_seed=numpy_rng)
Expand All @@ -873,6 +894,7 @@ def sde_fn(x, k, d, s):
sde_fn=sde_fn,
sde_pars=sde_pars_slice,
init_dist=init_dist,
initval="prior",
**kwargs,
)

Expand Down

0 comments on commit 22b4446

Please sign in to comment.