Skip to content

Commit

Permalink
Fixes to PR according to comments
Browse files Browse the repository at this point in the history
  • Loading branch information
velochy committed Jan 12, 2024
1 parent 1129cf8 commit ab63678
Show file tree
Hide file tree
Showing 2 changed files with 19 additions and 15 deletions.
20 changes: 11 additions & 9 deletions pymc/distributions/multivariate.py
Original file line number Diff line number Diff line change
Expand Up @@ -1597,7 +1597,6 @@ def lkjcorr_default_transform(op, rv):
return MultivariateIntervalTransform(floatX(-1.0), floatX(1.0))


# Thin wrapper around _LKJCorr
class LKJCorr:
r"""
The LKJ (Lewandowski, Kurowicka and Joe) log-likelihood.
Expand All @@ -1619,9 +1618,9 @@ class LKJCorr:
implies a uniform distribution of the correlation matrices;
larger values put more weight on matrices with few correlations.
return_matrix : bool, default=False
If True, returns the full correllation matrix.
If True, returns the full correlation matrix.
False only returns the values of the upper triangular matrix excluding
diagonal in a single vector of length n(n-1)/2 for backwards compatibility
diagonal in a single vector of length n(n-1)/2 for memory efficiency
Notes
-----
Expand All @@ -1641,14 +1640,18 @@ class LKJCorr:
'corr', eta=4, n=10, return_matrix=True
)
# Define a new MvNormal with the given correllation matrix
# Define a new MvNormal with the given correlation matrix
vals = sds*pm.MvNormal('vals', mu=np.zeros(10), cov=corr, shape=10)
# Or transform an uncorrelated normal distribution:
vals_raw = pm.Normal('vals_raw', shape=10)
chol = pt.linalg.cholesky(corr)
vals = sds*pt.dot(chol,vals_raw)
# The matrix is internally still sampled as a upper triangular vector
# If you want access to it in matrix form in the trace, add
pm.Deterministic('corr_mat', corr)
References
----------
Expand All @@ -1659,15 +1662,14 @@ class LKJCorr:
"""

def __new__(cls, name, n, eta, *, return_matrix=False, **kwargs):
c_vec = _LKJCorr(name, eta=eta, n=n, **kwargs)
if not return_matrix:
return _LKJCorr(name, eta=eta, n=n, **kwargs)
return c_vec
else:
c_vec = _LKJCorr(name + "_raw", eta=eta, n=n, **kwargs)
return pm.Deterministic(name, cls.vec_to_corr_mat(c_vec, n))
return cls.vec_to_corr_mat(c_vec, n)

@classmethod
def dist(cls, n, eta, *, return_matrix=True, **kwargs):
# compute Cholesky decomposition
def dist(cls, n, eta, *, return_matrix=False, **kwargs):
c_vec = _LKJCorr.dist(eta=eta, n=n, **kwargs)
if not return_matrix:
return c_vec
Expand Down
14 changes: 8 additions & 6 deletions tests/distributions/test_multivariate.py
Original file line number Diff line number Diff line change
Expand Up @@ -1474,15 +1474,17 @@ def test_with_lkjcorr_matrix(
self,
):
with pm.Model() as model:
mu = pm.Normal("mu", 0.0, 1.0, shape=3)
sd = pm.Exponential("sd", 1.0, shape=3)

corr = pm.LKJCorr("corr", n=3, eta=2, return_matrix=True)
# pylint: enable=unpacking-non-sequence
mv = pm.MvNormal("mv", mu, cov=sd * (sd * corr).T, size=4)
pm.Deterministic("corr_mat", corr)
mv = pm.MvNormal("mv", 0.0, cov=corr, size=4)
prior = pm.sample_prior_predictive(samples=10, return_inferencedata=False)

assert prior["mv"].shape == (10, 4, 3)
assert prior["corr_mat"].shape == (10, 3, 3) # square
assert (prior["corr_mat"][:, [0, 1, 2], [0, 1, 2]] == 1.0).all() # 1.0 on diagonal
assert (prior["corr_mat"] == prior["corr_mat"].transpose(0, 2, 1)).all() # symmetric
assert (
prior["corr_mat"].max() <= 1.0 and prior["corr_mat"].min() >= -1.0
) # constrained between -1 and 1

def test_issue_3758(self):
np.random.seed(42)
Expand Down

0 comments on commit ab63678

Please sign in to comment.