diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index 3488cdf49ee..583494898bb 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -10,6 +10,7 @@ - Made `BrokenPipeError` for parallel sampling more verbose on Windows. - Added the `broadcast_distribution_samples` function that helps broadcasting arrays of drawn samples, taking into account the requested `size` and the inferred distribution shape. This sometimes is needed by distributions that call several `rvs` separately within their `random` method, such as the `ZeroInflatedPoisson` (Fix issue #3310). - The `Wald`, `Kumaraswamy`, `LogNormal`, `Pareto`, `Cauchy`, `HalfCauchy`, `Weibull` and `ExGaussian` distributions `random` method used a hidden `_random` function that was written with scalars in mind. This could potentially lead to artificial correlations between random draws. Added shape guards and broadcasting of the distribution samples to prevent this (Similar to issue #3310). +- Added a fix to allow the imputation of single missing values of observed data, which previously would fail (Fix issue #3122). ### Deprecations diff --git a/pymc3/model.py b/pymc3/model.py index 4e0f7ec10a5..31f64aa5ba7 100644 --- a/pymc3/model.py +++ b/pymc3/model.py @@ -401,6 +401,8 @@ class ValueGradFunction: """ def __init__(self, cost, grad_vars, extra_vars=None, dtype=None, casting='no', **kwargs): + from .distributions import TensorType + if extra_vars is None: extra_vars = [] @@ -437,6 +439,12 @@ def __init__(self, cost, grad_vars, extra_vars=None, dtype=None, self._extra_vars_shared = {} for var in extra_vars: shared = theano.shared(var.tag.test_value, var.name + '_shared__') + # test TensorType compatibility + if hasattr(var.tag.test_value, 'shape'): + testtype = TensorType(var.dtype, var.tag.test_value.shape) + + if testtype != shared.type: + shared.type = testtype self._extra_vars_shared[var.name] = shared givens.append((var, shared)) diff --git a/pymc3/tests/test_model.py b/pymc3/tests/test_model.py index a0d9d3d228a..8bb47aeec33 100644 --- a/pymc3/tests/test_model.py +++ b/pymc3/tests/test_model.py @@ -288,3 +288,16 @@ def test_edge_case(self): assert logp.size == 1 assert dlogp.size == 4 npt.assert_allclose(dlogp, 0., atol=1e-5) + + def test_tensor_type_conversion(self): + # case described in #3122 + X = np.random.binomial(1, 0.5, 10) + X[0] = -1 # masked a single value + X = np.ma.masked_values(X, value=-1) + with pm.Model() as m: + x1 = pm.Uniform('x1', 0., 1.) + x2 = pm.Bernoulli('x2', x1, observed=X) + + gf = m.logp_dlogp_function() + + assert m['x2_missing'].type == gf._extra_vars_shared['x2_missing'].type