Skip to content

Commit

Permalink
Asymmetric Laplace distribution added (#4392)
Browse files Browse the repository at this point in the history
* asymmetric laplace distribution added

* unit test for random function added

* continuous.py updated

* suggested changes added

* updated

* RELEASE-NOTES.md and continuous.rst updated
  • Loading branch information
chandan5362 authored Jan 5, 2021
1 parent 34b05d8 commit 044c407
Show file tree
Hide file tree
Showing 6 changed files with 138 additions and 0 deletions.
1 change: 1 addition & 0 deletions RELEASE-NOTES.md
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ It also brings some dreadfully awaited fixes, so be sure to go through the chang
- `plot_posterior_predictive_glm` now works with `arviz.InferenceData` as well (see [#4234](https://github.com/pymc-devs/pymc3/pull/4234))
- Add `logcdf` method to all univariate discrete distributions (see [#4387](https://github.com/pymc-devs/pymc3/pull/4387)).
- Add `random` method to `MvGaussianRandomWalk` (see [#4388](https://github.com/pymc-devs/pymc3/pull/4388))
- `AsymmetricLaplace` distribution added (see [#4392](https://github.com/pymc-devs/pymc3/pull/4392)).

### Maintenance
- Fixed bug whereby partial traces returns after keyboard interrupt during parallel sampling had fewer draws than would've been available [#4318](https://github.com/pymc-devs/pymc3/pull/4318)
Expand Down
1 change: 1 addition & 0 deletions docs/source/api/distributions/continuous.rst
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ Continuous
Kumaraswamy
Exponential
Laplace
AsymmetricLaplace
StudentT
HalfStudentT
Cauchy
Expand Down
2 changes: 2 additions & 0 deletions pymc3/distributions/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
from pymc3.distributions.bart import BART
from pymc3.distributions.bound import Bound
from pymc3.distributions.continuous import (
AsymmetricLaplace,
Beta,
Cauchy,
ChiSquared,
Expand Down Expand Up @@ -160,6 +161,7 @@
"LKJCorr",
"AR1",
"AR",
"AsymmetricLaplace",
"GaussianRandomWalk",
"MvGaussianRandomWalk",
"MvStudentTRandomWalk",
Expand Down
101 changes: 101 additions & 0 deletions pymc3/distributions/continuous.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,7 @@
"Interpolated",
"Rice",
"Moyal",
"AsymmetricLaplace",
]


Expand Down Expand Up @@ -1661,6 +1662,106 @@ def logcdf(self, value):
)


class AsymmetricLaplace(Continuous):
r"""
Asymmetric-Laplace log-likelihood.
The pdf of this distribution is
.. math::
{f(x|\\b,\kappa,\mu) =
\left({\frac{\\b}{\kappa + 1/\kappa}}\right)\,e^{-(x-\mu)\\b\,s\kappa ^{s}}}
where
.. math::
s = sgn(x-\mu)
======== ========================
Support :math:`x \in \mathbb{R}`
Mean :math:`\mu-\frac{\\\kappa-1/\kappa}b`
Variance :math:`\frac{1+\kappa^{4}}{b^2\kappa^2 }`
======== ========================
Parameters
----------
b: float
Scale parameter (b > 0)
kappa: float
Symmetry parameter (kappa > 0)
mu: float
Location parameter
See Also:
--------
`Reference <https://en.wikipedia.org/wiki/Asymmetric_Laplace_distribution>`_
"""

def __init__(self, b, kappa, mu=0, *args, **kwargs):
self.b = tt.as_tensor_variable(floatX(b))
self.kappa = tt.as_tensor_variable(floatX(kappa))
self.mu = mu = tt.as_tensor_variable(floatX(mu))

self.mean = self.mu - (self.kappa - 1 / self.kappa) / b
self.variance = (1 + self.kappa ** 4) / (self.kappa ** 2 * self.b ** 2)

assert_negative_support(kappa, "kappa", "AsymmetricLaplace")
assert_negative_support(b, "b", "AsymmetricLaplace")

super().__init__(*args, **kwargs)

def _random(self, b, kappa, mu, size=None):
u = np.random.uniform(size=size)
switch = kappa ** 2 / (1 + kappa ** 2)
non_positive_x = mu + kappa * np.log(u * (1 / switch)) / b
positive_x = mu - np.log((1 - u) * (1 + kappa ** 2)) / (kappa * b)
draws = non_positive_x * (u <= switch) + positive_x * (u > switch)
return draws

def random(self, point=None, size=None):
"""
Draw random samples from this distribution, using the inverse CDF method.
Parameters
----------
point: dict, optional
Dict of variable values on which random values are to be
conditioned (uses default point if not specified).
size:int, optional
Desired size of random sample (returns one sample if not
specified).
Returns
-------
array
"""
b, kappa, mu = draw_values([self.b, self.kappa, self.mu], point=point, size=size)
return generate_samples(self._random, b, kappa, mu, dist_shape=self.shape, size=size)

def logp(self, value):
"""
Calculate log-probability of Asymmetric-Laplace distribution at specified value.
Parameters
----------
value: numeric
Value(s) for which log-probability is calculated. If the log probabilities for multiple
values are desired the values must be provided in a numpy array or theano tensor
Returns
-------
TensorVariable
"""
value = value - self.mu
return bound(
tt.log(self.b / (self.kappa + (self.kappa ** -1)))
+ (-value * self.b * tt.sgn(value) * (self.kappa ** tt.sgn(value))),
0 < self.b,
0 < self.kappa,
)


class Lognormal(PositiveContinuous):
r"""
Log-normal log-likelihood.
Expand Down
17 changes: 17 additions & 0 deletions pymc3/tests/test_distributions.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
from pymc3.blocking import DictToVarBijection
from pymc3.distributions import (
AR1,
AsymmetricLaplace,
Bernoulli,
Beta,
BetaBinomial,
Expand Down Expand Up @@ -223,6 +224,14 @@ def build_model(distfam, valuedomain, vardomains, extra_args=None):
return m


def laplace_asymmetric_logpdf(value, kappa, b, mu):
kapinv = 1 / kappa
value = value - mu
lPx = value * b * np.where(value >= 0, -kappa, kapinv)
lPx += np.log(b / (kappa + kapinv))
return lPx


def integrate_nd(f, domain, shape, dtype):
if shape == () or shape == (1,):
if dtype in continuous_types:
Expand Down Expand Up @@ -991,6 +1000,14 @@ def test_laplace(self):
lambda value, mu, b: sp.laplace.logcdf(value, mu, b),
)

def test_laplace_asymmetric(self):
self.pymc3_matches_scipy(
AsymmetricLaplace,
R,
{"b": Rplus, "kappa": Rplus, "mu": R},
laplace_asymmetric_logpdf,
)

def test_lognormal(self):
self.pymc3_matches_scipy(
Lognormal,
Expand Down
16 changes: 16 additions & 0 deletions pymc3/tests/test_distributions_random.py
Original file line number Diff line number Diff line change
Expand Up @@ -375,6 +375,11 @@ class TestLaplace(BaseTestCases.BaseTestCase):
params = {"mu": 1.0, "b": 1.0}


class TestAsymmetricLaplace(BaseTestCases.BaseTestCase):
distribution = pm.AsymmetricLaplace
params = {"kappa": 1.0, "b": 1.0, "mu": 0.0}


class TestLognormal(BaseTestCases.BaseTestCase):
distribution = pm.Lognormal
params = {"mu": 1.0, "tau": 1.0}
Expand Down Expand Up @@ -626,6 +631,17 @@ def ref_rand(size, mu, b):

pymc3_random(pm.Laplace, {"mu": R, "b": Rplus}, ref_rand=ref_rand)

def test_laplace_asymmetric(self):
def ref_rand(size, kappa, b, mu):
u = np.random.uniform(size=size)
switch = kappa ** 2 / (1 + kappa ** 2)
non_positive_x = mu + kappa * np.log(u * (1 / switch)) / b
positive_x = mu - np.log((1 - u) * (1 + kappa ** 2)) / (kappa * b)
draws = non_positive_x * (u <= switch) + positive_x * (u > switch)
return draws

pymc3_random(pm.AsymmetricLaplace, {"b": Rplus, "kappa": Rplus, "mu": R}, ref_rand=ref_rand)

def test_lognormal(self):
def ref_rand(size, mu, tau):
return np.exp(mu + (tau ** -0.5) * st.norm.rvs(loc=0.0, scale=1.0, size=size))
Expand Down

0 comments on commit 044c407

Please sign in to comment.