Skip to content

Commit

Permalink
Use sigma instead of sd, remove deprecationwarning (#4344)
Browse files Browse the repository at this point in the history
* remove sd deprecation warning

* sd -> sigma in pymc3/tests/test_models_linear.py::TestGLM

* update cls.sd in tests

* noop

* don't delete self.sd
  • Loading branch information
MarcoGorelli authored Dec 26, 2020
1 parent 93b68f2 commit 3cfee77
Show file tree
Hide file tree
Showing 5 changed files with 10 additions and 31 deletions.
14 changes: 0 additions & 14 deletions pymc3/distributions/continuous.py
Original file line number Diff line number Diff line change
Expand Up @@ -478,7 +478,6 @@ class Normal(Continuous):
def __init__(self, mu=0, sigma=None, tau=None, sd=None, **kwargs):
if sd is not None:
sigma = sd
warnings.warn("sd is deprecated, use sigma instead", DeprecationWarning)
tau, sigma = get_tau_sigma(tau=tau, sigma=sigma)
self.sigma = self.sd = tt.as_tensor_variable(sigma)
self.tau = tt.as_tensor_variable(tau)
Expand Down Expand Up @@ -640,7 +639,6 @@ def __init__(
):
if sd is not None:
sigma = sd
warnings.warn("sd is deprecated, use sigma instead", DeprecationWarning)
tau, sigma = get_tau_sigma(tau=tau, sigma=sigma)
self.sigma = self.sd = tt.as_tensor_variable(sigma)
self.tau = tt.as_tensor_variable(tau)
Expand Down Expand Up @@ -835,7 +833,6 @@ class HalfNormal(PositiveContinuous):
def __init__(self, sigma=None, tau=None, sd=None, *args, **kwargs):
if sd is not None:
sigma = sd
warnings.warn("sd is deprecated, use sigma instead", DeprecationWarning)
super().__init__(*args, **kwargs)
tau, sigma = get_tau_sigma(tau=tau, sigma=sigma)

Expand Down Expand Up @@ -1218,7 +1215,6 @@ def __init__(self, alpha=None, beta=None, mu=None, sigma=None, sd=None, *args, *
super().__init__(*args, **kwargs)
if sd is not None:
sigma = sd
warnings.warn("sd is deprecated, use sigma instead", DeprecationWarning)
alpha, beta = self.get_alpha_beta(alpha, beta, mu, sigma)
self.alpha = alpha = tt.as_tensor_variable(floatX(alpha))
self.beta = beta = tt.as_tensor_variable(floatX(beta))
Expand Down Expand Up @@ -1724,7 +1720,6 @@ def __init__(self, mu=0, sigma=None, tau=None, sd=None, *args, **kwargs):
super().__init__(*args, **kwargs)
if sd is not None:
sigma = sd
warnings.warn("sd is deprecated, use sigma instead", DeprecationWarning)

tau, sigma = get_tau_sigma(tau=tau, sigma=sigma)

Expand Down Expand Up @@ -1884,11 +1879,9 @@ class StudentT(Continuous):
"""

def __init__(self, nu, mu=0, lam=None, sigma=None, sd=None, *args, **kwargs):
super().__init__(*args, **kwargs)
super().__init__(*args, **kwargs)
if sd is not None:
sigma = sd
warnings.warn("sd is deprecated, use sigma instead", DeprecationWarning)
self.nu = nu = tt.as_tensor_variable(floatX(nu))
lam, sigma = get_tau_sigma(tau=lam, sigma=sigma)
self.lam = lam = tt.as_tensor_variable(lam)
Expand Down Expand Up @@ -2397,7 +2390,6 @@ def __init__(self, alpha=None, beta=None, mu=None, sigma=None, sd=None, *args, *
super().__init__(*args, **kwargs)
if sd is not None:
sigma = sd
warnings.warn("sd is deprecated, use sigma instead", DeprecationWarning)

alpha, beta = self.get_alpha_beta(alpha, beta, mu, sigma)
self.alpha = alpha = tt.as_tensor_variable(floatX(alpha))
Expand Down Expand Up @@ -2545,7 +2537,6 @@ def __init__(self, alpha=None, beta=None, mu=None, sigma=None, sd=None, *args, *

if sd is not None:
sigma = sd
warnings.warn("sd is deprecated, use sigma instead", DeprecationWarning)

alpha, beta = InverseGamma._get_alpha_beta(alpha, beta, mu, sigma)
self.alpha = alpha = tt.as_tensor_variable(floatX(alpha))
Expand Down Expand Up @@ -2902,7 +2893,6 @@ def __init__(self, nu=1, sigma=None, lam=None, sd=None, *args, **kwargs):
super().__init__(*args, **kwargs)
if sd is not None:
sigma = sd
warnings.warn("sd is deprecated, use sigma instead", DeprecationWarning)

self.mode = tt.as_tensor_variable(0)
lam, sigma = get_tau_sigma(lam, sigma)
Expand Down Expand Up @@ -3041,7 +3031,6 @@ def __init__(self, mu=0.0, sigma=None, nu=None, sd=None, *args, **kwargs):

if sd is not None:
sigma = sd
warnings.warn("sd is deprecated, use sigma instead", DeprecationWarning)

self.mu = mu = tt.as_tensor_variable(floatX(mu))
self.sigma = self.sd = sigma = tt.as_tensor_variable(floatX(sigma))
Expand Down Expand Up @@ -3317,7 +3306,6 @@ def __init__(self, mu=0.0, sigma=None, tau=None, alpha=1, sd=None, *args, **kwar

if sd is not None:
sigma = sd
warnings.warn("sd is deprecated, use sigma instead", DeprecationWarning)

tau, sigma = get_tau_sigma(tau=tau, sigma=sigma)
self.mu = mu = tt.as_tensor_variable(floatX(mu))
Expand Down Expand Up @@ -3721,7 +3709,6 @@ def __init__(self, nu=None, sigma=None, b=None, sd=None, *args, **kwargs):
super().__init__(*args, **kwargs)
if sd is not None:
sigma = sd
warnings.warn("sd is deprecated, use sigma instead", DeprecationWarning)

nu, b, sigma = self.get_nu_b(nu, b, sigma)
self.nu = nu = tt.as_tensor_variable(floatX(nu))
Expand Down Expand Up @@ -3994,7 +3981,6 @@ class LogitNormal(UnitContinuous):
def __init__(self, mu=0, sigma=None, tau=None, sd=None, **kwargs):
if sd is not None:
sigma = sd
warnings.warn("sd is deprecated, use sigma instead", DeprecationWarning)
self.mu = mu = tt.as_tensor_variable(floatX(mu))
tau, sigma = get_tau_sigma(tau=tau, sigma=sigma)
self.sigma = self.sd = tt.as_tensor_variable(sigma)
Expand Down
3 changes: 0 additions & 3 deletions pymc3/distributions/mixture.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import warnings

from collections.abc import Iterable

import numpy as np
Expand Down Expand Up @@ -632,7 +630,6 @@ class NormalMixture(Mixture):
def __init__(self, w, mu, sigma=None, tau=None, sd=None, comp_shape=(), *args, **kwargs):
if sd is not None:
sigma = sd
warnings.warn("sd is deprecated, use sigma instead", DeprecationWarning)
_, sigma = get_tau_sigma(tau=tau, sigma=sigma)

self.mu = mu = tt.as_tensor_variable(mu)
Expand Down
4 changes: 0 additions & 4 deletions pymc3/distributions/timeseries.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import warnings

import numpy as np
import theano.tensor as tt

Expand Down Expand Up @@ -116,7 +114,6 @@ def __init__(
super().__init__(*args, **kwargs)
if sd is not None:
sigma = sd
warnings.warn("sd is deprecated, use sigma instead", DeprecationWarning)

tau, sigma = get_tau_sigma(tau=tau, sigma=sigma)
self.sigma = self.sd = tt.as_tensor_variable(sigma)
Expand Down Expand Up @@ -211,7 +208,6 @@ def __init__(self, tau=None, init=Flat.dist(), sigma=None, mu=0.0, sd=None, *arg
raise TypeError("GaussianRandomWalk must be supplied a non-zero shape argument!")
if sd is not None:
sigma = sd
warnings.warn("sd is deprecated, use sigma instead", DeprecationWarning)
tau, sigma = get_tau_sigma(tau=tau, sigma=sigma)
self.tau = tt.as_tensor_variable(tau)
sigma = tt.as_tensor_variable(sigma)
Expand Down
8 changes: 4 additions & 4 deletions pymc3/tests/test_glm.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,9 +46,9 @@ def setup_class(cls):
super().setup_class()
cls.intercept = 1
cls.slope = 3
cls.sd = 0.05
cls.sigma = 0.05
x_linear, cls.y_linear = generate_data(cls.intercept, cls.slope, size=1000)
cls.y_linear += np.random.normal(size=1000, scale=cls.sd)
cls.y_linear += np.random.normal(size=1000, scale=cls.sigma)
cls.data_linear = pd.DataFrame(dict(x=x_linear, y=cls.y_linear))

x_logistic, y_logistic = generate_data(cls.intercept, cls.slope, size=3000)
Expand All @@ -73,7 +73,7 @@ def test_linear_component(self):

assert round(abs(np.mean(trace["Intercept"]) - self.intercept), 1) == 0
assert round(abs(np.mean(trace["x"]) - self.slope), 1) == 0
assert round(abs(np.mean(trace["sigma"]) - self.sd), 1) == 0
assert round(abs(np.mean(trace["sigma"]) - self.sigma), 1) == 0

def test_glm(self):
with Model() as model:
Expand All @@ -83,7 +83,7 @@ def test_glm(self):

assert round(abs(np.mean(trace["Intercept"]) - self.intercept), 1) == 0
assert round(abs(np.mean(trace["x"]) - self.slope), 1) == 0
assert round(abs(np.mean(trace["sd"]) - self.sd), 1) == 0
assert round(abs(np.mean(trace["sd"]) - self.sigma), 1) == 0

def test_glm_offset(self):
offset = 1.0
Expand Down
12 changes: 6 additions & 6 deletions pymc3/tests/test_models_linear.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,9 +33,9 @@ def setup_class(cls):
super().setup_class()
cls.intercept = 1
cls.slope = 3
cls.sd = 0.05
cls.sigma = 0.05
x_linear, cls.y_linear = generate_data(cls.intercept, cls.slope, size=1000)
cls.y_linear += np.random.normal(size=1000, scale=cls.sd)
cls.y_linear += np.random.normal(size=1000, scale=cls.sigma)
cls.data_linear = dict(x=x_linear, y=cls.y_linear)

x_logistic, y_logistic = generate_data(cls.intercept, cls.slope, size=3000)
Expand All @@ -59,7 +59,7 @@ def test_linear_component(self):

assert round(abs(np.mean(trace["lm_Intercept"]) - self.intercept), 1) == 0
assert round(abs(np.mean(trace["lm_x0"]) - self.slope), 1) == 0
assert round(abs(np.mean(trace["sigma"]) - self.sd), 1) == 0
assert round(abs(np.mean(trace["sigma"]) - self.sigma), 1) == 0
assert vars_to_create == set(model.named_vars.keys())

def test_linear_component_from_formula(self):
Expand All @@ -75,7 +75,7 @@ def test_linear_component_from_formula(self):

assert round(abs(np.mean(trace["Intercept"]) - self.intercept), 1) == 0
assert round(abs(np.mean(trace["x"]) - self.slope), 1) == 0
assert round(abs(np.mean(trace["sigma"]) - self.sd), 1) == 0
assert round(abs(np.mean(trace["sigma"]) - self.sigma), 1) == 0

def test_glm(self):
with Model() as model:
Expand All @@ -88,7 +88,7 @@ def test_glm(self):
)
assert round(abs(np.mean(trace["glm_Intercept"]) - self.intercept), 1) == 0
assert round(abs(np.mean(trace["glm_x0"]) - self.slope), 1) == 0
assert round(abs(np.mean(trace["glm_sd"]) - self.sd), 1) == 0
assert round(abs(np.mean(trace["glm_sd"]) - self.sigma), 1) == 0
assert vars_to_create == set(model.named_vars.keys())

def test_glm_from_formula(self):
Expand All @@ -103,7 +103,7 @@ def test_glm_from_formula(self):

assert round(abs(np.mean(trace["%s_Intercept" % NAME]) - self.intercept), 1) == 0
assert round(abs(np.mean(trace["%s_x" % NAME]) - self.slope), 1) == 0
assert round(abs(np.mean(trace["%s_sd" % NAME]) - self.sd), 1) == 0
assert round(abs(np.mean(trace["%s_sd" % NAME]) - self.sigma), 1) == 0

def test_strange_types(self):
with Model():
Expand Down

0 comments on commit 3cfee77

Please sign in to comment.