Skip to content

Commit 3cfee77

Browse files
authored
Use sigma instead of sd, remove deprecationwarning (#4344)
* remove sd deprecation warning * sd -> sigma in pymc3/tests/test_models_linear.py::TestGLM * update cls.sd in tests * noop * don't delete self.sd
1 parent 93b68f2 commit 3cfee77

File tree

5 files changed

+10
-31
lines changed

5 files changed

+10
-31
lines changed

Diff for: pymc3/distributions/continuous.py

-14
Original file line numberDiff line numberDiff line change
@@ -478,7 +478,6 @@ class Normal(Continuous):
478478
def __init__(self, mu=0, sigma=None, tau=None, sd=None, **kwargs):
479479
if sd is not None:
480480
sigma = sd
481-
warnings.warn("sd is deprecated, use sigma instead", DeprecationWarning)
482481
tau, sigma = get_tau_sigma(tau=tau, sigma=sigma)
483482
self.sigma = self.sd = tt.as_tensor_variable(sigma)
484483
self.tau = tt.as_tensor_variable(tau)
@@ -640,7 +639,6 @@ def __init__(
640639
):
641640
if sd is not None:
642641
sigma = sd
643-
warnings.warn("sd is deprecated, use sigma instead", DeprecationWarning)
644642
tau, sigma = get_tau_sigma(tau=tau, sigma=sigma)
645643
self.sigma = self.sd = tt.as_tensor_variable(sigma)
646644
self.tau = tt.as_tensor_variable(tau)
@@ -835,7 +833,6 @@ class HalfNormal(PositiveContinuous):
835833
def __init__(self, sigma=None, tau=None, sd=None, *args, **kwargs):
836834
if sd is not None:
837835
sigma = sd
838-
warnings.warn("sd is deprecated, use sigma instead", DeprecationWarning)
839836
super().__init__(*args, **kwargs)
840837
tau, sigma = get_tau_sigma(tau=tau, sigma=sigma)
841838

@@ -1218,7 +1215,6 @@ def __init__(self, alpha=None, beta=None, mu=None, sigma=None, sd=None, *args, *
12181215
super().__init__(*args, **kwargs)
12191216
if sd is not None:
12201217
sigma = sd
1221-
warnings.warn("sd is deprecated, use sigma instead", DeprecationWarning)
12221218
alpha, beta = self.get_alpha_beta(alpha, beta, mu, sigma)
12231219
self.alpha = alpha = tt.as_tensor_variable(floatX(alpha))
12241220
self.beta = beta = tt.as_tensor_variable(floatX(beta))
@@ -1724,7 +1720,6 @@ def __init__(self, mu=0, sigma=None, tau=None, sd=None, *args, **kwargs):
17241720
super().__init__(*args, **kwargs)
17251721
if sd is not None:
17261722
sigma = sd
1727-
warnings.warn("sd is deprecated, use sigma instead", DeprecationWarning)
17281723

17291724
tau, sigma = get_tau_sigma(tau=tau, sigma=sigma)
17301725

@@ -1884,11 +1879,9 @@ class StudentT(Continuous):
18841879
"""
18851880

18861881
def __init__(self, nu, mu=0, lam=None, sigma=None, sd=None, *args, **kwargs):
1887-
super().__init__(*args, **kwargs)
18881882
super().__init__(*args, **kwargs)
18891883
if sd is not None:
18901884
sigma = sd
1891-
warnings.warn("sd is deprecated, use sigma instead", DeprecationWarning)
18921885
self.nu = nu = tt.as_tensor_variable(floatX(nu))
18931886
lam, sigma = get_tau_sigma(tau=lam, sigma=sigma)
18941887
self.lam = lam = tt.as_tensor_variable(lam)
@@ -2397,7 +2390,6 @@ def __init__(self, alpha=None, beta=None, mu=None, sigma=None, sd=None, *args, *
23972390
super().__init__(*args, **kwargs)
23982391
if sd is not None:
23992392
sigma = sd
2400-
warnings.warn("sd is deprecated, use sigma instead", DeprecationWarning)
24012393

24022394
alpha, beta = self.get_alpha_beta(alpha, beta, mu, sigma)
24032395
self.alpha = alpha = tt.as_tensor_variable(floatX(alpha))
@@ -2545,7 +2537,6 @@ def __init__(self, alpha=None, beta=None, mu=None, sigma=None, sd=None, *args, *
25452537

25462538
if sd is not None:
25472539
sigma = sd
2548-
warnings.warn("sd is deprecated, use sigma instead", DeprecationWarning)
25492540

25502541
alpha, beta = InverseGamma._get_alpha_beta(alpha, beta, mu, sigma)
25512542
self.alpha = alpha = tt.as_tensor_variable(floatX(alpha))
@@ -2902,7 +2893,6 @@ def __init__(self, nu=1, sigma=None, lam=None, sd=None, *args, **kwargs):
29022893
super().__init__(*args, **kwargs)
29032894
if sd is not None:
29042895
sigma = sd
2905-
warnings.warn("sd is deprecated, use sigma instead", DeprecationWarning)
29062896

29072897
self.mode = tt.as_tensor_variable(0)
29082898
lam, sigma = get_tau_sigma(lam, sigma)
@@ -3041,7 +3031,6 @@ def __init__(self, mu=0.0, sigma=None, nu=None, sd=None, *args, **kwargs):
30413031

30423032
if sd is not None:
30433033
sigma = sd
3044-
warnings.warn("sd is deprecated, use sigma instead", DeprecationWarning)
30453034

30463035
self.mu = mu = tt.as_tensor_variable(floatX(mu))
30473036
self.sigma = self.sd = sigma = tt.as_tensor_variable(floatX(sigma))
@@ -3317,7 +3306,6 @@ def __init__(self, mu=0.0, sigma=None, tau=None, alpha=1, sd=None, *args, **kwar
33173306

33183307
if sd is not None:
33193308
sigma = sd
3320-
warnings.warn("sd is deprecated, use sigma instead", DeprecationWarning)
33213309

33223310
tau, sigma = get_tau_sigma(tau=tau, sigma=sigma)
33233311
self.mu = mu = tt.as_tensor_variable(floatX(mu))
@@ -3721,7 +3709,6 @@ def __init__(self, nu=None, sigma=None, b=None, sd=None, *args, **kwargs):
37213709
super().__init__(*args, **kwargs)
37223710
if sd is not None:
37233711
sigma = sd
3724-
warnings.warn("sd is deprecated, use sigma instead", DeprecationWarning)
37253712

37263713
nu, b, sigma = self.get_nu_b(nu, b, sigma)
37273714
self.nu = nu = tt.as_tensor_variable(floatX(nu))
@@ -3994,7 +3981,6 @@ class LogitNormal(UnitContinuous):
39943981
def __init__(self, mu=0, sigma=None, tau=None, sd=None, **kwargs):
39953982
if sd is not None:
39963983
sigma = sd
3997-
warnings.warn("sd is deprecated, use sigma instead", DeprecationWarning)
39983984
self.mu = mu = tt.as_tensor_variable(floatX(mu))
39993985
tau, sigma = get_tau_sigma(tau=tau, sigma=sigma)
40003986
self.sigma = self.sd = tt.as_tensor_variable(sigma)

Diff for: pymc3/distributions/mixture.py

-3
Original file line numberDiff line numberDiff line change
@@ -12,8 +12,6 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15-
import warnings
16-
1715
from collections.abc import Iterable
1816

1917
import numpy as np
@@ -632,7 +630,6 @@ class NormalMixture(Mixture):
632630
def __init__(self, w, mu, sigma=None, tau=None, sd=None, comp_shape=(), *args, **kwargs):
633631
if sd is not None:
634632
sigma = sd
635-
warnings.warn("sd is deprecated, use sigma instead", DeprecationWarning)
636633
_, sigma = get_tau_sigma(tau=tau, sigma=sigma)
637634

638635
self.mu = mu = tt.as_tensor_variable(mu)

Diff for: pymc3/distributions/timeseries.py

-4
Original file line numberDiff line numberDiff line change
@@ -12,8 +12,6 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15-
import warnings
16-
1715
import numpy as np
1816
import theano.tensor as tt
1917

@@ -116,7 +114,6 @@ def __init__(
116114
super().__init__(*args, **kwargs)
117115
if sd is not None:
118116
sigma = sd
119-
warnings.warn("sd is deprecated, use sigma instead", DeprecationWarning)
120117

121118
tau, sigma = get_tau_sigma(tau=tau, sigma=sigma)
122119
self.sigma = self.sd = tt.as_tensor_variable(sigma)
@@ -211,7 +208,6 @@ def __init__(self, tau=None, init=Flat.dist(), sigma=None, mu=0.0, sd=None, *arg
211208
raise TypeError("GaussianRandomWalk must be supplied a non-zero shape argument!")
212209
if sd is not None:
213210
sigma = sd
214-
warnings.warn("sd is deprecated, use sigma instead", DeprecationWarning)
215211
tau, sigma = get_tau_sigma(tau=tau, sigma=sigma)
216212
self.tau = tt.as_tensor_variable(tau)
217213
sigma = tt.as_tensor_variable(sigma)

Diff for: pymc3/tests/test_glm.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -46,9 +46,9 @@ def setup_class(cls):
4646
super().setup_class()
4747
cls.intercept = 1
4848
cls.slope = 3
49-
cls.sd = 0.05
49+
cls.sigma = 0.05
5050
x_linear, cls.y_linear = generate_data(cls.intercept, cls.slope, size=1000)
51-
cls.y_linear += np.random.normal(size=1000, scale=cls.sd)
51+
cls.y_linear += np.random.normal(size=1000, scale=cls.sigma)
5252
cls.data_linear = pd.DataFrame(dict(x=x_linear, y=cls.y_linear))
5353

5454
x_logistic, y_logistic = generate_data(cls.intercept, cls.slope, size=3000)
@@ -73,7 +73,7 @@ def test_linear_component(self):
7373

7474
assert round(abs(np.mean(trace["Intercept"]) - self.intercept), 1) == 0
7575
assert round(abs(np.mean(trace["x"]) - self.slope), 1) == 0
76-
assert round(abs(np.mean(trace["sigma"]) - self.sd), 1) == 0
76+
assert round(abs(np.mean(trace["sigma"]) - self.sigma), 1) == 0
7777

7878
def test_glm(self):
7979
with Model() as model:
@@ -83,7 +83,7 @@ def test_glm(self):
8383

8484
assert round(abs(np.mean(trace["Intercept"]) - self.intercept), 1) == 0
8585
assert round(abs(np.mean(trace["x"]) - self.slope), 1) == 0
86-
assert round(abs(np.mean(trace["sd"]) - self.sd), 1) == 0
86+
assert round(abs(np.mean(trace["sd"]) - self.sigma), 1) == 0
8787

8888
def test_glm_offset(self):
8989
offset = 1.0

Diff for: pymc3/tests/test_models_linear.py

+6-6
Original file line numberDiff line numberDiff line change
@@ -33,9 +33,9 @@ def setup_class(cls):
3333
super().setup_class()
3434
cls.intercept = 1
3535
cls.slope = 3
36-
cls.sd = 0.05
36+
cls.sigma = 0.05
3737
x_linear, cls.y_linear = generate_data(cls.intercept, cls.slope, size=1000)
38-
cls.y_linear += np.random.normal(size=1000, scale=cls.sd)
38+
cls.y_linear += np.random.normal(size=1000, scale=cls.sigma)
3939
cls.data_linear = dict(x=x_linear, y=cls.y_linear)
4040

4141
x_logistic, y_logistic = generate_data(cls.intercept, cls.slope, size=3000)
@@ -59,7 +59,7 @@ def test_linear_component(self):
5959

6060
assert round(abs(np.mean(trace["lm_Intercept"]) - self.intercept), 1) == 0
6161
assert round(abs(np.mean(trace["lm_x0"]) - self.slope), 1) == 0
62-
assert round(abs(np.mean(trace["sigma"]) - self.sd), 1) == 0
62+
assert round(abs(np.mean(trace["sigma"]) - self.sigma), 1) == 0
6363
assert vars_to_create == set(model.named_vars.keys())
6464

6565
def test_linear_component_from_formula(self):
@@ -75,7 +75,7 @@ def test_linear_component_from_formula(self):
7575

7676
assert round(abs(np.mean(trace["Intercept"]) - self.intercept), 1) == 0
7777
assert round(abs(np.mean(trace["x"]) - self.slope), 1) == 0
78-
assert round(abs(np.mean(trace["sigma"]) - self.sd), 1) == 0
78+
assert round(abs(np.mean(trace["sigma"]) - self.sigma), 1) == 0
7979

8080
def test_glm(self):
8181
with Model() as model:
@@ -88,7 +88,7 @@ def test_glm(self):
8888
)
8989
assert round(abs(np.mean(trace["glm_Intercept"]) - self.intercept), 1) == 0
9090
assert round(abs(np.mean(trace["glm_x0"]) - self.slope), 1) == 0
91-
assert round(abs(np.mean(trace["glm_sd"]) - self.sd), 1) == 0
91+
assert round(abs(np.mean(trace["glm_sd"]) - self.sigma), 1) == 0
9292
assert vars_to_create == set(model.named_vars.keys())
9393

9494
def test_glm_from_formula(self):
@@ -103,7 +103,7 @@ def test_glm_from_formula(self):
103103

104104
assert round(abs(np.mean(trace["%s_Intercept" % NAME]) - self.intercept), 1) == 0
105105
assert round(abs(np.mean(trace["%s_x" % NAME]) - self.slope), 1) == 0
106-
assert round(abs(np.mean(trace["%s_sd" % NAME]) - self.sd), 1) == 0
106+
assert round(abs(np.mean(trace["%s_sd" % NAME]) - self.sigma), 1) == 0
107107

108108
def test_strange_types(self):
109109
with Model():

0 commit comments

Comments
 (0)