-
-
Notifications
You must be signed in to change notification settings - Fork 2.1k
Use sigma instead of sd, remove deprecationwarning #4344
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
6169b5b
fbc9355
392b83c
4173b5e
b7ec8de
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -478,7 +478,6 @@ class Normal(Continuous): | |
def __init__(self, mu=0, sigma=None, tau=None, sd=None, **kwargs): | ||
if sd is not None: | ||
sigma = sd | ||
warnings.warn("sd is deprecated, use sigma instead", DeprecationWarning) | ||
tau, sigma = get_tau_sigma(tau=tau, sigma=sigma) | ||
self.sigma = self.sd = tt.as_tensor_variable(sigma) | ||
self.tau = tt.as_tensor_variable(tau) | ||
|
@@ -640,7 +639,6 @@ def __init__( | |
): | ||
if sd is not None: | ||
sigma = sd | ||
warnings.warn("sd is deprecated, use sigma instead", DeprecationWarning) | ||
tau, sigma = get_tau_sigma(tau=tau, sigma=sigma) | ||
self.sigma = self.sd = tt.as_tensor_variable(sigma) | ||
self.tau = tt.as_tensor_variable(tau) | ||
|
@@ -835,7 +833,6 @@ class HalfNormal(PositiveContinuous): | |
def __init__(self, sigma=None, tau=None, sd=None, *args, **kwargs): | ||
if sd is not None: | ||
sigma = sd | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Instead of deleting, could you raise an error here and advice using sigma The reason being that |
||
warnings.warn("sd is deprecated, use sigma instead", DeprecationWarning) | ||
super().__init__(*args, **kwargs) | ||
tau, sigma = get_tau_sigma(tau=tau, sigma=sigma) | ||
|
||
|
@@ -1218,7 +1215,6 @@ def __init__(self, alpha=None, beta=None, mu=None, sigma=None, sd=None, *args, * | |
super().__init__(*args, **kwargs) | ||
if sd is not None: | ||
sigma = sd | ||
warnings.warn("sd is deprecated, use sigma instead", DeprecationWarning) | ||
alpha, beta = self.get_alpha_beta(alpha, beta, mu, sigma) | ||
self.alpha = alpha = tt.as_tensor_variable(floatX(alpha)) | ||
self.beta = beta = tt.as_tensor_variable(floatX(beta)) | ||
|
@@ -1724,7 +1720,6 @@ def __init__(self, mu=0, sigma=None, tau=None, sd=None, *args, **kwargs): | |
super().__init__(*args, **kwargs) | ||
if sd is not None: | ||
sigma = sd | ||
warnings.warn("sd is deprecated, use sigma instead", DeprecationWarning) | ||
|
||
tau, sigma = get_tau_sigma(tau=tau, sigma=sigma) | ||
|
||
|
@@ -1884,11 +1879,9 @@ class StudentT(Continuous): | |
""" | ||
|
||
def __init__(self, nu, mu=0, lam=None, sigma=None, sd=None, *args, **kwargs): | ||
super().__init__(*args, **kwargs) | ||
super().__init__(*args, **kwargs) | ||
if sd is not None: | ||
sigma = sd | ||
warnings.warn("sd is deprecated, use sigma instead", DeprecationWarning) | ||
self.nu = nu = tt.as_tensor_variable(floatX(nu)) | ||
lam, sigma = get_tau_sigma(tau=lam, sigma=sigma) | ||
self.lam = lam = tt.as_tensor_variable(lam) | ||
|
@@ -2397,7 +2390,6 @@ def __init__(self, alpha=None, beta=None, mu=None, sigma=None, sd=None, *args, * | |
super().__init__(*args, **kwargs) | ||
if sd is not None: | ||
sigma = sd | ||
warnings.warn("sd is deprecated, use sigma instead", DeprecationWarning) | ||
|
||
alpha, beta = self.get_alpha_beta(alpha, beta, mu, sigma) | ||
self.alpha = alpha = tt.as_tensor_variable(floatX(alpha)) | ||
|
@@ -2545,7 +2537,6 @@ def __init__(self, alpha=None, beta=None, mu=None, sigma=None, sd=None, *args, * | |
|
||
if sd is not None: | ||
sigma = sd | ||
warnings.warn("sd is deprecated, use sigma instead", DeprecationWarning) | ||
|
||
alpha, beta = InverseGamma._get_alpha_beta(alpha, beta, mu, sigma) | ||
self.alpha = alpha = tt.as_tensor_variable(floatX(alpha)) | ||
|
@@ -2902,7 +2893,6 @@ def __init__(self, nu=1, sigma=None, lam=None, sd=None, *args, **kwargs): | |
super().__init__(*args, **kwargs) | ||
if sd is not None: | ||
sigma = sd | ||
warnings.warn("sd is deprecated, use sigma instead", DeprecationWarning) | ||
|
||
self.mode = tt.as_tensor_variable(0) | ||
lam, sigma = get_tau_sigma(lam, sigma) | ||
|
@@ -3041,7 +3031,6 @@ def __init__(self, mu=0.0, sigma=None, nu=None, sd=None, *args, **kwargs): | |
|
||
if sd is not None: | ||
sigma = sd | ||
warnings.warn("sd is deprecated, use sigma instead", DeprecationWarning) | ||
|
||
self.mu = mu = tt.as_tensor_variable(floatX(mu)) | ||
self.sigma = self.sd = sigma = tt.as_tensor_variable(floatX(sigma)) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. can remove |
||
|
@@ -3317,7 +3306,6 @@ def __init__(self, mu=0.0, sigma=None, tau=None, alpha=1, sd=None, *args, **kwar | |
|
||
if sd is not None: | ||
sigma = sd | ||
warnings.warn("sd is deprecated, use sigma instead", DeprecationWarning) | ||
|
||
tau, sigma = get_tau_sigma(tau=tau, sigma=sigma) | ||
self.mu = mu = tt.as_tensor_variable(floatX(mu)) | ||
|
@@ -3721,7 +3709,6 @@ def __init__(self, nu=None, sigma=None, b=None, sd=None, *args, **kwargs): | |
super().__init__(*args, **kwargs) | ||
if sd is not None: | ||
sigma = sd | ||
warnings.warn("sd is deprecated, use sigma instead", DeprecationWarning) | ||
|
||
nu, b, sigma = self.get_nu_b(nu, b, sigma) | ||
self.nu = nu = tt.as_tensor_variable(floatX(nu)) | ||
|
@@ -3994,7 +3981,6 @@ class LogitNormal(UnitContinuous): | |
def __init__(self, mu=0, sigma=None, tau=None, sd=None, **kwargs): | ||
if sd is not None: | ||
sigma = sd | ||
warnings.warn("sd is deprecated, use sigma instead", DeprecationWarning) | ||
self.mu = mu = tt.as_tensor_variable(floatX(mu)) | ||
tau, sigma = get_tau_sigma(tau=tau, sigma=sigma) | ||
self.sigma = self.sd = tt.as_tensor_variable(sigma) | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I think we can leave
self.sd
too.There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Sure, I can revert this, but wouldn't it be safer to have a single source of truth for the value of sigma?
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I agree with your sentiment, but from user perspective, setting
sd=x
I would expect there to be ay.sd
, and if we're keeping it around, which isn't the cleanest anyway, I don't see what we gain by changing this one thing in a subtle way.There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
makes sense, thanks for explaining!