From e1d9c69cf57ce6f007adfe953ff45e992ad5f12e Mon Sep 17 00:00:00 2001 From: Sayam753 Date: Sun, 30 May 2021 17:31:45 +0530 Subject: [PATCH 1/8] Make MvStudentT distribution v4 compatible --- pymc3/distributions/multivariate.py | 116 ++++++++++++++--------- pymc3/tests/test_distributions.py | 2 +- pymc3/tests/test_distributions_random.py | 1 - 3 files changed, 72 insertions(+), 47 deletions(-) diff --git a/pymc3/distributions/multivariate.py b/pymc3/distributions/multivariate.py index ad7c1df646..846102f666 100644 --- a/pymc3/distributions/multivariate.py +++ b/pymc3/distributions/multivariate.py @@ -27,6 +27,7 @@ from aesara.tensor import gammaln from aesara.tensor.nlinalg import det, eigh, matrix_inverse, trace from aesara.tensor.random.basic import MultinomialRV, dirichlet, multivariate_normal +from aesara.tensor.random.op import RandomVariable, default_shape_from_params from aesara.tensor.random.utils import broadcast_params from aesara.tensor.slinalg import ( Cholesky, @@ -248,6 +249,66 @@ def _distr_parameters_for_repr(self): return ["mu", "cov"] +def safe_multivariate_t(nu, mu, cov, size=None, rng=None): + res = np.atleast_1d( + stats.multivariate_t(loc=mu, shape=cov, df=nu, allow_singular=True).rvs( + size=size, random_state=rng + ) + ) + + if size is not None: + res = res.reshape(list(size) + [-1]) + + return res + + +class MvStudentTRV(RandomVariable): + name = "multivariate_studentt" + ndim_supp = 1 + ndims_params = [0, 1, 2] + dtype = "floatX" + _print_name = ("MvStudentT", "\\operatorname{MvStudentT}") + + def __call__(self, nu, mu=None, cov=None, size=None, **kwargs): + + dtype = aesara.config.floatX if self.dtype == "floatX" else self.dtype + + if mu is None: + mu = np.array([0.0], dtype=dtype) + if cov is None: + cov = np.array([[1.0]], dtype=dtype) + return super().__call__(nu, mu, cov, size=size, **kwargs) + + def _shape_from_params(self, dist_params, rep_param_idx=1, param_shapes=None): + return default_shape_from_params(self.ndim_supp, dist_params, rep_param_idx, param_shapes) + + @classmethod + def rng_fn(cls, rng, nu, mu, cov, size): + + if mu.ndim > 1 or cov.ndim > 2: + # Neither SciPy nor NumPy implement parameter broadcasting for + # multivariate normals (or many other multivariate distributions), + # so we have implement a quick and dirty one here + mu, cov = broadcast_params([mu, cov], cls.ndims_params[1:]) + size = tuple(size or ()) + + if size: + mu = np.broadcast_to(mu, size + mu.shape) + cov = np.broadcast_to(cov, size + cov.shape) + + res = np.empty(mu.shape) + for idx in np.ndindex(mu.shape[:-1]): + m = mu[idx] + c = cov[idx] + res[idx] = safe_multivariate_t(nu, m, c, rng=rng) + return res + else: + return safe_multivariate_t(nu, mu, cov, size=size, rng=rng) + + +mv_studentt = MvStudentTRV() + + class MvStudentT(Continuous): r""" Multivariate Student-T log-likelihood. @@ -288,55 +349,20 @@ class MvStudentT(Continuous): lower: bool, default=True Whether the cholesky fatcor is given as a lower triangular matrix. """ + rv_op = mv_studentt - def __init__( - self, nu, Sigma=None, mu=None, cov=None, tau=None, chol=None, lower=True, *args, **kwargs - ): + @classmethod + def dist(cls, nu, Sigma=None, mu=None, cov=None, tau=None, chol=None, lower=True, **kwargs): if Sigma is not None: if cov is not None: raise ValueError("Specify only one of cov and Sigma") cov = Sigma - super().__init__(mu=mu, cov=cov, tau=tau, chol=chol, lower=lower, *args, **kwargs) - self.nu = nu = at.as_tensor_variable(nu) - self.mean = self.median = self.mode = self.mu = self.mu - - def random(self, point=None, size=None): - """ - Draw random values from Multivariate Student's T distribution. - - Parameters - ---------- - point: dict, optional - Dict of variable values on which random values are to be - conditioned (uses default point if not specified). - size: int, optional - Desired size of random sample (returns one sample if not - specified). - - Returns - ------- - array - """ - # with _DrawValuesContext(): - # nu, mu = draw_values([self.nu, self.mu], point=point, size=size) - # if self._cov_type == "cov": - # (cov,) = draw_values([self.cov], point=point, size=size) - # dist = MvNormal.dist(mu=np.zeros_like(mu), cov=cov, shape=self.shape) - # elif self._cov_type == "tau": - # (tau,) = draw_values([self.tau], point=point, size=size) - # dist = MvNormal.dist(mu=np.zeros_like(mu), tau=tau, shape=self.shape) - # else: - # (chol,) = draw_values([self.chol_cov], point=point, size=size) - # dist = MvNormal.dist(mu=np.zeros_like(mu), chol=chol, shape=self.shape) - # - # samples = dist.random(point, size) - # - # chi2_samples = np.random.chisquare(nu, size) - # # Add distribution shape to chi2 samples - # chi2_samples = chi2_samples.reshape(chi2_samples.shape + (1,) * len(self.shape)) - # return (samples / np.sqrt(chi2_samples / nu)) + mu + nu = at.as_tensor_variable(nu) + mu = at.as_tensor_variable(mu) + cov = quaddist_matrix(cov, chol, tau, lower) + return super().dist([nu, mu, cov], **kwargs) - def logp(value, nu, cov): + def logp(value, nu, mu, cov): """ Calculate log-probability of Multivariate Student's T distribution at specified value. @@ -350,7 +376,7 @@ def logp(value, nu, cov): ------- TensorVariable """ - quaddist, logdet, ok = quaddist_parse(value, nu, cov) + quaddist, logdet, ok = quaddist_parse(value, mu, cov) k = floatX(value.shape[-1]) norm = gammaln((nu + k) / 2.0) - gammaln(nu / 2.0) - 0.5 * k * floatX(np.log(nu * np.pi)) @@ -358,7 +384,7 @@ def logp(value, nu, cov): return bound(norm + inner - logdet, ok) def _distr_parameters_for_repr(self): - return ["mu", "nu", "cov"] + return ["nu", "mu", "cov"] class Dirichlet(Continuous): diff --git a/pymc3/tests/test_distributions.py b/pymc3/tests/test_distributions.py index 38ae1c82d8..dd8b546ef8 100644 --- a/pymc3/tests/test_distributions.py +++ b/pymc3/tests/test_distributions.py @@ -2019,7 +2019,6 @@ def test_kroneckernormal(self, n, m, sigma): ) @pytest.mark.parametrize("n", [1, 2]) - @pytest.mark.xfail(reason="Distribution not refactored yet") def test_mvt(self, n): self.check_logp( MvStudentT, @@ -2032,6 +2031,7 @@ def test_mvt(self, n): RealMatrix(2, n), {"nu": Rplus, "Sigma": PdMatrix(n), "mu": Vector(R, n)}, mvt_logpdf, + extra_args={"size": 2}, ) @pytest.mark.parametrize("n", [2, 3, 4]) diff --git a/pymc3/tests/test_distributions_random.py b/pymc3/tests/test_distributions_random.py index a4e2905d5b..ea09b7dff5 100644 --- a/pymc3/tests/test_distributions_random.py +++ b/pymc3/tests/test_distributions_random.py @@ -1402,7 +1402,6 @@ def ref_rand_evd(size, mu, evds, sigma): model_args=evd_args, ) - @pytest.mark.xfail(reason="This distribution has not been refactored for v4") def test_mv_t(self): def ref_rand(size, nu, Sigma, mu): normal = st.multivariate_normal.rvs(cov=Sigma, size=size) From 1425508f663967466ad3973f5d971cc9c12dde75 Mon Sep 17 00:00:00 2001 From: Sayam753 Date: Sun, 30 May 2021 18:53:30 +0530 Subject: [PATCH 2/8] Enable pymc3/tests/test_distributions.py::TestBugfixes::test_issue_3051 --- pymc3/tests/test_distributions.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/pymc3/tests/test_distributions.py b/pymc3/tests/test_distributions.py index dd8b546ef8..93c4b49d6e 100644 --- a/pymc3/tests/test_distributions.py +++ b/pymc3/tests/test_distributions.py @@ -2941,9 +2941,12 @@ class TestBugfixes: "dist_cls,kwargs", [(MvNormal, dict(mu=0)), (MvStudentT, dict(mu=0, nu=2))] ) @pytest.mark.parametrize("dims", [1, 2, 4]) - @pytest.mark.xfail(reason="Distribution not refactored yet") def test_issue_3051(self, dims, dist_cls, kwargs): - d = dist_cls.dist(**kwargs, cov=np.eye(dims), size=(dims,)) + mu = np.repeat(kwargs["mu"], dims) + if "nu" in kwargs: + d = dist_cls.dist(nu=kwargs["nu"], mu=mu, cov=np.eye(dims), size=(20)) + else: + d = dist_cls.dist(mu=mu, cov=np.eye(dims), size=(20)) X = np.random.normal(size=(20, dims)) actual_t = logpt(d, X) From 8eb5623dea8befeca563b636676caf4e81e861f9 Mon Sep 17 00:00:00 2001 From: Sayam753 Date: Wed, 9 Jun 2021 01:05:36 +0530 Subject: [PATCH 3/8] refactor tests --- pymc3/tests/test_distributions.py | 11 +--- pymc3/tests/test_distributions_random.py | 74 +++++++++++++++++++----- 2 files changed, 61 insertions(+), 24 deletions(-) diff --git a/pymc3/tests/test_distributions.py b/pymc3/tests/test_distributions.py index 93c4b49d6e..be4b1a80c3 100644 --- a/pymc3/tests/test_distributions.py +++ b/pymc3/tests/test_distributions.py @@ -2937,16 +2937,11 @@ def test_car_logp(size): class TestBugfixes: - @pytest.mark.parametrize( - "dist_cls,kwargs", [(MvNormal, dict(mu=0)), (MvStudentT, dict(mu=0, nu=2))] - ) + @pytest.mark.parametrize("dist_cls,kwargs", [(MvNormal, dict()), (MvStudentT, dict(nu=2))]) @pytest.mark.parametrize("dims", [1, 2, 4]) def test_issue_3051(self, dims, dist_cls, kwargs): - mu = np.repeat(kwargs["mu"], dims) - if "nu" in kwargs: - d = dist_cls.dist(nu=kwargs["nu"], mu=mu, cov=np.eye(dims), size=(20)) - else: - d = dist_cls.dist(mu=mu, cov=np.eye(dims), size=(20)) + mu = np.repeat(0, dims) + d = dist_cls.dist(mu=mu, cov=np.eye(dims), **kwargs, size=(20)) X = np.random.normal(size=(20, dims)) actual_t = logpt(d, X) diff --git a/pymc3/tests/test_distributions_random.py b/pymc3/tests/test_distributions_random.py index ea09b7dff5..e23c1b7717 100644 --- a/pymc3/tests/test_distributions_random.py +++ b/pymc3/tests/test_distributions_random.py @@ -843,7 +843,7 @@ class TestPoisson(BaseTestDistribution): tests_to_run = ["check_pymc_params_match_rv_op"] -class TestMvNormal(BaseTestDistribution): +class TestMvNormalCov(BaseTestDistribution): pymc_dist = pm.MvNormal pymc_dist_params = { "mu": np.array([1.0, 2.0]), @@ -893,6 +893,63 @@ class TestMvNormalTau(BaseTestDistribution): tests_to_run = ["check_pymc_params_match_rv_op"] +class TestMvStudentTCov(BaseTestDistribution): + pymc_dist = pm.MvStudentT + pymc_dist_params = { + "nu": 5, + "mu": np.array([1.0, 2.0]), + "cov": np.array([[2.0, 0.0], [0.0, 3.5]]), + } + expected_rv_op_params = { + "nu": 5, + "mu": np.array([1.0, 2.0]), + "cov": np.array([[2.0, 0.0], [0.0, 3.5]]), + } + sizes_to_check = [None, (1), (2, 3)] + sizes_expected = [(2,), (1, 2), (2, 3, 2)] + reference_dist_params = { + "df": 5, + "loc": np.array([1.0, 2.0]), + "shape": np.array([[2.0, 0.0], [0.0, 3.5]]), + } + reference_dist = seeded_scipy_distribution_builder("multivariate_t") + tests_to_run = [ + "check_pymc_params_match_rv_op", + "check_pymc_draws_match_reference", + "check_rv_size", + ] + + +class TestMvStudentTChol(BaseTestDistribution): + pymc_dist = pm.MvStudentT + pymc_dist_params = { + "nu": 5, + "mu": np.array([1.0, 2.0]), + "chol": np.array([[2.0, 0.0], [0.0, 3.5]]), + } + expected_rv_op_params = { + "nu": 5, + "mu": np.array([1.0, 2.0]), + "cov": quaddist_matrix(chol=pymc_dist_params["chol"]).eval(), + } + tests_to_run = ["check_pymc_params_match_rv_op"] + + +class TestMvStudentTTau(BaseTestDistribution): + pymc_dist = pm.MvStudentT + pymc_dist_params = { + "nu": 5, + "mu": np.array([1.0, 2.0]), + "tau": np.array([[2.0, 0.0], [0.0, 3.5]]), + } + expected_rv_op_params = { + "nu": 5, + "mu": np.array([1.0, 2.0]), + "cov": quaddist_matrix(tau=pymc_dist_params["tau"]).eval(), + } + tests_to_run = ["check_pymc_params_match_rv_op"] + + class TestDirichlet(BaseTestDistribution): pymc_dist = pm.Dirichlet pymc_dist_params = {"a": np.array([1.0, 2.0])} @@ -1402,21 +1459,6 @@ def ref_rand_evd(size, mu, evds, sigma): model_args=evd_args, ) - def test_mv_t(self): - def ref_rand(size, nu, Sigma, mu): - normal = st.multivariate_normal.rvs(cov=Sigma, size=size) - chi2 = st.chi2.rvs(df=nu, size=size)[..., None] - return mu + (normal / np.sqrt(chi2 / nu)) - - for n in [2, 3]: - pymc3_random( - pm.MvStudentT, - {"nu": Domain([5, 10, 25, 50]), "Sigma": PdMatrix(n), "mu": Vector(R, n)}, - size=100, - valuedomain=Vector(R, n), - ref_rand=ref_rand, - ) - @pytest.mark.xfail(reason="This distribution has not been refactored for v4") def test_dirichlet_multinomial(self): def ref_rand(size, a, n): From a553c558d89af32c48d628485f57ed7e3bfe8462 Mon Sep 17 00:00:00 2001 From: Sayam753 Date: Sun, 13 Jun 2021 03:42:53 +0530 Subject: [PATCH 4/8] simplify rng_fn method and refactor tests --- pymc3/distributions/multivariate.py | 46 ++++++++---------------- pymc3/tests/test_distributions_random.py | 15 +++++--- 2 files changed, 26 insertions(+), 35 deletions(-) diff --git a/pymc3/distributions/multivariate.py b/pymc3/distributions/multivariate.py index 846102f666..b6dc2830af 100644 --- a/pymc3/distributions/multivariate.py +++ b/pymc3/distributions/multivariate.py @@ -249,19 +249,6 @@ def _distr_parameters_for_repr(self): return ["mu", "cov"] -def safe_multivariate_t(nu, mu, cov, size=None, rng=None): - res = np.atleast_1d( - stats.multivariate_t(loc=mu, shape=cov, df=nu, allow_singular=True).rvs( - size=size, random_state=rng - ) - ) - - if size is not None: - res = res.reshape(list(size) + [-1]) - - return res - - class MvStudentTRV(RandomVariable): name = "multivariate_studentt" ndim_supp = 1 @@ -285,25 +272,22 @@ def _shape_from_params(self, dist_params, rep_param_idx=1, param_shapes=None): @classmethod def rng_fn(cls, rng, nu, mu, cov, size): - if mu.ndim > 1 or cov.ndim > 2: - # Neither SciPy nor NumPy implement parameter broadcasting for - # multivariate normals (or many other multivariate distributions), - # so we have implement a quick and dirty one here - mu, cov = broadcast_params([mu, cov], cls.ndims_params[1:]) - size = tuple(size or ()) + # Don't reassign broadcasted cov, since MvNormal expects two dimensional cov only. + mu, _ = broadcast_params([mu, cov], cls.ndims_params[1:]) - if size: - mu = np.broadcast_to(mu, size + mu.shape) - cov = np.broadcast_to(cov, size + cov.shape) - - res = np.empty(mu.shape) - for idx in np.ndindex(mu.shape[:-1]): - m = mu[idx] - c = cov[idx] - res[idx] = safe_multivariate_t(nu, m, c, rng=rng) - return res - else: - return safe_multivariate_t(nu, mu, cov, size=size, rng=rng) + chi2_samples = rng.chisquare(nu, size=size) + # Add distribution shape to chi2 samples + chi2_samples = chi2_samples.reshape(chi2_samples.shape + (1,) * len(mu.shape)) + + mv_samples = pm.MvNormal.dist( + mu=np.zeros_like(mu), cov=cov, size=size, rng=aesara.shared(rng) + ).eval() + + size = tuple(size or ()) + if size: + mu = np.broadcast_to(mu, size + mu.shape) + + return (mv_samples / np.sqrt(chi2_samples / nu)) + mu mv_studentt = MvStudentTRV() diff --git a/pymc3/tests/test_distributions_random.py b/pymc3/tests/test_distributions_random.py index e23c1b7717..f2c8e9faed 100644 --- a/pymc3/tests/test_distributions_random.py +++ b/pymc3/tests/test_distributions_random.py @@ -894,6 +894,11 @@ class TestMvNormalTau(BaseTestDistribution): class TestMvStudentTCov(BaseTestDistribution): + def mvstudentt_rng_fn(self, size, nu, mu, cov, rng): + chi2_samples = rng.chisquare(nu, size=size) + mv_samples = rng.multivariate_normal(np.zeros_like(mu), cov, size=size) + return (mv_samples / np.sqrt(chi2_samples[:, None] / nu)) + mu + pymc_dist = pm.MvStudentT pymc_dist_params = { "nu": 5, @@ -908,11 +913,13 @@ class TestMvStudentTCov(BaseTestDistribution): sizes_to_check = [None, (1), (2, 3)] sizes_expected = [(2,), (1, 2), (2, 3, 2)] reference_dist_params = { - "df": 5, - "loc": np.array([1.0, 2.0]), - "shape": np.array([[2.0, 0.0], [0.0, 3.5]]), + "nu": 5, + "mu": np.array([1.0, 2.0]), + "cov": np.array([[2.0, 0.0], [0.0, 3.5]]), } - reference_dist = seeded_scipy_distribution_builder("multivariate_t") + reference_dist = lambda self: functools.partial( + self.mvstudentt_rng_fn, rng=self.get_random_state() + ) tests_to_run = [ "check_pymc_params_match_rv_op", "check_pymc_draws_match_reference", From 91e2dd134e053e460e671aa5e147c48b664e31af Mon Sep 17 00:00:00 2001 From: Sayam753 Date: Sun, 13 Jun 2021 03:51:26 +0530 Subject: [PATCH 5/8] Refactor rng method to compute sqrt over chi2_samples, one step earlier --- pymc3/distributions/multivariate.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pymc3/distributions/multivariate.py b/pymc3/distributions/multivariate.py index b6dc2830af..b7087adaa5 100644 --- a/pymc3/distributions/multivariate.py +++ b/pymc3/distributions/multivariate.py @@ -275,7 +275,7 @@ def rng_fn(cls, rng, nu, mu, cov, size): # Don't reassign broadcasted cov, since MvNormal expects two dimensional cov only. mu, _ = broadcast_params([mu, cov], cls.ndims_params[1:]) - chi2_samples = rng.chisquare(nu, size=size) + chi2_samples = np.sqrt(rng.chisquare(nu, size=size) / nu) # Add distribution shape to chi2 samples chi2_samples = chi2_samples.reshape(chi2_samples.shape + (1,) * len(mu.shape)) @@ -287,7 +287,7 @@ def rng_fn(cls, rng, nu, mu, cov, size): if size: mu = np.broadcast_to(mu, size + mu.shape) - return (mv_samples / np.sqrt(chi2_samples / nu)) + mu + return (mv_samples / chi2_samples) + mu mv_studentt = MvStudentTRV() From eb3901d4c1d67a3fa2574a2e0461ad9cef968b98 Mon Sep 17 00:00:00 2001 From: Sayam753 Date: Sun, 13 Jun 2021 12:46:01 +0530 Subject: [PATCH 6/8] Modify rng method to use aesara multivariate_normal.rng_fn --- pymc3/distributions/multivariate.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/pymc3/distributions/multivariate.py b/pymc3/distributions/multivariate.py index b7087adaa5..ae6975a9c5 100644 --- a/pymc3/distributions/multivariate.py +++ b/pymc3/distributions/multivariate.py @@ -279,9 +279,7 @@ def rng_fn(cls, rng, nu, mu, cov, size): # Add distribution shape to chi2 samples chi2_samples = chi2_samples.reshape(chi2_samples.shape + (1,) * len(mu.shape)) - mv_samples = pm.MvNormal.dist( - mu=np.zeros_like(mu), cov=cov, size=size, rng=aesara.shared(rng) - ).eval() + mv_samples = multivariate_normal.rng_fn(rng=rng, mean=np.zeros_like(mu), cov=cov, size=size) size = tuple(size or ()) if size: From c98b214ff08099fff63733531f3420413a09609a Mon Sep 17 00:00:00 2001 From: Sayam753 Date: Sun, 13 Jun 2021 20:41:47 +0530 Subject: [PATCH 7/8] Add suggestions Co-authored-by: Ricardo --- pymc3/distributions/multivariate.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/pymc3/distributions/multivariate.py b/pymc3/distributions/multivariate.py index ae6975a9c5..2ec99665f5 100644 --- a/pymc3/distributions/multivariate.py +++ b/pymc3/distributions/multivariate.py @@ -316,8 +316,8 @@ class MvStudentT(Continuous): Parameters ---------- - nu: int - Degrees of freedom. + nu: float + Degrees of freedom, should be a positive scalar. Sigma: matrix Covariance matrix. Use `cov` in new code. mu: array @@ -339,8 +339,8 @@ def dist(cls, nu, Sigma=None, mu=None, cov=None, tau=None, chol=None, lower=True if cov is not None: raise ValueError("Specify only one of cov and Sigma") cov = Sigma - nu = at.as_tensor_variable(nu) - mu = at.as_tensor_variable(mu) + nu = at.as_tensor_variable(floatX(nu)) + mu = at.as_tensor_variable(floatX(mu)) cov = quaddist_matrix(cov, chol, tau, lower) return super().dist([nu, mu, cov], **kwargs) @@ -361,7 +361,7 @@ def logp(value, nu, mu, cov): quaddist, logdet, ok = quaddist_parse(value, mu, cov) k = floatX(value.shape[-1]) - norm = gammaln((nu + k) / 2.0) - gammaln(nu / 2.0) - 0.5 * k * floatX(np.log(nu * np.pi)) + norm = gammaln((nu + k) / 2.0) - gammaln(nu / 2.0) - 0.5 * k * at.log(nu * np.pi) inner = -(nu + k) / 2.0 * at.log1p(quaddist / nu) return bound(norm + inner - logdet, ok) From de30b28581d1b37b6cde99bba16d5742cb62d2cc Mon Sep 17 00:00:00 2001 From: Sayam753 Date: Sun, 13 Jun 2021 20:50:04 +0530 Subject: [PATCH 8/8] Add a check against negative support of degrees of freedom Co-authored-by: Ricardo --- pymc3/distributions/multivariate.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pymc3/distributions/multivariate.py b/pymc3/distributions/multivariate.py index 2ec99665f5..b7566d59ac 100644 --- a/pymc3/distributions/multivariate.py +++ b/pymc3/distributions/multivariate.py @@ -42,7 +42,7 @@ from pymc3.aesaraf import floatX, intX from pymc3.distributions import transforms -from pymc3.distributions.continuous import ChiSquared, Normal +from pymc3.distributions.continuous import ChiSquared, Normal, assert_negative_support from pymc3.distributions.dist_math import bound, factln, logpow, multigammaln from pymc3.distributions.distribution import Continuous, Discrete from pymc3.math import kron_diag, kron_dot, kron_solve_lower, kronecker @@ -342,6 +342,7 @@ def dist(cls, nu, Sigma=None, mu=None, cov=None, tau=None, chol=None, lower=True nu = at.as_tensor_variable(floatX(nu)) mu = at.as_tensor_variable(floatX(mu)) cov = quaddist_matrix(cov, chol, tau, lower) + assert_negative_support(nu, "nu", "MvStudentT") return super().dist([nu, mu, cov], **kwargs) def logp(value, nu, mu, cov):