Skip to content

Commit 57c6a8f

Browse files
ricardoV94twiecki
authored andcommitted
Move printing-related tests to their own test module
1 parent c758d0d commit 57c6a8f

File tree

2 files changed

+149
-134
lines changed

2 files changed

+149
-134
lines changed

pymc/tests/test_distributions.py

Lines changed: 1 addition & 134 deletions
Original file line numberDiff line numberDiff line change
@@ -127,7 +127,7 @@ def polyagamma_cdf(*args, **kwargs):
127127
)
128128
from pymc.distributions.shape_utils import to_tuple
129129
from pymc.math import kronecker
130-
from pymc.model import Deterministic, Model, Point, Potential
130+
from pymc.model import Model, Point
131131
from pymc.tests.helpers import select_by_precision
132132
from pymc.vartypes import continuous_types, discrete_types
133133

@@ -2886,139 +2886,6 @@ def test_lower_bounded_broadcasted(self):
28862886
assert upper_interval is None
28872887

28882888

2889-
class TestStrAndLatexRepr:
2890-
def setup_class(self):
2891-
# True parameter values
2892-
alpha, sigma = 1, 1
2893-
beta = [1, 2.5]
2894-
2895-
# Size of dataset
2896-
size = 100
2897-
2898-
# Predictor variable
2899-
X = np.random.normal(size=(size, 2)).dot(np.array([[1, 0], [0, 0.2]]))
2900-
2901-
# Simulate outcome variable
2902-
Y = alpha + X.dot(beta) + np.random.randn(size) * sigma
2903-
with Model() as self.model:
2904-
# TODO: some variables commented out here as they're not working properly
2905-
# in v4 yet (9-jul-2021), so doesn't make sense to test str/latex for them
2906-
2907-
# Priors for unknown model parameters
2908-
alpha = Normal("alpha", mu=0, sigma=10)
2909-
b = Normal("beta", mu=0, sigma=10, size=(2,), observed=beta)
2910-
sigma = HalfNormal("sigma", sigma=1)
2911-
2912-
# Test Cholesky parameterization
2913-
Z = MvNormal("Z", mu=np.zeros(2), chol=np.eye(2), size=(2,))
2914-
2915-
# NegativeBinomial representations to test issue 4186
2916-
# nb1 = pm.NegativeBinomial(
2917-
# "nb_with_mu_alpha", mu=pm.Normal("nbmu"), alpha=pm.Gamma("nbalpha", mu=6, sigma=1)
2918-
# )
2919-
nb2 = pm.NegativeBinomial("nb_with_p_n", p=pm.Uniform("nbp"), n=10)
2920-
2921-
# Expected value of outcome
2922-
mu = Deterministic("mu", floatX(alpha + at.dot(X, b)))
2923-
2924-
# add a bounded variable as well
2925-
# bound_var = Bound(Normal, lower=1.0)("bound_var", mu=0, sigma=10)
2926-
2927-
# KroneckerNormal
2928-
n, m = 3, 4
2929-
covs = [np.eye(n), np.eye(m)]
2930-
kron_normal = KroneckerNormal("kron_normal", mu=np.zeros(n * m), covs=covs, size=n * m)
2931-
2932-
# MatrixNormal
2933-
# matrix_normal = MatrixNormal(
2934-
# "mat_normal",
2935-
# mu=np.random.normal(size=n),
2936-
# rowcov=np.eye(n),
2937-
# colchol=np.linalg.cholesky(np.eye(n)),
2938-
# size=(n, n),
2939-
# )
2940-
2941-
# DirichletMultinomial
2942-
dm = DirichletMultinomial("dm", n=5, a=[1, 1, 1], size=(2, 3))
2943-
2944-
# Likelihood (sampling distribution) of observations
2945-
Y_obs = Normal("Y_obs", mu=mu, sigma=sigma, observed=Y)
2946-
2947-
# add a potential as well
2948-
pot = Potential("pot", mu**2)
2949-
2950-
self.distributions = [alpha, sigma, mu, b, Z, nb2, Y_obs, pot]
2951-
self.deterministics_or_potentials = [mu, pot]
2952-
# tuples of (formatting, include_params
2953-
self.formats = [("plain", True), ("plain", False), ("latex", True), ("latex", False)]
2954-
self.expected = {
2955-
("plain", True): [
2956-
r"alpha ~ N(0, 10)",
2957-
r"sigma ~ N**+(0, 1)",
2958-
r"mu ~ Deterministic(f(beta, alpha))",
2959-
r"beta ~ N(0, 10)",
2960-
r"Z ~ N(f(), f())",
2961-
r"nb_with_p_n ~ NB(10, nbp)",
2962-
r"Y_obs ~ N(mu, sigma)",
2963-
r"pot ~ Potential(f(beta, alpha))",
2964-
],
2965-
("plain", False): [
2966-
r"alpha ~ N",
2967-
r"sigma ~ N**+",
2968-
r"mu ~ Deterministic",
2969-
r"beta ~ N",
2970-
r"Z ~ N",
2971-
r"nb_with_p_n ~ NB",
2972-
r"Y_obs ~ N",
2973-
r"pot ~ Potential",
2974-
],
2975-
("latex", True): [
2976-
r"$\text{alpha} \sim \operatorname{N}(0,~10)$",
2977-
r"$\text{sigma} \sim \operatorname{N^{+}}(0,~1)$",
2978-
r"$\text{mu} \sim \operatorname{Deterministic}(f(\text{beta},~\text{alpha}))$",
2979-
r"$\text{beta} \sim \operatorname{N}(0,~10)$",
2980-
r"$\text{Z} \sim \operatorname{N}(f(),~f())$",
2981-
r"$\text{nb_with_p_n} \sim \operatorname{NB}(10,~\text{nbp})$",
2982-
r"$\text{Y_obs} \sim \operatorname{N}(\text{mu},~\text{sigma})$",
2983-
r"$\text{pot} \sim \operatorname{Potential}(f(\text{beta},~\text{alpha}))$",
2984-
],
2985-
("latex", False): [
2986-
r"$\text{alpha} \sim \operatorname{N}$",
2987-
r"$\text{sigma} \sim \operatorname{N^{+}}$",
2988-
r"$\text{mu} \sim \operatorname{Deterministic}$",
2989-
r"$\text{beta} \sim \operatorname{N}$",
2990-
r"$\text{Z} \sim \operatorname{N}$",
2991-
r"$\text{nb_with_p_n} \sim \operatorname{NB}$",
2992-
r"$\text{Y_obs} \sim \operatorname{N}$",
2993-
r"$\text{pot} \sim \operatorname{Potential}$",
2994-
],
2995-
}
2996-
2997-
def test__repr_latex_(self):
2998-
for distribution, tex in zip(self.distributions, self.expected[("latex", True)]):
2999-
assert distribution._repr_latex_() == tex
3000-
3001-
model_tex = self.model._repr_latex_()
3002-
3003-
# make sure each variable is in the model
3004-
for tex in self.expected[("latex", True)]:
3005-
for segment in tex.strip("$").split(r"\sim"):
3006-
assert segment in model_tex
3007-
3008-
def test_str_repr(self):
3009-
for str_format in self.formats:
3010-
for dist, text in zip(self.distributions, self.expected[str_format]):
3011-
assert dist.str_repr(*str_format) == text
3012-
3013-
model_text = self.model.str_repr(*str_format)
3014-
for text in self.expected[str_format]:
3015-
if str_format[0] == "latex":
3016-
for segment in text.strip("$").split(r"\sim"):
3017-
assert segment in model_text
3018-
else:
3019-
assert text in model_text
3020-
3021-
30222889
def test_discrete_trafo():
30232890
with Model():
30242891
with pytest.raises(ValueError) as err:

pymc/tests/test_printing.py

Lines changed: 148 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,148 @@
1+
import numpy as np
2+
3+
from pymc.aesaraf import floatX
4+
from pymc.distributions import (
5+
DirichletMultinomial,
6+
HalfNormal,
7+
KroneckerNormal,
8+
MvNormal,
9+
NegativeBinomial,
10+
Normal,
11+
Uniform,
12+
)
13+
from pymc.math import dot
14+
from pymc.model import Deterministic, Model, Potential
15+
16+
17+
# TODO: This test is a bit too monolithic
18+
class TestStrAndLatexRepr:
19+
def setup_class(self):
20+
# True parameter values
21+
alpha, sigma = 1, 1
22+
beta = [1, 2.5]
23+
24+
# Size of dataset
25+
size = 100
26+
27+
# Predictor variable
28+
X = np.random.normal(size=(size, 2)).dot(np.array([[1, 0], [0, 0.2]]))
29+
30+
# Simulate outcome variable
31+
Y = alpha + X.dot(beta) + np.random.randn(size) * sigma
32+
with Model() as self.model:
33+
# TODO: some variables commented out here as they're not working properly
34+
# in v4 yet (9-jul-2021), so doesn't make sense to test str/latex for them
35+
36+
# Priors for unknown model parameters
37+
alpha = Normal("alpha", mu=0, sigma=10)
38+
b = Normal("beta", mu=0, sigma=10, size=(2,), observed=beta)
39+
sigma = HalfNormal("sigma", sigma=1)
40+
41+
# Test Cholesky parameterization
42+
Z = MvNormal("Z", mu=np.zeros(2), chol=np.eye(2), size=(2,))
43+
44+
# NegativeBinomial representations to test issue 4186
45+
# nb1 = pm.NegativeBinomial(
46+
# "nb_with_mu_alpha", mu=pm.Normal("nbmu"), alpha=pm.Gamma("nbalpha", mu=6, sigma=1)
47+
# )
48+
nb2 = NegativeBinomial("nb_with_p_n", p=Uniform("nbp"), n=10)
49+
50+
# Expected value of outcome
51+
mu = Deterministic("mu", floatX(alpha + dot(X, b)))
52+
53+
# add a bounded variable as well
54+
# bound_var = Bound(Normal, lower=1.0)("bound_var", mu=0, sigma=10)
55+
56+
# KroneckerNormal
57+
n, m = 3, 4
58+
covs = [np.eye(n), np.eye(m)]
59+
kron_normal = KroneckerNormal("kron_normal", mu=np.zeros(n * m), covs=covs, size=n * m)
60+
61+
# MatrixNormal
62+
# matrix_normal = MatrixNormal(
63+
# "mat_normal",
64+
# mu=np.random.normal(size=n),
65+
# rowcov=np.eye(n),
66+
# colchol=np.linalg.cholesky(np.eye(n)),
67+
# size=(n, n),
68+
# )
69+
70+
# DirichletMultinomial
71+
dm = DirichletMultinomial("dm", n=5, a=[1, 1, 1], size=(2, 3))
72+
73+
# Likelihood (sampling distribution) of observations
74+
Y_obs = Normal("Y_obs", mu=mu, sigma=sigma, observed=Y)
75+
76+
# add a potential as well
77+
pot = Potential("pot", mu**2)
78+
79+
self.distributions = [alpha, sigma, mu, b, Z, nb2, Y_obs, pot]
80+
self.deterministics_or_potentials = [mu, pot]
81+
# tuples of (formatting, include_params
82+
self.formats = [("plain", True), ("plain", False), ("latex", True), ("latex", False)]
83+
self.expected = {
84+
("plain", True): [
85+
r"alpha ~ N(0, 10)",
86+
r"sigma ~ N**+(0, 1)",
87+
r"mu ~ Deterministic(f(beta, alpha))",
88+
r"beta ~ N(0, 10)",
89+
r"Z ~ N(f(), f())",
90+
r"nb_with_p_n ~ NB(10, nbp)",
91+
r"Y_obs ~ N(mu, sigma)",
92+
r"pot ~ Potential(f(beta, alpha))",
93+
],
94+
("plain", False): [
95+
r"alpha ~ N",
96+
r"sigma ~ N**+",
97+
r"mu ~ Deterministic",
98+
r"beta ~ N",
99+
r"Z ~ N",
100+
r"nb_with_p_n ~ NB",
101+
r"Y_obs ~ N",
102+
r"pot ~ Potential",
103+
],
104+
("latex", True): [
105+
r"$\text{alpha} \sim \operatorname{N}(0,~10)$",
106+
r"$\text{sigma} \sim \operatorname{N^{+}}(0,~1)$",
107+
r"$\text{mu} \sim \operatorname{Deterministic}(f(\text{beta},~\text{alpha}))$",
108+
r"$\text{beta} \sim \operatorname{N}(0,~10)$",
109+
r"$\text{Z} \sim \operatorname{N}(f(),~f())$",
110+
r"$\text{nb_with_p_n} \sim \operatorname{NB}(10,~\text{nbp})$",
111+
r"$\text{Y_obs} \sim \operatorname{N}(\text{mu},~\text{sigma})$",
112+
r"$\text{pot} \sim \operatorname{Potential}(f(\text{beta},~\text{alpha}))$",
113+
],
114+
("latex", False): [
115+
r"$\text{alpha} \sim \operatorname{N}$",
116+
r"$\text{sigma} \sim \operatorname{N^{+}}$",
117+
r"$\text{mu} \sim \operatorname{Deterministic}$",
118+
r"$\text{beta} \sim \operatorname{N}$",
119+
r"$\text{Z} \sim \operatorname{N}$",
120+
r"$\text{nb_with_p_n} \sim \operatorname{NB}$",
121+
r"$\text{Y_obs} \sim \operatorname{N}$",
122+
r"$\text{pot} \sim \operatorname{Potential}$",
123+
],
124+
}
125+
126+
def test__repr_latex_(self):
127+
for distribution, tex in zip(self.distributions, self.expected[("latex", True)]):
128+
assert distribution._repr_latex_() == tex
129+
130+
model_tex = self.model._repr_latex_()
131+
132+
# make sure each variable is in the model
133+
for tex in self.expected[("latex", True)]:
134+
for segment in tex.strip("$").split(r"\sim"):
135+
assert segment in model_tex
136+
137+
def test_str_repr(self):
138+
for str_format in self.formats:
139+
for dist, text in zip(self.distributions, self.expected[str_format]):
140+
assert dist.str_repr(*str_format) == text
141+
142+
model_text = self.model.str_repr(*str_format)
143+
for text in self.expected[str_format]:
144+
if str_format[0] == "latex":
145+
for segment in text.strip("$").split(r"\sim"):
146+
assert segment in model_text
147+
else:
148+
assert text in model_text

0 commit comments

Comments
 (0)