|
| 1 | +--- |
| 2 | +jupyter: |
| 3 | + jupytext: |
| 4 | + formats: ipynb,md |
| 5 | + text_representation: |
| 6 | + extension: .md |
| 7 | + format_name: markdown |
| 8 | + format_version: '1.3' |
| 9 | + jupytext_version: 1.16.0 |
| 10 | + kernelspec: |
| 11 | + display_name: Python 3 (ipykernel) |
| 12 | + language: python |
| 13 | + name: python3 |
| 14 | +--- |
| 15 | + |
| 16 | +# Code 1: Bayesian Inference |
| 17 | + |
| 18 | + |
| 19 | +```{admonition} This is a reference notebook for the book Bayesian Modeling and Computation in Python |
| 20 | +:class: tip, dropdown |
| 21 | +The textbook is not needed to use or run this code, though the context and explanation is missing from this notebook. |
| 22 | +
|
| 23 | +If you'd like a copy it's available |
| 24 | +[from the CRC Press](https://www.routledge.com/Bayesian-Modeling-and-Computation-in-Python/Martin-Kumar-Lao/p/book/9780367894368) |
| 25 | +or from [Amazon](https://www.routledge.com/Bayesian-Modeling-and-Computation-in-Python/Martin-Kumar-Lao/p/book/9780367894368). |
| 26 | +`` |
| 27 | +
|
| 28 | +```python |
| 29 | +%matplotlib inline |
| 30 | +import arviz as az |
| 31 | +import matplotlib.pyplot as plt |
| 32 | +import numpy as np |
| 33 | +import pymc as pm |
| 34 | +from scipy import stats |
| 35 | +from scipy.stats import entropy |
| 36 | +from scipy.optimize import minimize |
| 37 | +``` |
| 38 | + |
| 39 | +```python |
| 40 | +az.style.use("arviz-grayscale") |
| 41 | +plt.rcParams['figure.dpi'] = 300 |
| 42 | +np.random.seed(521) |
| 43 | +viridish = [(0.2823529411764706, 0.11372549019607843, 0.43529411764705883, 1.0), |
| 44 | + (0.1450980392156863, 0.6705882352941176, 0.5098039215686274, 1.0), |
| 45 | + (0.6901960784313725, 0.8666666666666667, 0.1843137254901961, 1.0)] |
| 46 | +``` |
| 47 | + |
| 48 | +## A DIY Sampler, Do Not Try This at Home |
| 49 | + |
| 50 | + |
| 51 | +### Figure 1.1 |
| 52 | + |
| 53 | +```python |
| 54 | +grid = np.linspace(0, 1, 5000) |
| 55 | +prior = stats.triang.pdf(grid, 0.5) |
| 56 | +likelihood = stats.triang.pdf(0.2, grid) |
| 57 | +posterior = prior * likelihood |
| 58 | +log_prior = np.log(prior) |
| 59 | +log_likelihood = np.log(likelihood) |
| 60 | +log_posterior = log_prior + log_likelihood |
| 61 | + |
| 62 | + |
| 63 | +_, ax = plt.subplots(1, 2, figsize=(10, 4)) |
| 64 | +ax[0].plot(grid, prior, label="prior", lw=2) |
| 65 | +ax[0].plot(grid, likelihood, label="likelihood", lw=2, color="C2") |
| 66 | +ax[0].plot(grid, posterior, label="posterior", lw=2, color="C4") |
| 67 | +ax[0].set_xlabel("θ") |
| 68 | +ax[0].legend() |
| 69 | +ax[0].set_yticks([]) |
| 70 | + |
| 71 | + |
| 72 | +ax[1].plot(grid, log_prior, label="log-prior", lw=2) |
| 73 | +ax[1].plot(grid, log_likelihood, label="log-likelihood", lw=2, color="C2") |
| 74 | +ax[1].plot(grid, log_posterior, label="log-posterior", lw=2, color="C4") |
| 75 | +ax[1].set_xlabel("θ") |
| 76 | +ax[1].legend() |
| 77 | +ax[1].set_yticks([]) |
| 78 | +plt.savefig("img/chp01/bayesian_triad.png") |
| 79 | +``` |
| 80 | + |
| 81 | +### Code 1.1 |
| 82 | + |
| 83 | +```python |
| 84 | +def post(θ, Y, α=1, β=1): |
| 85 | + if 0 <= θ <= 1: |
| 86 | + prior = stats.beta(α, β).pdf(θ) |
| 87 | + like = stats.bernoulli(θ).pmf(Y).prod() |
| 88 | + prop = like * prior |
| 89 | + else: |
| 90 | + prop = -np.inf |
| 91 | + return prop |
| 92 | +``` |
| 93 | + |
| 94 | +### Code 1.2 |
| 95 | + |
| 96 | +```python |
| 97 | +Y = stats.bernoulli(0.7).rvs(20) |
| 98 | +``` |
| 99 | + |
| 100 | +### Code 1.3 |
| 101 | + |
| 102 | +```python |
| 103 | +n_iters = 1000 |
| 104 | +can_sd = 0.05 |
| 105 | +α = β = 1 |
| 106 | +θ = 0.5 |
| 107 | +trace = {'θ':np.zeros(n_iters)} |
| 108 | +p2 = post(θ, Y, α, β) |
| 109 | + |
| 110 | +for iter in range(n_iters): |
| 111 | + θ_can = stats.norm(θ, can_sd).rvs(1) |
| 112 | + p1 = post(θ_can, Y, α, β) |
| 113 | + pa = p1 / p2 |
| 114 | + |
| 115 | + if pa > stats.uniform(0, 1).rvs(1): |
| 116 | + θ = θ_can |
| 117 | + p2 = p1 |
| 118 | + |
| 119 | + trace['θ'][iter] = θ |
| 120 | +``` |
| 121 | + |
| 122 | +### Code 1.5 |
| 123 | + |
| 124 | +```python |
| 125 | +az.summary(trace, kind='stats', round_to=2) |
| 126 | +``` |
| 127 | + |
| 128 | +### Code 1.4 and Figure 1.2 |
| 129 | + |
| 130 | +```python |
| 131 | +_, axes = plt.subplots(1,2, figsize=(10, 4), constrained_layout=True, sharey=True) |
| 132 | +axes[1].hist(trace['θ'], color='0.5', orientation="horizontal", density=True) |
| 133 | +axes[1].set_xticks([]) |
| 134 | +axes[0].plot(trace['θ'], '0.5') |
| 135 | +axes[0].set_ylabel('θ', rotation=0, labelpad=15) |
| 136 | +plt.savefig("img/chp01/traceplot.png") |
| 137 | +``` |
| 138 | + |
| 139 | +## Say Yes to Automating Inference, Say No to Automated Model Building |
| 140 | + |
| 141 | + |
| 142 | +### Figure 1.3 |
| 143 | + |
| 144 | +```python |
| 145 | +az.plot_posterior(trace) |
| 146 | +plt.savefig("img/chp01/plot_posterior.png") |
| 147 | +``` |
| 148 | + |
| 149 | +### Code 1.6 |
| 150 | + |
| 151 | +```python |
| 152 | +# Declare a model in PyMC3 |
| 153 | +with pm.Model() as model: |
| 154 | + # Specify the prior distribution of unknown parameter |
| 155 | + θ = pm.Beta("θ", alpha=1, beta=1) |
| 156 | + |
| 157 | + # Specify the likelihood distribution and condition on the observed data |
| 158 | + y_obs = pm.Binomial("y_obs", n=1, p=θ, observed=Y) |
| 159 | + |
| 160 | + # Sample from the posterior distribution |
| 161 | + idata = pm.sample(1000) |
| 162 | +``` |
| 163 | + |
| 164 | +### Code 1.7 |
| 165 | + |
| 166 | +```python |
| 167 | +graphviz = pm.model_to_graphviz(model) |
| 168 | +graphviz |
| 169 | +``` |
| 170 | + |
| 171 | +```python |
| 172 | +graphviz.graph_attr.update(dpi="300") |
| 173 | +graphviz.render("img/chp01/BetaBinomModelGraphViz", format="png") |
| 174 | +``` |
| 175 | + |
| 176 | +## A Few Options to Quantify Your Prior Information |
| 177 | + |
| 178 | + |
| 179 | +### Figure 1.5 |
| 180 | + |
| 181 | +```python |
| 182 | +pred_dists = (pm.sample_prior_predictive(1000, model).prior_predictive["y_obs"].values, |
| 183 | + pm.sample_posterior_predictive(idata, model).posterior_predictive["y_obs"].values) |
| 184 | +``` |
| 185 | + |
| 186 | +```python |
| 187 | +fig, axes = plt.subplots(4, 1, figsize=(9, 9)) |
| 188 | + |
| 189 | +for idx, n_d, dist in zip((1, 3), ("Prior", "Posterior"), pred_dists): |
| 190 | + az.plot_dist(dist.sum(-1), |
| 191 | + hist_kwargs={"color":"0.5", "bins":range(0, 22)}, |
| 192 | + ax=axes[idx]) |
| 193 | + axes[idx].set_title(f"{n_d} predictive distribution", fontweight='bold') |
| 194 | + axes[idx].set_xlim(-1, 21) |
| 195 | + axes[idx].set_ylim(0, 0.15) |
| 196 | + axes[idx].set_xlabel("number of success") |
| 197 | + |
| 198 | +az.plot_dist(pm.draw(θ, 1000), plot_kwargs={"color":"0.5"}, |
| 199 | + fill_kwargs={'alpha':1}, ax=axes[0]) |
| 200 | +axes[0].set_title("Prior distribution", fontweight='bold') |
| 201 | +axes[0].set_xlim(0, 1) |
| 202 | +axes[0].set_ylim(0, 4) |
| 203 | +axes[0].tick_params(axis='both', pad=7) |
| 204 | +axes[0].set_xlabel("θ") |
| 205 | + |
| 206 | +az.plot_dist(idata.posterior["θ"], plot_kwargs={"color":"0.5"}, |
| 207 | + fill_kwargs={'alpha':1}, ax=axes[2]) |
| 208 | +axes[2].set_title("Posterior distribution", fontweight='bold') |
| 209 | +axes[2].set_xlim(0, 1) |
| 210 | +axes[2].set_ylim(0, 5) |
| 211 | +axes[2].tick_params(axis='both', pad=7) |
| 212 | +axes[2].set_xlabel("θ") |
| 213 | + |
| 214 | +plt.savefig("img/chp01/Bayesian_quartet_distributions.png") |
| 215 | +``` |
| 216 | + |
| 217 | +### Figure 1.6 |
| 218 | + |
| 219 | +```python |
| 220 | +predictions = (stats.binom(n=1, p=idata.posterior["θ"].mean()).rvs((4000, len(Y))), |
| 221 | + pred_dists[1]) |
| 222 | + |
| 223 | +for d, c, l in zip(predictions, ("C0", "C4"), ("posterior mean", "posterior predictive")): |
| 224 | + ax = az.plot_dist(d.sum(-1), |
| 225 | + label=l, |
| 226 | + figsize=(10, 5), |
| 227 | + hist_kwargs={"alpha": 0.5, "color":c, "bins":range(0, 22)}) |
| 228 | + ax.set_yticks([]) |
| 229 | + ax.set_xlabel("number of success") |
| 230 | +plt.savefig("img/chp01/predictions_distributions.png") |
| 231 | +``` |
| 232 | + |
| 233 | +### Code 1.8 and Figure 1.7 |
| 234 | + |
| 235 | +```python |
| 236 | +_, axes = plt.subplots(2,3, figsize=(12, 6), sharey=True, sharex=True, |
| 237 | + constrained_layout=True) |
| 238 | +axes = np.ravel(axes) |
| 239 | + |
| 240 | +n_trials = [0, 1, 2, 3, 12, 180] |
| 241 | +success = [0, 1, 1, 1, 6, 59] |
| 242 | +data = zip(n_trials, success) |
| 243 | + |
| 244 | +beta_params = [(0.5, 0.5), (1, 1), (10, 10)] |
| 245 | +θ = np.linspace(0, 1, 1500) |
| 246 | +for idx, (N, y) in enumerate(data): |
| 247 | + s_n = ('s' if (N > 1) else '') |
| 248 | + for jdx, (a_prior, b_prior) in enumerate(beta_params): |
| 249 | + p_theta_given_y = stats.beta.pdf(θ, a_prior + y, b_prior + N - y) |
| 250 | + |
| 251 | + axes[idx].plot(θ, p_theta_given_y, lw=4, color=viridish[jdx]) |
| 252 | + axes[idx].set_yticks([]) |
| 253 | + axes[idx].set_ylim(0, 12) |
| 254 | + axes[idx].plot(np.divide(y, N), 0, color='k', marker='o', ms=12) |
| 255 | + axes[idx].set_title(f'{N:4d} trial{s_n} {y:4d} success') |
| 256 | + |
| 257 | +plt.savefig('img/chp01/beta_binomial_update.png') |
| 258 | +``` |
| 259 | + |
| 260 | +### Figure 1.8 |
| 261 | + |
| 262 | +```python |
| 263 | +θ = np.linspace(0, 1, 100) |
| 264 | +κ = (θ / (1-θ)) |
| 265 | +y = 2 |
| 266 | +n = 7 |
| 267 | + |
| 268 | +_, axes = plt.subplots(2, 2, figsize=(10, 5), |
| 269 | + sharex='col', sharey='row', constrained_layout=False) |
| 270 | + |
| 271 | +axes[0, 0].set_title("Jeffreys' prior for Alice") |
| 272 | +axes[0, 0].plot(θ, θ**(-0.5) * (1-θ)**(-0.5)) |
| 273 | +axes[1, 0].set_title("Jeffreys' posterior for Alice") |
| 274 | +axes[1, 0].plot(θ, θ**(y-0.5) * (1-θ)**(n-y-0.5)) |
| 275 | +axes[1, 0].set_xlabel("θ") |
| 276 | +axes[0, 1].set_title("Jeffreys' prior for Bob") |
| 277 | +axes[0, 1].plot(κ, κ**(-0.5) * (1 + κ)**(-1)) |
| 278 | +axes[1, 1].set_title("Jeffreys' posterior for Bob") |
| 279 | +axes[1, 1].plot(κ, κ**(y-0.5) * (1 + κ)**(-n-1)) |
| 280 | +axes[1, 1].set_xlim(-0.5, 10) |
| 281 | +axes[1, 1].set_xlabel("κ") |
| 282 | +axes[1, 1].text(-4.0, 0.030, size=18, s=r'$p(\theta \mid Y) \, \frac{d\theta}{d\kappa}$') |
| 283 | +axes[1, 1].annotate("", xy=(-0.5, 0.025), xytext=(-4.5, 0.025), |
| 284 | + arrowprops=dict(facecolor='black', shrink=0.05)) |
| 285 | +axes[1, 1].text(-4.0, 0.007, size=18, s= r'$p(\kappa \mid Y) \, \frac{d\kappa}{d\theta}$') |
| 286 | +axes[1, 1].annotate("", xy=(-4.5, 0.015), xytext=(-0.5, 0.015), |
| 287 | + arrowprops=dict(facecolor='black', shrink=0.05), |
| 288 | + annotation_clip=False) |
| 289 | + |
| 290 | +plt.subplots_adjust(wspace=0.4, hspace=0.4) |
| 291 | +plt.tight_layout() |
| 292 | +plt.savefig("img/chp01/Jeffrey_priors.png") |
| 293 | +``` |
| 294 | + |
| 295 | +### Figure 1.9 |
| 296 | + |
| 297 | +```python |
| 298 | +cons = [[{"type": "eq", "fun": lambda x: np.sum(x) - 1}], |
| 299 | + [{"type": "eq", "fun": lambda x: np.sum(x) - 1}, |
| 300 | + {"type": "eq", "fun": lambda x: 1.5 - np.sum(x * np.arange(1, 7))}], |
| 301 | + [{"type": "eq", "fun": lambda x: np.sum(x) - 1}, |
| 302 | + {"type": "eq", "fun": lambda x: np.sum(x[[2, 3]]) - 0.8}]] |
| 303 | + |
| 304 | +max_ent = [] |
| 305 | +for i, c in enumerate(cons): |
| 306 | + val = minimize(lambda x: -entropy(x), x0=[1/6]*6, bounds=[(0., 1.)] * 6, |
| 307 | + constraints=c)['x'] |
| 308 | + max_ent.append(entropy(val)) |
| 309 | + plt.plot(np.arange(1, 7), val, 'o--', color=viridish[i], lw=2.5) |
| 310 | +plt.xlabel("$t$") |
| 311 | +plt.ylabel("$p(t)$") |
| 312 | + |
| 313 | +plt.savefig("img/chp01/max_entropy.png") |
| 314 | +``` |
| 315 | + |
| 316 | +### Code 1.10 |
| 317 | + |
| 318 | +```python |
| 319 | +ite = 100_000 |
| 320 | +entropies = np.zeros((3, ite)) |
| 321 | +for idx in range(ite): |
| 322 | + rnds = np.zeros(6) |
| 323 | + total = 0 |
| 324 | + x_ = np.random.choice(np.arange(1, 7), size=6, replace=False) |
| 325 | + for i in x_[:-1]: |
| 326 | + rnd = np.random.uniform(0, 1-total) |
| 327 | + rnds[i-1] = rnd |
| 328 | + total = rnds.sum() |
| 329 | + rnds[-1] = 1 - rnds[:-1].sum() |
| 330 | + H = entropy(rnds) |
| 331 | + entropies[0, idx] = H |
| 332 | + if abs(1.5 - np.sum(rnds * x_)) < 0.01: |
| 333 | + entropies[1, idx] = H |
| 334 | + prob_34 = sum(rnds[np.argwhere((x_ == 3) | (x_ == 4)).ravel()]) |
| 335 | + if abs(0.8 - prob_34) < 0.01: |
| 336 | + entropies[2, idx] = H |
| 337 | +``` |
| 338 | + |
| 339 | +### Figure 1.10 |
| 340 | + |
| 341 | +```python |
| 342 | +_, ax = plt.subplots(1, 3, figsize=(12,4), sharex=True, sharey=True, constrained_layout=True) |
| 343 | + |
| 344 | +for i in range(3): |
| 345 | + az.plot_kde(entropies[i][np.nonzero(entropies[i])], ax=ax[i], plot_kwargs={"color":viridish[i], "lw":4}) |
| 346 | + ax[i].axvline(max_ent[i], 0, 1, ls="--") |
| 347 | + ax[i].set_yticks([]) |
| 348 | + ax[i].set_xlabel("entropy") |
| 349 | +plt.savefig("img/chp01/max_entropy_vs_random_dist.png") |
| 350 | +``` |
| 351 | + |
| 352 | +### Figure 1.11 |
| 353 | + |
| 354 | +```python |
| 355 | +x = np.linspace(0, 1, 500) |
| 356 | +params = [(0.5, 0.5), (1, 1), (3,3), (100, 25)] |
| 357 | + |
| 358 | +labels = ["Jeffreys", "MaxEnt", "Weakly Informative", |
| 359 | + "Informative"] |
| 360 | + |
| 361 | +_, ax = plt.subplots() |
| 362 | +for (α, β), label, c in zip(params, labels, (0, 1, 4, 2)): |
| 363 | + pdf = stats.beta.pdf(x, α, β) |
| 364 | + ax.plot(x, pdf, label=f"{label}", c=f"C{c}", lw=3) |
| 365 | + ax.set(yticks=[], xlabel="θ", title="Priors") |
| 366 | + ax.legend() |
| 367 | +plt.savefig("img/chp01/prior_informativeness_spectrum.png") |
| 368 | +``` |
0 commit comments