Skip to content

Commit da07736

Browse files
authored
Merge pull request #26 from CloudChaoszero/replace-pymc3-arviz-plots_part2
Replacing PyMC3 plots w/ Arviz plots & sigma Param change [Part 2]
2 parents 3593fe9 + 05ab6b3 commit da07736

11 files changed

+1220
-675
lines changed

examples/case_studies/BEST.ipynb

Lines changed: 89 additions & 138 deletions
Large diffs are not rendered by default.

examples/case_studies/LKJ.ipynb

Lines changed: 139 additions & 96 deletions
Large diffs are not rendered by default.

examples/case_studies/blackbox_external_likelihood.ipynb

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -320,7 +320,7 @@
320320
" trace = pm.sample(ndraws, tune=nburn, discard_tuned_samples=True)\n",
321321
"\n",
322322
"# plot the traces\n",
323-
"_ = pm.traceplot(trace, lines={\"m\": mtrue, \"c\": ctrue})\n",
323+
"_ = az.plot_trace(trace, lines={\"m\": mtrue, \"c\": ctrue})\n",
324324
"\n",
325325
"# put the chains in an array (for later!)\n",
326326
"samples_pymc3 = np.vstack((trace[\"m\"], trace[\"c\"])).T"
@@ -616,7 +616,7 @@
616616
" trace = pm.sample(ndraws, tune=nburn, discard_tuned_samples=True)\n",
617617
"\n",
618618
"# plot the traces\n",
619-
"_ = pm.traceplot(trace, lines={\"m\": mtrue, \"c\": ctrue})\n",
619+
"_ = az.plot_trace(trace, lines={\"m\": mtrue, \"c\": ctrue})\n",
620620
"\n",
621621
"# put the chains in an array (for later!)\n",
622622
"samples_pymc3_2 = np.vstack((trace[\"m\"], trace[\"c\"])).T"
@@ -644,12 +644,12 @@
644644
" theta = tt.as_tensor_variable([m, c])\n",
645645
"\n",
646646
" # use a Normal distribution\n",
647-
" pm.Normal(\"likelihood\", mu=(m * x + c), sd=sigma, observed=data)\n",
647+
" pm.Normal(\"likelihood\", mu=(m * x + c), sigma=sigma, observed=data)\n",
648648
"\n",
649649
" trace = pm.sample(ndraws, tune=nburn, discard_tuned_samples=True)\n",
650650
"\n",
651651
"# plot the traces\n",
652-
"_ = pm.traceplot(trace, lines={\"m\": mtrue, \"c\": ctrue})\n",
652+
"_ = az.plot_trace(trace, lines={\"m\": mtrue, \"c\": ctrue})\n",
653653
"\n",
654654
"# put the chains in an array (for later!)\n",
655655
"samples_pymc3_3 = np.vstack((trace[\"m\"], trace[\"c\"])).T"
@@ -832,7 +832,7 @@
832832
"name": "python",
833833
"nbconvert_exporter": "python",
834834
"pygments_lexer": "ipython3",
835-
"version": "3.8.2"
835+
"version": "3.8.5"
836836
}
837837
},
838838
"nbformat": 4,

examples/case_studies/conditional-autoregressive-model.ipynb

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -122,7 +122,7 @@
122122
"\n",
123123
"The classical `WinBUGS` implementation (more information [here](http://glau.ca/?p=340)):\n",
124124
"\n",
125-
"```\n",
125+
"```python\n",
126126
"model\n",
127127
"{\n",
128128
" for (i in 1 : regions) {\n",
@@ -2717,7 +2717,7 @@
27172717
}
27182718
],
27192719
"source": [
2720-
"summary2 = pm.summary(infdata2)\n",
2720+
"summary2 = az.summary(infdata2)\n",
27212721
"summary2[summary2[\"r_hat\"] > 1.05]"
27222722
]
27232723
},
@@ -3004,7 +3004,7 @@
30043004
"Note that in the node $\\phi \\sim \\mathcal{N}(0, [D_\\tau (I - \\alpha B)]^{-1})$, we are computing the log-likelihood for a multivariate Gaussian distribution, which might not scale well in high-dimensions. We can take advantage of the fact that the covariance matrix here $[D_\\tau (I - \\alpha B)]^{-1}$ is **sparse**, and there are faster ways to compute its log-likelihood. \n",
30053005
"\n",
30063006
"For example, a more efficient sparse representation of the CAR in `Stan`:\n",
3007-
"```\n",
3007+
"```python\n",
30083008
"functions {\n",
30093009
" /**\n",
30103010
" * Return the log probability of a proper conditional autoregressive (CAR) prior \n",
@@ -3040,7 +3040,7 @@
30403040
" - tau * (phit_D * phi - alpha * (phit_W * phi)));\n",
30413041
" }\n",
30423042
"}\n",
3043-
"```\n",
3043+
"```python\n",
30443044
"with the data transformed in the model:\n",
30453045
"```\n",
30463046
"transformed data {\n",
@@ -3500,7 +3500,7 @@
35003500
"name": "python",
35013501
"nbconvert_exporter": "python",
35023502
"pygments_lexer": "ipython3",
3503-
"version": "3.8.3"
3503+
"version": "3.8.6"
35043504
}
35053505
},
35063506
"nbformat": 4,

examples/case_studies/factor_analysis.ipynb

Lines changed: 81 additions & 65 deletions
Large diffs are not rendered by default.

examples/case_studies/hierarchical_partial_pooling.ipynb

Lines changed: 25 additions & 18 deletions
Large diffs are not rendered by default.

examples/case_studies/log-gaussian-cox-process.ipynb

Lines changed: 10 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -320,7 +320,7 @@
320320
"outputs": [],
321321
"source": [
322322
"with pm.Model() as lgcp_model:\n",
323-
" mu = pm.Normal(\"mu\", sd=3)\n",
323+
" mu = pm.Normal(\"mu\", sigma=3)\n",
324324
" rho = pm.Uniform(\"rho\", lower=25, upper=200)\n",
325325
" cov_scale = pm.Exponential(\"cov_scale\", lam=1)\n",
326326
"\n",
@@ -601,9 +601,7 @@
601601
{
602602
"cell_type": "code",
603603
"execution_count": 14,
604-
"metadata": {
605-
"scrolled": false
606-
},
604+
"metadata": {},
607605
"outputs": [
608606
{
609607
"data": {
@@ -773,7 +771,7 @@
773771
"n_centroids = centroids.shape[0]\n",
774772
"\n",
775773
"with pm.Model() as mark_model:\n",
776-
" mu = pm.Normal(\"mu\", sd=3)\n",
774+
" mu = pm.Normal(\"mu\", sigma=3)\n",
777775
" rho = pm.Uniform(\"rho\", lower=25, upper=200)\n",
778776
"\n",
779777
" cov_scale = pm.Exponential(\"scale\", lam=1)\n",
@@ -809,14 +807,14 @@
809807
"outputs": [],
810808
"source": [
811809
"with mark_model:\n",
812-
" alpha = pm.Normal(\"alpha\", sd=10.0)\n",
813-
" beta = pm.Normal(\"beta\", sd=5)\n",
810+
" alpha = pm.Normal(\"alpha\", sigma=10.0)\n",
811+
" beta = pm.Normal(\"beta\", sigma=5)\n",
814812
" eps_sd = pm.HalfCauchy(\"eps_sd\", beta=1.0)\n",
815813
"\n",
816814
" marks = pm.Normal(\n",
817815
" \"marks\",\n",
818816
" mu=alpha + beta * intensity[n_centroids::],\n",
819-
" sd=eps_sd,\n",
817+
" sigma=eps_sd,\n",
820818
" shape=n,\n",
821819
" observed=data[\"marks\"].values,\n",
822820
" )"
@@ -1023,9 +1021,9 @@
10231021
],
10241022
"metadata": {
10251023
"kernelspec": {
1026-
"display_name": "Python 3",
1024+
"display_name": "Python PyMC3 (Dev)",
10271025
"language": "python",
1028-
"name": "python3"
1026+
"name": "pymc3-dev-py38"
10291027
},
10301028
"language_info": {
10311029
"codemirror_mode": {
@@ -1037,7 +1035,7 @@
10371035
"name": "python",
10381036
"nbconvert_exporter": "python",
10391037
"pygments_lexer": "ipython3",
1040-
"version": "3.7.2"
1038+
"version": "3.8.6"
10411039
},
10421040
"toc": {
10431041
"base_numbering": 1,
@@ -1054,5 +1052,5 @@
10541052
}
10551053
},
10561054
"nbformat": 4,
1057-
"nbformat_minor": 2
1055+
"nbformat_minor": 4
10581056
}

examples/case_studies/probabilistic_matrix_factorization.ipynb

Lines changed: 263 additions & 54 deletions
Large diffs are not rendered by default.

examples/case_studies/putting_workflow.ipynb

Lines changed: 535 additions & 222 deletions
Large diffs are not rendered by default.

examples/case_studies/rugby_analytics.ipynb

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1499,9 +1499,9 @@
14991499
"metadata": {
15001500
"anaconda-cloud": {},
15011501
"kernelspec": {
1502-
"display_name": "pymc-dev",
1502+
"display_name": "Python (PyMC3 Dev)",
15031503
"language": "python",
1504-
"name": "pymc-dev"
1504+
"name": "pymc3-dev"
15051505
},
15061506
"language_info": {
15071507
"codemirror_mode": {
@@ -1513,7 +1513,7 @@
15131513
"name": "python",
15141514
"nbconvert_exporter": "python",
15151515
"pygments_lexer": "ipython3",
1516-
"version": "3.8.2"
1516+
"version": "3.8.5"
15171517
}
15181518
},
15191519
"nbformat": 4,

examples/case_studies/stochastic_volatility.ipynb

Lines changed: 65 additions & 57 deletions
Large diffs are not rendered by default.

0 commit comments

Comments
 (0)