Skip to content

Replacing PyMC3 plots w/ Arviz plots & sigma Param change [Part 2] #26

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
260 changes: 107 additions & 153 deletions examples/case_studies/BEST.ipynb

Large diffs are not rendered by default.

4 changes: 3 additions & 1 deletion examples/case_studies/LKJ.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,9 @@
"outputs": [],
"source": [
"az.style.use(\"arviz-darkgrid\")\n",
"\n",
"warnings.simplefilter(action=\"ignore\", category=FutureWarning)\n",
"\n",
"RANDOM_SEED = 8924\n",
"np.random.seed(3264602) # from random.org"
]
Expand Down Expand Up @@ -716,7 +718,7 @@
"metadata": {
"anaconda-cloud": {},
"kernelspec": {
"display_name": "pymc3-dev",
"display_name": "Python (PyMC3 Dev)",
"language": "python",
"name": "pymc3-dev"
},
Expand Down
10 changes: 5 additions & 5 deletions examples/case_studies/blackbox_external_likelihood.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -320,7 +320,7 @@
" trace = pm.sample(ndraws, tune=nburn, discard_tuned_samples=True)\n",
"\n",
"# plot the traces\n",
"_ = pm.traceplot(trace, lines={\"m\": mtrue, \"c\": ctrue})\n",
"_ = az.plot_trace(trace, lines={\"m\": mtrue, \"c\": ctrue})\n",
"\n",
"# put the chains in an array (for later!)\n",
"samples_pymc3 = np.vstack((trace[\"m\"], trace[\"c\"])).T"
Expand Down Expand Up @@ -616,7 +616,7 @@
" trace = pm.sample(ndraws, tune=nburn, discard_tuned_samples=True)\n",
"\n",
"# plot the traces\n",
"_ = pm.traceplot(trace, lines={\"m\": mtrue, \"c\": ctrue})\n",
"_ = az.plot_trace(trace, lines={\"m\": mtrue, \"c\": ctrue})\n",
"\n",
"# put the chains in an array (for later!)\n",
"samples_pymc3_2 = np.vstack((trace[\"m\"], trace[\"c\"])).T"
Expand Down Expand Up @@ -644,12 +644,12 @@
" theta = tt.as_tensor_variable([m, c])\n",
"\n",
" # use a Normal distribution\n",
" pm.Normal(\"likelihood\", mu=(m * x + c), sd=sigma, observed=data)\n",
" pm.Normal(\"likelihood\", mu=(m * x + c), sigma=sigma, observed=data)\n",
"\n",
" trace = pm.sample(ndraws, tune=nburn, discard_tuned_samples=True)\n",
"\n",
"# plot the traces\n",
"_ = pm.traceplot(trace, lines={\"m\": mtrue, \"c\": ctrue})\n",
"_ = az.plot_trace(trace, lines={\"m\": mtrue, \"c\": ctrue})\n",
"\n",
"# put the chains in an array (for later!)\n",
"samples_pymc3_3 = np.vstack((trace[\"m\"], trace[\"c\"])).T"
Expand Down Expand Up @@ -832,7 +832,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.2"
"version": "3.8.5"
}
},
"nbformat": 4,
Expand Down
10 changes: 5 additions & 5 deletions examples/case_studies/conditional-autoregressive-model.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@
"\n",
"The classical `WinBUGS` implementation (more information [here](http://glau.ca/?p=340)):\n",
"\n",
"```\n",
"```python\n",
"model\n",
"{\n",
" for (i in 1 : regions) {\n",
Expand Down Expand Up @@ -2717,7 +2717,7 @@
}
],
"source": [
"summary2 = pm.summary(infdata2)\n",
"summary2 = az.summary(infdata2)\n",
"summary2[summary2[\"r_hat\"] > 1.05]"
]
},
Expand Down Expand Up @@ -3004,7 +3004,7 @@
"Note that in the node $\\phi \\sim \\mathcal{N}(0, [D_\\tau (I - \\alpha B)]^{-1})$, we are computing the log-likelihood for a multivariate Gaussian distribution, which might not scale well in high-dimensions. We can take advantage of the fact that the covariance matrix here $[D_\\tau (I - \\alpha B)]^{-1}$ is **sparse**, and there are faster ways to compute its log-likelihood. \n",
"\n",
"For example, a more efficient sparse representation of the CAR in `Stan`:\n",
"```\n",
"```python\n",
"functions {\n",
" /**\n",
" * Return the log probability of a proper conditional autoregressive (CAR) prior \n",
Expand Down Expand Up @@ -3040,7 +3040,7 @@
" - tau * (phit_D * phi - alpha * (phit_W * phi)));\n",
" }\n",
"}\n",
"```\n",
"```python\n",
"with the data transformed in the model:\n",
"```\n",
"transformed data {\n",
Expand Down Expand Up @@ -3500,7 +3500,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.3"
"version": "3.8.5"
}
},
"nbformat": 4,
Expand Down
169 changes: 61 additions & 108 deletions examples/case_studies/factor_analysis.ipynb

Large diffs are not rendered by default.

38 changes: 22 additions & 16 deletions examples/case_studies/hierarchical_partial_pooling.ipynb

Large diffs are not rendered by default.

18 changes: 8 additions & 10 deletions examples/case_studies/log-gaussian-cox-process.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -320,7 +320,7 @@
"outputs": [],
"source": [
"with pm.Model() as lgcp_model:\n",
" mu = pm.Normal(\"mu\", sd=3)\n",
" mu = pm.Normal(\"mu\", sigma=3)\n",
" rho = pm.Uniform(\"rho\", lower=25, upper=200)\n",
" cov_scale = pm.Exponential(\"cov_scale\", lam=1)\n",
"\n",
Expand Down Expand Up @@ -601,9 +601,7 @@
{
"cell_type": "code",
"execution_count": 14,
"metadata": {
"scrolled": false
},
"metadata": {},
"outputs": [
{
"data": {
Expand Down Expand Up @@ -773,7 +771,7 @@
"n_centroids = centroids.shape[0]\n",
"\n",
"with pm.Model() as mark_model:\n",
" mu = pm.Normal(\"mu\", sd=3)\n",
" mu = pm.Normal(\"mu\", sigma=3)\n",
" rho = pm.Uniform(\"rho\", lower=25, upper=200)\n",
"\n",
" cov_scale = pm.Exponential(\"scale\", lam=1)\n",
Expand Down Expand Up @@ -809,14 +807,14 @@
"outputs": [],
"source": [
"with mark_model:\n",
" alpha = pm.Normal(\"alpha\", sd=10.0)\n",
" beta = pm.Normal(\"beta\", sd=5)\n",
" alpha = pm.Normal(\"alpha\", sigma=10.0)\n",
" beta = pm.Normal(\"beta\", sigma=5)\n",
" eps_sd = pm.HalfCauchy(\"eps_sd\", beta=1.0)\n",
"\n",
" marks = pm.Normal(\n",
" \"marks\",\n",
" mu=alpha + beta * intensity[n_centroids::],\n",
" sd=eps_sd,\n",
" sigma=eps_sd,\n",
" shape=n,\n",
" observed=data[\"marks\"].values,\n",
" )"
Expand Down Expand Up @@ -1037,7 +1035,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.2"
"version": "3.8.5"
},
"toc": {
"base_numbering": 1,
Expand All @@ -1054,5 +1052,5 @@
}
},
"nbformat": 4,
"nbformat_minor": 2
"nbformat_minor": 4
}
11 changes: 3 additions & 8 deletions examples/case_studies/multilevel_modeling.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,9 @@
"from theano import tensor as tt\n",
Copy link
Contributor

@MarcoGorelli MarcoGorelli Feb 5, 2021

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Did you save the notebooks after having re-run them? The watermarks still show old "last updated" dates


Reply via ReviewNB

Copy link
Contributor

@MarcoGorelli MarcoGorelli Feb 5, 2021

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If there's nothing to be updated in this notebook, then perhaps it could be unstaged?

git fetch upstream
git checkout upstream/main -- examples/case_studies/multilevel_modeling.ipynb
git add examples/case_studies/multilevel_modeling.ipynb
git commit -m 'unstage multilevel_modeling'
git push

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Acknowledged. Thanks for the snippet!

"\n",
"print(f\"Running on PyMC3 v{pm.__version__}\")\n",
"\n",
"warnings.simplefilter(action=\"ignore\", category=FutureWarning)\n",
"\n",
"RANDOM_SEED = 8924\n",
"np.random.seed(286)"
]
Expand Down Expand Up @@ -8531,13 +8533,6 @@
"%load_ext watermark\n",
"%watermark -n -u -v -iv -w"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
Expand All @@ -8557,7 +8552,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.9"
"version": "3.8.5"
}
},
"nbformat": 4,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1649,7 +1649,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.7"
"version": "3.8.5"
}
},
"nbformat": 4,
Expand Down
556 changes: 274 additions & 282 deletions examples/case_studies/putting_workflow.ipynb

Large diffs are not rendered by default.

6 changes: 3 additions & 3 deletions examples/case_studies/rugby_analytics.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -1499,9 +1499,9 @@
"metadata": {
"anaconda-cloud": {},
"kernelspec": {
"display_name": "pymc-dev",
"display_name": "Python (PyMC3 Dev)",
"language": "python",
"name": "pymc-dev"
"name": "pymc3-dev"
},
"language_info": {
"codemirror_mode": {
Expand All @@ -1513,7 +1513,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.2"
"version": "3.8.5"
}
},
"nbformat": 4,
Expand Down
185 changes: 97 additions & 88 deletions examples/case_studies/stochastic_volatility.ipynb

Large diffs are not rendered by default.