Skip to content

Replacing PyMC3 plots w/ Arviz plots & sigma Param change [Part 2] #26

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
227 changes: 89 additions & 138 deletions examples/case_studies/BEST.ipynb

Large diffs are not rendered by default.

235 changes: 139 additions & 96 deletions examples/case_studies/LKJ.ipynb

Large diffs are not rendered by default.

10 changes: 5 additions & 5 deletions examples/case_studies/blackbox_external_likelihood.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -320,7 +320,7 @@
" trace = pm.sample(ndraws, tune=nburn, discard_tuned_samples=True)\n",
"\n",
"# plot the traces\n",
"_ = pm.traceplot(trace, lines={\"m\": mtrue, \"c\": ctrue})\n",
"_ = az.plot_trace(trace, lines={\"m\": mtrue, \"c\": ctrue})\n",
"\n",
"# put the chains in an array (for later!)\n",
"samples_pymc3 = np.vstack((trace[\"m\"], trace[\"c\"])).T"
Expand Down Expand Up @@ -616,7 +616,7 @@
" trace = pm.sample(ndraws, tune=nburn, discard_tuned_samples=True)\n",
"\n",
"# plot the traces\n",
"_ = pm.traceplot(trace, lines={\"m\": mtrue, \"c\": ctrue})\n",
"_ = az.plot_trace(trace, lines={\"m\": mtrue, \"c\": ctrue})\n",
"\n",
"# put the chains in an array (for later!)\n",
"samples_pymc3_2 = np.vstack((trace[\"m\"], trace[\"c\"])).T"
Expand Down Expand Up @@ -644,12 +644,12 @@
" theta = tt.as_tensor_variable([m, c])\n",
"\n",
" # use a Normal distribution\n",
" pm.Normal(\"likelihood\", mu=(m * x + c), sd=sigma, observed=data)\n",
" pm.Normal(\"likelihood\", mu=(m * x + c), sigma=sigma, observed=data)\n",
"\n",
" trace = pm.sample(ndraws, tune=nburn, discard_tuned_samples=True)\n",
"\n",
"# plot the traces\n",
"_ = pm.traceplot(trace, lines={\"m\": mtrue, \"c\": ctrue})\n",
"_ = az.plot_trace(trace, lines={\"m\": mtrue, \"c\": ctrue})\n",
"\n",
"# put the chains in an array (for later!)\n",
"samples_pymc3_3 = np.vstack((trace[\"m\"], trace[\"c\"])).T"
Expand Down Expand Up @@ -832,7 +832,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.2"
"version": "3.8.5"
}
},
"nbformat": 4,
Expand Down
10 changes: 5 additions & 5 deletions examples/case_studies/conditional-autoregressive-model.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@
"\n",
"The classical `WinBUGS` implementation (more information [here](http://glau.ca/?p=340)):\n",
"\n",
"```\n",
"```python\n",
"model\n",
"{\n",
" for (i in 1 : regions) {\n",
Expand Down Expand Up @@ -2717,7 +2717,7 @@
}
],
"source": [
"summary2 = pm.summary(infdata2)\n",
"summary2 = az.summary(infdata2)\n",
"summary2[summary2[\"r_hat\"] > 1.05]"
]
},
Expand Down Expand Up @@ -3004,7 +3004,7 @@
"Note that in the node $\\phi \\sim \\mathcal{N}(0, [D_\\tau (I - \\alpha B)]^{-1})$, we are computing the log-likelihood for a multivariate Gaussian distribution, which might not scale well in high-dimensions. We can take advantage of the fact that the covariance matrix here $[D_\\tau (I - \\alpha B)]^{-1}$ is **sparse**, and there are faster ways to compute its log-likelihood. \n",
"\n",
"For example, a more efficient sparse representation of the CAR in `Stan`:\n",
"```\n",
"```python\n",
"functions {\n",
" /**\n",
" * Return the log probability of a proper conditional autoregressive (CAR) prior \n",
Expand Down Expand Up @@ -3040,7 +3040,7 @@
" - tau * (phit_D * phi - alpha * (phit_W * phi)));\n",
" }\n",
"}\n",
"```\n",
"```python\n",
"with the data transformed in the model:\n",
"```\n",
"transformed data {\n",
Expand Down Expand Up @@ -3500,7 +3500,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.3"
"version": "3.8.6"
}
},
"nbformat": 4,
Expand Down
146 changes: 81 additions & 65 deletions examples/case_studies/factor_analysis.ipynb

Large diffs are not rendered by default.

43 changes: 25 additions & 18 deletions examples/case_studies/hierarchical_partial_pooling.ipynb

Large diffs are not rendered by default.

22 changes: 10 additions & 12 deletions examples/case_studies/log-gaussian-cox-process.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -320,7 +320,7 @@
"outputs": [],
"source": [
"with pm.Model() as lgcp_model:\n",
" mu = pm.Normal(\"mu\", sd=3)\n",
" mu = pm.Normal(\"mu\", sigma=3)\n",
" rho = pm.Uniform(\"rho\", lower=25, upper=200)\n",
" cov_scale = pm.Exponential(\"cov_scale\", lam=1)\n",
"\n",
Expand Down Expand Up @@ -601,9 +601,7 @@
{
"cell_type": "code",
"execution_count": 14,
"metadata": {
"scrolled": false
},
"metadata": {},
"outputs": [
{
"data": {
Expand Down Expand Up @@ -773,7 +771,7 @@
"n_centroids = centroids.shape[0]\n",
"\n",
"with pm.Model() as mark_model:\n",
" mu = pm.Normal(\"mu\", sd=3)\n",
" mu = pm.Normal(\"mu\", sigma=3)\n",
" rho = pm.Uniform(\"rho\", lower=25, upper=200)\n",
"\n",
" cov_scale = pm.Exponential(\"scale\", lam=1)\n",
Expand Down Expand Up @@ -809,14 +807,14 @@
"outputs": [],
"source": [
"with mark_model:\n",
" alpha = pm.Normal(\"alpha\", sd=10.0)\n",
" beta = pm.Normal(\"beta\", sd=5)\n",
" alpha = pm.Normal(\"alpha\", sigma=10.0)\n",
" beta = pm.Normal(\"beta\", sigma=5)\n",
" eps_sd = pm.HalfCauchy(\"eps_sd\", beta=1.0)\n",
"\n",
" marks = pm.Normal(\n",
" \"marks\",\n",
" mu=alpha + beta * intensity[n_centroids::],\n",
" sd=eps_sd,\n",
" sigma=eps_sd,\n",
" shape=n,\n",
" observed=data[\"marks\"].values,\n",
" )"
Expand Down Expand Up @@ -1023,9 +1021,9 @@
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"display_name": "Python PyMC3 (Dev)",
"language": "python",
"name": "python3"
"name": "pymc3-dev-py38"
},
"language_info": {
"codemirror_mode": {
Expand All @@ -1037,7 +1035,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.2"
"version": "3.8.6"
},
"toc": {
"base_numbering": 1,
Expand All @@ -1054,5 +1052,5 @@
}
},
"nbformat": 4,
"nbformat_minor": 2
"nbformat_minor": 4
}
317 changes: 263 additions & 54 deletions examples/case_studies/probabilistic_matrix_factorization.ipynb

Large diffs are not rendered by default.

757 changes: 535 additions & 222 deletions examples/case_studies/putting_workflow.ipynb

Large diffs are not rendered by default.

6 changes: 3 additions & 3 deletions examples/case_studies/rugby_analytics.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -1499,9 +1499,9 @@
"metadata": {
"anaconda-cloud": {},
"kernelspec": {
"display_name": "pymc-dev",
"display_name": "Python (PyMC3 Dev)",
"language": "python",
"name": "pymc-dev"
"name": "pymc3-dev"
},
"language_info": {
"codemirror_mode": {
Expand All @@ -1513,7 +1513,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.2"
"version": "3.8.5"
}
},
"nbformat": 4,
Expand Down
122 changes: 65 additions & 57 deletions examples/case_studies/stochastic_volatility.ipynb

Large diffs are not rendered by default.