Skip to content

Commit ffc2094

Browse files
committed
formatting, release notes, comments in notebook, etc
1 parent fa71926 commit ffc2094

File tree

4 files changed

+71
-81
lines changed

4 files changed

+71
-81
lines changed

RELEASE-NOTES.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99
- Added `Matern12` covariance function for Gaussian processes. This is the Matern kernel with nu=1/2.
1010
- Progressbar reports number of divergences in real time, when available [#3547](https://github.com/pymc-devs/pymc3/pull/3547).
1111
- Sampling from variational approximation now allows for alternative trace backends [#3550].
12+
- Add capabilities to do inference on parameters in a differential equation with `DifferentialEquation`.
1213

1314
### Maintenance
1415
- Moved math operations out of `Rice`, `TruncatedNormal`, `Triangular` and `ZeroInflatedNegativeBinomial` `random` methods. Math operations on values returned by `draw_values` might not broadcast well, and all the `size` aware broadcasting is left to `generate_samples`. Fixes [#3481](https://github.com/pymc-devs/pymc3/issues/3481) and [#3508](https://github.com/pymc-devs/pymc3/issues/3508)

docs/source/notebooks/ODE_API_parameter_estimation.ipynb

Lines changed: 55 additions & 70 deletions
Original file line numberDiff line numberDiff line change
@@ -97,22 +97,22 @@
9797
}
9898
],
9999
"source": [
100-
"#For reproducibility\n",
100+
"# For reproducibility\n",
101101
"np.random.seed(19920908)\n",
102102
"\n",
103-
"def freefall(y,t,p):\n",
103+
"def freefall(y, t, p):\n",
104104
" \n",
105105
" return 2.0*p[1] - p[0]*y[0]\n",
106106
"\n",
107-
"#Times for observation\n",
107+
"# Times for observation\n",
108108
"times = np.arange(0,10,0.5)\n",
109109
"gamma,g, y0, sigma = 0.4, 9.8, -2, 2\n",
110-
"y = odeint(freefall, t = times, y0 = y0, args = tuple([[gamma,g]]))\n",
110+
"y = odeint(freefall, t=times, y0=y, args=tuple([[gamma,g]]))\n",
111111
"yobs = np.random.normal(y,2)\n",
112112
"\n",
113-
"fig, ax = plt.subplots(dpi = 120)\n",
114-
"plt.plot(times,yobs, label = 'observed speed', linestyle = 'dashed', marker = 'o', color='red')\n",
115-
"plt.plot(times,y, label = 'True speed', color ='k', alpha = 0.5)\n",
113+
"fig, ax = plt.subplots(dpi=120)\n",
114+
"plt.plot(times,yobs, label='observed speed', linestyle='dashed', marker='o', color='red')\n",
115+
"plt.plot(times,y, label='True speed', color='k', alpha=0.5)\n",
116116
"plt.legend()\n",
117117
"plt.xlabel('Time (Seconds)')\n",
118118
"plt.ylabel(r'$y(t)$');\n",
@@ -159,31 +159,29 @@
159159
}
160160
],
161161
"source": [
162-
"ode_model = DifferentialEquation(func = freefall,\n",
163-
" t0 = 0,\n",
164-
" times = times,\n",
162+
"ode_model = DifferentialEquation(func=freefall,\n",
163+
" t0=0,\n",
164+
" times=times,\n",
165165
" n_odeparams=2, \n",
166-
" n_states = 1)\n",
166+
" n_states=1)\n",
167167
"\n",
168168
"with pm.Model() as model:\n",
169169
" \n",
170-
" sigma= pm.HalfCauchy('sigma',1)\n",
170+
" sigma = pm.HalfCauchy('sigma',1)\n",
171171
" \n",
172172
" gamma = pm.Lognormal('gamma',0,1)\n",
173173
" \n",
174-
" #If we know one of the parameter values, we can simply pass the value.\n",
175-
" #No need to specify a prior.\n",
176-
" ode_solution = ode_model(odeparams = [gamma, 9.8], y0 = [0]).reshape(yobs.shape)\n",
174+
" # If we know one of the parameter values, we can simply pass the value.\n",
175+
" # No need to specify a prior.\n",
176+
" ode_solution = ode_model(odeparams=[gamma, 9.8], y0=[0]).reshape(yobs.shape)\n",
177177
" \n",
178-
" Y = pm.Normal('Y', mu = ode_solution, sd = sigma, observed = yobs)\n",
178+
" Y = pm.Normal('Y', mu=ode_solution, sd=sigma, observed=yobs)\n",
179179
" \n",
180-
" trace = pm.sample(2000,tune = 1000)\n",
180+
" trace = pm.sample(2000,tune=1000)\n",
181181
" prior = pm.sample_prior_predictive()\n",
182182
" posterior_predictive = pm.sample_posterior_predictive(trace)\n",
183183
" \n",
184-
" data = az.from_pymc3(trace = trace,\n",
185-
" prior = prior,\n",
186-
" posterior_predictive = posterior_predictive)"
184+
" data = az.from_pymc3(trace=trace, prior=prior, posterior_predictive=posterior_predictive)"
187185
]
188186
},
189187
{
@@ -238,24 +236,22 @@
238236
"source": [
239237
"with pm.Model() as model2:\n",
240238
" \n",
241-
" sigma= pm.HalfCauchy('sigma',1)\n",
239+
" sigma = pm.HalfCauchy('sigma',1)\n",
242240
" gamma = pm.Lognormal('gamma',0,1)\n",
243-
" #A prior on the acceleration due to gravity\n",
241+
" # A prior on the acceleration due to gravity\n",
244242
" g = pm.Lognormal('g',pm.math.log(10),2)\n",
245243
" \n",
246-
" #Notice now I have passed g to the odeparams argument\n",
247-
" ode_solution = ode_model(odeparams = [gamma, g], y0 = [0]).reshape(yobs.shape)\n",
244+
" # Notice now I have passed g to the odeparams argument\n",
245+
" ode_solution = ode_model(odeparams=[gamma, g], y0=[0]).reshape(yobs.shape)\n",
248246
" \n",
249-
" Y = pm.Normal('Y', mu = ode_solution, sd = sigma, observed = yobs)\n",
247+
" Y = pm.Normal('Y', mu=ode_solution, sd=sigma, observed=yobs)\n",
250248
"\n",
251249
" \n",
252-
" trace = pm.sample(2000,tune = 1000, target_accept = 0.9)\n",
250+
" trace = pm.sample(2000, tune=1000, target_accept=0.9)\n",
253251
" prior = pm.sample_prior_predictive()\n",
254252
" posterior_predictive = pm.sample_posterior_predictive(trace)\n",
255253
" \n",
256-
" data = az.from_pymc3(trace = trace,\n",
257-
" prior = prior,\n",
258-
" posterior_predictive = posterior_predictive)"
254+
" data = az.from_pymc3(trace=trace, prior=prior, posterior_predictive=posterior_predictive)"
259255
]
260256
},
261257
{
@@ -316,24 +312,22 @@
316312
"source": [
317313
"with pm.Model() as model3:\n",
318314
" \n",
319-
" sigma= pm.HalfCauchy('sigma',1)\n",
315+
" sigma = pm.HalfCauchy('sigma',1)\n",
320316
" gamma = pm.Lognormal('gamma',0,1)\n",
321317
" g = pm.Lognormal('g',pm.math.log(10),2)\n",
322-
" #Initial condition prior. We think it is at rest, but will allow for perturbations in initial velocity.\n",
318+
" # Initial condition prior. We think it is at rest, but will allow for perturbations in initial velocity.\n",
323319
" y0 = pm.Normal('y0', 0, 2)\n",
324320
" \n",
325-
" ode_solution = ode_model(odeparams = [gamma, g], y0 = [y0]).reshape(yobs.shape)\n",
321+
" ode_solution = ode_model(odeparams=[gamma, g], y0=[y0]).reshape(yobs.shape)\n",
326322
" \n",
327-
" Y = pm.Normal('Y', mu = ode_solution, sd = sigma, observed = yobs)\n",
323+
" Y = pm.Normal('Y', mu=ode_solution, sd=sigma, observed=yobs)\n",
328324
"\n",
329325
" \n",
330-
" trace = pm.sample(2000,tune = 1000, target_accept = 0.9)\n",
326+
" trace = pm.sample(2000,tune=1000, target_accept=0.9)\n",
331327
" prior = pm.sample_prior_predictive()\n",
332328
" posterior_predictive = pm.sample_posterior_predictive(trace)\n",
333329
" \n",
334-
" data = az.from_pymc3(trace = trace,\n",
335-
" prior = prior,\n",
336-
" posterior_predictive = posterior_predictive)"
330+
" data = az.from_pymc3(trace=trace, prior=prior, posterior_predictive=posterior_predictive)"
337331
]
338332
},
339333
{
@@ -355,7 +349,7 @@
355349
}
356350
],
357351
"source": [
358-
"az.plot_posterior(data, figsize = (13,3));"
352+
"az.plot_posterior(data, figsize=(13,3));"
359353
]
360354
},
361355
{
@@ -432,25 +426,25 @@
432426
}
433427
],
434428
"source": [
435-
"def SIR(y,t,p):\n",
429+
"def SIR(y, t, p):\n",
436430
" \n",
437431
" ds = -p[0]*y[0]*y[1]\n",
438432
" di = p[0]*y[0]*y[1] - p[1]*y[1]\n",
439433
" \n",
440-
" return [ds,di]\n",
434+
" return [ds, di]\n",
441435
"\n",
442436
"times = np.arange(0,5,0.25)\n",
443437
"\n",
444438
"beta,gamma = 4,1.0\n",
445-
"#Create true curves\n",
446-
"y = odeint(SIR, t = times, y0 = [0.99, 0.01], args = tuple([[beta,gamma]]), rtol=1e-8 )\n",
447-
"#Observational model. Lognormal likelihood isn't appropriate, but we'll do it anyway\n",
448-
"yobs = np.random.lognormal(mean = np.log(y[1::]), sigma = [0.2, 0.3])\n",
439+
"# Create true curves\n",
440+
"y = odeint(SIR, t=times, y0=[0.99, 0.01], args=tuple([[beta,gamma]]), rtol=1e-8 )\n",
441+
"# Observational model. Lognormal likelihood isn't appropriate, but we'll do it anyway\n",
442+
"yobs = np.random.lognormal(mean=np.log(y[1::]), sigma=[0.2, 0.3])\n",
449443
"\n",
450444
"\n",
451-
"plt.plot(times[1::],yobs, marker = 'o', linestyle = 'none')\n",
452-
"plt.plot(times, y[:,0], color = 'C0', alpha = 0.5, label = f'$S(t)$')\n",
453-
"plt.plot(times, y[:,1], color = 'C1', alpha = 0.5, label = f'$I(t)$')\n",
445+
"plt.plot(times[1::],yobs, marker='o', linestyle='none')\n",
446+
"plt.plot(times, y[:,0], color='C0', alpha=0.5, label=f'$S(t)$')\n",
447+
"plt.plot(times, y[:,1], color ='C1', alpha=0.5, label=f'$I(t)$')\n",
454448
"plt.legend()"
455449
]
456450
},
@@ -474,42 +468,33 @@
474468
],
475469
"source": [
476470
"\n",
477-
"sir_model = DifferentialEquation(func = SIR,\n",
478-
" times = np.arange(0.25, 5, 0.25), \n",
479-
" t0 = 0,\n",
480-
" n_states = 2,\n",
481-
" n_odeparams=2)\n",
471+
"sir_model = DifferentialEquation(func=SIR, \n",
472+
" times=np.arange(0.25, 5, 0.25), \n",
473+
" t0=0,\n",
474+
" n_states=2,\n",
475+
" n_odeparams=2)\n",
482476
"\n",
483477
"with pm.Model() as model4:\n",
484478
" \n",
485-
" sigma = pm.HalfCauchy('sigma',1, shape = 2)\n",
479+
" sigma = pm.HalfCauchy('sigma',1, shape=2)\n",
486480
" \n",
487-
" #R0 is bounded below by 1 because we see an epidemic has occured\n",
488-
" R0 = pm.Bound(pm.Normal, lower = 1)('R0', 2,3)\n",
481+
" # R0 is bounded below by 1 because we see an epidemic has occured\n",
482+
" R0 = pm.Bound(pm.Normal, lower=1)('R0', 2,3)\n",
489483
" lam = pm.Lognormal('lambda',pm.math.log(2),2)\n",
490484
" beta = pm.Deterministic('beta', lam*R0)\n",
491485
"\n",
492486
" \n",
493-
" sir_curves = sir_model(odeparams = [beta, lam], y0 = [0.99, 0.01]).reshape(yobs.shape)\n",
487+
" sir_curves = sir_model(odeparams=[beta, lam], y0=[0.99, 0.01]).reshape(yobs.shape)\n",
494488
" \n",
495-
" Y = pm.Lognormal('Y', mu = pm.math.log(sir_curves), sd = sigma, observed = yobs)\n",
496-
" trace = pm.sample(2000,tune = 1000, target_accept = 0.9)\n",
489+
" Y = pm.Lognormal('Y', mu=pm.math.log(sir_curves), sd=sigma, observed=yobs)\n",
490+
" trace = pm.sample(2000,tune=1000, target_accept=0.9)\n",
497491
" prior = pm.sample_prior_predictive()\n",
498492
" posterior_predictive = pm.sample_posterior_predictive(trace)\n",
493+
" \n",
494+
" data = az.from_pymc3(trace=trace, prior = prior, posterior_predictive = posterior_predictive)\n",
499495
" "
500496
]
501497
},
502-
{
503-
"cell_type": "code",
504-
"execution_count": 11,
505-
"metadata": {},
506-
"outputs": [],
507-
"source": [
508-
"data = az.from_pymc3(trace = trace,\n",
509-
" prior = prior,\n",
510-
" posterior_predictive = posterior_predictive)"
511-
]
512-
},
513498
{
514499
"cell_type": "code",
515500
"execution_count": 12,
@@ -529,7 +514,7 @@
529514
}
530515
],
531516
"source": [
532-
"az.plot_posterior(data,round_to = 2, credible_interval=0.95);"
517+
"az.plot_posterior(data,round_to=2, credible_interval=0.95);"
533518
]
534519
},
535520
{

docs/source/notebooks/table_of_contents_examples.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,5 +53,5 @@ Gallery.contents = {
5353
"normalizing_flows_overview": "Variational Inference",
5454
"gaussian-mixture-model-advi": "Variational Inference",
5555
"GLM-hierarchical-advi-minibatch": "Variational Inference",
56-
"ODE_parameter_estimation": "Inference in ODE models"
56+
"ODE_API_parameter_estimation": "Inference in ODE models"
5757
}

pymc3/ode/ode.py

Lines changed: 14 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -70,16 +70,21 @@ def __init__(self, func, times, n_states, n_odeparams, t0=0):
7070
self._grad_op = ODEGradop(self._numpy_vsp)
7171

7272
def _make_sens_ic(self):
73-
# The sensitivity matrix will always have consistent form.
74-
# If the first n_odeparams entries of the parameters vector in the simulate call
75-
# correspond to ode paramaters, then the first n_odeparams columns in
76-
# the sensitivity matrix will be 0
73+
"""The sensitivity matrix will always have consistent form.
74+
If the first n_odeparams entries of the parameters vector in the simulate call
75+
correspond to ode paramaters, then the first n_odeparams columns in
76+
the sensitivity matrix will be 0
77+
78+
If the last n_states entries of the paramters vector in the simulate call
79+
correspond to initial conditions of the system,
80+
then the last n_states columns of the sensitivity matrix should form
81+
an identity matrix
82+
"""
83+
84+
# Initialize the sensitivity matrix to be 0 everywhere
7785
sens_matrix = np.zeros((self._n, self._m))
7886

79-
# If the last n_states entrues of the paramters vector in the simulate call
80-
# correspond to initial conditions of the system,
81-
# then the last n_states columns of the sensitivity matrix should form
82-
# an identity matrix
87+
# Slip in the identity matrix in the appropirate place
8388
sens_matrix[:, -self.n_states :] = np.eye(self.n_states)
8489

8590
# We need the sensitivity matrix to be a vector (see augmented_function)
@@ -89,8 +94,7 @@ def _make_sens_ic(self):
8994
return dydp
9095

9196
def _system(self, Y, t, p):
92-
"""
93-
This is the function that will be passed to odeint.
97+
"""This is the function that will be passed to odeint.
9498
Solves both ODE and sensitivities
9599
Args:
96100
Y (vector): current state and current gradient state

0 commit comments

Comments
 (0)