@@ -6,11 +6,20 @@ jupytext:
6
6
format_version : 0.13
7
7
jupytext_version : 1.13.7
8
8
kernelspec :
9
- display_name : Python 3
9
+ display_name : Python 3 (ipykernel)
10
10
language : python
11
11
name : python3
12
12
---
13
13
14
+ (notebook_name)=
15
+ # Mean and Covariance Functions
16
+
17
+ :::{post} Aug 31, 2021
18
+ :tags: GP
19
+ :category: intermediate
20
+ :author: Bill Engels
21
+ :::
22
+
14
23
``` {code-cell} ipython3
15
24
---
16
25
papermill:
@@ -44,13 +53,14 @@ papermill:
44
53
tags: []
45
54
---
46
55
RANDOM_SEED = 8927
56
+ FIGSIZE = (10, 4)
47
57
np.random.seed(RANDOM_SEED)
48
58
az.style.use("arviz-darkgrid")
49
59
```
50
60
51
61
+++ {"papermill": {"duration": 0.037844, "end_time": "2020-12-22T18:36:31.751886", "exception": false, "start_time": "2020-12-22T18:36:31.714042", "status": "completed"}, "tags": [ ] }
52
62
53
- # Mean and Covariance Functions
63
+ #
54
64
55
65
A large set of mean and covariance functions are available in PyMC3. It is relatively easy to define custom mean and covariance functions. Since PyMC3 uses Theano, their gradients do not need to be defined by the user.
56
66
@@ -204,7 +214,7 @@ cov += pm.gp.cov.WhiteNoise(1e-6)
204
214
X = np.linspace(0, 2, 200)[:, None]
205
215
K = cov(X).eval()
206
216
207
- plt.figure(figsize=(14, 4) )
217
+ plt.figure(figsize=FIGSIZE )
208
218
209
219
plt.plot(X, pm.MvNormal.dist(mu=np.zeros(len(K)), cov=K, shape=K.shape[0]).random(size=3).T)
210
220
plt.title("Samples from the GP prior")
@@ -313,7 +323,7 @@ cov = pm.gp.cov.WhiteNoise(sigma)
313
323
X = np.linspace(0, 2, 200)[:, None]
314
324
K = cov(X).eval()
315
325
316
- plt.figure(figsize=(14, 4) )
326
+ plt.figure(figsize=FIGSIZE )
317
327
plt.plot(X, pm.MvNormal.dist(mu=np.zeros(len(K)), cov=K, shape=len(K)).random(size=3).T)
318
328
plt.title("Samples from the GP prior")
319
329
plt.ylabel("y")
@@ -346,7 +356,7 @@ cov += pm.gp.cov.WhiteNoise(1e-6)
346
356
X = np.linspace(0, 2, 200)[:, None]
347
357
K = cov(X).eval()
348
358
349
- plt.figure(figsize=(14, 4) )
359
+ plt.figure(figsize=FIGSIZE )
350
360
plt.plot(X, pm.MvNormal.dist(mu=np.zeros(len(K)), cov=K, shape=len(K)).random(size=3).T)
351
361
plt.title("Samples from the GP prior")
352
362
plt.ylabel("y")
@@ -379,7 +389,7 @@ cov = tau * pm.gp.cov.RatQuad(1, ls, alpha)
379
389
X = np.linspace(0, 2, 200)[:, None]
380
390
K = cov(X).eval()
381
391
382
- plt.figure(figsize=(14, 4) )
392
+ plt.figure(figsize=FIGSIZE )
383
393
plt.plot(X, pm.MvNormal.dist(mu=np.zeros(len(K)), cov=K, shape=len(K)).random(size=3).T)
384
394
plt.title("Samples from the GP prior")
385
395
plt.ylabel("y")
@@ -410,7 +420,7 @@ cov = pm.gp.cov.Exponential(1, ls_inv=inverse_lengthscale)
410
420
X = np.linspace(0, 2, 200)[:, None]
411
421
K = cov(X).eval()
412
422
413
- plt.figure(figsize=(14, 4) )
423
+ plt.figure(figsize=FIGSIZE )
414
424
plt.plot(X, pm.MvNormal.dist(mu=np.zeros(len(K)), cov=K, shape=len(K)).random(size=3).T)
415
425
plt.title("Samples from the GP prior")
416
426
plt.ylabel("y")
@@ -444,7 +454,7 @@ cov = tau * pm.gp.cov.Matern52(1, ls)
444
454
X = np.linspace(0, 2, 200)[:, None]
445
455
K = cov(X).eval()
446
456
447
- plt.figure(figsize=(14, 4) )
457
+ plt.figure(figsize=FIGSIZE )
448
458
plt.plot(X, pm.MvNormal.dist(mu=np.zeros(len(K)), cov=K, shape=len(K)).random(size=3).T)
449
459
plt.title("Samples from the GP prior")
450
460
plt.ylabel("y")
@@ -477,7 +487,7 @@ cov = tau * pm.gp.cov.Matern32(1, ls)
477
487
X = np.linspace(0, 2, 200)[:, None]
478
488
K = cov(X).eval()
479
489
480
- plt.figure(figsize=(14, 4) )
490
+ plt.figure(figsize=FIGSIZE )
481
491
plt.plot(X, pm.MvNormal.dist(mu=np.zeros(len(K)), cov=K, shape=len(K)).random(size=3).T)
482
492
plt.title("Samples from the GP prior")
483
493
plt.ylabel("y")
@@ -507,7 +517,7 @@ cov = tau * pm.gp.cov.Matern12(1, ls)
507
517
X = np.linspace(0, 2, 200)[:, None]
508
518
K = cov(X).eval()
509
519
510
- plt.figure(figsize=(14, 4) )
520
+ plt.figure(figsize=FIGSIZE )
511
521
plt.plot(X, pm.MvNormal.dist(mu=np.zeros(len(K)), cov=K, shape=len(K)).random(size=3).T)
512
522
plt.title("Samples from the GP prior")
513
523
plt.ylabel("y")
@@ -540,7 +550,7 @@ cov += pm.gp.cov.WhiteNoise(1e-4)
540
550
X = np.linspace(0, 2, 200)[:, None]
541
551
K = cov(X).eval()
542
552
543
- plt.figure(figsize=(14, 4) )
553
+ plt.figure(figsize=FIGSIZE )
544
554
plt.plot(X, pm.MvNormal.dist(mu=np.zeros(len(K)), cov=K, shape=len(K)).random(size=3).T)
545
555
plt.title("Samples from the GP prior")
546
556
plt.ylabel("y")
@@ -574,7 +584,7 @@ cov += pm.gp.cov.WhiteNoise(1e-6)
574
584
X = np.linspace(0, 2, 200)[:, None]
575
585
K = cov(X).eval()
576
586
577
- plt.figure(figsize=(14, 4) )
587
+ plt.figure(figsize=FIGSIZE )
578
588
plt.plot(X, pm.MvNormal.dist(mu=np.zeros(len(K)), cov=K, shape=len(K)).random(size=3).T)
579
589
plt.title("Samples from the GP prior")
580
590
plt.ylabel("y")
@@ -610,7 +620,7 @@ cov += pm.gp.cov.WhiteNoise(1e-6)
610
620
X = np.linspace(0, 2, 200)[:, None]
611
621
K = cov(X).eval()
612
622
613
- plt.figure(figsize=(14, 4) )
623
+ plt.figure(figsize=FIGSIZE )
614
624
plt.plot(X, pm.MvNormal.dist(mu=np.zeros(len(K)), cov=K, shape=len(K)).random(size=3).T)
615
625
plt.title("Samples from the GP prior")
616
626
plt.ylabel("y")
@@ -644,7 +654,7 @@ cov = pm.gp.cov.Matern32(1, 0.5) * K_cos
644
654
X = np.linspace(0, 2, 200)[:, None]
645
655
K = cov(X).eval()
646
656
647
- plt.figure(figsize=(14, 4) )
657
+ plt.figure(figsize=FIGSIZE )
648
658
plt.plot(X, pm.MvNormal.dist(mu=np.zeros(len(K)), cov=K, shape=len(K)).random(size=3).T)
649
659
plt.title("Samples from the GP prior")
650
660
plt.ylabel("y")
@@ -685,14 +695,14 @@ cov += pm.gp.cov.WhiteNoise(1e-6)
685
695
X = np.linspace(0, 2, 400)[:, None]
686
696
wf = warp_func(X.flatten(), a, b, c).eval()
687
697
688
- plt.figure(figsize=(14, 4) )
698
+ plt.figure(figsize=FIGSIZE )
689
699
plt.plot(X, wf)
690
700
plt.xlabel("X")
691
701
plt.ylabel("warp_func(X)")
692
702
plt.title("The warping function used")
693
703
694
704
K = cov(X).eval()
695
- plt.figure(figsize=(14, 4) )
705
+ plt.figure(figsize=FIGSIZE )
696
706
plt.plot(X, pm.MvNormal.dist(mu=np.zeros(len(K)), cov=K, shape=len(K)).random(size=3).T)
697
707
plt.title("Samples from the GP prior")
698
708
plt.ylabel("y")
@@ -742,7 +752,7 @@ cov = pm.gp.cov.WarpedInput(1, cov_func=cov_exp, warp_func=mapping, args=(T,))
742
752
cov += pm.gp.cov.WhiteNoise(1e-6)
743
753
744
754
K = cov(X).eval()
745
- plt.figure(figsize=(14, 4) )
755
+ plt.figure(figsize=FIGSIZE )
746
756
plt.plot(X, pm.MvNormal.dist(mu=np.zeros(len(K)), cov=K, shape=len(K)).random(size=3).T)
747
757
plt.title("Samples from the GP prior")
748
758
plt.ylabel("y")
@@ -772,7 +782,7 @@ cov = pm.gp.cov.Periodic(1, period=period, ls=ls)
772
782
cov += pm.gp.cov.WhiteNoise(1e-6)
773
783
774
784
K = cov(X).eval()
775
- plt.figure(figsize=(14, 4) )
785
+ plt.figure(figsize=FIGSIZE )
776
786
plt.plot(X, pm.MvNormal.dist(mu=np.zeros(len(K)), cov=K, shape=len(K)).random(size=3).T)
777
787
for p in np.arange(0, 2, period):
778
788
plt.axvline(p, color="black")
@@ -822,7 +832,7 @@ tau = 4
822
832
cov = pm.gp.cov.Circular(1, period=period, tau=tau)
823
833
824
834
K = cov(X).eval()
825
- plt.figure(figsize=(14, 4) )
835
+ plt.figure(figsize=FIGSIZE )
826
836
plt.plot(X, pm.MvNormal.dist(mu=np.zeros(len(K)), cov=K, shape=len(K)).random(size=3).T)
827
837
for p in np.arange(0, 2, period):
828
838
plt.axvline(p, color="black")
@@ -851,7 +861,7 @@ tau = 40
851
861
cov = pm.gp.cov.Circular(1, period=period, tau=tau)
852
862
853
863
K = cov(X).eval()
854
- plt.figure(figsize=(14, 4) )
864
+ plt.figure(figsize=FIGSIZE )
855
865
plt.plot(X, pm.MvNormal.dist(mu=np.zeros(len(K)), cov=K, shape=len(K)).random(size=3).T)
856
866
for p in np.arange(0, 2, period):
857
867
plt.axvline(p, color="black")
@@ -896,14 +906,14 @@ cov = pm.gp.cov.Gibbs(1, tanh_func, args=(ls1, ls2, w, x0))
896
906
cov += pm.gp.cov.WhiteNoise(1e-6)
897
907
898
908
wf = tanh_func(X, ls1, ls2, w, x0).eval()
899
- plt.figure(figsize=(14, 4) )
909
+ plt.figure(figsize=FIGSIZE )
900
910
plt.plot(X, wf)
901
911
plt.ylabel("lengthscale")
902
912
plt.xlabel("X")
903
913
plt.title("Lengthscale as a function of X")
904
914
905
915
K = cov(X).eval()
906
- plt.figure(figsize=(14, 4) )
916
+ plt.figure(figsize=FIGSIZE )
907
917
plt.plot(X, pm.MvNormal.dist(mu=np.zeros(len(K)), cov=K, shape=len(K)).random(size=3).T)
908
918
plt.title("Samples from the GP prior")
909
919
plt.ylabel("y")
@@ -950,14 +960,14 @@ cov += pm.gp.cov.WhiteNoise(1e-5)
950
960
X = np.linspace(0, 10, 400)[:, None]
951
961
lfunc = logistic(X.flatten(), a, b, c, d).eval()
952
962
953
- plt.figure(figsize=(14, 4) )
963
+ plt.figure(figsize=FIGSIZE )
954
964
plt.plot(X, lfunc)
955
965
plt.xlabel("X")
956
966
plt.ylabel(r"$\phi(x)$")
957
967
plt.title("The scaling function")
958
968
959
969
K = cov(X).eval()
960
- plt.figure(figsize=(14, 4) )
970
+ plt.figure(figsize=FIGSIZE )
961
971
plt.plot(X, pm.MvNormal.dist(mu=np.zeros(len(K)), cov=K, shape=len(K)).random(size=3).T)
962
972
plt.title("Samples from the GP prior")
963
973
plt.ylabel("y")
@@ -1008,7 +1018,7 @@ cov = cov1 + cov2
1008
1018
cov += pm.gp.cov.WhiteNoise(1e-5)
1009
1019
1010
1020
X = np.linspace(0, 10, 400)
1011
- plt.figure(figsize=(14, 4) )
1021
+ plt.figure(figsize=FIGSIZE )
1012
1022
plt.fill_between(
1013
1023
X,
1014
1024
np.zeros(400),
@@ -1026,7 +1036,7 @@ plt.ylabel(r"$\phi(x)$")
1026
1036
plt.title("The two scaling functions")
1027
1037
1028
1038
K = cov(X[:, None]).eval()
1029
- plt.figure(figsize=(14, 4) )
1039
+ plt.figure(figsize=FIGSIZE )
1030
1040
plt.plot(X, pm.MvNormal.dist(mu=np.zeros(len(K)), cov=K, shape=len(K)).random(size=3).T)
1031
1041
plt.title("Samples from the GP prior")
1032
1042
plt.ylabel("y")
@@ -1073,7 +1083,7 @@ cov += pm.gp.cov.WhiteNoise(1e-6)
1073
1083
X = np.linspace(0, 2, 200)[:, None]
1074
1084
K = cov(X).eval()
1075
1085
1076
- plt.figure(figsize=(14, 4) )
1086
+ plt.figure(figsize=FIGSIZE )
1077
1087
plt.plot(X, pm.MvNormal.dist(mu=np.zeros(len(K)), cov=K, shape=len(K)).random(size=3).T)
1078
1088
plt.title("Samples from the GP prior")
1079
1089
plt.ylabel("y")
@@ -1108,7 +1118,7 @@ cov += pm.gp.cov.WhiteNoise(1e-6)
1108
1118
X = np.linspace(0, 2, 200)[:, None]
1109
1119
K = cov(X).eval()
1110
1120
1111
- plt.figure(figsize=(14, 4) )
1121
+ plt.figure(figsize=FIGSIZE )
1112
1122
plt.plot(X, pm.MvNormal.dist(mu=np.zeros(len(K)), cov=K, shape=len(K)).random(size=3).T)
1113
1123
plt.title("Samples from the GP prior")
1114
1124
plt.ylabel("y")
@@ -1141,7 +1151,7 @@ cov += pm.gp.cov.WhiteNoise(1e-6)
1141
1151
X = np.linspace(0, 2, 200)[:, None]
1142
1152
K = cov(X).eval()
1143
1153
1144
- plt.figure(figsize=(14, 4) )
1154
+ plt.figure(figsize=FIGSIZE )
1145
1155
plt.plot(X, pm.MvNormal.dist(mu=np.zeros(len(K)), cov=K, shape=len(K)).random(size=3).T)
1146
1156
plt.title("Samples from the GP prior")
1147
1157
plt.ylabel("y")
0 commit comments