@@ -204,7 +204,7 @@ def test_return_inferencedata(self):
204
204
assert len (result ._groups_warmup ) > 0
205
205
206
206
# inferencedata without tuning, with idata_kwargs
207
- prior = pm .sample_prior_predictive ()
207
+ prior = pm .sample_prior_predictive (return_inferencedata = False )
208
208
result = pm .sample (
209
209
** kwargs ,
210
210
return_inferencedata = True ,
@@ -472,7 +472,6 @@ def test_normal_scalar(self):
472
472
trace = pm .sample (
473
473
draws = ndraws ,
474
474
chains = nchains ,
475
- return_inferencedata = False ,
476
475
)
477
476
478
477
with model :
@@ -486,43 +485,23 @@ def test_normal_scalar(self):
486
485
487
486
# test keep_size parameter
488
487
ppc = pm .sample_posterior_predictive (trace , keep_size = True )
489
- assert ppc ["a" ].shape == (nchains , ndraws )
488
+ assert ppc . posterior_predictive ["a" ].shape == (1 , nchains , ndraws )
490
489
491
490
# test default case
492
491
ppc = pm .sample_posterior_predictive (trace , var_names = ["a" ])
493
- assert "a" in ppc
494
- assert ppc ["a" ].shape == (nchains * ndraws , )
492
+ assert "a" in ppc . posterior_predictive . data_vars
493
+ assert ppc . posterior_predictive ["a" ].shape == (1 , nchains * ndraws )
495
494
# mu's standard deviation may have changed thanks to a's observed
496
- _ , pval = stats .kstest (ppc ["a" ] - trace ["mu" ], stats .norm (loc = 0 , scale = 1 ).cdf )
495
+ _ , pval = stats .kstest (
496
+ ppc .posterior_predictive ["a" ] - trace .posterior ["mu" ],
497
+ stats .norm (loc = 0 , scale = 1 ).cdf ,
498
+ )
497
499
assert pval > 0.001
498
500
499
501
# size argument not introduced to fast version [2019/08/20:rpg]
500
502
with model :
501
503
ppc = pm .sample_posterior_predictive (trace , size = 5 , var_names = ["a" ])
502
- assert ppc ["a" ].shape == (nchains * ndraws , 5 )
503
-
504
- def test_normal_scalar_idata (self ):
505
- nchains = 2
506
- ndraws = 500
507
- with pm .Model () as model :
508
- mu = pm .Normal ("mu" , 0.0 , 1.0 )
509
- a = pm .Normal ("a" , mu = mu , sigma = 1 , observed = 0.0 )
510
- trace = pm .sample (
511
- draws = ndraws ,
512
- chains = nchains ,
513
- return_inferencedata = False ,
514
- discard_tuned_samples = False ,
515
- )
516
-
517
- assert not isinstance (trace , InferenceData )
518
-
519
- with model :
520
- # test keep_size parameter and idata input
521
- idata = pm .to_inference_data (trace )
522
- assert isinstance (idata , InferenceData )
523
-
524
- ppc = pm .sample_posterior_predictive (idata , keep_size = True )
525
- assert ppc ["a" ].shape == (nchains , ndraws )
504
+ assert ppc .posterior_predictive ["a" ].shape == (1 , nchains * ndraws , 5 )
526
505
527
506
def test_normal_vector (self , caplog ):
528
507
with pm .Model () as model :
@@ -532,25 +511,35 @@ def test_normal_vector(self, caplog):
532
511
533
512
with model :
534
513
# test list input
535
- ppc0 = pm .sample_posterior_predictive ([model .initial_point ], samples = 10 )
536
- ppc = pm .sample_posterior_predictive (trace , samples = 12 , var_names = [])
514
+ ppc0 = pm .sample_posterior_predictive (
515
+ [model .initial_point ], return_inferencedata = False , samples = 10
516
+ )
517
+ ppc = pm .sample_posterior_predictive (
518
+ trace , return_inferencedata = False , samples = 12 , var_names = []
519
+ )
537
520
assert len (ppc ) == 0
538
521
539
522
# test keep_size parameter
540
- ppc = pm .sample_posterior_predictive (trace , keep_size = True )
523
+ ppc = pm .sample_posterior_predictive (trace , return_inferencedata = False , keep_size = True )
541
524
assert ppc ["a" ].shape == (trace .nchains , len (trace ), 2 )
542
525
with pytest .warns (UserWarning ):
543
- ppc = pm .sample_posterior_predictive (trace , samples = 12 , var_names = ["a" ])
526
+ ppc = pm .sample_posterior_predictive (
527
+ trace , return_inferencedata = False , samples = 12 , var_names = ["a" ]
528
+ )
544
529
assert "a" in ppc
545
530
assert ppc ["a" ].shape == (12 , 2 )
546
531
547
532
with pytest .warns (UserWarning ):
548
- ppc = pm .sample_posterior_predictive (trace , samples = 12 , var_names = ["a" ])
533
+ ppc = pm .sample_posterior_predictive (
534
+ trace , return_inferencedata = False , samples = 12 , var_names = ["a" ]
535
+ )
549
536
assert "a" in ppc
550
537
assert ppc ["a" ].shape == (12 , 2 )
551
538
552
539
# size unsupported by fast_ version argument. [2019/08/19:rpg]
553
- ppc = pm .sample_posterior_predictive (trace , samples = 10 , var_names = ["a" ], size = 4 )
540
+ ppc = pm .sample_posterior_predictive (
541
+ trace , return_inferencedata = False , samples = 10 , var_names = ["a" ], size = 4
542
+ )
554
543
assert "a" in ppc
555
544
assert ppc ["a" ].shape == (10 , 4 , 2 )
556
545
@@ -567,7 +556,7 @@ def test_normal_vector_idata(self, caplog):
567
556
idata = pm .to_inference_data (trace )
568
557
assert isinstance (idata , InferenceData )
569
558
570
- ppc = pm .sample_posterior_predictive (idata , keep_size = True )
559
+ ppc = pm .sample_posterior_predictive (idata , return_inferencedata = False , keep_size = True )
571
560
assert ppc ["a" ].shape == (trace .nchains , len (trace ), 2 )
572
561
573
562
def test_exceptions (self , caplog ):
@@ -600,11 +589,15 @@ def test_vector_observed(self):
600
589
# TODO: Assert something about the output
601
590
# ppc = pm.sample_posterior_predictive(idata, samples=12, var_names=[])
602
591
# assert len(ppc) == 0
603
- ppc = pm .sample_posterior_predictive (idata , samples = 12 , var_names = ["a" ])
592
+ ppc = pm .sample_posterior_predictive (
593
+ idata , return_inferencedata = False , samples = 12 , var_names = ["a" ]
594
+ )
604
595
assert "a" in ppc
605
596
assert ppc ["a" ].shape == (12 , 2 )
606
597
607
- ppc = pm .sample_posterior_predictive (idata , samples = 10 , var_names = ["a" ], size = 4 )
598
+ ppc = pm .sample_posterior_predictive (
599
+ idata , return_inferencedata = False , samples = 10 , var_names = ["a" ], size = 4
600
+ )
608
601
assert "a" in ppc
609
602
assert ppc ["a" ].shape == (10 , 4 , 2 )
610
603
@@ -616,9 +609,13 @@ def test_sum_normal(self):
616
609
617
610
with model :
618
611
# test list input
619
- ppc0 = pm .sample_posterior_predictive ([model .initial_point ], samples = 10 )
612
+ ppc0 = pm .sample_posterior_predictive (
613
+ [model .initial_point ], return_inferencedata = False , samples = 10
614
+ )
620
615
assert ppc0 == {}
621
- ppc = pm .sample_posterior_predictive (idata , samples = 1000 , var_names = ["b" ])
616
+ ppc = pm .sample_posterior_predictive (
617
+ idata , return_inferencedata = False , samples = 1000 , var_names = ["b" ]
618
+ )
622
619
assert len (ppc ) == 1
623
620
assert ppc ["b" ].shape == (1000 ,)
624
621
scale = np .sqrt (1 + 0.2 ** 2 )
@@ -637,7 +634,7 @@ def test_model_not_drawable_prior(self):
637
634
with pytest .raises (NotImplementedError ) as excinfo :
638
635
pm .sample_prior_predictive (50 )
639
636
assert "Cannot sample" in str (excinfo .value )
640
- samples = pm .sample_posterior_predictive (idata , 40 )
637
+ samples = pm .sample_posterior_predictive (idata , 40 , return_inferencedata = False )
641
638
assert samples ["foo" ].shape == (40 , 200 )
642
639
643
640
def test_model_shared_variable (self ):
@@ -660,7 +657,7 @@ def test_model_shared_variable(self):
660
657
samples = 100
661
658
with model :
662
659
post_pred = pm .sample_posterior_predictive (
663
- trace , samples = samples , var_names = ["p" , "obs" ]
660
+ trace , return_inferencedata = False , samples = samples , var_names = ["p" , "obs" ]
664
661
)
665
662
666
663
expected_p = np .array ([logistic .eval ({coeff : val }) for val in trace ["x" ][:samples ]])
@@ -694,6 +691,7 @@ def test_deterministic_of_observed(self):
694
691
rtol = 1e-5 if aesara .config .floatX == "float64" else 1e-4
695
692
696
693
ppc = pm .sample_posterior_predictive (
694
+ return_inferencedata = False ,
697
695
model = model ,
698
696
trace = trace ,
699
697
samples = len (trace ) * nchains ,
@@ -728,6 +726,7 @@ def test_deterministic_of_observed_modified_interface(self):
728
726
trace , varnames = [n for n in trace .varnames if n != "out" ]
729
727
).to_dict ("records" )
730
728
ppc = pm .sample_posterior_predictive (
729
+ return_inferencedata = False ,
731
730
model = model ,
732
731
trace = ppc_trace ,
733
732
samples = len (ppc_trace ),
@@ -745,7 +744,7 @@ def test_variable_type(self):
745
744
trace = pm .sample (compute_convergence_checks = False , return_inferencedata = False )
746
745
747
746
with model :
748
- ppc = pm .sample_posterior_predictive (trace , samples = 1 )
747
+ ppc = pm .sample_posterior_predictive (trace , return_inferencedata = False , samples = 1 )
749
748
assert ppc ["a" ].dtype .kind == "f"
750
749
assert ppc ["b" ].dtype .kind == "i"
751
750
@@ -918,7 +917,7 @@ def test_ignores_observed(self):
918
917
positive_mu = pm .Deterministic ("positive_mu" , np .abs (mu ))
919
918
z = - 1 - positive_mu
920
919
pm .Normal ("x_obs" , mu = z , sigma = 1 , observed = observed_data )
921
- prior = pm .sample_prior_predictive ()
920
+ prior = pm .sample_prior_predictive (return_inferencedata = False )
922
921
923
922
assert "observed_data" not in prior
924
923
assert (prior ["mu" ] < - 90 ).all ()
@@ -932,8 +931,12 @@ def test_respects_shape(self):
932
931
with pm .Model ():
933
932
mu = pm .Gamma ("mu" , 3 , 1 , size = 1 )
934
933
goals = pm .Poisson ("goals" , mu , size = shape )
935
- trace1 = pm .sample_prior_predictive (10 , var_names = ["mu" , "mu" , "goals" ])
936
- trace2 = pm .sample_prior_predictive (10 , var_names = ["mu" , "goals" ])
934
+ trace1 = pm .sample_prior_predictive (
935
+ 10 , return_inferencedata = False , var_names = ["mu" , "mu" , "goals" ]
936
+ )
937
+ trace2 = pm .sample_prior_predictive (
938
+ 10 , return_inferencedata = False , var_names = ["mu" , "goals" ]
939
+ )
937
940
if shape == 2 : # want to test shape as an int
938
941
shape = (2 ,)
939
942
assert trace1 ["goals" ].shape == (10 ,) + shape
@@ -944,7 +947,7 @@ def test_multivariate(self):
944
947
m = pm .Multinomial ("m" , n = 5 , p = np .array ([0.25 , 0.25 , 0.25 , 0.25 ]))
945
948
trace = pm .sample_prior_predictive (10 )
946
949
947
- assert trace ["m" ].shape == (10 , 4 )
950
+ assert trace . prior ["m" ].shape == (1 , 10 , 4 )
948
951
949
952
def test_multivariate2 (self ):
950
953
# Added test for issue #3271
@@ -955,8 +958,12 @@ def test_multivariate2(self):
955
958
burned_trace = pm .sample (
956
959
20 , tune = 10 , cores = 1 , return_inferencedata = False , compute_convergence_checks = False
957
960
)
958
- sim_priors = pm .sample_prior_predictive (samples = 20 , model = dm_model )
959
- sim_ppc = pm .sample_posterior_predictive (burned_trace , samples = 20 , model = dm_model )
961
+ sim_priors = pm .sample_prior_predictive (
962
+ return_inferencedata = False , samples = 20 , model = dm_model
963
+ )
964
+ sim_ppc = pm .sample_posterior_predictive (
965
+ burned_trace , return_inferencedata = False , samples = 20 , model = dm_model
966
+ )
960
967
assert sim_priors ["probs" ].shape == (20 , 6 )
961
968
assert sim_priors ["obs" ].shape == (20 ,) + mn_data .shape
962
969
assert sim_ppc ["obs" ].shape == (20 ,) + mn_data .shape
@@ -987,9 +994,9 @@ def test_transformed(self):
987
994
y = pm .Binomial ("y" , n = at_bats , p = thetas , observed = hits )
988
995
gen = pm .sample_prior_predictive (draws )
989
996
990
- assert gen ["phi" ].shape == (draws , )
991
- assert gen ["y" ].shape == (draws , n )
992
- assert "thetas" in gen
997
+ assert gen . prior ["phi" ].shape == (1 , draws )
998
+ assert gen . prior_predictive ["y" ].shape == (1 , draws , n )
999
+ assert "thetas" in gen . prior . data_vars
993
1000
994
1001
def test_shared (self ):
995
1002
n1 = 10
@@ -1002,16 +1009,16 @@ def test_shared(self):
1002
1009
o = pm .Deterministic ("o" , obs )
1003
1010
gen1 = pm .sample_prior_predictive (draws )
1004
1011
1005
- assert gen1 ["y" ].shape == (draws , n1 )
1006
- assert gen1 ["o" ].shape == (draws , n1 )
1012
+ assert gen1 . prior ["y" ].shape == (1 , draws , n1 )
1013
+ assert gen1 . prior ["o" ].shape == (1 , draws , n1 )
1007
1014
1008
1015
n2 = 20
1009
1016
obs .set_value (np .random .rand (n2 ) < 0.5 )
1010
1017
with m :
1011
1018
gen2 = pm .sample_prior_predictive (draws )
1012
1019
1013
- assert gen2 ["y" ].shape == (draws , n2 )
1014
- assert gen2 ["o" ].shape == (draws , n2 )
1020
+ assert gen2 . prior ["y" ].shape == (1 , draws , n2 )
1021
+ assert gen2 . prior ["o" ].shape == (1 , draws , n2 )
1015
1022
1016
1023
def test_density_dist (self ):
1017
1024
obs = np .random .normal (- 1 , 0.1 , size = 10 )
@@ -1025,7 +1032,7 @@ def test_density_dist(self):
1025
1032
random = lambda mu , sd , rng = None , size = None : rng .normal (loc = mu , scale = sd , size = size ),
1026
1033
observed = obs ,
1027
1034
)
1028
- prior = pm .sample_prior_predictive ()
1035
+ prior = pm .sample_prior_predictive (return_inferencedata = False )
1029
1036
1030
1037
npt .assert_almost_equal (prior ["a" ].mean (), 0 , decimal = 1 )
1031
1038
@@ -1035,17 +1042,17 @@ def test_shape_edgecase(self):
1035
1042
sd = pm .Uniform ("sd" , lower = 2 , upper = 3 )
1036
1043
x = pm .Normal ("x" , mu = mu , sigma = sd , size = 5 )
1037
1044
prior = pm .sample_prior_predictive (10 )
1038
- assert prior ["mu" ].shape == (10 , 5 )
1045
+ assert prior . prior ["mu" ].shape == (1 , 10 , 5 )
1039
1046
1040
1047
def test_zeroinflatedpoisson (self ):
1041
1048
with pm .Model ():
1042
1049
theta = pm .Beta ("theta" , alpha = 1 , beta = 1 )
1043
1050
psi = pm .HalfNormal ("psi" , sd = 1 )
1044
1051
pm .ZeroInflatedPoisson ("suppliers" , psi = psi , theta = theta , size = 20 )
1045
1052
gen_data = pm .sample_prior_predictive (samples = 5000 )
1046
- assert gen_data ["theta" ].shape == (5000 , )
1047
- assert gen_data ["psi" ].shape == (5000 , )
1048
- assert gen_data ["suppliers" ].shape == (5000 , 20 )
1053
+ assert gen_data . prior ["theta" ].shape == (1 , 5000 )
1054
+ assert gen_data . prior ["psi" ].shape == (1 , 5000 )
1055
+ assert gen_data . prior ["suppliers" ].shape == (1 , 5000 , 20 )
1049
1056
1050
1057
def test_potentials_warning (self ):
1051
1058
warning_msg = "The effect of Potentials on other parameters is ignored during"
@@ -1075,10 +1082,10 @@ def ub_interval_forward(x, ub):
1075
1082
)
1076
1083
1077
1084
# Check values are correct
1078
- assert np .allclose (prior ["ub_log__" ], np .log (prior ["ub" ]))
1085
+ assert np .allclose (prior . prior ["ub_log__" ]. data , np .log (prior . prior ["ub" ]. data ))
1079
1086
assert np .allclose (
1080
- prior ["x_interval__" ],
1081
- ub_interval_forward (prior ["x" ], prior ["ub" ]),
1087
+ prior . prior ["x_interval__" ]. data ,
1088
+ ub_interval_forward (prior . prior ["x" ]. data , prior . prior ["ub" ]. data ),
1082
1089
)
1083
1090
1084
1091
# Check that it works when the original RVs are not mentioned in var_names
@@ -1090,9 +1097,16 @@ def ub_interval_forward(x, ub):
1090
1097
var_names = ["ub_log__" , "x_interval__" ],
1091
1098
samples = 10 ,
1092
1099
)
1093
- assert "ub" not in prior_transformed_only and "x" not in prior_transformed_only
1094
- assert np .allclose (prior ["ub_log__" ], prior_transformed_only ["ub_log__" ])
1095
- assert np .allclose (prior ["x_interval__" ], prior_transformed_only ["x_interval__" ])
1100
+ assert (
1101
+ "ub" not in prior_transformed_only .prior .data_vars
1102
+ and "x" not in prior_transformed_only .prior .data_vars
1103
+ )
1104
+ assert np .allclose (
1105
+ prior .prior ["ub_log__" ].data , prior_transformed_only .prior ["ub_log__" ].data
1106
+ )
1107
+ assert np .allclose (
1108
+ prior .prior ["x_interval__" ], prior_transformed_only .prior ["x_interval__" ].data
1109
+ )
1096
1110
1097
1111
def test_issue_4490 (self ):
1098
1112
# Test that samples do not depend on var_name order or, more fundamentally,
@@ -1112,27 +1126,34 @@ def test_issue_4490(self):
1112
1126
d = pm .Normal ("d" )
1113
1127
prior2 = pm .sample_prior_predictive (samples = 1 , var_names = ["b" , "a" , "d" , "c" ])
1114
1128
1115
- assert prior1 ["a" ] == prior2 ["a" ]
1116
- assert prior1 ["b" ] == prior2 ["b" ]
1117
- assert prior1 ["c" ] == prior2 ["c" ]
1118
- assert prior1 ["d" ] == prior2 ["d" ]
1129
+ assert prior1 . prior ["a" ] == prior2 . prior ["a" ]
1130
+ assert prior1 . prior ["b" ] == prior2 . prior ["b" ]
1131
+ assert prior1 . prior ["c" ] == prior2 . prior ["c" ]
1132
+ assert prior1 . prior ["d" ] == prior2 . prior ["d" ]
1119
1133
1120
1134
1121
1135
class TestSamplePosteriorPredictive :
1122
1136
def test_point_list_arg_bug_spp (self , point_list_arg_bug_fixture ):
1123
1137
pmodel , trace = point_list_arg_bug_fixture
1124
1138
with pmodel :
1125
- pp = pm .sample_posterior_predictive ([trace [15 ]], var_names = ["d" ])
1139
+ pp = pm .sample_posterior_predictive (
1140
+ [trace [15 ]], return_inferencedata = False , var_names = ["d" ]
1141
+ )
1126
1142
1127
1143
def test_sample_from_xarray_prior (self , point_list_arg_bug_fixture ):
1128
1144
pmodel , trace = point_list_arg_bug_fixture
1129
1145
1130
1146
with pmodel :
1131
- prior = pm .sample_prior_predictive (samples = 20 )
1147
+ prior = pm .sample_prior_predictive (
1148
+ samples = 20 ,
1149
+ return_inferencedata = False ,
1150
+ )
1132
1151
idat = pm .to_inference_data (trace , prior = prior )
1133
1152
1134
1153
with pmodel :
1135
- pp = pm .sample_posterior_predictive (idat .prior , var_names = ["d" ])
1154
+ pp = pm .sample_posterior_predictive (
1155
+ idat .prior , return_inferencedata = False , var_names = ["d" ]
1156
+ )
1136
1157
1137
1158
def test_sample_from_xarray_posterior (self , point_list_arg_bug_fixture ):
1138
1159
pmodel , trace = point_list_arg_bug_fixture
0 commit comments