@@ -302,15 +302,15 @@ def posterior_to_xarray(self):
302
302
coords = self .coords ,
303
303
dims = self .dims ,
304
304
attrs = self .attrs ,
305
- index_origin = self .index_origin ,
305
+ # index_origin=self.index_origin,
306
306
),
307
307
dict_to_dataset (
308
308
data_warmup ,
309
309
library = pymc3 ,
310
310
coords = self .coords ,
311
311
dims = self .dims ,
312
312
attrs = self .attrs ,
313
- index_origin = self .index_origin ,
313
+ # index_origin=self.index_origin,
314
314
),
315
315
)
316
316
@@ -344,15 +344,15 @@ def sample_stats_to_xarray(self):
344
344
dims = None ,
345
345
coords = self .coords ,
346
346
attrs = self .attrs ,
347
- index_origin = self .index_origin ,
347
+ # index_origin=self.index_origin,
348
348
),
349
349
dict_to_dataset (
350
350
data_warmup ,
351
351
library = pymc3 ,
352
352
dims = None ,
353
353
coords = self .coords ,
354
354
attrs = self .attrs ,
355
- index_origin = self .index_origin ,
355
+ # index_origin=self.index_origin,
356
356
),
357
357
)
358
358
@@ -385,15 +385,15 @@ def log_likelihood_to_xarray(self):
385
385
dims = self .dims ,
386
386
coords = self .coords ,
387
387
skip_event_dims = True ,
388
- index_origin = self .index_origin ,
388
+ # index_origin=self.index_origin,
389
389
),
390
390
dict_to_dataset (
391
391
data_warmup ,
392
392
library = pymc3 ,
393
393
dims = self .dims ,
394
394
coords = self .coords ,
395
395
skip_event_dims = True ,
396
- index_origin = self .index_origin ,
396
+ # index_origin=self.index_origin,
397
397
),
398
398
)
399
399
@@ -415,7 +415,11 @@ def translate_posterior_predictive_dict_to_xarray(self, dct) -> xr.Dataset:
415
415
k ,
416
416
)
417
417
return dict_to_dataset (
418
- data , library = pymc3 , coords = self .coords , dims = self .dims , index_origin = self .index_origin
418
+ data ,
419
+ library = pymc3 ,
420
+ coords = self .coords ,
421
+ # dims=self.dims,
422
+ # index_origin=self.index_origin
419
423
)
420
424
421
425
@requires (["posterior_predictive" ])
@@ -450,8 +454,8 @@ def priors_to_xarray(self):
450
454
{k : np .expand_dims (self .prior [k ], 0 ) for k in var_names },
451
455
library = pymc3 ,
452
456
coords = self .coords ,
453
- dims = self .dims ,
454
- index_origin = self .index_origin ,
457
+ # dims=self.dims,
458
+ # index_origin=self.index_origin,
455
459
)
456
460
)
457
461
return priors_dict
@@ -466,9 +470,9 @@ def observed_data_to_xarray(self):
466
470
{** self .observations , ** self .multi_observations },
467
471
library = pymc3 ,
468
472
coords = self .coords ,
469
- dims = self .dims ,
470
- default_dims = [],
471
- index_origin = self .index_origin ,
473
+ # dims=self.dims,
474
+ # default_dims=[],
475
+ # index_origin=self.index_origin,
472
476
)
473
477
474
478
@requires (["trace" , "predictions" ])
@@ -513,9 +517,9 @@ def is_data(name, var) -> bool:
513
517
constant_data ,
514
518
library = pymc3 ,
515
519
coords = self .coords ,
516
- dims = self .dims ,
517
- default_dims = [],
518
- index_origin = self .index_origin ,
520
+ # dims=self.dims,
521
+ # default_dims=[],
522
+ # index_origin=self.index_origin,
519
523
)
520
524
521
525
def to_inference_data (self ):
0 commit comments