5
5
import theano
6
6
from ..memoize import memoize
7
7
from ..model import (
8
- Model , modelcontext , FreeRV , ObservedRV , MultiObservedRV ,
8
+ Model , modelcontext , FreeRV , ObservedRV ,
9
9
not_shared_or_constant_variable , DependenceDAG
10
10
)
11
11
from ..vartypes import string_types
@@ -35,12 +35,14 @@ def __new__(cls, name, *args, **kwargs):
35
35
if isinstance (name , string_types ):
36
36
data = kwargs .pop ('observed' , None )
37
37
if isinstance (data , ObservedRV ) or isinstance (data , FreeRV ):
38
- raise TypeError ("observed needs to be data but got: {}" .format (type (data )))
38
+ raise TypeError ("observed needs to be data but got: {}" .
39
+ format (type (data )))
39
40
total_size = kwargs .pop ('total_size' , None )
40
41
dist = cls .dist (* args , ** kwargs )
41
42
return model .Var (name , dist , data , total_size )
42
43
else :
43
- raise TypeError ("Name needs to be a string but got: {}" .format (name ))
44
+ raise TypeError ("Name needs to be a string but got: {}" .
45
+ format (name ))
44
46
45
47
def __getnewargs__ (self ):
46
48
return _Unpickling ,
@@ -64,12 +66,14 @@ def __init__(self, shape, dtype, testval=None, defaults=(),
64
66
self .conditional_on = None
65
67
66
68
def default (self ):
67
- return np .asarray (self .get_test_val (self .testval , self .defaults ), self .dtype )
69
+ return np .asarray (self .get_test_val (self .testval , self .defaults ),
70
+ self .dtype )
68
71
69
72
def get_test_val (self , val , defaults ):
70
73
if val is None :
71
74
for v in defaults :
72
- if hasattr (self , v ) and np .all (np .isfinite (self .getattr_value (v ))):
75
+ if (hasattr (self , v ) and
76
+ np .all (np .isfinite (self .getattr_value (v )))):
73
77
return self .getattr_value (v )
74
78
else :
75
79
return self .getattr_value (val )
@@ -132,7 +136,8 @@ class NoDistribution(Distribution):
132
136
def __init__ (self , shape , dtype , testval = None , defaults = (),
133
137
transform = None , parent_dist = None , * args , ** kwargs ):
134
138
super (NoDistribution , self ).__init__ (shape = shape , dtype = dtype ,
135
- testval = testval , defaults = defaults ,
139
+ testval = testval ,
140
+ defaults = defaults ,
136
141
* args , ** kwargs )
137
142
self .parent_dist = parent_dist
138
143
@@ -161,7 +166,8 @@ def __init__(self, shape=(), dtype=None, defaults=('mode',),
161
166
else :
162
167
dtype = 'int64'
163
168
if dtype != 'int16' and dtype != 'int64' :
164
- raise TypeError ('Discrete classes expect dtype to be int16 or int64.' )
169
+ raise TypeError ('Discrete classes expect dtype to be int16 or '
170
+ 'int64.' )
165
171
166
172
if kwargs .get ('transform' , None ) is not None :
167
173
raise ValueError ("Transformations for discrete distributions "
@@ -174,7 +180,8 @@ def __init__(self, shape=(), dtype=None, defaults=('mode',),
174
180
class Continuous (Distribution ):
175
181
"""Base class for continuous distributions"""
176
182
177
- def __init__ (self , shape = (), dtype = None , defaults = ('median' , 'mean' , 'mode' ),
183
+ def __init__ (self , shape = (), dtype = None ,
184
+ defaults = ('median' , 'mean' , 'mode' ),
178
185
* args , ** kwargs ):
179
186
if dtype is None :
180
187
dtype = theano .config .floatX
@@ -195,12 +202,15 @@ class DensityDist(Distribution):
195
202
with pm.Model():
196
203
mu = pm.Normal('mu',0,1)
197
204
normal_dist = pm.Normal.dist(mu, 1)
198
- pm.DensityDist('density_dist', normal_dist.logp, observed=np.random.randn(100), random=normal_dist.random)
205
+ pm.DensityDist('density_dist', normal_dist.logp,
206
+ observed=np.random.randn(100),
207
+ random=normal_dist.random)
199
208
trace = pm.sample(100)
200
209
201
210
"""
202
211
203
- def __init__ (self , logp , shape = (), dtype = None , testval = 0 , random = None , * args , ** kwargs ):
212
+ def __init__ (self , logp , shape = (), dtype = None , testval = 0 , random = None ,
213
+ * args , ** kwargs ):
204
214
if dtype is None :
205
215
dtype = theano .config .floatX
206
216
super (DensityDist , self ).__init__ (
@@ -213,7 +223,8 @@ def random(self, *args, **kwargs):
213
223
return self .rand (* args , ** kwargs )
214
224
else :
215
225
raise ValueError ("Distribution was not passed any random method "
216
- "Define a custom random method and pass it as kwarg random" )
226
+ "Define a custom random method and pass it as "
227
+ "kwarg random" )
217
228
218
229
219
230
def draw_values (params , point = None , size = None , model = None ):
@@ -462,6 +473,7 @@ def to_tuple(shape):
462
473
shape = tuple (shape )
463
474
return shape
464
475
476
+
465
477
def _is_one_d (dist_shape ):
466
478
if hasattr (dist_shape , 'dshape' ) and dist_shape .dshape in ((), (0 ,), (1 ,)):
467
479
return True
@@ -471,6 +483,7 @@ def _is_one_d(dist_shape):
471
483
return True
472
484
return False
473
485
486
+
474
487
def generate_samples (generator , * args , ** kwargs ):
475
488
"""Generate samples from the distribution of a random variable.
476
489
0 commit comments