Skip to content

Commit 69c6ffe

Browse files
initial distribution shape refactoring
1 parent 6b47c64 commit 69c6ffe

File tree

5 files changed

+163
-111
lines changed

5 files changed

+163
-111
lines changed

pymc3/distributions/continuous.py

Lines changed: 84 additions & 64 deletions
Original file line numberDiff line numberDiff line change
@@ -28,15 +28,17 @@ class PositiveUnivariateContinuous(UnivariateContinuous):
2828

2929
def __init__(self, *args, **kwargs):
3030
transform = kwargs.get('transform', transforms.log)
31-
super(PositiveUnivariateContinuous, self).__init__(transform=transform, *args, **kwargs)
31+
super(PositiveUnivariateContinuous, self).__init__(transform=transform,
32+
*args, **kwargs)
3233

3334

3435
class UnitUnivariateContinuous(UnivariateContinuous):
3536
"""Base class for univariate continuous distributions in [0,1]"""
3637

3738
def __init__(self, *args, **kwargs):
3839
transform = kwargs.get('transform', transforms.logodds)
39-
super(UnitUnivariateContinuous, self).__init__(transform=transform, *args, **kwargs)
40+
super(UnitUnivariateContinuous, self).__init__(transform=transform,
41+
*args, **kwargs)
4042

4143
def assert_negative_support(var, label, distname, value=-1e-6):
4244
# Checks for evidence of positive support for a variable
@@ -123,7 +125,8 @@ class Uniform(UnivariateContinuous):
123125
Upper limit.
124126
"""
125127

126-
def __init__(self, lower=0, upper=1, transform='interval', size=None, ndim=None, dtype=None, *args, **kwargs):
128+
def __init__(self, lower=0, upper=1, transform='interval', size=None,
129+
ndim=None, dtype=None, *args, **kwargs):
127130

128131
lower = tt.as_tensor_variable(lower)
129132
upper = tt.as_tensor_variable(upper)
@@ -133,7 +136,8 @@ def __init__(self, lower=0, upper=1, transform='interval', size=None, ndim=None,
133136
self.mean = (upper + lower) / 2.
134137
self.median = self.mean
135138

136-
super(Uniform, self).__init__(self.dist_params, ndim, size, dtype, *args, **kwargs)
139+
super(Uniform, self).__init__(self.dist_params, ndim, size, dtype,
140+
*args, **kwargs)
137141

138142
if transform == 'interval':
139143
self.transform = transforms.interval(lower, upper)
@@ -164,7 +168,8 @@ def __init__(self, ndim=None, size=None, dtype=None, *args, **kwargs):
164168
self.median = tt.as_tensor_variable(0.)
165169
self.dist_params = (self.median,)
166170

167-
super(Flat, self).__init__(self.dist_params, ndim, size, dtype, *args, **kwargs)
171+
super(Flat, self).__init__(self.dist_params, ndim, size, dtype, *args,
172+
**kwargs)
168173

169174
def random(self, point=None, size=None, repeat=None):
170175
raise ValueError('Cannot sample from Flat distribution')
@@ -228,7 +233,8 @@ def __init__(self, *args, **kwargs):
228233
sd = kwargs.pop('sd', None)
229234
tau = kwargs.pop('tau', None)
230235

231-
def __init__(self, mu=0.0, tau=None, sd=None, ndim=None, size=None, dtype=None, *args, **kwargs):
236+
def __init__(self, mu=0.0, tau=None, sd=None, ndim=None, size=None,
237+
dtype=None, *args, **kwargs):
232238

233239
mu = tt.as_tensor_variable(mu)
234240
self.mean = self.median = self.mode = self.mu = mu
@@ -240,7 +246,8 @@ def __init__(self, mu=0.0, tau=None, sd=None, ndim=None, size=None, dtype=None,
240246

241247
self.dist_params = (self.mu, self.tau)
242248

243-
super(Normal, self).__init__(self.dist_params, ndim, size, dtype, *args, **kwargs)
249+
super(Normal, self).__init__(self.dist_params, ndim, size, dtype,
250+
*args, **kwargs)
244251

245252
def random(self, point=None, size=None, repeat=None):
246253
mu, tau, sd = draw_values([self.mu, self.tau, self.sd],
@@ -280,7 +287,9 @@ class HalfNormal(PositiveUnivariateContinuous):
280287
tau : float
281288
Precision (tau > 0).
282289
"""
283-
def __init__(self, tau=None, sd=None, ndim=None, size=None, dtype=None, *args, **kwargs):
290+
def __init__(self, tau=None, sd=None, ndim=None, size=None, dtype=None,
291+
*args, **kwargs):
292+
284293
self.tau, self.sd = get_tau_sd(tau=tau, sd=sd)
285294
self.mean = tt.sqrt(2 / (np.pi * self.tau))
286295
self.variance = (1. - 2 / np.pi) / self.tau
@@ -290,7 +299,8 @@ def __init__(self, tau=None, sd=None, ndim=None, size=None, dtype=None, *args, *
290299

291300
self.dist_params = (self.tau,)
292301

293-
super(HalfNormal, self).__init__(self.dist_params, ndim, size, dtype, *args, **kwargs)
302+
super(HalfNormal, self).__init__(self.dist_params, ndim, size, dtype,
303+
*args, **kwargs)
294304

295305
def random(self, point=None, size=None, repeat=None):
296306
sd = draw_values([self.sd], point=point)
@@ -362,7 +372,8 @@ class Wald(PositiveUnivariateContinuous):
362372
The American Statistician, Vol. 30, No. 2, pp. 88-90
363373
"""
364374

365-
def __init__(self, mu=None, lam=None, phi=None, alpha=0., ndim=None, size=None, dtype=None, *args, **kwargs):
375+
def __init__(self, mu=None, lam=None, phi=None, alpha=0., ndim=None,
376+
size=None, dtype=None, *args, **kwargs):
366377

367378
self.mu, self.lam, self.phi = self.get_mu_lam_phi(mu, lam, phi)
368379
self.alpha = alpha
@@ -377,7 +388,8 @@ def __init__(self, mu=None, lam=None, phi=None, alpha=0., ndim=None, size=None,
377388

378389
self.dist_params = (self.mu, self.lam, self.alpha)
379390

380-
super(Wald, self).__init__(self.dist_params, ndim, size, dtype, *args, **kwargs)
391+
super(Wald, self).__init__(self.dist_params, ndim, size, dtype, *args,
392+
**kwargs)
381393

382394
def get_mu_lam_phi(self, mu, lam, phi):
383395
res = None
@@ -421,13 +433,12 @@ def logp(self, value):
421433
lam = self.lam
422434
alpha = self.alpha
423435
# value *must* be iid. Otherwise this is wrong.
424-
return bound(logpow(lam / (2. * np.pi), 0.5)
425-
- logpow(value - alpha, 1.5)
426-
- (0.5 * lam / (value - alpha)
427-
* ((value - alpha - mu) / mu)**2),
436+
return bound(logpow(lam / (2. * np.pi), 0.5) -
437+
logpow(value - alpha, 1.5) -
438+
(0.5 * lam / (value - alpha) * ((value - alpha - mu) /
439+
mu)**2),
428440
# XXX these two are redundant. Please, check.
429-
value > 0, value - alpha > 0,
430-
mu > 0, lam > 0, alpha >= 0)
441+
value > 0, value - alpha > 0, mu > 0, lam > 0, alpha >= 0)
431442

432443

433444
class Beta(UnitUnivariateContinuous):
@@ -473,7 +484,8 @@ class Beta(UnitUnivariateContinuous):
473484
the binomial distribution.
474485
"""
475486

476-
def __init__(self, alpha=None, beta=None, mu=None, sd=None, ndim=None, size=None, dtype=None, *args, **kwargs):
487+
def __init__(self, alpha=None, beta=None, mu=None, sd=None, ndim=None,
488+
size=None, dtype=None, *args, **kwargs):
477489
alpha, beta = self.get_alpha_beta(alpha, beta, mu, sd)
478490
self.alpha = alpha
479491
self.beta = beta
@@ -486,7 +498,8 @@ def __init__(self, alpha=None, beta=None, mu=None, sd=None, ndim=None, size=None
486498

487499
self.dist_params = (self.alpha, self.beta)
488500

489-
super(Beta, self).__init__(self.dist_params, ndim, size, dtype, *args, **kwargs)
501+
super(Beta, self).__init__(self.dist_params, ndim, size, dtype, *args,
502+
**kwargs)
490503

491504
def get_alpha_beta(self, alpha=None, beta=None, mu=None, sd=None):
492505
if (alpha is not None) and (beta is not None):
@@ -512,9 +525,8 @@ def logp(self, value):
512525
alpha = self.alpha
513526
beta = self.beta
514527

515-
return bound(logpow(value, alpha - 1) + logpow(1 - value, beta - 1)
516-
- betaln(alpha, beta),
517-
value >= 0, value <= 1,
528+
return bound(logpow(value, alpha - 1) + logpow(1 - value, beta - 1) -
529+
betaln(alpha, beta), value >= 0, value <= 1,
518530
alpha > 0, beta > 0)
519531

520532

@@ -548,8 +560,6 @@ def __init__(self, lam, ndim=None, size=None, dtype=None, *args, **kwargs):
548560

549561
self.variance = lam**-2
550562

551-
assert_negative_support(lam, 'lam', 'Exponential')
552-
553563
super(Exponential, self).__init__(self.dist_params, ndim, size, dtype, *args, **kwargs)
554564

555565
def random(self, point=None, size=None, repeat=None):
@@ -596,7 +606,8 @@ def __init__(self, mu, b, ndim=None, size=None, dtype=None, *args, **kwargs):
596606

597607
self.dist_params = (self.b, self.mu)
598608

599-
super(Laplace, self).__init__(self.dist_params, ndim, size, dtype, *args, **kwargs)
609+
super(Laplace, self).__init__(self.dist_params, ndim, size, dtype,
610+
*args, **kwargs)
600611

601612
def random(self, point=None, size=None, repeat=None):
602613
mu, b = draw_values([self.mu, self.b], point=point)
@@ -639,21 +650,22 @@ class Lognormal(PositiveUnivariateContinuous):
639650
tau : float
640651
Scale parameter (tau > 0).
641652
"""
642-
def __init__(self, mu=0, tau=1, ndim=None, size=None, dtype=None, *args, **kwargs):
643-
self.mu = mu
644-
self.tau, self.sd = get_tau_sd(tau=tau, sd=sd)
645-
646-
self.mean = tt.exp(mu + 1. / (2 * self.tau))
647-
self.median = tt.exp(mu)
648-
self.mode = tt.exp(mu - 1. / self.tau)
649-
self.variance = (tt.exp(1. / self.tau) - 1) * tt.exp(2 * mu + 1. / self.tau)
653+
def __init__(self, mu=0, tau=1, ndim=None, size=None, dtype=None, *args,
654+
**kwargs):
655+
self.mu = tt.as_tensor_variable(mu)
656+
self.tau = tt.as_tensor_variable(tau)
657+
self.mean = tt.exp(self.mu + 1. / (2. * self.tau))
658+
self.median = tt.exp(self.mu)
659+
self.mode = tt.exp(self.mu - 1. / self.tau)
660+
self.variance = (tt.exp(1. / self.tau) - 1.) * tt.exp(2 * self.mu + 1. / self.tau)
650661

651662
assert_negative_support(tau, 'tau', 'Lognormal')
652663
assert_negative_support(sd, 'sd', 'Lognormal')
653664

654665
self.dist_params = (self.mu, self.tau)
655666

656-
super(Lognormal, self).__init__(self.dist_params, ndim, size, dtype, *args, **kwargs)
667+
super(Lognormal, self).__init__(self.dist_params, ndim, size, dtype,
668+
*args, **kwargs)
657669

658670
def _random(self, mu, tau, size=None):
659671
samples = np.random.normal(size=size)
@@ -702,9 +714,8 @@ class StudentT(UnivariateContinuous):
702714
lam : float
703715
Scale parameter (lam > 0).
704716
"""
705-
706-
def __init__(self, nu, mu=0, lam=None, sd=None, ndim=None,
707-
size=None, dtype=None, *args, **kwargs):
717+
def __init__(self, nu, mu=0, lam=None, sd=None, ndim=None, size=None,
718+
dtype=None, *args, **kwargs):
708719
self.nu = nu = tt.as_tensor_variable(nu)
709720
self.lam, self.sd = get_tau_sd(tau=lam, sd=sd)
710721
self.mean = self.median = self.mode = self.mu = mu
@@ -767,19 +778,20 @@ class Pareto(PositiveUnivariateContinuous):
767778
def __init__(self, alpha, m, ndim=None, size=None, dtype=None, *args, **kwargs):
768779
self.alpha = tt.as_tensor_variable(alpha)
769780
self.m = tt.as_tensor_variable(m)
770-
self.mean = tt.switch(tt.gt(alpha, 1), alpha * m / (alpha - 1.), np.inf)
771-
self.median = m * 2.**(1. / alpha)
781+
self.mean = tt.switch(tt.gt(self.alpha, 1), self.alpha * self.m / (self.alpha - 1.), np.inf)
782+
self.median = self.m * 2.**(1. / self.alpha)
772783
self.variance = tt.switch(
773-
tt.gt(alpha, 2),
774-
(alpha * m**2) / ((alpha - 2.) * (alpha - 1.)**2),
784+
tt.gt(self.alpha, 2),
785+
(self.alpha * self.m**2) / ((self.alpha - 2.) * (self.alpha - 1.)**2),
775786
np.inf)
776787

777788
assert_negative_support(alpha, 'alpha', 'Pareto')
778789
assert_negative_support(m, 'm', 'Pareto')
779790

780791
self.dist_params = (self.alpha, self.m)
781792

782-
super(Pareto, self).__init__(self.dist_params, ndim, size, dtype, *args, **kwargs)
793+
super(Pareto, self).__init__(self.dist_params, ndim, size, dtype,
794+
*args, **kwargs)
783795

784796
def _random(self, alpha, m, size=None):
785797
u = np.random.uniform(size=size)
@@ -832,7 +844,8 @@ def __init__(self, alpha, beta, ndim=None, size=None, dtype=None, *args, **kwarg
832844

833845
self.dist_params = (self.alpha, self.beta)
834846

835-
super(Cauchy, self).__init__(self.dist_params, ndim, size, dtype, *args, **kwargs)
847+
super(Cauchy, self).__init__(self.dist_params, ndim, size, dtype,
848+
*args, **kwargs)
836849

837850
assert_negative_support(beta, 'beta', 'Cauchy')
838851

@@ -883,7 +896,8 @@ def __init__(self, beta, ndim=None, size=None, dtype=None, *args, **kwargs):
883896

884897
self.dist_params = (self.beta,)
885898

886-
super(HalfCauchy, self).__init__(self.dist_params, ndim, size, dtype, *args, **kwargs)
899+
super(HalfCauchy, self).__init__(self.dist_params, ndim, size, dtype,
900+
*args, **kwargs)
887901

888902
assert_negative_support(beta, 'beta', 'HalfCauchy')
889903

@@ -943,20 +957,22 @@ class Gamma(PositiveUnivariateContinuous):
943957
Alternative scale parameter (sd > 0).
944958
"""
945959

946-
def __init__(self, alpha=None, beta=None, mu=None, sd=None, ndim=None, size=None, dtype=None, *args, **kwargs):
960+
def __init__(self, alpha=None, beta=None, mu=None, sd=None, ndim=None,
961+
size=None, dtype=None, *args, **kwargs):
947962
alpha, beta = self.get_alpha_beta(alpha, beta, mu, sd)
948963
self.alpha = alpha
949964
self.beta = beta
950-
self.mean = alpha / beta
951-
self.mode = tt.maximum((alpha - 1) / beta, 0)
952-
self.variance = alpha / beta**2
965+
self.mean = self.alpha / self.beta
966+
self.mode = tt.maximum((self.alpha - 1) / self.beta, 0)
967+
self.variance = self.alpha / self.beta**2
953968

954969
assert_negative_support(alpha, 'alpha', 'Gamma')
955970
assert_negative_support(beta, 'beta', 'Gamma')
956971

957972
self.dist_params = (self.alpha, self.beta)
958973

959-
super(Gamma, self).__init__(self.dist_params, ndim, size, dtype, *args, **kwargs)
974+
super(Gamma, self).__init__(self.dist_params, ndim, size, dtype, *args,
975+
**kwargs)
960976

961977
def get_alpha_beta(self, alpha=None, beta=None, mu=None, sd=None):
962978
if (alpha is not None) and (beta is not None):
@@ -1025,6 +1041,8 @@ def __init__(self, alpha, beta=1., ndim=None, size=None, dtype=None, *args, **kw
10251041
assert_negative_support(alpha, 'alpha', 'InverseGamma')
10261042
assert_negative_support(beta, 'beta', 'InverseGamma')
10271043

1044+
self.dist_params = (self.alpha, self.beta)
1045+
10281046
super(InverseGamma, self).__init__(self.dist_params, ndim, size, dtype, *args, **kwargs)
10291047

10301048
def _calculate_mean(self):
@@ -1035,9 +1053,6 @@ def _calculate_mean(self):
10351053
m[self.alpha <= 1] = np.inf
10361054
return m
10371055

1038-
self.dist_params = (self.alpha, self.beta)
1039-
1040-
10411056
def random(self, point=None, size=None, repeat=None):
10421057
alpha, beta = draw_values([self.alpha, self.beta],
10431058
point=point)
@@ -1048,8 +1063,8 @@ def random(self, point=None, size=None, repeat=None):
10481063
def logp(self, value):
10491064
alpha = self.alpha
10501065
beta = self.beta
1051-
return bound(logpow(beta, alpha) - gammaln(alpha) - beta / value
1052-
+ logpow(value, -alpha - 1),
1066+
return bound(logpow(beta, alpha) - gammaln(alpha) - beta / value +
1067+
logpow(value, -alpha - 1),
10531068
value > 0, alpha > 0, beta > 0)
10541069

10551070

@@ -1075,7 +1090,9 @@ class ChiSquared(Gamma):
10751090

10761091
def __init__(self, nu, *args, **kwargs):
10771092
self.nu = tt.as_tensor_variable(nu)
1078-
super(ChiSquared, self).__init__(alpha=self.nu / 2., beta=tt.as_tensor_variable(0.5), *args, **kwargs)
1093+
super(ChiSquared, self).__init__(alpha=self.nu / 2.,
1094+
beta=tt.as_tensor_variable(0.5),
1095+
*args, **kwargs)
10791096

10801097

10811098
class Weibull(PositiveUnivariateContinuous):
@@ -1114,7 +1131,8 @@ def __init__(self, alpha, beta, ndim=None, size=None, dtype=None, *args, **kwarg
11141131

11151132
self.dist_params = (self.alpha, self.beta)
11161133

1117-
super(Weibull, self).__init__(self.dist_params, ndim, size, dtype, *args, **kwargs)
1134+
super(Weibull, self).__init__(self.dist_params, ndim, size, dtype,
1135+
*args, **kwargs)
11181136

11191137
def random(self, point=None, size=None, repeat=None):
11201138
alpha, beta = draw_values([self.alpha, self.beta],
@@ -1301,15 +1319,16 @@ def __init__(self, mu, sigma, nu, ndim=None, size=None, dtype=None, *args, **kwa
13011319
self.mu = tt.as_tensor_variable(mu)
13021320
self.sigma = tt.as_tensor_variable(sigma)
13031321
self.nu = tt.as_tensor_variable(nu)
1304-
self.mean = mu + nu
1305-
self.variance = (sigma**2) + (nu**2)
1322+
self.mean = self.mu + self.nu
1323+
self.variance = self.sigma**2 + self.nu**2
13061324

13071325
assert_negative_support(sigma, 'sigma', 'ExGaussian')
13081326
assert_negative_support(nu, 'nu', 'ExGaussian')
13091327

13101328
self.dist_params = (self.mu, self.sigma, self.nu)
13111329

1312-
super(ExGaussian, self).__init__(self.dist_params, ndim, size, dtype, *args, **kwargs)
1330+
super(ExGaussian, self).__init__(self.dist_params, ndim, size, dtype,
1331+
*args, **kwargs)
13131332

13141333
def random(self, point=None, size=None, repeat=None):
13151334
mu, sigma, nu = draw_values([self.mu, self.sigma, self.nu],
@@ -1361,15 +1380,16 @@ class VonMises(UnivariateContinuous):
13611380
Concentration (\frac{1}{kappa} is analogous to \sigma^2).
13621381
"""
13631382

1364-
def __init__(self, mu=0.0, kappa=None, transform='circular', ndim=None, size=None, dtype=None, *args, **kwargs):
1365-
self.mean = self.median = self.mode = self.mu = tt.as_tensor_variable(
1366-
mu)
1383+
def __init__(self, mu=0.0, kappa=None, transform='circular', ndim=None,
1384+
size=None, dtype=None, *args, **kwargs):
1385+
self.mean = self.median = self.mode = self.mu = tt.as_tensor_variable(mu)
13671386
self.kappa = tt.as_tensor_variable(kappa)
1368-
self.variance = 1. - i1(kappa) / i0(kappa)
1387+
self.variance = 1. - i1(self.kappa) / i0(self.kappa)
13691388

13701389
self.dist_params = (self.mu, self.kappa)
13711390

1372-
super(VonMises, self).__init__(self.dist_params, ndim, size, dtype, *args, **kwargs)
1391+
super(VonMises, self).__init__(self.dist_params, ndim, size, dtype,
1392+
*args, **kwargs)
13731393

13741394
if transform == 'circular':
13751395
self.transform = transforms.Circular()

0 commit comments

Comments
 (0)