diff --git a/pymc/distributions/continuous.py b/pymc/distributions/continuous.py index fc287045b0..0e172cc0b3 100644 --- a/pymc/distributions/continuous.py +++ b/pymc/distributions/continuous.py @@ -35,22 +35,21 @@ from aesara.tensor.math import tanh from aesara.tensor.random.basic import ( BetaRV, - CauchyRV, - HalfCauchyRV, - HalfNormalRV, - LogNormalRV, - NormalRV, - UniformRV, + cauchy, chisquare, exponential, gamma, gumbel, + halfcauchy, + halfnormal, invgamma, laplace, logistic, + lognormal, normal, pareto, triangular, + uniform, vonmises, ) from aesara.tensor.random.op import RandomVariable @@ -253,13 +252,6 @@ def get_tau_sigma(tau=None, sigma=None): return floatX(tau), floatX(sigma) -class PyMCUniformRV(UniformRV): - _print_name = ("Uniform", "\\operatorname{Uniform}") - - -pymc_uniform = PyMCUniformRV() - - class Uniform(BoundedContinuous): r""" Continuous uniform log-likelihood. @@ -303,8 +295,7 @@ class Uniform(BoundedContinuous): upper : tensor_like of float, default 1 Upper limit. """ - rv_op = pymc_uniform - rv_type = UniformRV + rv_op = uniform bound_args_indices = (3, 4) # Lower, Upper @classmethod @@ -488,13 +479,6 @@ def logcdf(value): return at.switch(at.lt(value, np.inf), -np.inf, at.switch(at.eq(value, np.inf), 0, -np.inf)) -class PyMCNormalRV(NormalRV): - _print_name = ("Normal", "\\operatorname{Normal}") - - -pymc_normal = PyMCNormalRV() - - class Normal(Continuous): r""" Univariate normal log-likelihood. @@ -560,8 +544,7 @@ class Normal(Continuous): with pm.Model(): x = pm.Normal('x', mu=0, tau=1/23) """ - rv_op = pymc_normal - rv_type = NormalRV + rv_op = normal @classmethod def dist(cls, mu=0, sigma=None, tau=None, **kwargs): @@ -818,13 +801,6 @@ def truncated_normal_default_transform(op, rv): return bounded_cont_transform(op, rv, TruncatedNormal.bound_args_indices) -class PyMCHalfNormalRV(HalfNormalRV): - _print_name = ("HalfNormal", "\\operatorname{HalfNormal}") - - -pymc_halfnormal = PyMCHalfNormalRV() - - class HalfNormal(PositiveContinuous): r""" Half-normal log-likelihood. @@ -891,8 +867,7 @@ class HalfNormal(PositiveContinuous): with pm.Model(): x = pm.HalfNormal('x', tau=1/15) """ - rv_op = pymc_halfnormal - rv_type = HalfNormalRV + rv_op = halfnormal @classmethod def dist(cls, sigma=None, tau=None, *args, **kwargs): @@ -1715,13 +1690,6 @@ def logp(value, b, kappa, mu): return check_parameters(res, 0 < b, 0 < kappa, msg="b > 0, kappa > 0") -class PyMCLogNormalRV(LogNormalRV): - _print_name = ("LogNormal", "\\operatorname{LogNormal}") - - -pymc_lognormal = PyMCLogNormalRV() - - class LogNormal(PositiveContinuous): r""" Log-normal log-likelihood. @@ -1790,8 +1758,7 @@ class LogNormal(PositiveContinuous): x = pm.LogNormal('x', mu=2, tau=1/100) """ - rv_op = pymc_lognormal - rv_type = LogNormalRV + rv_op = lognormal @classmethod def dist(cls, mu=0, sigma=None, tau=None, *args, **kwargs): @@ -2082,13 +2049,6 @@ def pareto_default_transform(op, rv): return bounded_cont_transform(op, rv, Pareto.bound_args_indices) -class PyMCCauchyRV(CauchyRV): - _print_name = ("Cauchy", "\\operatorname{Cauchy}") - - -pymc_cauchy = PyMCCauchyRV() - - class Cauchy(Continuous): r""" Cauchy log-likelihood. @@ -2135,8 +2095,7 @@ class Cauchy(Continuous): beta : tensor_like of float Scale parameter > 0. """ - rv_op = pymc_cauchy - rv_type = CauchyRV + rv_op = cauchy @classmethod def dist(cls, alpha, beta, *args, **kwargs): @@ -2174,13 +2133,6 @@ def logcdf(value, alpha, beta): ) -class PyMCHalfCauchyRV(HalfCauchyRV): - _print_name = ("HalfCauchy", "\\operatorname{HalfCauchy}") - - -pymc_halfcauchy = PyMCHalfCauchyRV() - - class HalfCauchy(PositiveContinuous): r""" Half-Cauchy log-likelihood. @@ -2220,8 +2172,7 @@ class HalfCauchy(PositiveContinuous): beta : tensor_like of float Scale parameter (beta > 0). """ - rv_op = pymc_halfcauchy - rv_type = HalfCauchyRV + rv_op = halfcauchy @classmethod def dist(cls, beta, *args, **kwargs): @@ -3991,7 +3942,7 @@ class PolyaGammaRV(RandomVariable): ndim_supp = 0 ndims_params = [0, 0] dtype = "floatX" - _print_name = ("PolyaGamma", "\\operatorname{PolyaGamma}") + _print_name = ("PG", "\\operatorname{PG}") def __call__(self, h=1.0, z=0.0, size=None, **kwargs): return super().__call__(h, z, size=size, **kwargs) diff --git a/pymc/distributions/discrete.py b/pymc/distributions/discrete.py index 28bc69ce6d..b21e0fe7ce 100644 --- a/pymc/distributions/discrete.py +++ b/pymc/distributions/discrete.py @@ -17,16 +17,16 @@ import numpy as np from aesara.tensor.random.basic import ( - GeometricRV, - HyperGeometricRV, - NegBinomialRV, - PoissonRV, RandomVariable, ScipyRandomVariable, bernoulli, betabinom, binomial, categorical, + geometric, + hypergeometric, + nbinom, + poisson, ) from scipy import stats @@ -560,13 +560,6 @@ def logcdf(value, q, beta): return check_parameters(res, 0 < q, q < 1, 0 < beta, msg="0 < q < 1, beta > 0") -class PyMCPoissonRV(PoissonRV): - _print_name = ("Poisson", "\\operatorname{Poisson}") - - -pymc_poisson = PyMCPoissonRV() - - class Poisson(Discrete): R""" Poisson log-likelihood. @@ -612,8 +605,7 @@ class Poisson(Discrete): The Poisson distribution can be derived as a limiting case of the binomial distribution. """ - rv_op = pymc_poisson - rv_type = PoissonRV + rv_op = poisson @classmethod def dist(cls, mu, *args, **kwargs): @@ -682,13 +674,6 @@ def logcdf(value, mu): return check_parameters(res, 0 <= mu, msg="mu >= 0") -class PyMCNegativeBinomialRV(NegBinomialRV): - _print_name = ("NegBinom", "\\operatorname{NegBinom}") - - -pymc_nbinom = PyMCNegativeBinomialRV() - - class NegativeBinomial(Discrete): R""" Negative binomial log-likelihood. @@ -761,8 +746,7 @@ def NegBinom(a, m, x): n : tensor_like of float Alternative number of target success trials (n > 0) """ - rv_op = pymc_nbinom - rv_type = NegBinomialRV + rv_op = nbinom @classmethod def dist(cls, mu=None, alpha=None, p=None, n=None, *args, **kwargs): @@ -863,13 +847,6 @@ def logcdf(value, n, p): ) -class PyMCGeometricRV(GeometricRV): - _print_name = ("Geometric", "\\operatorname{Geometric}") - - -pymc_geometric = PyMCGeometricRV() - - class Geometric(Discrete): R""" Geometric log-likelihood. @@ -909,8 +886,7 @@ class Geometric(Discrete): Probability of success on an individual trial (0 < p <= 1). """ - rv_op = pymc_geometric - rv_type = GeometricRV + rv_op = geometric @classmethod def dist(cls, p, *args, **kwargs): @@ -980,13 +956,6 @@ def logcdf(value, p): ) -class PyMCHyperGeometricRV(HyperGeometricRV): - _print_name = ("HyperGeometric", "\\operatorname{HyperGeometric}") - - -pymc_hypergeometric = PyMCHyperGeometricRV() - - class HyperGeometric(Discrete): R""" Discrete hypergeometric distribution. @@ -1035,8 +1004,7 @@ class HyperGeometric(Discrete): Number of samples drawn from the population (0 <= n <= N) """ - rv_op = pymc_hypergeometric - rv_type = HyperGeometricRV + rv_op = hypergeometric @classmethod def dist(cls, N, k, n, *args, **kwargs): diff --git a/pymc/distributions/distribution.py b/pymc/distributions/distribution.py index 4ea762dd12..b75bcaaa74 100644 --- a/pymc/distributions/distribution.py +++ b/pymc/distributions/distribution.py @@ -102,9 +102,9 @@ def _random(*args, **kwargs): clsdict["random"] = _random rv_op = clsdict.setdefault("rv_op", None) - rv_type = clsdict.setdefault("rv_type", None) + rv_type = None - if rv_type is None and isinstance(rv_op, RandomVariable): + if isinstance(rv_op, RandomVariable): rv_type = type(rv_op) clsdict["rv_type"] = rv_type diff --git a/pymc/distributions/multivariate.py b/pymc/distributions/multivariate.py index ffc1c772b8..2f21d9e949 100644 --- a/pymc/distributions/multivariate.py +++ b/pymc/distributions/multivariate.py @@ -32,12 +32,7 @@ from aesara.sparse.basic import sp_sum from aesara.tensor import gammaln, sigmoid from aesara.tensor.nlinalg import det, eigh, matrix_inverse, trace -from aesara.tensor.random.basic import ( - DirichletRV, - MvNormalRV, - multinomial, - multivariate_normal, -) +from aesara.tensor.random.basic import dirichlet, multinomial, multivariate_normal from aesara.tensor.random.op import RandomVariable, default_supp_shape_from_params from aesara.tensor.random.utils import broadcast_params from aesara.tensor.slinalg import Cholesky, SolveTriangular @@ -195,13 +190,6 @@ def quaddist_tau(delta, chol_mat): return quaddist, logdet, ok -class PyMCMvNormalRV(MvNormalRV): - _print_name = ("MvNormal", "\\operatorname{MvNormal}") - - -pymc_multivariate_normal = PyMCMvNormalRV() - - class MvNormal(Continuous): r""" Multivariate normal log-likelihood. @@ -266,8 +254,7 @@ class MvNormal(Continuous): vals_raw = pm.Normal('vals_raw', mu=0, sigma=1, shape=(5, 3)) vals = pm.Deterministic('vals', at.dot(chol, vals_raw.T).T) """ - rv_op = pymc_multivariate_normal - rv_type = MvNormalRV + rv_op = multivariate_normal @classmethod def dist(cls, mu, cov=None, tau=None, chol=None, lower=True, **kwargs): @@ -449,13 +436,6 @@ def logp(value, nu, mu, scale): ) -class PyMCDirichletRV(DirichletRV): - _print_name = ("Dirichlet", "\\operatorname{Dirichlet}") - - -pymc_dirichlet = PyMCDirichletRV() - - class Dirichlet(SimplexContinuous): r""" Dirichlet log-likelihood. @@ -480,8 +460,7 @@ class Dirichlet(SimplexContinuous): Concentration parameters (a > 0). The number of categories is given by the length of the last axis. """ - rv_op = pymc_dirichlet - rv_type = DirichletRV + rv_op = dirichlet @classmethod def dist(cls, a, **kwargs): diff --git a/pymc/tests/distributions/test_logprob.py b/pymc/tests/distributions/test_logprob.py index 881bf27358..4212b4baa7 100644 --- a/pymc/tests/distributions/test_logprob.py +++ b/pymc/tests/distributions/test_logprob.py @@ -320,7 +320,7 @@ def test_ignore_logprob_basic(): new_x = ignore_logprob(x) assert new_x is not x assert isinstance(new_x.owner.op, Normal) - assert type(new_x.owner.op).__name__ == "UnmeasurablePyMCNormalRV" + assert type(new_x.owner.op).__name__ == "UnmeasurableNormalRV" # Confirm that it does not have measurable output assert get_measurable_outputs(new_x.owner.op, new_x.owner) is None diff --git a/pymc/tests/test_aesaraf.py b/pymc/tests/test_aesaraf.py index 418b802150..f579df7c69 100644 --- a/pymc/tests/test_aesaraf.py +++ b/pymc/tests/test_aesaraf.py @@ -25,7 +25,7 @@ from aeppl.logprob import ParameterValueError from aesara.compile.builders import OpFromGraph from aesara.graph.basic import Variable, equal_computations -from aesara.tensor.random.basic import NormalRV, normal, uniform +from aesara.tensor.random.basic import normal, uniform from aesara.tensor.random.op import RandomVariable from aesara.tensor.random.var import RandomStateSharedVariable from aesara.tensor.subtensor import AdvancedIncSubtensor, AdvancedIncSubtensor1 @@ -405,7 +405,7 @@ def test_rvs_to_value_vars_unvalued_rv(): res_y = res.owner.inputs[1] # Graph should have be cloned, and therefore y and res_y should have different ids assert res_y is not y - assert isinstance(res_y.owner.op, NormalRV) + assert res_y.owner.op == at.random.normal assert res_y.owner.inputs[3] is x_value diff --git a/pymc/tests/test_printing.py b/pymc/tests/test_printing.py index 50197679f7..5966a33a13 100644 --- a/pymc/tests/test_printing.py +++ b/pymc/tests/test_printing.py @@ -94,63 +94,63 @@ def setup_class(self): self.formats = [("plain", True), ("plain", False), ("latex", True), ("latex", False)] self.expected = { ("plain", True): [ - r"alpha ~ Normal(0, 10)", - r"sigma ~ HalfNormal(0, 1)", + r"alpha ~ N(0, 10)", + r"sigma ~ N**+(0, 1)", r"mu ~ Deterministic(f(beta, alpha))", - r"beta ~ Normal(0, 10)", - r"Z ~ MvNormal(f(), f())", - r"nb_with_p_n ~ NegBinom(10, nbp)", - r"zip ~ MarginalMixture(f(), DiracDelta(0), Poisson(5))", - r"w ~ Dirichlet()", + r"beta ~ N(0, 10)", + r"Z ~ N(f(), f())", + r"nb_with_p_n ~ NB(10, nbp)", + r"zip ~ MarginalMixture(f(), DiracDelta(0), Pois(5))", + r"w ~ Dir()", ( r"nested_mix ~ MarginalMixture(w, " - r"MarginalMixture(f(), DiracDelta(0), Poisson(5)), " + r"MarginalMixture(f(), DiracDelta(0), Pois(5)), " r"Censored(Bern(0.5), -1, 1))" ), - r"Y_obs ~ Normal(mu, sigma)", + r"Y_obs ~ N(mu, sigma)", r"pot ~ Potential(f(beta, alpha))", ], ("plain", False): [ - r"alpha ~ Normal", - r"sigma ~ HalfNormal", + r"alpha ~ N", + r"sigma ~ N**+", r"mu ~ Deterministic", - r"beta ~ Normal", - r"Z ~ MvNormal", - r"nb_with_p_n ~ NegBinom", + r"beta ~ N", + r"Z ~ N", + r"nb_with_p_n ~ NB", r"zip ~ MarginalMixture", - r"w ~ Dirichlet", + r"w ~ Dir", r"nested_mix ~ MarginalMixture", - r"Y_obs ~ Normal", + r"Y_obs ~ N", r"pot ~ Potential", ], ("latex", True): [ - r"$\text{alpha} \sim \operatorname{Normal}(0,~10)$", - r"$\text{sigma} \sim \operatorname{HalfNormal}(0,~1)$", + r"$\text{alpha} \sim \operatorname{N}(0,~10)$", + r"$\text{sigma} \sim \operatorname{N^{+}}(0,~1)$", r"$\text{mu} \sim \operatorname{Deterministic}(f(\text{beta},~\text{alpha}))$", - r"$\text{beta} \sim \operatorname{Normal}(0,~10)$", - r"$\text{Z} \sim \operatorname{MvNormal}(f(),~f())$", - r"$\text{nb_with_p_n} \sim \operatorname{NegBinom}(10,~\text{nbp})$", - r"$\text{zip} \sim \operatorname{MarginalMixture}(f(),~\text{\$\operatorname{DiracDelta}(0)\$},~\text{\$\operatorname{Poisson}(5)\$})$", - r"$\text{w} \sim \operatorname{Dirichlet}(\text{})$", + r"$\text{beta} \sim \operatorname{N}(0,~10)$", + r"$\text{Z} \sim \operatorname{N}(f(),~f())$", + r"$\text{nb_with_p_n} \sim \operatorname{NB}(10,~\text{nbp})$", + r"$\text{zip} \sim \operatorname{MarginalMixture}(f(),~\text{\$\operatorname{DiracDelta}(0)\$},~\text{\$\operatorname{Pois}(5)\$})$", + r"$\text{w} \sim \operatorname{Dir}(\text{})$", ( r"$\text{nested_mix} \sim \operatorname{MarginalMixture}(\text{w}," - r"~\text{\$\operatorname{MarginalMixture}(f(),~\text{\\$\operatorname{DiracDelta}(0)\\$},~\text{\\$\operatorname{Poisson}(5)\\$})\$}," + r"~\text{\$\operatorname{MarginalMixture}(f(),~\text{\\$\operatorname{DiracDelta}(0)\\$},~\text{\\$\operatorname{Pois}(5)\\$})\$}," r"~\text{\$\operatorname{Censored}(\text{\\$\operatorname{Bern}(0.5)\\$},~-1,~1)\$})$" ), - r"$\text{Y_obs} \sim \operatorname{Normal}(\text{mu},~\text{sigma})$", + r"$\text{Y_obs} \sim \operatorname{N}(\text{mu},~\text{sigma})$", r"$\text{pot} \sim \operatorname{Potential}(f(\text{beta},~\text{alpha}))$", ], ("latex", False): [ - r"$\text{alpha} \sim \operatorname{Normal}$", - r"$\text{sigma} \sim \operatorname{HalfNormal}$", + r"$\text{alpha} \sim \operatorname{N}$", + r"$\text{sigma} \sim \operatorname{N^{+}}$", r"$\text{mu} \sim \operatorname{Deterministic}$", - r"$\text{beta} \sim \operatorname{Normal}$", - r"$\text{Z} \sim \operatorname{MvNormal}$", - r"$\text{nb_with_p_n} \sim \operatorname{NegBinom}$", + r"$\text{beta} \sim \operatorname{N}$", + r"$\text{Z} \sim \operatorname{N}$", + r"$\text{nb_with_p_n} \sim \operatorname{NB}$", r"$\text{zip} \sim \operatorname{MarginalMixture}$", - r"$\text{w} \sim \operatorname{Dirichlet}$", + r"$\text{w} \sim \operatorname{Dir}$", r"$\text{nested_mix} \sim \operatorname{MarginalMixture}$", - r"$\text{Y_obs} \sim \operatorname{Normal}$", + r"$\text{Y_obs} \sim \operatorname{N}$", r"$\text{pot} \sim \operatorname{Potential}$", ], }