|
| 1 | +import pytensor.tensor as pt |
| 2 | + |
| 3 | +from pymc.distributions import Gamma, Normal |
| 4 | +from pymc.model.fgraph import ModelObservedRV, model_observed_rv |
| 5 | +from pytensor.graph.fg import FunctionGraph |
| 6 | +from pytensor.graph.rewriting.basic import node_rewriter |
| 7 | + |
| 8 | +from pymc_experimental.sampling.optimizations.optimize import posterior_optimization_db |
| 9 | + |
| 10 | + |
| 11 | +@node_rewriter(tracks=[ModelObservedRV]) |
| 12 | +def summary_stats_normal(fgraph: FunctionGraph, node): |
| 13 | + """Applies the equivalence (up to a normalizing constant) described in: |
| 14 | +
|
| 15 | + https://mc-stan.org/docs/stan-users-guide/efficiency-tuning.html#exploiting-sufficient-statistics |
| 16 | + """ |
| 17 | + [observed_rv] = node.outputs |
| 18 | + [rv, data] = node.inputs |
| 19 | + |
| 20 | + if not isinstance(rv.owner.op, Normal): |
| 21 | + return None |
| 22 | + |
| 23 | + # Check the normal RV is not just a scalar |
| 24 | + if all(rv.type.broadcastable): |
| 25 | + return None |
| 26 | + |
| 27 | + # Check that the observed RV is not used anywhere else (like a Potential or Deterministic) |
| 28 | + # There should be only one use: as an "output" |
| 29 | + if len(fgraph.clients[observed_rv]) > 1: |
| 30 | + return None |
| 31 | + |
| 32 | + mu, sigma = rv.owner.op.dist_params(rv.owner) |
| 33 | + |
| 34 | + # Check if mu and sigma are scalar RVs |
| 35 | + if not all(mu.type.broadcastable) and not all(sigma.type.broadcastable): |
| 36 | + return None |
| 37 | + |
| 38 | + # Check that mu and sigma are not used anywhere else |
| 39 | + # Note: This is too restrictive, it's fine if they're used in Deterministics! |
| 40 | + # There should only be two uses: as an "output" and as the param of the `rv` |
| 41 | + if len(fgraph.clients[mu]) > 2 or len(fgraph.clients[sigma]) > 2: |
| 42 | + return None |
| 43 | + |
| 44 | + # Remove expand_dims |
| 45 | + mu = mu.squeeze() |
| 46 | + sigma = sigma.squeeze() |
| 47 | + |
| 48 | + # Apply the rewrite |
| 49 | + mean_data = pt.mean(data) |
| 50 | + mean_data.name = None |
| 51 | + var_data = pt.var(data, ddof=1) |
| 52 | + var_data.name = None |
| 53 | + N = data.size |
| 54 | + sqrt_N = pt.sqrt(N) |
| 55 | + nm1_over2 = (N - 1) / 2 |
| 56 | + |
| 57 | + observed_mean = model_observed_rv( |
| 58 | + Normal.dist(mu=mu, sigma=sigma / sqrt_N), |
| 59 | + mean_data, |
| 60 | + ) |
| 61 | + observed_mean.name = f"{rv.name}_mean" |
| 62 | + |
| 63 | + observed_var = model_observed_rv( |
| 64 | + Gamma.dist(alpha=nm1_over2, beta=nm1_over2 / (sigma**2)), |
| 65 | + var_data, |
| 66 | + ) |
| 67 | + observed_var.name = f"{rv.name}_var" |
| 68 | + |
| 69 | + fgraph.add_output(observed_mean, import_missing=True) |
| 70 | + fgraph.add_output(observed_var, import_missing=True) |
| 71 | + fgraph.remove_node(node) |
| 72 | + # Just so it shows in the profile for verbose=True, |
| 73 | + # It won't do anything because node is not in the fgraph anymore |
| 74 | + return [node.out.copy()] |
| 75 | + |
| 76 | + |
| 77 | +posterior_optimization_db.register( |
| 78 | + summary_stats_normal.__name__, summary_stats_normal, "default", "summary_stats" |
| 79 | +) |
0 commit comments