Skip to content

Commit 75ea232

Browse files
Add docstrings to various logp related methods
1 parent a1e30eb commit 75ea232

File tree

2 files changed

+61
-4
lines changed

2 files changed

+61
-4
lines changed

pymc/model.py

Lines changed: 61 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -651,23 +651,53 @@ def compile_logp(
651651
jacobian: bool = True,
652652
sum: bool = True,
653653
):
654-
"""Compiled log probability density function"""
654+
"""Compiled log probability density function.
655+
656+
Parameters
657+
----------
658+
vars: list of random variables or potential terms, optional
659+
Compute the gradient with respect to those variables. If None, use all
660+
free and observed random variables, as well as potential terms in model.
661+
jacobian:
662+
Whether to include jacobian terms in logprob graph. Defaults to True.
663+
sum:
664+
Whether to sum all logp terms or return elemwise logp for each variable.
665+
Defaults to True.
666+
"""
655667
return self.model.compile_fn(self.logpt(vars=vars, jacobian=jacobian, sum=sum))
656668

657669
def compile_dlogp(
658670
self,
659671
vars: Optional[Union[Variable, Sequence[Variable]]] = None,
660672
jacobian: bool = True,
661673
):
662-
"""Compiled log probability density gradient function"""
674+
"""Compiled log probability density gradient function.
675+
676+
Parameters
677+
----------
678+
vars: list of random variables or potential terms, optional
679+
Compute the gradient with respect to those variables. If None, use all
680+
free and observed random variables, as well as potential terms in model.
681+
jacobian:
682+
Whether to include jacobian terms in logprob graph. Defaults to True.
683+
"""
663684
return self.model.compile_fn(self.dlogpt(vars=vars, jacobian=jacobian))
664685

665686
def compile_d2logp(
666687
self,
667688
vars: Optional[Union[Variable, Sequence[Variable]]] = None,
668689
jacobian: bool = True,
669690
):
670-
"""Compiled log probability density hessian function"""
691+
"""Compiled log probability density hessian function.
692+
693+
Parameters
694+
----------
695+
vars: list of random variables or potential terms, optional
696+
Compute the gradient with respect to those variables. If None, use all
697+
free and observed random variables, as well as potential terms in model.
698+
jacobian:
699+
Whether to include jacobian terms in logprob graph. Defaults to True.
700+
"""
671701
return self.model.compile_fn(self.d2logpt(vars=vars, jacobian=jacobian))
672702

673703
def logpt(
@@ -747,6 +777,20 @@ def dlogpt(
747777
vars: Optional[Union[Variable, Sequence[Variable]]] = None,
748778
jacobian: bool = True,
749779
) -> Variable:
780+
"""Gradient of the models log-probability w.r.t. ``vars``.
781+
782+
Parameters
783+
----------
784+
vars: list of random variables or potential terms, optional
785+
Compute the gradient with respect to those variables. If None, use all
786+
free and observed random variables, as well as potential terms in model.
787+
jacobian:
788+
Whether to include jacobian terms in logprob graph. Defaults to True.
789+
790+
Returns
791+
-------
792+
dlogp graph
793+
"""
750794
if vars is None:
751795
value_vars = None
752796
else:
@@ -771,6 +815,20 @@ def d2logpt(
771815
vars: Optional[Union[Variable, Sequence[Variable]]] = None,
772816
jacobian: bool = True,
773817
) -> Variable:
818+
"""Hessian of the models log-probability w.r.t. ``vars``.
819+
820+
Parameters
821+
----------
822+
vars: list of random variables or potential terms, optional
823+
Compute the gradient with respect to those variables. If None, use all
824+
free and observed random variables, as well as potential terms in model.
825+
jacobian:
826+
Whether to include jacobian terms in logprob graph. Defaults to True.
827+
828+
Returns
829+
-------
830+
d²logp graph
831+
"""
774832
if vars is None:
775833
value_vars = None
776834
else:

pymc/tests/test_shared.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,6 @@ def test_deterministic(self):
2727
data_values = np.array([0.5, 0.4, 5, 2])
2828
X = aesara.shared(np.asarray(data_values, dtype=aesara.config.floatX), borrow=True)
2929
pm.Normal("y", 0, 1, observed=X)
30-
# TODO: This should assert something
3130
assert np.all(
3231
np.isclose(model.compile_logp(sum=False)({}), st.norm().logpdf(data_values))
3332
)

0 commit comments

Comments
 (0)