Skip to content

Commit 6af086a

Browse files
committed
remove duplicate doc for asvgd
1 parent 1453b85 commit 6af086a

File tree

2 files changed

+3
-23
lines changed

2 files changed

+3
-23
lines changed

pymc3/variational/inference.py

Lines changed: 1 addition & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -580,28 +580,7 @@ def __init__(self, approx=None, estimator=KSD, kernel=test_functions.rbf, **kwar
580580
)
581581

582582
def fit(self, n=10000, score=None, callbacks=None, progressbar=True,
583-
obj_n_mc=300, **kwargs):
584-
"""Perform Amortized Stein Variational Gradient Descent
585-
586-
Parameters
587-
----------
588-
n : int
589-
number of iterations
590-
score : bool
591-
evaluate loss on each iteration or not
592-
callbacks : list[function : (Approximation, losses, i) -> None]
593-
calls provided functions after each iteration step
594-
progressbar : bool
595-
whether to show progressbar or not
596-
obj_n_mc : int
597-
sample `n` particles for Stein gradient
598-
kwargs : kwargs
599-
additional kwargs for :func:`ObjectiveFunction.step_function`
600-
601-
Returns
602-
-------
603-
Approximation
604-
"""
583+
obj_n_mc=500, **kwargs):
605584
return super(ASVGD, self).fit(
606585
n=n, score=score, callbacks=callbacks,
607586
progressbar=progressbar, obj_n_mc=obj_n_mc, **kwargs)

pymc3/variational/operators.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,8 @@ class KSD(Operator):
6969
.. math::
7070
7171
x_i^{l+1} \leftarrow \epsilon_l \hat{\phi}^{*}(x_i^l) \\
72-
\hat{\phi}^{*}(x) = \frac{1}{n}\sum^{n}_{j=1}[k(x^l_j,x) \nabla_{x^l_j} logp(x^l_j)+ \nabla_{x^l_j} k(x^l_j,x)]
72+
\hat{\phi}^{*}(x) = \frac{1}{n}\sum^{n}_{j=1}[k(x^l_j,x) \nabla_{x^l_j} logp(x^l_j)/temp +
73+
\nabla_{x^l_j} k(x^l_j,x)]
7374
7475
Parameters
7576
----------

0 commit comments

Comments
 (0)