Skip to content

Commit eef0646

Browse files
committed
Prevent Elemwise docstring from being appened to wrapped functions
1 parent 8b62e81 commit eef0646

File tree

5 files changed

+1
-78
lines changed

5 files changed

+1
-78
lines changed

pytensor/tensor/basic.py

Lines changed: 0 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -772,18 +772,6 @@ def switch(cond, ift, iff):
772772
iff : TensorVariable
773773
Values selected at `False` elements of `cond`.
774774
775-
Examples
776-
--------
777-
This example demonstrates how `switch` can be used in PyMC to model a
778-
categorical variable.
779-
780-
.. code:: python
781-
782-
import pymc as pm
783-
784-
with pm.Model():
785-
x = pm.Categorical('x', np.array([0.1, 0.9]))
786-
y = pm.Bernoulli('y', p=pm.math.switch(x, 0.9, 0.1), shape=10)
787775
"""
788776

789777

pytensor/tensor/elemwise.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1704,7 +1704,7 @@ def construct(symbol):
17041704
rval = Elemwise(scalar_op, nfunc_spec=(nfunc and (nfunc, nin, nout)))
17051705

17061706
if getattr(symbol, "__doc__"):
1707-
rval.__doc__ = symbol.__doc__ + "\n\n " + rval.__doc__
1707+
rval.__doc__ = symbol.__doc__
17081708

17091709
# for the meaning of this see the ./epydoc script
17101710
# it makes epydoc display rval as if it were a function, not an object

pytensor/tensor/extra_ops.py

Lines changed: 0 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -410,19 +410,6 @@ def cumsum(x, axis=None):
410410
axis
411411
The axis along which the cumulative sum is computed.
412412
The default (None) is to compute the cumsum over the flattened array.
413-
# noqa W293
414-
Example
415-
-------
416-
Usage in PyMC:
417-
418-
.. code-block:: python
419-
420-
with pm.Model() as model:
421-
x0 = pm.Normal('x0')
422-
x = pm.Normal('x', mu=0, sd=1, shape=10)
423-
# Gaussian random walk
424-
grw = pm.Deterministic('grw', x0 + pm.math.cumsum(x))
425-
426413
"""
427414
return CumOp(axis=axis, mode="add")(x)
428415

@@ -439,22 +426,6 @@ def cumprod(x, axis=None):
439426
axis
440427
The axis along which the cumulative product is computed.
441428
The default (None) is to compute the `cumprod` over the flattened array.
442-
# noqa W293
443-
Example
444-
-------
445-
Usage in PyMC:
446-
447-
.. code-block:: python
448-
449-
import pymc as pm
450-
451-
with pm.Model() as model:
452-
x = pm.Normal('x', shape=(10, 3))
453-
# Product of x
454-
prod_x = pm.Deterministic('prod_x', pm.math.cumprod(x, axis=0))
455-
456-
.. versionadded:: 0.7
457-
458429
"""
459430
return CumOp(axis=axis, mode="mul")(x)
460431

pytensor/tensor/math.py

Lines changed: 0 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -1945,17 +1945,6 @@ def dot(l, r):
19451945
"""Return a symbolic dot product.
19461946
19471947
This is designed to work with both sparse and dense tensors types.
1948-
1949-
Example usage with PyMC:
1950-
1951-
.. code:: python
1952-
1953-
import pymc as pm
1954-
1955-
with pm.Model() as model:
1956-
x = pm.Normal('x', mu=0, sd=1, shape=2)
1957-
y = pm.Normal('y', mu=0, sd=1, shape=2)
1958-
z = pt.math.dot(x, y)
19591948
"""
19601949

19611950
if not isinstance(l, Variable):
@@ -2675,17 +2664,6 @@ def prod(
26752664
If this is set to True, the axes which are reduced are left in
26762665
the result as dimensions with size one. With this option, the result
26772666
will broadcast correctly against the original tensor.
2678-
# noqa W293
2679-
Example
2680-
-------
2681-
.. code-block:: python
2682-
2683-
import pymc as pm
2684-
2685-
with pm.Model() as model:
2686-
n = pm.Poisson('n', 1, shape=(2, 3))
2687-
prod_n = pm.Deterministic('prod_n', pm.math.prod(n, axis=0))
2688-
26892667
"""
26902668

26912669
out = Prod(

pytensor/tensor/special.py

Lines changed: 0 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -499,20 +499,6 @@ def softmax(c, axis=None):
499499
-------
500500
TensorVariable
501501
The softmax of the input tensor along the specified axis.
502-
503-
Examples
504-
--------
505-
In PyMC, you can use this function to compute a softmax over a vector of
506-
probabilities representing the likelihood of each class in a multiclass
507-
classification problem. Here is an example::
508-
509-
import pymc as pm
510-
511-
with pm.Model() as model:
512-
weights = pm.Gamma('weights', 1, 1, shape=3)
513-
softmax_prob = pm.math.softmax(weights)
514-
outcome = pm.Categorical('outcome', p=softmax_prob)
515-
516502
"""
517503
c = as_tensor_variable(c)
518504
return Softmax(axis=axis)(c)

0 commit comments

Comments
 (0)