Skip to content

Commit 3fe07f3

Browse files
committed
Remove deprecation warning on softmax functions
1 parent df4183d commit 3fe07f3

File tree

4 files changed

+6
-37
lines changed

4 files changed

+6
-37
lines changed

pytensor/tensor/special.py

Lines changed: 2 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -483,25 +483,8 @@ def c_code_cache_version():
483483
return (4,)
484484

485485

486-
UNSET_AXIS = object()
487-
488-
489-
def softmax(c, axis=UNSET_AXIS):
490-
if axis is UNSET_AXIS:
491-
warnings.warn(
492-
"Softmax now accepts an axis argument. For backwards-compatibility it defaults to -1 when not specified, "
493-
"but in the future the default will be `None`.\nTo suppress this warning specify axis explicitly.",
494-
FutureWarning,
495-
)
496-
axis = -1
497-
486+
def softmax(c, axis=None):
498487
c = as_tensor_variable(c)
499-
if c.ndim == 1:
500-
# TODO: Create Specific warning type that can be suppressed?
501-
warnings.warn(
502-
"Softmax no longer converts a vector to a row matrix.",
503-
UserWarning,
504-
)
505488
return Softmax(axis=axis)(c)
506489

507490

@@ -749,22 +732,8 @@ def c_code_cache_version():
749732
return (1,)
750733

751734

752-
def log_softmax(c, axis=UNSET_AXIS):
753-
if axis is UNSET_AXIS:
754-
warnings.warn(
755-
"logsoftmax now accepts an axis argument. For backwards-compatibility it defaults to -1 when not specified, "
756-
"but in the future the default will be `None`.\nTo suppress this warning specify axis explicitly.",
757-
FutureWarning,
758-
)
759-
axis = -1
760-
735+
def log_softmax(c, axis=None):
761736
c = as_tensor_variable(c)
762-
if c.ndim == 1:
763-
# TODO: Create Specific warning type that can be suppressed?
764-
warnings.warn(
765-
"Softmax no longer converts a vector to a row matrix.",
766-
UserWarning,
767-
)
768737
return LogSoftmax(axis=axis)(c)
769738

770739

tests/d3viz/models.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ def __init__(self, nfeatures=100, noutputs=10, nhiddens=50, rng=None):
2525

2626
wy = shared(self.rng.normal(0, 1, (nhiddens, noutputs)))
2727
by = shared(np.zeros(noutputs), borrow=True)
28-
y = softmax(at.dot(h, wy) + by)
28+
y = softmax(at.dot(h, wy) + by, axis=-1)
2929
self.inputs = [x]
3030
self.outputs = [y]
3131

tests/tensor/rewriting/test_special.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,7 @@ def test_logsoftmax_grad_true_div_elemwise(self):
7272
"""
7373

7474
x = matrix("x")
75-
y = log(softmax(x))
75+
y = log(softmax(x, axis=-1))
7676
g = pytensor.tensor.grad(y.sum(), x)
7777

7878
softmax_grad_node = g.owner
@@ -96,7 +96,7 @@ def test_log_softmax_stabilization():
9696
mode = mode.including("local_log_softmax", "specialize")
9797

9898
x = matrix()
99-
y = softmax(x)
99+
y = softmax(x, axis=-1)
100100
z = log(y)
101101

102102
fgraph = FunctionGraph([x], [z])

tests/test_rop.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -272,7 +272,7 @@ def test_sum(self):
272272
self.check_mat_rop_lop(self.mx.sum(axis=1), (self.mat_in_shape[0],))
273273

274274
def test_softmax(self):
275-
self.check_rop_lop(pytensor.tensor.special.softmax(self.x), self.in_shape)
275+
self.check_rop_lop(pytensor.tensor.special.softmax(self.x, axis=-1), self.in_shape)
276276

277277
def test_alloc(self):
278278
# Alloc of the sum of x into a vector

0 commit comments

Comments
 (0)