提交 39bda72a authored 作者: Ricardo Vieira's avatar Ricardo Vieira 提交者: Ricardo Vieira

Remove deprecation warning on softmax functions

上级 60bc3688
import warnings
from textwrap import dedent from textwrap import dedent
import numpy as np import numpy as np
...@@ -483,25 +482,8 @@ class Softmax(COp): ...@@ -483,25 +482,8 @@ class Softmax(COp):
return (4,) return (4,)
UNSET_AXIS = object() def softmax(c, axis=None):
def softmax(c, axis=UNSET_AXIS):
if axis is UNSET_AXIS:
warnings.warn(
"Softmax now accepts an axis argument. For backwards-compatibility it defaults to -1 when not specified, "
"but in the future the default will be `None`.\nTo suppress this warning specify axis explicitly.",
FutureWarning,
)
axis = -1
c = as_tensor_variable(c) c = as_tensor_variable(c)
if c.ndim == 1:
# TODO: Create Specific warning type that can be suppressed?
warnings.warn(
"Softmax no longer converts a vector to a row matrix.",
UserWarning,
)
return Softmax(axis=axis)(c) return Softmax(axis=axis)(c)
...@@ -749,22 +731,8 @@ class LogSoftmax(COp): ...@@ -749,22 +731,8 @@ class LogSoftmax(COp):
return (1,) return (1,)
def log_softmax(c, axis=UNSET_AXIS): def log_softmax(c, axis=None):
if axis is UNSET_AXIS:
warnings.warn(
"logsoftmax now accepts an axis argument. For backwards-compatibility it defaults to -1 when not specified, "
"but in the future the default will be `None`.\nTo suppress this warning specify axis explicitly.",
FutureWarning,
)
axis = -1
c = as_tensor_variable(c) c = as_tensor_variable(c)
if c.ndim == 1:
# TODO: Create Specific warning type that can be suppressed?
warnings.warn(
"Softmax no longer converts a vector to a row matrix.",
UserWarning,
)
return LogSoftmax(axis=axis)(c) return LogSoftmax(axis=axis)(c)
......
...@@ -25,7 +25,7 @@ class Mlp: ...@@ -25,7 +25,7 @@ class Mlp:
wy = shared(self.rng.normal(0, 1, (nhiddens, noutputs))) wy = shared(self.rng.normal(0, 1, (nhiddens, noutputs)))
by = shared(np.zeros(noutputs), borrow=True) by = shared(np.zeros(noutputs), borrow=True)
y = softmax(at.dot(h, wy) + by) y = softmax(at.dot(h, wy) + by, axis=-1)
self.inputs = [x] self.inputs = [x]
self.outputs = [y] self.outputs = [y]
......
...@@ -72,7 +72,7 @@ class TestLogSoftmaxRewrites: ...@@ -72,7 +72,7 @@ class TestLogSoftmaxRewrites:
""" """
x = matrix("x") x = matrix("x")
y = log(softmax(x)) y = log(softmax(x, axis=-1))
g = pytensor.tensor.grad(y.sum(), x) g = pytensor.tensor.grad(y.sum(), x)
softmax_grad_node = g.owner softmax_grad_node = g.owner
...@@ -96,7 +96,7 @@ def test_log_softmax_stabilization(): ...@@ -96,7 +96,7 @@ def test_log_softmax_stabilization():
mode = mode.including("local_log_softmax", "specialize") mode = mode.including("local_log_softmax", "specialize")
x = matrix() x = matrix()
y = softmax(x) y = softmax(x, axis=-1)
z = log(y) z = log(y)
fgraph = FunctionGraph([x], [z]) fgraph = FunctionGraph([x], [z])
......
...@@ -272,7 +272,9 @@ class TestRopLop(RopLopChecker): ...@@ -272,7 +272,9 @@ class TestRopLop(RopLopChecker):
self.check_mat_rop_lop(self.mx.sum(axis=1), (self.mat_in_shape[0],)) self.check_mat_rop_lop(self.mx.sum(axis=1), (self.mat_in_shape[0],))
def test_softmax(self): def test_softmax(self):
self.check_rop_lop(pytensor.tensor.special.softmax(self.x), self.in_shape) self.check_rop_lop(
pytensor.tensor.special.softmax(self.x, axis=-1), self.in_shape
)
def test_alloc(self): def test_alloc(self):
# Alloc of the sum of x into a vector # Alloc of the sum of x into a vector
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论