提交 bca9a381 authored 作者: Ricardo's avatar Ricardo 提交者: Brandon T. Willard

Move test helper softmax_graph to test module

上级 4a1010ef
......@@ -35,7 +35,6 @@ from aesara.tensor.nnet.basic import (
sigmoid_binary_crossentropy,
softmax,
softmax_grad_legacy,
softmax_graph,
softmax_legacy,
softmax_simplifier,
softmax_with_bias,
......
......@@ -1121,10 +1121,6 @@ def local_logsoftmax_grad(fgraph, node):
return [ret]
def softmax_graph(c):
return exp(c) / exp(c).sum(axis=-1, keepdims=True)
UNSET_AXIS = object()
......
......@@ -56,7 +56,7 @@ from aesara.tensor.math import all as aet_all
from aesara.tensor.math import dot, mean, sigmoid
from aesara.tensor.math import sum as aet_sum
from aesara.tensor.math import tanh
from aesara.tensor.nnet import categorical_crossentropy, softmax_graph
from aesara.tensor.nnet import categorical_crossentropy
from aesara.tensor.random.utils import RandomStream
from aesara.tensor.shape import Shape_i, reshape, shape, specify_shape
from aesara.tensor.sharedvar import SharedVariable
......@@ -81,6 +81,7 @@ from aesara.tensor.type import (
vector,
)
from tests import unittest_tools as utt
from tests.tensor.nnet.test_basic import softmax_graph
if config.mode == "FAST_COMPILE":
......
......@@ -52,7 +52,6 @@ from aesara.tensor.nnet.basic import (
sigmoid_binary_crossentropy,
softmax,
softmax_grad_legacy,
softmax_graph,
softmax_legacy,
softmax_with_bias,
softsign,
......@@ -83,6 +82,10 @@ from tests.tensor.utils import (
)
def softmax_graph(c):
return exp(c) / exp(c).sum(axis=-1, keepdims=True)
def valid_axis_tester(Op):
with pytest.raises(TypeError):
Op(1.5)
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论