提交 f9eb767b authored 作者: fvisin's avatar fvisin 提交者: Francesco Visin

Add test for the optimization of the fprop of log(softmax(x))

上级 65898f85
......@@ -223,6 +223,18 @@ class T_LogSoftmax(utt.InferShapeTester):
def f(a):
return logsoftmax_op(a)
def test_local_softmax_optimization(self):
"""Test the Logsoftmax substitution
Check that Log(Softmax(x)) is substituted to Logsoftmax(x). Note that
only the forward pass is checked (i.e., doesn't check the gradient)
"""
x, y = tensor.matrices('xy')
sm = tensor.nnet.softmax(x)
logsm = tensor.log(sm)
f = theano.function([x], logsm)
assert isinstance(f.maker.fgraph.outputs[0].owner.op,
theano.tensor.nnet.nnet.LogSoftmax)
class T_SoftmaxGrad(utt.InferShapeTester):
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论