提交 7dc4ad86 authored 作者: James Bergstra's avatar James Bergstra

made test of crossentropy optimizations more robust

上级 de39392d
...@@ -217,10 +217,24 @@ class T_CrossentropyCategorical1Hot(unittest.TestCase): ...@@ -217,10 +217,24 @@ class T_CrossentropyCategorical1Hot(unittest.TestCase):
# the function has 9 ops because the dimshuffle and elemwise{second} aren't getting # the function has 9 ops because the dimshuffle and elemwise{second} aren't getting
# cleaned up as well as we'd like. # cleaned up as well as we'd like.
has_cx1hot = False
assert env.toposort()[3].op == crossentropy_softmax_argmax_1hot_with_bias has_cx1hotdx = False
assert env.toposort()[8].op == crossentropy_softmax_1hot_with_bias_dx has_softmax = False
assert len(env.toposort()) == 9 #shorthand for actually checking what I really has_softmaxdx = False
for node in env.toposort():
if node.op == crossentropy_softmax_argmax_1hot_with_bias:
has_cx1hot = True
if node.op == crossentropy_softmax_1hot_with_bias_dx :
has_cx1hotdx = True
if node.op == softmax:
has_softmax = True
if node.op == softmax_grad:
has_softmaxdx = True
assert has_cx1hot
assert has_cx1hotdx
assert not has_softmax
assert not has_softmaxdx
def test_argmax_pushdown(): def test_argmax_pushdown():
x = tensor.dmatrix() x = tensor.dmatrix()
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论