提交 c3b833d2 authored 作者: Frederic's avatar Frederic

Move the new tests to the right file.

上级 48bfaf25
...@@ -1185,7 +1185,25 @@ class Test_softmax_opt: ...@@ -1185,7 +1185,25 @@ class Test_softmax_opt:
# REPEAT 3 CASES in presence of log(softmax) with the advanced indexing # REPEAT 3 CASES in presence of log(softmax) with the advanced indexing
# etc. # etc.
def test_stabilize_log_softmax():
mode = theano.compile.mode.get_default_mode()
mode = mode.including('local_log_softmax', 'specialize')
x = matrix()
y = theano.tensor.nnet.softmax(x)
z = theano.tensor.log(y)
f = function([x], z, mode=mode)
#check that the softmax has been optimized out
for node in f.maker.fgraph.toposort():
assert not isinstance(node.op, y.owner.op.__class__)
#call the function so debug mode can verify the optimized
#version matches the unoptimized version
rng = numpy.random.RandomState([2012, 8, 22])
f(numpy.cast[config.floatX](rng.randn(2, 3)))
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()
...@@ -132,26 +132,6 @@ class test_dimshuffle_lift(unittest.TestCase): ...@@ -132,26 +132,6 @@ class test_dimshuffle_lift(unittest.TestCase):
"{x,0,1}(y)), z)]"), str(g)) "{x,0,1}(y)), z)]"), str(g))
def test_stabilize_log_softmax():
mode = theano.compile.mode.get_default_mode()
mode = mode.including('local_log_softmax', 'specialize')
x = matrix()
y = theano.tensor.nnet.softmax(x)
z = theano.tensor.log(y)
f = function([x], z, mode=mode)
#check that the softmax has been optimized out
for node in f.maker.fgraph.toposort():
assert not isinstance(node.op, y.owner.op.__class__)
#call the function so debug mode can verify the optimized
#version matches the unoptimized version
rng = numpy.random.RandomState([2012, 8, 22])
f(numpy.cast[config.floatX](rng.randn(2, 3)))
def test_add_canonizer_problem0(): def test_add_canonizer_problem0():
n_segments = 10 n_segments = 10
label = lscalar('label') label = lscalar('label')
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论