提交 c40c1ed3 authored 作者: carriepl's avatar carriepl

Add test for opt to convert LogSoftmax to DnnLogSoftmax in cuda backend

上级 bdcb8d26
...@@ -612,6 +612,35 @@ class test_DnnSoftMax(test_nnet.test_SoftMax): ...@@ -612,6 +612,35 @@ class test_DnnSoftMax(test_nnet.test_SoftMax):
utt.assert_allclose(out, expected_out) utt.assert_allclose(out, expected_out)
def test_log_softmax2(self):
# Test that the op LogSoftmax is correctly replaced by the op
# DnnSoftmax with the 'log' mode.
# This is a test for an optimization that depends on CuDNN v3 or
# more recent. Don't test if the CuDNN version is too old.
if cuda.dnn.version() < (3000, 3000):
raise SkipTest("Log-softmax is only in cudnn v3+")
# Build the first graph and ensure that the optimization is applied
x = T.fmatrix()
log_softmax_out = T.nnet.LogSoftmax()(x)
f = theano.function([x], log_softmax_out, mode=mode_with_gpu)
dnn_softmax_nodes = [n for n in f.maker.fgraph.toposort() if
isinstance(n.op, cuda.dnn.GpuDnnSoftmax)]
assert len(dnn_softmax_nodes) == 1
assert dnn_softmax_nodes[0].op.algo == "log"
# Build the first graph and ensure that the optimization is applied
x = T.fmatrix()
log_softmax_out = T.log(T.nnet.Softmax()(x))
f = theano.function([x], log_softmax_out, mode=mode_with_gpu)
dnn_softmax_nodes = [n for n in f.maker.fgraph.toposort() if
isinstance(n.op, cuda.dnn.GpuDnnSoftmax)]
assert len(dnn_softmax_nodes) == 1
assert dnn_softmax_nodes[0].op.algo == "log"
def test_dnn_tag(): def test_dnn_tag():
""" """
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论