提交 ced656fa authored 作者: carriepl's avatar carriepl

Test result of theano function

上级 f8bbacbd
...@@ -616,13 +616,12 @@ class test_DnnSoftMax(test_nnet.test_SoftMax): ...@@ -616,13 +616,12 @@ class test_DnnSoftMax(test_nnet.test_SoftMax):
# Test that the op LogSoftmax is correctly replaced by the op # Test that the op LogSoftmax is correctly replaced by the op
# DnnSoftmax with the 'log' mode. # DnnSoftmax with the 'log' mode.
# This is a test for an optimization that depends on CuDNN v3 or # Compile a reference function, on the CPU, to be used to validate the
# more recent. Don't test if the CuDNN version is too old. # results of the other function.
if cuda.dnn.version() < (3000, 3000): x = T.fmatrix()
raise SkipTest("Log-softmax is only in cudnn v3+") f_ref = theano.function([x], T.nnet.LogSoftmax()(x))
# Build the first graph and ensure that the optimization is applied # Build the first graph and ensure that the optimization is applied
x = T.fmatrix()
log_softmax_out = T.nnet.LogSoftmax()(x) log_softmax_out = T.nnet.LogSoftmax()(x)
f = theano.function([x], log_softmax_out, mode=mode_with_gpu) f = theano.function([x], log_softmax_out, mode=mode_with_gpu)
...@@ -631,8 +630,11 @@ class test_DnnSoftMax(test_nnet.test_SoftMax): ...@@ -631,8 +630,11 @@ class test_DnnSoftMax(test_nnet.test_SoftMax):
assert len(dnn_softmax_nodes) == 1 assert len(dnn_softmax_nodes) == 1
assert dnn_softmax_nodes[0].op.algo == "log" assert dnn_softmax_nodes[0].op.algo == "log"
# Compare the output of the function with the reference function
inp = numpy.random.normal(0, 1, (5, 6)).astype("float32")
utt.assert_allclose(f(inp), f_ref(inp))
# Build the first graph and ensure that the optimization is applied # Build the first graph and ensure that the optimization is applied
x = T.fmatrix()
log_softmax_out = T.log(T.nnet.Softmax()(x)) log_softmax_out = T.log(T.nnet.Softmax()(x))
f = theano.function([x], log_softmax_out, mode=mode_with_gpu) f = theano.function([x], log_softmax_out, mode=mode_with_gpu)
...@@ -641,6 +643,10 @@ class test_DnnSoftMax(test_nnet.test_SoftMax): ...@@ -641,6 +643,10 @@ class test_DnnSoftMax(test_nnet.test_SoftMax):
assert len(dnn_softmax_nodes) == 1 assert len(dnn_softmax_nodes) == 1
assert dnn_softmax_nodes[0].op.algo == "log" assert dnn_softmax_nodes[0].op.algo == "log"
# Compare the output of the function with the reference function
inp = numpy.random.normal(0, 1, (5, 6)).astype("float32")
utt.assert_allclose(f(inp), f_ref(inp))
def test_dnn_tag(): def test_dnn_tag():
""" """
......
...@@ -847,7 +847,7 @@ class test_SoftMax(test_nnet.test_SoftMax): ...@@ -847,7 +847,7 @@ class test_SoftMax(test_nnet.test_SoftMax):
numpy.exp(input_val).sum(1)[:, None, :, :]) numpy.exp(input_val).sum(1)[:, None, :, :])
utt.assert_allclose(out, expected_out) utt.assert_allclose(out, expected_out)
def test_log_softmax2(self): def test_log_softmax2(self):
# Test that the op LogSoftmax is correctly replaced by the op # Test that the op LogSoftmax is correctly replaced by the op
# DnnSoftmax with the 'log' mode. # DnnSoftmax with the 'log' mode.
...@@ -857,8 +857,12 @@ class test_SoftMax(test_nnet.test_SoftMax): ...@@ -857,8 +857,12 @@ class test_SoftMax(test_nnet.test_SoftMax):
if dnn.version() < 3000: if dnn.version() < 3000:
raise SkipTest("Log-softmax is only in cudnn v3+") raise SkipTest("Log-softmax is only in cudnn v3+")
# Build the first graph and ensure that the optimization is applied # Compile a reference function, on the CPU, to be used to validate the
# results of the other function.
x = T.fmatrix() x = T.fmatrix()
f_ref = theano.function([x], T.nnet.LogSoftmax()(x))
# Build the first graph and ensure that the optimization is applied
log_softmax_out = T.nnet.LogSoftmax()(x) log_softmax_out = T.nnet.LogSoftmax()(x)
f = theano.function([x], log_softmax_out, mode=mode_with_gpu) f = theano.function([x], log_softmax_out, mode=mode_with_gpu)
...@@ -867,8 +871,11 @@ class test_SoftMax(test_nnet.test_SoftMax): ...@@ -867,8 +871,11 @@ class test_SoftMax(test_nnet.test_SoftMax):
assert len(dnn_softmax_nodes) == 1 assert len(dnn_softmax_nodes) == 1
assert dnn_softmax_nodes[0].op.algo == "log" assert dnn_softmax_nodes[0].op.algo == "log"
# Compare the output of the function with the reference function
inp = numpy.random.normal(0, 1, (5, 6)).astype("float32")
utt.assert_allclose(f(inp), f_ref(inp))
# Build the first graph and ensure that the optimization is applied # Build the first graph and ensure that the optimization is applied
x = T.fmatrix()
log_softmax_out = T.log(T.nnet.Softmax()(x)) log_softmax_out = T.log(T.nnet.Softmax()(x))
f = theano.function([x], log_softmax_out, mode=mode_with_gpu) f = theano.function([x], log_softmax_out, mode=mode_with_gpu)
...@@ -876,3 +883,7 @@ class test_SoftMax(test_nnet.test_SoftMax): ...@@ -876,3 +883,7 @@ class test_SoftMax(test_nnet.test_SoftMax):
isinstance(n.op, dnn.GpuDnnSoftmax)] isinstance(n.op, dnn.GpuDnnSoftmax)]
assert len(dnn_softmax_nodes) == 1 assert len(dnn_softmax_nodes) == 1
assert dnn_softmax_nodes[0].op.algo == "log" assert dnn_softmax_nodes[0].op.algo == "log"
# Compare the output of the function with the reference function
inp = numpy.random.normal(0, 1, (5, 6)).astype("float32")
utt.assert_allclose(f(inp), f_ref(inp))
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论