提交 8b437c23 authored 作者: Frederic's avatar Frederic

Fix cudnn tests

上级 4df4c004
...@@ -121,7 +121,7 @@ def test_dnn_tag(): ...@@ -121,7 +121,7 @@ def test_dnn_tag():
[x], [x],
max_pool_2d(x, ds=(2, 2)), max_pool_2d(x, ds=(2, 2)),
mode=mode_with_gpu.including("cudnn")) mode=mode_with_gpu.including("cudnn"))
except RuntimeError, e: except (AssertionError, RuntimeError), e:
assert not cuda.dnn.dnn_available() assert not cuda.dnn.dnn_available()
raised = True raised = True
finally: finally:
......
...@@ -301,6 +301,12 @@ class test_SoftMax(unittest.TestCase): ...@@ -301,6 +301,12 @@ class test_SoftMax(unittest.TestCase):
self._cmp(2 << 15, 5, f, f_gpu) self._cmp(2 << 15, 5, f, f_gpu)
self._cmp(0, 10, f, f_gpu) self._cmp(0, 10, f, f_gpu)
def test_softmax_cudnn(self):
if not cuda.dnn.dnn_available():
raise SkipTest(cuda.dnn.dnn_available.msg)
x = T.fmatrix('x')
z = T.nnet.softmax
def check_types_with_cudnn(graph, graph_gpu): def check_types_with_cudnn(graph, graph_gpu):
self._check_types( self._check_types(
graph, graph,
...@@ -320,7 +326,7 @@ class test_SoftMax(unittest.TestCase): ...@@ -320,7 +326,7 @@ class test_SoftMax(unittest.TestCase):
check_types_with_cudnn check_types_with_cudnn
) )
def test_cudnn_softmax(self): def test_cudnn_softmax_grad(self):
if not cuda.dnn.dnn_available(): if not cuda.dnn.dnn_available():
raise SkipTest(cuda.dnn.dnn_available.msg) raise SkipTest(cuda.dnn.dnn_available.msg)
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论