提交 1f77bb9d authored 作者: Dustin Webb's avatar Dustin Webb

Added test to ensure SoftmaxGrad to DnnSoftmaxGrad is not applied when cudnn is…

Added test to ensure SoftmaxGrad to DnnSoftmaxGrad is not applied when cudnn is excluded from optimizations.
上级 b9d546a9
......@@ -383,7 +383,8 @@ class test_SoftMax(unittest.TestCase):
mode_w_cudnn, check_types_opt
)
# Verify that the SoftmaxGrad -> GpuDnnSoftmaxGrad optimization
# Verify that the SoftmaxGrad -> GpuDnnSoftmaxGrad optimization is
# applied when cudnn is required
y = T.vector('y')
f = theano.function(
[y],
......@@ -403,3 +404,26 @@ class test_SoftMax(unittest.TestCase):
i.op,
theano.tensor.nnet.SoftmaxGrad
)]) == 0)
# Verify that the SoftmaxGrad -> GpuDnnSoftmaxGrad optimization is not
# applied when cudnn is excluded or not available
mode_wo_cudnn = mode_with_gpu.excluding("cudnn")
y = T.vector('y')
f = theano.function(
[y],
T.grad(T.nnet.softmax(y).mean(), y),
mode=mode_wo_cudnn
)
sorted_f = f.maker.fgraph.toposort()
assert(len([i
for i in sorted_f
if isinstance(
i.op,
theano.sandbox.cuda.dnn.GpuDnnSoftmaxGrad
)]) == 0)
assert(len([i
for i in sorted_f
if isinstance(
i.op,
theano.tensor.nnet.SoftmaxGrad
)]) == 1)
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论