提交 1b5d4fc4 authored 作者: Frédéric Bastien's avatar Frédéric Bastien 提交者: GitHub

Merge pull request #5070 from JesseLivezey/softmax_dnn_grad_opt_fix

fix softmaxgrad dnn opt error fixes #5056
...@@ -3110,6 +3110,7 @@ if True: ...@@ -3110,6 +3110,7 @@ if True:
return return
ins = [] ins = []
for n in node.inputs: for n in node.inputs:
if n.owner is not None:
if isinstance(n.owner.op, HostFromGpu): if isinstance(n.owner.op, HostFromGpu):
n = n.owner.inputs[0] n = n.owner.inputs[0]
if n.ndim != 2: if n.ndim != 2:
......
...@@ -582,6 +582,19 @@ class test_DnnSoftMax(test_nnet.test_SoftMax): ...@@ -582,6 +582,19 @@ class test_DnnSoftMax(test_nnet.test_SoftMax):
if cuda.dnn.version() != (3000, 3000): if cuda.dnn.version() != (3000, 3000):
utt.verify_grad(softmax_op, [x_val2], mode=mode_with_gpu) utt.verify_grad(softmax_op, [x_val2], mode=mode_with_gpu)
def test_local_softmax_dnn_grad(self):
"""
Check for optimization error when grad of summed
softmax is taken over tensor with fixed shape.
"""
x = T.fvector('x')
xp = x.reshape((5, 5))
y = T.nnet.softmax(xp.flatten()).sum()
g = T.grad(y, x)
f = theano.function(inputs=[x], outputs=g, mode=self.mode)
assert(any(n for n in f.maker.fgraph.toposort() if
isinstance(n.op, dnn.GpuDnnSoftmaxGrad)))
def test_cudnn_softmax_grad_opt(self): def test_cudnn_softmax_grad_opt(self):
# Verify that the SoftmaxGrad -> GpuDnnSoftmaxGrad optimization is # Verify that the SoftmaxGrad -> GpuDnnSoftmaxGrad optimization is
# applied when cudnn is required # applied when cudnn is required
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论