提交 b1ff3235 authored 作者: Frederic Bastien's avatar Frederic Bastien

Fix grad of abs to have relu don't return nan in fast_compile

上级 5e5e5cc5
...@@ -2150,6 +2150,8 @@ class Abs(UnaryScalarOp): ...@@ -2150,6 +2150,8 @@ class Abs(UnaryScalarOp):
else: else:
return [x.zeros_like()] return [x.zeros_like()]
if x.dtype.startswith('float'):
return gz * sgn(x),
return gz * x / abs(x), # formula works for complex and real return gz * x / abs(x), # formula works for complex and real
def c_code(self, node, name, inputs, outputs, sub): def c_code(self, node, name, inputs, outputs, sub):
......
...@@ -453,6 +453,16 @@ def test_grad_inrange(): ...@@ -453,6 +453,16 @@ def test_grad_inrange():
utt.assert_allclose(f(7, 1, 5), [0, 0, 0]) utt.assert_allclose(f(7, 1, 5), [0, 0, 0])
def test_grad_abs():
a = theano.tensor.fscalar("a")
b = theano.tensor.nnet.relu(a)
c = theano.grad(b, a)
f = theano.function([a], c, mode=theano.Mode(optimizer=None))
# Currently Theano return 0.5, but it isn't sure it won't change
# in the futur.
ret = f(0.)
assert ret == 0.5, ret
# Testing of Composite is done in tensor/tests/test_opt.py # Testing of Composite is done in tensor/tests/test_opt.py
# in test_fusion, TestCompositeCodegen # in test_fusion, TestCompositeCodegen
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论