提交 cadda5db authored 作者: Frédéric Bastien's avatar Frédéric Bastien

Merge pull request #1601 from joschu/master

mod derivative
......@@ -1655,8 +1655,14 @@ class Mod(BinaryScalarOp):
""") % locals()
def grad(self, (x, y), (gz, )):
return [x.zeros_like(dtype=theano.config.floatX),
y.zeros_like(dtype=theano.config.floatX)]
z = self(x, y)
if z.type.dtype in discrete_types:
# The gradient does not flow in if the output is discrete
return [x.zeros_like(dtype=theano.config.floatX),
y.zeros_like(dtype=theano.config.floatX)]
return [gz,
-(x // y) * gz]
mod = Mod(upcast_out, name='mod')
......
......@@ -883,6 +883,7 @@ ModTester = makeBroadcastTester(
x % y, dtype=theano.scalar.basic.upcast(x.dtype, y.dtype)),
good=copymod(_good_broadcast_div_mod_normal_float,
['complex1', 'complex2']),
grad=_grad_broadcast_div_mod_normal,
)
......@@ -892,6 +893,7 @@ ModInplaceTester = makeBroadcastTester(
x % y, dtype=theano.scalar.basic.upcast(x.dtype, y.dtype)),
good=copymod(_good_broadcast_div_mod_normal_float_inplace,
["complex1", "complex2"]),
grad=_grad_broadcast_div_mod_normal,
inplace=True)
_good_broadcast_pow_normal_float = dict(same_shapes = (rand_ranged(1, 5, (2, 3)), rand_ranged(-3, 3, (2, 3))),
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论