提交 0f29e4ab authored 作者: joschu's avatar joschu

Merge pull request #1 from lamblin/mod_derivative

Few changes to mod derivative
...@@ -1655,8 +1655,14 @@ class Mod(BinaryScalarOp): ...@@ -1655,8 +1655,14 @@ class Mod(BinaryScalarOp):
""") % locals() """) % locals()
def grad(self, (x, y), (gz, )): def grad(self, (x, y), (gz, )):
z = self(x, y)
if z.type.dtype in discrete_types:
# The gradient does not flow in if the output is discrete
return [x.zeros_like(dtype=theano.config.floatX),
y.zeros_like(dtype=theano.config.floatX)]
return [gz, return [gz,
-(x//y) * gz ] -(x // y) * gz]
mod = Mod(upcast_out, name='mod') mod = Mod(upcast_out, name='mod')
......
...@@ -876,6 +876,7 @@ ModTester = makeBroadcastTester( ...@@ -876,6 +876,7 @@ ModTester = makeBroadcastTester(
x % y, dtype=theano.scalar.basic.upcast(x.dtype, y.dtype)), x % y, dtype=theano.scalar.basic.upcast(x.dtype, y.dtype)),
good=copymod(_good_broadcast_div_mod_normal_float, good=copymod(_good_broadcast_div_mod_normal_float,
['complex1', 'complex2']), ['complex1', 'complex2']),
grad=_grad_broadcast_div_mod_normal,
) )
...@@ -885,6 +886,7 @@ ModInplaceTester = makeBroadcastTester( ...@@ -885,6 +886,7 @@ ModInplaceTester = makeBroadcastTester(
x % y, dtype=theano.scalar.basic.upcast(x.dtype, y.dtype)), x % y, dtype=theano.scalar.basic.upcast(x.dtype, y.dtype)),
good=copymod(_good_broadcast_div_mod_normal_float_inplace, good=copymod(_good_broadcast_div_mod_normal_float_inplace,
["complex1", "complex2"]), ["complex1", "complex2"]),
grad=_grad_broadcast_div_mod_normal,
inplace=True) inplace=True)
_good_broadcast_pow_normal_float = dict(same_shapes = (rand_ranged(1, 5, (2, 3)), rand_ranged(-3, 3, (2, 3))), _good_broadcast_pow_normal_float = dict(same_shapes = (rand_ranged(1, 5, (2, 3)), rand_ranged(-3, 3, (2, 3))),
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论