提交 162306f8 authored 作者: sebastien-j's avatar sebastien-j

Minor changes

上级 33f5b715
......@@ -1123,6 +1123,8 @@ class DownsampleFactorMaxGradGrad(Op):
@register_canonicalize
@gof.local_optimizer([MaxPoolGrad])
def local_average_pool_grad(node):
# To assure backward compatibility with
# DownsampleFactorMaxGrad
if (not isinstance(node.op, MaxPoolGrad) or node.op.mode not in
['sum','average_exc_pad', 'average_inc_pad']):
return False
......
......@@ -803,9 +803,9 @@ class TestDownsampleFactorMax(utt.InferShapeTester):
warn=False)
def test_opt_max_to_average(self):
im = theano.tensor.ftensor4()
maxout = theano.tensor.ftensor4()
grad = theano.tensor.ftensor4()
im = theano.tensor.tensor4()
maxout = theano.tensor.tensor4()
grad = theano.tensor.tensor4()
for mode in ['max', 'sum', 'average_inc_pad', 'average_exc_pad']:
f = theano.function([im, maxout, grad],
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论