提交 a9255855 authored 作者: Sina Honari's avatar Sina Honari

updating the return method for the case where ds != st

上级 b94dc4a5
...@@ -236,7 +236,7 @@ class DownsampleFactorMax(Op): ...@@ -236,7 +236,7 @@ class DownsampleFactorMax(Op):
gz, = grads gz, = grads
maxout = self(x) maxout = self(x)
if self.st != self.ds: if self.st != self.ds:
return theano.gradient.grad_not_implemented() return [theano.gradient.grad_not_implemented(self, 0, x)]
return [DownsampleFactorMaxGrad(self.ds, return [DownsampleFactorMaxGrad(self.ds,
ignore_border=self.ignore_border, ignore_border=self.ignore_border,
st=self.st)( st=self.st)(
...@@ -387,7 +387,9 @@ class DownsampleFactorMaxGrad(Op): ...@@ -387,7 +387,9 @@ class DownsampleFactorMaxGrad(Op):
x, maxout, gz = inp x, maxout, gz = inp
ggx, = grads ggx, = grads
if self.st != self.ds: if self.st != self.ds:
return theano.gradient.grad_not_implemented() return [theano.gradient.grad_not_implemented(self, 0, x),
theano.gradient.grad_not_implemented(self, 1, maxout)
theano.gradient.grad_not_implemented(self, 2, gz)]
return [theano.tensor.zeros_like(x), return [theano.tensor.zeros_like(x),
theano.tensor.zeros_like(maxout), theano.tensor.zeros_like(maxout),
DownsampleFactorMaxGradGrad( DownsampleFactorMaxGradGrad(
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论