提交 fa790f36 authored 作者: João Victor Risso's avatar João Victor Risso

Remove L_op methods from spatial transformer gradient Ops

上级 8467591f
...@@ -3021,16 +3021,6 @@ class GpuDnnTransformerGradI(DnnBase): ...@@ -3021,16 +3021,6 @@ class GpuDnnTransformerGradI(DnnBase):
return Apply(self, inputs, outputs) return Apply(self, inputs, outputs)
def L_op(self, inputs, outputs, grads):
img, grid, dy, desc = inputs
dimg_out, dgrid = outputs
grad_cost = grads[0]
dimg = dimg_out * grad_cost
d_dy = grad_not_implemented(self, dy, 2)
return [dimg, dgrid, d_dy, DisconnectedType()()]
def connection_pattern(self, node): def connection_pattern(self, node):
# not connected to desc # not connected to desc
return [[1, 1], [1, 1], [1, 1], [0, 0]] return [[1, 1], [1, 1], [1, 1], [0, 0]]
...@@ -3063,12 +3053,6 @@ class GpuDnnTransformerGradT(DnnBase): ...@@ -3063,12 +3053,6 @@ class GpuDnnTransformerGradT(DnnBase):
return Apply(self, inputs, outputs) return Apply(self, inputs, outputs)
def L_op(self, inputs, outputs, grads):
dgrid, desc = inputs
grad_cost = grads[0]
dtheta = outputs * grad_cost
return [dtheta, DisconnectedType()()]
def connection_pattern(self, node): def connection_pattern(self, node):
# not connected to desc # not connected to desc
return [[1], [0]] return [[1], [0]]
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论