提交 60d25721 authored 作者: Frédéric Bastien's avatar Frédéric Bastien 提交者: GitHub

Merge pull request #5399 from gokul-uf/add_grad_scale

Complete Add grad_scale Op
......@@ -151,6 +151,8 @@ class DisconnectedType(theano.gof.type.Type):
def __str__(self):
return 'DisconnectedType'
disconnected_type = DisconnectedType()
......@@ -1749,6 +1751,7 @@ Exception args: %s""" % (self.err_pos, self.arg,
self.rel_err, self.rel_tol,
args_msg)
verify_grad.E_grad = GradientError
......@@ -1994,6 +1997,7 @@ class DisconnectedGrad(ViewOp):
def connection_pattern(self, node):
return [[False]]
disconnected_grad_ = DisconnectedGrad()
......@@ -2062,3 +2066,35 @@ def grad_clip(x, lower_bound, upper_bound):
"""
return GradClip(lower_bound, upper_bound)(x)
class GradScale(ViewOp):
def __init__(self, multiplier):
self.multiplier = multiplier
def grad(self, args, g_outs):
return [self.multiplier * g_out for g_out in g_outs]
def grad_scale(x, multiplier):
"""
This op scale or inverse the gradient in the backpropagation.
:param x: the variable we want its gradient inputs scale
:param multiplier: scale of the gradient
:examples:
x = theano.tensor.fscalar()
fx = theano.tensor.sin(x)
fp = theano.tensor.grad(fx, wrt=x)
fprime = theano.function([x], fp)
print(fprime(2))#-0.416
f_inverse=grad_scale(fx,-1.)
fpp = theano.tensor.grad(f_inverse, wrt=x)
fpprime = theano.function([x], fpp)
print(fpprime(2))#0.416
"""
return GradScale(multiplier)(x)
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论