提交 c98a1668 authored 作者: Gokul's avatar Gokul

flake8 fixes

上级 e550df89
...@@ -151,6 +151,8 @@ class DisconnectedType(theano.gof.type.Type): ...@@ -151,6 +151,8 @@ class DisconnectedType(theano.gof.type.Type):
def __str__(self): def __str__(self):
return 'DisconnectedType' return 'DisconnectedType'
disconnected_type = DisconnectedType() disconnected_type = DisconnectedType()
...@@ -1749,6 +1751,7 @@ Exception args: %s""" % (self.err_pos, self.arg, ...@@ -1749,6 +1751,7 @@ Exception args: %s""" % (self.err_pos, self.arg,
self.rel_err, self.rel_tol, self.rel_err, self.rel_tol,
args_msg) args_msg)
verify_grad.E_grad = GradientError verify_grad.E_grad = GradientError
...@@ -1994,6 +1997,7 @@ class DisconnectedGrad(ViewOp): ...@@ -1994,6 +1997,7 @@ class DisconnectedGrad(ViewOp):
def connection_pattern(self, node): def connection_pattern(self, node):
return [[False]] return [[False]]
disconnected_grad_ = DisconnectedGrad() disconnected_grad_ = DisconnectedGrad()
...@@ -2065,14 +2069,14 @@ def grad_clip(x, lower_bound, upper_bound): ...@@ -2065,14 +2069,14 @@ def grad_clip(x, lower_bound, upper_bound):
class GradScale(ViewOp): class GradScale(ViewOp):
def __init__(self,multiplier): def __init__(self, multiplier):
self.multiplier=multiplier self.multiplier = multiplier
def grad(self, args, g_outs): def grad(self, args, g_outs):
return [self.multiplier*g_out for g_out in g_outs] return [self.multiplier * g_out for g_out in g_outs]
def grad_scale(x,multiplier): def grad_scale(x, multiplier):
""" """
This op scale or inverse the gradient in the backpropagation. This op scale or inverse the gradient in the backpropagation.
...@@ -2093,4 +2097,3 @@ def grad_scale(x,multiplier): ...@@ -2093,4 +2097,3 @@ def grad_scale(x,multiplier):
print(fpprime(2))#0.416 print(fpprime(2))#0.416
""" """
return GradScale(multiplier)(x) return GradScale(multiplier)(x)
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论