提交 9b75b7ae authored 作者: khaotik's avatar khaotik 提交者: khaotik

Added whitespaces for flake8

上级 f6ddf8d8
......@@ -20,7 +20,7 @@ class OpFromGraphBase(gof.Op):
def __init__(self, inputs, outputs, grad_overrides=None, **kwargs):
if not isinstance(outputs, list):
raise TypeError('outputs must be list', outputs)
for i in inputs+outputs:
for i in inputs + outputs:
if not isinstance(i, gof.Variable):
raise TypeError(
'inputs and outputs must be Variable instances', i)
......@@ -75,7 +75,7 @@ class OpFromGraphBase(gof.Op):
if isinstance(grad_ops_l, list):
assert len(grad_ops_l) <= len(self.internal_inputs)
if len(grad_ops_l) < len(self.internal_inputs):
grad_ops_l += [None]*(
grad_ops_l += [None] * (
len(self.internal_inputs) - len(grad_ops_l))
# It is normal if some inputs are not needed in order
# to compute the gradient, so we ignore them.
......@@ -94,7 +94,7 @@ class OpFromGraphBase(gof.Op):
# additional filtering is needed
def grad_ops(inps, grds):
# nonlocal gs, grad_ops_l
return [(go(inps, grds) if ov else go(*(inps+grds)))
return [(go(inps, grds) if ov else go(*(inps + grds)))
for go, ov in izip(gs, grad_ops_l)]
else:
grad_ops = grad_ops_l
......@@ -115,7 +115,7 @@ class OpFromGraphBase(gof.Op):
def grad_ops(inps, grds):
# nonlocal grad_ops_l
return [go(*(inps+grds)) for go in grad_ops_l]
return [go(*(inps + grds)) for go in grad_ops_l]
self.grad_ops = grad_ops
self.cached_grad_ops = True
return grad_ops(inputs, output_grads)
......@@ -194,7 +194,7 @@ class OpFromGraphInline(OpFromGraphBase):
"""
def perform(self, node, inputs, outputs):
raise RuntimeError(
type(self).__name__+' is not supposed to be executed at runtime')
type(self).__name__ + ' is not supposed to be executed at runtime')
@gof.local_optimizer([OpFromGraphInline])
......
......@@ -126,11 +126,11 @@ class T_OpFromGraph(unittest_tools.InferShapeTester):
def go(inps, gs):
x, y = inps
g = gs[0]
return [g*y*2, g*x*1.5]
return [g * y * 2, g * x * 1.5]
# no override case is coverd in "grad" test
# single override case
op_mul = cls_ofg([x, y], [x*y], grad_overrides=go)
op_mul = cls_ofg([x, y], [x * y], grad_overrides=go)
xx, yy = T.vector('xx'), T.vector('yy')
zz = T.sum(op_mul(xx, yy))
dx, dy = T.grad(zz, [xx, yy])
......@@ -138,23 +138,23 @@ class T_OpFromGraph(unittest_tools.InferShapeTester):
xv = numpy.random.rand(16).astype(config.floatX)
yv = numpy.random.rand(16).astype(config.floatX)
dxv, dyv = fn(xv, yv)
assert numpy.allclose(yv*2, dxv)
assert numpy.allclose(xv*1.5, dyv)
assert numpy.allclose(yv * 2, dxv)
assert numpy.allclose(xv * 1.5, dyv)
# list override case
def go1(inps, gs):
x, w, b = inps
g = gs[0]
return g*w*2
return g * w * 2
def go2(inps, gs):
x, w, b = inps
g = gs[0]
return g*x*1.5
return g * x * 1.5
w, b = T.vectors('wb')
# we make the 3rd gradient default (no override)
op_linear = cls_ofg([x, w, b], [x*w+b], grad_overrides=[go1, go2])
op_linear = cls_ofg([x, w, b], [x * w + b], grad_overrides=[go1, go2])
xx, ww, bb = T.vector('xx'), T.vector('yy'), T.vector('bb')
zz = T.sum(op_linear(xx, ww, bb))
dx, dw, db = T.grad(zz, [xx, ww, bb])
......@@ -163,16 +163,16 @@ class T_OpFromGraph(unittest_tools.InferShapeTester):
wv = numpy.random.rand(16).astype(config.floatX)
bv = numpy.random.rand(16).astype(config.floatX)
dxv, dwv, dbv = fn(xv, wv, bv)
assert numpy.allclose(wv*2, dxv)
assert numpy.allclose(xv*1.5, dwv)
assert numpy.allclose(wv * 2, dxv)
assert numpy.allclose(xv * 1.5, dwv)
assert numpy.allclose(numpy.ones(16, dtype=config.floatX), dbv)
@test_params
def test_nested(self, cls_ofg):
x, y = T.vectors('xy')
u, v = x+y, x-y
u, v = x + y, x - y
op_ft = cls_ofg([x, y], [u, v])
op_ift = cls_ofg([x, y], [u/2, v/2])
op_ift = cls_ofg([x, y], [u / 2, v / 2])
xx, yy = T.vector('xx'), T.vector('yy')
xx2, yy2 = op_ift(*op_ft(xx, yy))
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论