提交 80a73ecc authored 作者: Frederic Bastien's avatar Frederic Bastien

flake8

上级 deee4794
...@@ -817,13 +817,13 @@ def test_maximum_minimum_grad(): ...@@ -817,13 +817,13 @@ def test_maximum_minimum_grad():
# Test the discontinuity point. # Test the discontinuity point.
# We decided that we only pass the gradient to the first input in that case. # We decided that we only pass the gradient to the first input in that case.
x, y = tensor.vectors('xy') x, y = tensor.vectors('xy')
for operator in [tensor.maximum, tensor.minimum]: for op in [tensor.maximum, tensor.minimum]:
o = operator(x, y) o = op(x, y)
g = theano.grad(o.sum(), [x, y]) g = theano.grad(o.sum(), [x, y])
theano.printing.debugprint(g) theano.printing.debugprint(g)
f = theano.function([x, y], g) f = theano.function([x, y], g)
theano.printing.debugprint(f, print_type=True) theano.printing.debugprint(f, print_type=True)
assert np.allclose(f([1], [1]), [[1],[0]]) assert np.allclose(f([1], [1]), [[1], [0]])
print() print()
MinimumTester = makeBroadcastTester( MinimumTester = makeBroadcastTester(
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论