提交 1872e918 authored 作者: Frederic's avatar Frederic

pep8

上级 89718faa
......@@ -1124,10 +1124,10 @@ class T_CrossentropyCategorical1Hot(utt.InferShapeTester):
def test_argmax_pushdown():
x = tensor.matrix()
for softmax in [softmax_graph, softmax_op]:
for sm in [softmax_graph, softmax_op]:
# test that the max_and_argmax is pushed down if the max is not used
out = tensor.max_and_argmax(
softmax(tensor.exp(tensor.tanh(sigmoid(x)))),
sm(tensor.exp(tensor.tanh(sigmoid(x)))),
axis=-1)[1]
fgraph = gof.FunctionGraph(
[x],
......@@ -1144,7 +1144,7 @@ def test_argmax_pushdown():
x = tensor.matrix()
# test that the max_and_argmax is not pushed down if the max is used
out = tensor.max_and_argmax(
softmax(tensor.exp(tensor.tanh(sigmoid(x)))),
sm(tensor.exp(tensor.tanh(sigmoid(x)))),
axis=-1)[0]
fgraph = gof.FunctionGraph(
[x],
......@@ -1420,12 +1420,12 @@ def test_relu():
X = rng.randn(20, 30).astype(config.floatX)
# test the base case, without custom alpha value
y = theano.tensor.nnet.relu(x).eval({x: X})
y = relu(x).eval({x: X})
assert numpy.allclose(y, numpy.maximum(X, 0))
# test for different constant alpha values (also outside of [0, 1])
for alpha in 0, 0.3, 1, 2, -0.3, -1, -2:
y = theano.tensor.nnet.relu(x, alpha).eval({x: X})
y = relu(x, alpha).eval({x: X})
assert numpy.allclose(y, numpy.where(X > 0, X, alpha * X))
# test for variable alpha (scalar, vector and matrix)
......@@ -1433,7 +1433,7 @@ def test_relu():
# create value for alpha (correct ndim and broadcastable against X)
A = numpy.array(rng.randn(*X.shape[::-1][:alpha.ndim][::-1]),
dtype=config.floatX)
y = theano.tensor.nnet.relu(x, alpha).eval({x: X, alpha: A})
y = relu(x, alpha).eval({x: X, alpha: A})
assert numpy.allclose(y, numpy.where(X > 0, X, A * X), rtol=3e-5)
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论