提交 23adf69e authored 作者: Frederic's avatar Frederic

some pep8

上级 64590a18
......@@ -1495,11 +1495,11 @@ def test_log1p():
f = function([x], T.log(1 + (x)), mode=m)
assert [node.op for node in f.maker.fgraph.toposort()] == [T.log1p]
f = function([x], T.log(1 + (-x)), mode=m)
assert [node.op for node in f.maker.fgraph.toposort()] == [T.neg,
inplace.log1p_inplace]
assert [node.op for node in f.maker.fgraph.toposort()] == [
T.neg, inplace.log1p_inplace]
f = function([x], -T.log(1 + (-x)), mode=m)
assert [node.op for node in f.maker.fgraph.toposort()] == [T.neg,
inplace.log1p_inplace, inplace.neg_inplace]
assert [node.op for node in f.maker.fgraph.toposort()] == [
T.neg, inplace.log1p_inplace, inplace.neg_inplace]
# check trickier cases (and use different dtype)
y = fmatrix()
......@@ -1507,12 +1507,12 @@ def test_log1p():
print f.maker.fgraph.toposort()
# the first three ops are Shape_i, Shape_i, and Dimshuffle
theano.printing.debugprint(f)
assert [node.op for node in f.maker.fgraph.toposort()][3:] \
== [T.log1p, tensor.alloc]
assert [node.op for node in f.maker.fgraph.toposort()][3:] == [
T.log1p, tensor.alloc]
f = function([x, y], T.log(0 + (x) + tensor.fill(y, 1.0)), mode=m)
theano.printing.debugprint(f)
assert [node.op for node in f.maker.fgraph.toposort()][3:] \
== [T.log1p, tensor.alloc]
assert [node.op for node in f.maker.fgraph.toposort()][3:] == [
T.log1p, tensor.alloc]
f = function([x, y], T.log(2 + (x) - tensor.fill(y, 1.0)), mode=m)
theano.printing.debugprint(f)
assert [node.op for node in f.maker.fgraph.toposort()][3:] \
......@@ -2333,7 +2333,8 @@ class Test_alloc_zero(unittest.TestCase):
def setUp(self):
mode = theano.compile.mode.get_default_mode()
self.mode = mode.including("local_incsubtensor_of_allocs",
"local_setsubtensor_of_allocs", "local_0_dot_x")
"local_setsubtensor_of_allocs",
"local_0_dot_x")
def test_setsubtensor_allocs0(self):
x = tensor.matrix()
......@@ -2427,7 +2428,7 @@ class Test_alloc_zero(unittest.TestCase):
f(_e1[1], _e2[1])
f(_e1[2], _e2[2])
assert numpy.all([not isinstance(x.op, tensor.Dot) for x in
f.maker.fgraph.toposort() ])
f.maker.fgraph.toposort()])
#test that we don't remove shape errors
self.assertRaises((ValueError, AssertionError), f,
......@@ -2809,8 +2810,8 @@ class test_assert(utt.InferShapeTester):
x = T.scalar()
y = T.scalar()
f = theano.function([x, y], theano.tensor.opt.assert_(x, y,
1), mode=mode)
f = theano.function([x, y], theano.tensor.opt.assert_(x, y, 1),
mode=mode)
assert f(1, 1) == 1
assert f(5, 1) == 5
topo = f.maker.fgraph.toposort()
......@@ -2827,8 +2828,8 @@ class test_assert(utt.InferShapeTester):
x = T.scalar()
y = T.scalar()
f = theano.function([x, y], theano.tensor.opt.assert_(x, y,
0), mode=mode)
f = theano.function([x, y], theano.tensor.opt.assert_(x, y, 0),
mode=mode)
self.assertRaises(AssertionError, f, 1, 0)
topo = f.maker.fgraph.toposort()
assert len(topo) == 2
......@@ -3177,8 +3178,9 @@ def test_constant_get_stabilized():
class T_local_switch_sink(unittest.TestCase):
def setUp(self):
# condition values
self.condm = numpy.asarray([[0.1, 0, 1, -1], [0., 0., 0.,
0.], [1, 1, 1, 1]])
self.condm = numpy.asarray([[0.1, 0, 1, -1],
[0., 0., 0., 0.],
[1, 1, 1, 1]])
self.condv = numpy.asarray([0.1, 0, 1, -1])
self.conds = [0.1, 0, 1, -1]
......@@ -3256,14 +3258,14 @@ class T_local_erf(unittest.TestCase):
f = theano.function([x], 1 + T.erf(x), mode=self.mode)
print f.maker.fgraph.toposort()
assert [n.op for n in f.maker.fgraph.toposort()] == [T.mul, T.
erfc], f.maker.fgraph.toposort()
assert [n.op for n in f.maker.fgraph.toposort()] == [
T.mul, T.erfc], f.maker.fgraph.toposort()
f(val)
f = theano.function([x], T.erf(x) + 1, mode=self.mode)
print f.maker.fgraph.toposort()
assert [n.op for n in f.maker.fgraph.toposort()] == [T.mul, T.
erfc], f.maker.fgraph.toposort()
assert [n.op for n in f.maker.fgraph.toposort()] == [
T.mul, T.erfc], f.maker.fgraph.toposort()
f(val)
f = theano.function([x], T.erf(x) + 2, mode=self.mode)
......@@ -3305,7 +3307,7 @@ class T_local_erf(unittest.TestCase):
assert topo[0].op == T.erf, f.maker.fgraph.toposort()
assert isinstance(topo[1].op, T.Elemwise), f.maker.fgraph.toposort()
assert isinstance(topo[1].op.scalar_op, scal.Add)\
or isinstance(topo[1].op.scalar_op,scal.Sub), f.maker.fgraph.toposort()
or isinstance(topo[1].op.scalar_op, scal.Sub), f.maker.fgraph.toposort()
print f(val)
def test_local_erf_minus_one(self):
......@@ -3345,7 +3347,8 @@ class T_local_erfc(unittest.TestCase):
'canonicalize').including('fast_run').excluding('gpu')
self.mode = self.mode_fusion.excluding('fusion')
self.mode._optimizer.position_cutoff = 1.50001
if theano.config.cxx == '' and not theano.scalar.basic_scipy.imported_scipy_special:
if (theano.config.cxx == '' and
not theano.scalar.basic_scipy.imported_scipy_special):
raise SkipTest("erfc need a c++ compiler or scipy")
def test_local_one_minus_erfc(self):
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论