提交 b4c10047 authored 作者: Olivier Delalleau's avatar Olivier Delalleau

Safer setting of config warn options to be 100% sure they do not persist outside of tests

上级 df6a535e
......@@ -727,7 +727,13 @@ class T_CrossentropyCategorical1Hot(unittest.TestCase):
theano.printing.debugprint(f)
raise
g = theano.function([x,b,y], T.grad(expr, x), mode=mode)
backup = config.warn.sum_div_dimshuffle_bug
config.warn.sum_div_dimshuffle_bug = False
try:
g = theano.function([x,b,y], T.grad(expr, x), mode=mode)
finally:
config.warn.sum_div_dimshuffle_bug = backup
print_graph(g)
try:
ops = [node.op for node in g.maker.env.toposort()]
......@@ -879,9 +885,13 @@ def test_argmax_pushdown():
[x],
[out])
backup = config.warn.argmax_pushdown_bug
config.warn.argmax_pushdown_bug = False
theano.compile.mode.optdb.query(
theano.compile.mode.OPT_FAST_RUN).optimize(env)
try:
theano.compile.mode.optdb.query(
theano.compile.mode.OPT_FAST_RUN).optimize(env)
finally:
config.warn.argmax_pushdown_bug = backup
#print 'AFTER'
#for node in env.toposort():
......@@ -923,9 +933,13 @@ def test_argmax_pushdown_bias():
[x,b],
[out])
backup = config.warn.argmax_pushdown_bug
config.warn.argmax_pushdown_bug = False
theano.compile.mode.optdb.query(
theano.compile.mode.OPT_FAST_RUN).optimize(env)
try:
theano.compile.mode.optdb.query(
theano.compile.mode.OPT_FAST_RUN).optimize(env)
finally:
config.warn.argmax_pushdown_bug = backup
#print 'AFTER'
#for node in env.toposort():
......@@ -1020,7 +1034,12 @@ class Test_softmax_opt:
# test that function contains softmax and softmaxgrad
w = T.matrix()
g = theano.function([c,w],T.grad((p_y*w).sum(), c))
backup = config.warn.sum_div_dimshuffle_bug
config.warn.sum_div_dimshuffle_bug = False
try:
g = theano.function([c,w],T.grad((p_y*w).sum(), c))
finally:
config.warn.sum_div_dimshuffle_bug = backup
g_ops = [n.op for n in g.maker.env.toposort()]
print '--- g ='
printing.debugprint(g)
......@@ -1042,7 +1061,12 @@ class Test_softmax_opt:
printing.debugprint(f)
# test that function contains softmax and no div.
g = theano.function([c],T.grad(p_y.sum(), c))
backup = config.warn.sum_div_dimshuffle_bug
config.warn.sum_div_dimshuffle_bug = False
try:
g = theano.function([c],T.grad(p_y.sum(), c))
finally:
config.warn.sum_div_dimshuffle_bug = backup
printing.debugprint(g)
raise SkipTest('Optimization not enabled for the moment')
......@@ -1056,7 +1080,12 @@ class Test_softmax_opt:
printing.debugprint(f)
# test that function contains softmax and no div.
g = theano.function([c], T.grad(p_y.sum(), c))
backup = config.warn.sum_div_dimshuffle_bug
config.warn.sum_div_dimshuffle_bug = False
try:
g = theano.function([c], T.grad(p_y.sum(), c))
finally:
config.warn.sum_div_dimshuffle_bug = backup
printing.debugprint(g)
raise SkipTest('Optimization not enabled for the moment')
......
......@@ -2511,33 +2511,41 @@ class T_local_sum(unittest.TestCase):
assert numpy.allclose(f(input),input.sum())
backup = config.warn.sum_sum_bug
config.warn.sum_sum_bug = False
f = theano.function([a],a.sum(0).sum(0).sum(0),mode=self.mode)
assert len(f.maker.env.nodes)==1
assert numpy.allclose(f(input),input.sum())
try:
f = theano.function([a],a.sum(0).sum(0).sum(0),mode=self.mode)
assert len(f.maker.env.nodes)==1
assert numpy.allclose(f(input),input.sum())
finally:
config.warn.sum_sum_bug = backup
def test_local_sum_sum(self):
a=T.tensor3()
input=numpy.arange(3*3*3, dtype=config.floatX).reshape(3,3,3)
dims=[(0,0),(1,0),(2,0),(0,1),(1,1),(2,1)]
backup = config.warn.sum_sum_bug
config.warn.sum_sum_bug = False
for d,dd in dims:
f = theano.function([a],a.sum(d).sum(dd),mode=self.mode)
assert numpy.allclose(f(input),input.sum(d).sum(dd))
assert len(f.maker.env.nodes)==1
for d,dd in dims:
f = theano.function([a],a.sum(d).sum(dd).sum(0),mode=self.mode)
assert numpy.allclose(f(input),input.sum(d).sum(dd).sum(0))
assert len(f.maker.env.nodes)==1
for d in [0,1,2]:
f = theano.function([a],a.sum(d).sum(None),mode=self.mode)
assert numpy.allclose(f(input),input.sum(d).sum())
assert len(f.maker.env.nodes)==1
for d in [0,1,2]:
f = theano.function([a],a.sum(None).sum(),mode=self.mode)
assert numpy.allclose(f(input),input.sum())
assert len(f.maker.env.nodes)==1
try:
for d,dd in dims:
f = theano.function([a],a.sum(d).sum(dd),mode=self.mode)
assert numpy.allclose(f(input),input.sum(d).sum(dd))
assert len(f.maker.env.nodes)==1
for d,dd in dims:
f = theano.function([a],a.sum(d).sum(dd).sum(0),mode=self.mode)
assert numpy.allclose(f(input),input.sum(d).sum(dd).sum(0))
assert len(f.maker.env.nodes)==1
for d in [0,1,2]:
f = theano.function([a],a.sum(d).sum(None),mode=self.mode)
assert numpy.allclose(f(input),input.sum(d).sum())
assert len(f.maker.env.nodes)==1
for d in [0,1,2]:
f = theano.function([a],a.sum(None).sum(),mode=self.mode)
assert numpy.allclose(f(input),input.sum())
assert len(f.maker.env.nodes)==1
finally:
config.warn.sum_sum_bug = backup
def test_local_sum_alloc(self):
a=T.dtensor3()
......@@ -2567,13 +2575,17 @@ class T_local_sum(unittest.TestCase):
assert len(f.maker.env.nodes)==nb_nodes[2]
assert f.maker.env.toposort()[-1].op==T.alloc
backup = config.warn.sum_sum_bug
config.warn.sum_sum_bug = False
for d, dd in [(0,0),(1,0),(2,0),(0,1),(1,1),(2,1)]:
f = theano.function([a],t_like(a).sum(d).sum(dd),mode=mode)
print f.maker.env.toposort()
assert numpy.allclose(f(input),n_like(input).sum(d).sum(dd))
assert len(f.maker.env.nodes)==nb_nodes[3]
assert f.maker.env.toposort()[-1].op==T.alloc
try:
for d, dd in [(0,0),(1,0),(2,0),(0,1),(1,1),(2,1)]:
f = theano.function([a],t_like(a).sum(d).sum(dd),mode=mode)
print f.maker.env.toposort()
assert numpy.allclose(f(input),n_like(input).sum(d).sum(dd))
assert len(f.maker.env.nodes)==nb_nodes[3]
assert f.maker.env.toposort()[-1].op==T.alloc
finally:
config.warn.sum_sum_bug = backup
class T_local_sum_dimshuffle(unittest.TestCase):
def setUp(self):
......@@ -2627,16 +2639,20 @@ class T_local_sum_dimshuffle(unittest.TestCase):
c_val = rng.randn(2,2,2).astype(config.floatX)
d_val = numpy.asarray(rng.randn(), config.floatX)
backup = config.warn.sum_sum_bug, config.warn.sum_div_dimshuffle_bug
config.warn.sum_sum_bug = False
config.warn.sum_div_dimshuffle_bug = False
for i,s in enumerate(sums):
print i
f = theano.function([a,b,c,d], s, mode=self.mode)
theano.printing.debugprint(f)
g = f.maker.env.toposort()
#print 'g =', g
assert isinstance(g[-1].op.scalar_op, theano.scalar.basic.TrueDiv)
f(a_val, b_val, c_val, d_val)
try:
for i,s in enumerate(sums):
print i
f = theano.function([a,b,c,d], s, mode=self.mode)
theano.printing.debugprint(f)
g = f.maker.env.toposort()
#print 'g =', g
assert isinstance(g[-1].op.scalar_op, theano.scalar.basic.TrueDiv)
f(a_val, b_val, c_val, d_val)
finally:
config.warn.sum_sum_bug, config.warn.sum_div_dimshuffle_bug = backup
# TODO:
# test_local_sum_prod_dimshuffle (a * b * c)
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论