提交 fa9a870d authored 作者: Pascal Lamblin's avatar Pascal Lamblin 提交者: GitHub

Merge pull request #4659 from julianser/master

Merged previous work implementing stack trace copy over and tests for…
......@@ -91,7 +91,7 @@ Optimization FAST_RUN FAST_COMPILE
* ``f(fill(a,b), c) -> f(b, c)``
* ``f(fill(a, b), fill(c, d), e) -> fill(a, fill(c, f(b, d, e)))``
See :func:`opt.local_fill_cut`, :func:`opt.local_fill_sink`
See :func:`opt.local_fill_sink`
inc_subtensor serialization
Incrementing a small subregion of a large tensor can be done quickly
......
......@@ -53,7 +53,7 @@ class MissingInputError(Exception):
# The call to list is needed for Python 3
assert list(kwargs.keys()) == ["variable"]
tr = getattr(list(kwargs.values())[0].tag, 'trace', [])
if type(tr) is list and len(tr) > 0:
if isinstance(tr, list) and len(tr) > 0:
sio = StringIO()
print("\nBacktrace when the variable is created:", file=sio)
for subtr in list(kwargs.values())[0].tag.trace:
......
......@@ -179,7 +179,7 @@ def raise_with_op(node, thunk=None, exc_info=None, storage_map=None):
# Print node backtraces
tr = getattr(node.outputs[0].tag, 'trace', [])
if type(tr) is list and len(tr) > 0:
if isinstance(tr, list) and len(tr) > 0:
detailed_err_msg += "\nBacktrace when the node is created(use Theano flag traceback.limit=N to make it longer):\n"
# Print separate message for each element in the list of batcktraces
......
差异被折叠。
......@@ -3451,7 +3451,7 @@ def test_local_subtensor_of_alloc():
def test_local_fill_useless():
# Test opt local_fill_cut
# Test opt local_fill_useless
x = dvector()
y = dvector()
z = lvector()
......@@ -3500,6 +3500,67 @@ def test_local_fill_useless():
f(m_, x_)
def test_local_elemwise_sub_zeros():
# Test opt local_elemwise_sub_zeros
# We test separately for scalars, vectors and matrices
scalar = T.scalar()
vect = T.vector()
mat = T.matrix()
rng = numpy.random.RandomState(seed=utt.fetch_seed())
scalar_val = rng.rand(1).astype(config.floatX)[0]
vect_val = rng.rand(5).astype(config.floatX)
mat_val = rng.rand(3, 2).astype(config.floatX)
mode = theano.compile.get_default_mode()\
.excluding('canonicalize', 'uncanonicalize',
'ShapeOpt', 'local_fill_to_alloc',
'local_elemwise_alloc')\
.including('local_elemwise_sub_zeros')
# Test scalar minus scalar
f = function([scalar], scalar - scalar, mode=mode)
# Check optimized graph is correct
assert isinstance(f.maker.fgraph.toposort()[0].op, T.Elemwise)
assert isinstance(f.maker.fgraph.toposort()[0].op.scalar_op,
theano.scalar.Second)
assert isinstance(f.maker.fgraph.toposort()[0].inputs[1],
T.TensorConstant) or\
isinstance(f.maker.fgraph.toposort()[0].inputs[1],
T.TensorConstant)
utt.assert_allclose(f(scalar_val), 0.0)
# Check stack trace is copied over
assert check_stack_trace(f, ops_to_check='all')
# Test vector minus vector
f = function([vect], vect - vect, mode=mode)
# Check optimized graph is correct
assert isinstance(f.maker.fgraph.toposort()[0].op, T.Elemwise)
assert isinstance(f.maker.fgraph.toposort()[0].op.scalar_op,
theano.scalar.Second)
assert isinstance(f.maker.fgraph.toposort()[0].inputs[1],
T.TensorConstant) or\
isinstance(f.maker.fgraph.toposort()[0].inputs[1],
T.TensorConstant)
utt.assert_allclose(f(vect_val), numpy.zeros(vect_val.shape))
# Check stack trace is copied over
assert check_stack_trace(f, ops_to_check='all')
# Test vector minus vector
f = function([mat], mat - mat, mode=mode)
# Check optimized graph is correct
assert isinstance(f.maker.fgraph.toposort()[0].op, T.Elemwise)
assert isinstance(f.maker.fgraph.toposort()[0].op.scalar_op,
theano.scalar.Second)
assert isinstance(f.maker.fgraph.toposort()[0].inputs[1],
T.TensorConstant) or\
isinstance(f.maker.fgraph.toposort()[0].inputs[1],
T.TensorConstant)
utt.assert_allclose(f(mat_val), numpy.zeros(mat_val.shape))
# Check stack trace is copied over
assert check_stack_trace(f, ops_to_check='all')
class Test_local_useless_elemwise_comparison(unittest.TestCase):
def setUp(self):
self.rng = numpy.random.RandomState(utt.fetch_seed())
......@@ -3743,6 +3804,17 @@ class Test_local_useless_elemwise_comparison(unittest.TestCase):
f = theano.function([x], T.xor(x, x), mode=mode)
self.assert_eqs_const(f, 0)
def test_stacktrace(self):
mode = theano.compile.get_default_mode().including(
'local_useless_elemwise_comparison')
x = T.vector('x', dtype=config.floatX)
f = theano.function([x], T.gt(x, x), mode=mode)
self.assertTrue(check_stack_trace(f, ops_to_check='last'))
f = theano.function([x], T.le(x, x), mode=mode)
self.assertTrue(check_stack_trace(f, ops_to_check='last'))
class Test_local_canonicalize_alloc(unittest.TestCase):
def setUp(self):
......@@ -5604,6 +5676,35 @@ class T_local_sum_prod(unittest.TestCase):
finally:
config.on_opt_error = backup
def test_local_sum_prod_mul_by_scalar_stack_trace(self):
# Test that stack trace is copied over correctly for local_sum_prod_mul_by_scalar.
m0 = theano.compile.get_default_mode()\
.excluding('inplace_elemwise_opt')\
.including('canonicalize', 'specialize')
vect = T.dvector()
mat = T.dmatrix()
scalar = T.dscalar()
f = theano.function([vect, scalar], T.sum(vect * scalar), mode=m0)
assert check_stack_trace(f, ops_to_check='all')
f = theano.function([vect], T.sum(-vect), mode=m0)
assert check_stack_trace(f, ops_to_check=[T.Sum])
f = theano.function([vect, scalar],
T.elemwise.Prod()(vect * scalar), mode=m0)
assert check_stack_trace(f, ops_to_check=[T.elemwise.Prod])
f = theano.function([vect], T.elemwise.Prod()(-vect), mode=m0)
assert check_stack_trace(f, ops_to_check=[T.elemwise.Prod])
f = theano.function([mat, scalar], T.sum(mat * scalar), mode=m0)
assert check_stack_trace(f, ops_to_check='all')
f = theano.function([mat], T.sum(-mat), mode=m0)
assert check_stack_trace(f, ops_to_check=[T.Sum])
class T_local_opt_alloc(unittest.TestCase):
def test_sum_upcast(self):
......@@ -6287,6 +6388,9 @@ class Test_Reshape(unittest.TestCase):
topo = f.maker.fgraph.toposort()
assert sum(isinstance(node.op, self.op) for node in topo) == 1
# Check stack trace
self.assertTrue(check_stack_trace(f, ops_to_check=[self.op]))
class Test_local_useless_reshape(unittest.TestCase):
def setUp(self):
......@@ -6316,6 +6420,9 @@ class Test_local_useless_reshape(unittest.TestCase):
topo = f2.maker.fgraph.toposort()
assert not any(isinstance(n.op, tensor.basic.Reshape) for n in topo)
# We do not need tests checking that stack traces are copied over,
# because local_useless_reshape only removes nodes from the graph
def test_2(self):
x = theano.tensor.matrix('x')
r = x.reshape([Shape_i(i)(x) for i in xrange(x.ndim)])
......@@ -6361,7 +6468,7 @@ class Test_local_reshape_to_dimshuffle(unittest.TestCase):
"TensorConstant{[5 6]}))]")
# Check stacktrace was copied over correctly after opt was applied
check_stack_trace(g, ops_to_check=(T.DimShuffle, T.Reshape))
assert check_stack_trace(g, ops_to_check=(T.DimShuffle, T.Reshape))
def test_local_reshape_lift():
......@@ -6375,6 +6482,8 @@ def test_local_reshape_lift():
topo = f.maker.fgraph.toposort()
assert isinstance(topo[-2].op, tensor.Reshape)
assert isinstance(topo[-1].op, tensor.Elemwise)
# Check stacktrace was copied over correctly after opt was applied
assert check_stack_trace(f, ops_to_check='last')
class Test_lift_transpose_through_dot(unittest.TestCase):
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论