提交 2ebc24ff authored 作者: Iulian Vlad Serban's avatar Iulian Vlad Serban

Continued work on #3018. Fixed stack trace copy over for additional…

Continued work on #3018. Fixed stack trace copy over for additional optimizations, fixed errors in previous commit and inclduded addditional tests.
上级 daf7ea76
...@@ -77,6 +77,7 @@ def add_tag_trace(thing, user_line=1): ...@@ -77,6 +77,7 @@ def add_tag_trace(thing, user_line=1):
if limit == -1: if limit == -1:
limit = None limit = None
tr = simple_extract_stack(limit=limit)[:-1] tr = simple_extract_stack(limit=limit)[:-1]
# Different python version use different sementic for # Different python version use different sementic for
# limit. python 2.7 include the call to extrack_stack. The -1 get # limit. python 2.7 include the call to extrack_stack. The -1 get
# rid of it. # rid of it.
...@@ -93,7 +94,11 @@ def add_tag_trace(thing, user_line=1): ...@@ -93,7 +94,11 @@ def add_tag_trace(thing, user_line=1):
"theano/sparse/", "theano\\sparse\\", "theano/sparse/", "theano\\sparse\\",
"theano/typed_list/", "theano\\typed_list\\", "theano/typed_list/", "theano\\typed_list\\",
]: ]:
if p in file_path: # Julian: I added the 'tests' exception together with Arnaud.
# Otherwise, we'd lose the stack trace during in our test cases
# (e.g. in test_opt.py). We're not sure this is the right way to
# do it though.
if p in file_path and 'tests' not in file_path:
tr = tr[:-1] tr = tr[:-1]
rm = True rm = True
break break
......
...@@ -91,8 +91,8 @@ def copy_stack_trace(from_var, to_var): ...@@ -91,8 +91,8 @@ def copy_stack_trace(from_var, to_var):
tr += getattr(v.tag, 'trace', []) tr += getattr(v.tag, 'trace', [])
else: else:
# If from_var is not a list, it must be a single tensor # If from_var is not a list, it must be a single tensor variable,
# variable, so just store that particular stack trace # so just store that particular stack trace
tr = getattr(from_var.tag, 'trace', []) tr = getattr(from_var.tag, 'trace', [])
# Copy over stack traces to to_var # Copy over stack traces to to_var
...@@ -2565,7 +2565,7 @@ def local_subtensor_lift(node): ...@@ -2565,7 +2565,7 @@ def local_subtensor_lift(node):
ret = u.owner.op(x_idx) ret = u.owner.op(x_idx)
# Copy over previous output stacktrace # Copy over previous output stacktrace
# and stacktrace from previous unary operation # and stacktrace from previous unary operation
copy_stack_trace([node.outputs, node.inputs[0]], ret) copy_stack_trace([node.outputs[0], node.inputs[0]], ret)
return [ret] return [ret]
if isinstance(u.owner.op, T.Elemwise): if isinstance(u.owner.op, T.Elemwise):
...@@ -2574,7 +2574,14 @@ def local_subtensor_lift(node): ...@@ -2574,7 +2574,14 @@ def local_subtensor_lift(node):
# There is no broadcastable in the inputs # There is no broadcastable in the inputs
idx = node.inputs[1:] idx = node.inputs[1:]
new_inputs = [node.op(i, *idx) for i in u.owner.inputs] new_inputs = [node.op(i, *idx) for i in u.owner.inputs]
return [u.owner.op(*new_inputs)] # Copy over previous output stacktrace
copy_stack_trace(node.outputs[0], new_inputs)
ret = u.owner.op(*new_inputs)
# Copy over previous output stacktrace
# and stacktrace from previous unary operation
copy_stack_trace([node.outputs[0], node.inputs[0]], ret)
return [ret]
elif all([sum(i.type.broadcastable) in [i.ndim, 0] elif all([sum(i.type.broadcastable) in [i.ndim, 0]
for i in u.owner.inputs]): for i in u.owner.inputs]):
# There is no broadcastable in the inputs or it is scalar # There is no broadcastable in the inputs or it is scalar
...@@ -2591,7 +2598,15 @@ def local_subtensor_lift(node): ...@@ -2591,7 +2598,15 @@ def local_subtensor_lift(node):
else: else:
new_inputs.append( new_inputs.append(
i.dimshuffle(['x'] * node.outputs[0].ndim)) i.dimshuffle(['x'] * node.outputs[0].ndim))
return [u.owner.op(*new_inputs)]
# Copy over previous output stacktrace
copy_stack_trace(node.outputs[0], new_inputs)
ret = u.owner.op(*new_inputs)
# Copy over previous output stacktrace
# and stacktrace from previous unary operation
copy_stack_trace([node.outputs[0], node.inputs[0]], ret)
return [ret]
if isinstance(u.owner.op, T.Rebroadcast): if isinstance(u.owner.op, T.Rebroadcast):
# make sure that Rebroadcast has only 1 input # make sure that Rebroadcast has only 1 input
...@@ -2617,7 +2632,13 @@ def local_subtensor_lift(node): ...@@ -2617,7 +2632,13 @@ def local_subtensor_lift(node):
j += 1 j += 1
subt_x = node.op(u.owner.inputs[0], *node.inputs[1:]) subt_x = node.op(u.owner.inputs[0], *node.inputs[1:])
# Copy over previous output stacktrace
copy_stack_trace(node.outputs[0], subt_x)
rbcast_subt_x = T.Rebroadcast(*new_axis)(subt_x) rbcast_subt_x = T.Rebroadcast(*new_axis)(subt_x)
# Copy over previous output stacktrace
# and stacktrace from previous unary operation
copy_stack_trace([node.outputs[0], node.inputs[0]], rbcast_subt_x)
return [rbcast_subt_x] return [rbcast_subt_x]
...@@ -2809,11 +2830,18 @@ def local_subtensor_merge(node): ...@@ -2809,11 +2830,18 @@ def local_subtensor_merge(node):
merged_slices = make_constant(merged_slices) merged_slices = make_constant(merged_slices)
subtens = Subtensor(merged_slices) subtens = Subtensor(merged_slices)
sl_ins = Subtensor.collapse( sl_ins = Subtensor.collapse(
merged_slices, merged_slices,
lambda x: isinstance(x, T.Variable)) lambda x: isinstance(x, T.Variable))
# Do not call make_node for test_value # Do not call make_node for test_value
out = subtens(x, *sl_ins) out = subtens(x, *sl_ins)
# Copy over previous output stacktrace
# and stacktrace from previous slicing operation.
# Why? Because, the merged slicing operation could have failed
# because of either of the two original slicing operations
copy_stack_trace([node.outputs[0], node.inputs[0]], out)
return [out] return [out]
...@@ -2821,6 +2849,7 @@ def local_subtensor_merge(node): ...@@ -2821,6 +2849,7 @@ def local_subtensor_merge(node):
@register_specialize @register_specialize
@gof.local_optimizer([Subtensor]) @gof.local_optimizer([Subtensor])
def local_subtensor_of_alloc(node): def local_subtensor_of_alloc(node):
#TODO Julian: Document this better!
"""alloc[x:y] -> alloc""" """alloc[x:y] -> alloc"""
if not isinstance(node.op, Subtensor): if not isinstance(node.op, Subtensor):
return False return False
......
...@@ -1864,11 +1864,18 @@ class test_local_subtensor_make_vector(unittest.TestCase): ...@@ -1864,11 +1864,18 @@ class test_local_subtensor_make_vector(unittest.TestCase):
#self.assertTrue(hasattr(f.outputs[0].tag, 'trace')) #self.assertTrue(hasattr(f.outputs[0].tag, 'trace'))
class test_local_subtensor_lift(unittest.TestCase): class test_local_subtensor_lift(unittest.TestCase):
def _verify_stack_trace(self, f):
for output in f.outputs:
# Check stacktrace was copied over correctly after opt was applied
self.assertTrue(hasattr(output.variable.tag, 'trace'))
def test0(self): def test0(self):
# basic test that the Op works # basic test that the Op works
x = tensor.matrix('x') x = tensor.matrix('x')
f = function([x], tensor.exp(x)[0], mode=mode_opt) f = function([x], tensor.exp(x)[0], mode=mode_opt)
self._verify_stack_trace(f)
prog = f.maker.fgraph.toposort() prog = f.maker.fgraph.toposort()
assert isinstance(prog[0].op, tensor.Subtensor) # first subtensor assert isinstance(prog[0].op, tensor.Subtensor) # first subtensor
assert prog[1].op == tensor.exp assert prog[1].op == tensor.exp
...@@ -1881,6 +1888,8 @@ class test_local_subtensor_lift(unittest.TestCase): ...@@ -1881,6 +1888,8 @@ class test_local_subtensor_lift(unittest.TestCase):
x = tensor.matrix('x') x = tensor.matrix('x')
f = function([x], [tensor.exp(x)[0], tensor.exp(x)], mode=mode_opt) f = function([x], [tensor.exp(x)[0], tensor.exp(x)], mode=mode_opt)
self._verify_stack_trace(f)
prog = f.maker.fgraph.toposort() prog = f.maker.fgraph.toposort()
assert prog[0].op == tensor.exp assert prog[0].op == tensor.exp
assert isinstance(prog[1].op, tensor.Subtensor) # first subtensor assert isinstance(prog[1].op, tensor.Subtensor) # first subtensor
...@@ -1895,6 +1904,8 @@ class test_local_subtensor_lift(unittest.TestCase): ...@@ -1895,6 +1904,8 @@ class test_local_subtensor_lift(unittest.TestCase):
z = tensor.matrix('z') z = tensor.matrix('z')
f = function([x, y, z], tensor.exp(x + y + z)[0], mode=mode_opt) f = function([x, y, z], tensor.exp(x + y + z)[0], mode=mode_opt)
self._verify_stack_trace(f)
prog = f.maker.fgraph.toposort() prog = f.maker.fgraph.toposort()
assert isinstance(prog[0].op, tensor.DimShuffle) assert isinstance(prog[0].op, tensor.DimShuffle)
assert isinstance(prog[1].op, tensor.Subtensor) # first subtensor assert isinstance(prog[1].op, tensor.Subtensor) # first subtensor
...@@ -1912,6 +1923,8 @@ class test_local_subtensor_lift(unittest.TestCase): ...@@ -1912,6 +1923,8 @@ class test_local_subtensor_lift(unittest.TestCase):
z = tensor.matrix('z') z = tensor.matrix('z')
f = function([x, y, z], tensor.exp(x + y + z)[0:2], mode=mode_opt) f = function([x, y, z], tensor.exp(x + y + z)[0:2], mode=mode_opt)
self._verify_stack_trace(f)
prog = f.maker.fgraph.toposort() prog = f.maker.fgraph.toposort()
assert isinstance(prog[0].op, tensor.DimShuffle) assert isinstance(prog[0].op, tensor.DimShuffle)
assert isinstance(prog[1].op, tensor.Subtensor) # first subtensor assert isinstance(prog[1].op, tensor.Subtensor) # first subtensor
...@@ -1928,6 +1941,8 @@ class test_local_subtensor_lift(unittest.TestCase): ...@@ -1928,6 +1941,8 @@ class test_local_subtensor_lift(unittest.TestCase):
y = tensor.vector('y') y = tensor.vector('y')
f = function([y], tensor.exp(y.dimshuffle(0, 'x'))[0], mode=mode_opt) f = function([y], tensor.exp(y.dimshuffle(0, 'x'))[0], mode=mode_opt)
self._verify_stack_trace(f)
prog = f.maker.fgraph.toposort() prog = f.maker.fgraph.toposort()
assert isinstance(prog[0].op, tensor.DimShuffle) assert isinstance(prog[0].op, tensor.DimShuffle)
assert isinstance(prog[1].op, tensor.Subtensor) assert isinstance(prog[1].op, tensor.Subtensor)
...@@ -1943,6 +1958,8 @@ class test_local_subtensor_lift(unittest.TestCase): ...@@ -1943,6 +1958,8 @@ class test_local_subtensor_lift(unittest.TestCase):
y = tensor.vector('y') y = tensor.vector('y')
f = function([x, y], tensor.exp(x + y)[0], mode=mode_opt) f = function([x, y], tensor.exp(x + y)[0], mode=mode_opt)
self._verify_stack_trace(f)
prog = f.maker.fgraph.toposort() prog = f.maker.fgraph.toposort()
assert isinstance(prog[0].op, tensor.DimShuffle) assert isinstance(prog[0].op, tensor.DimShuffle)
assert prog[1].op == tensor.add assert prog[1].op == tensor.add
...@@ -1959,6 +1976,8 @@ class test_local_subtensor_lift(unittest.TestCase): ...@@ -1959,6 +1976,8 @@ class test_local_subtensor_lift(unittest.TestCase):
f = function([x, y], [tensor.exp(x + y)[0], tensor.exp(x + y) + x], f = function([x, y], [tensor.exp(x + y)[0], tensor.exp(x + y) + x],
mode=mode_opt) mode=mode_opt)
self._verify_stack_trace(f)
prog = f.maker.fgraph.toposort() prog = f.maker.fgraph.toposort()
assert isinstance(prog[0].op, tensor.DimShuffle) assert isinstance(prog[0].op, tensor.DimShuffle)
assert isinstance(prog[1].op.scalar_op, theano.scalar. assert isinstance(prog[1].op.scalar_op, theano.scalar.
...@@ -1977,6 +1996,8 @@ class test_local_subtensor_lift(unittest.TestCase): ...@@ -1977,6 +1996,8 @@ class test_local_subtensor_lift(unittest.TestCase):
y = tensor.scalar('y') y = tensor.scalar('y')
f = function([x, y], tensor.exp(x + y)[0], mode=mode_opt) f = function([x, y], tensor.exp(x + y)[0], mode=mode_opt)
self._verify_stack_trace(f)
prog = f.maker.fgraph.toposort() prog = f.maker.fgraph.toposort()
assert isinstance(prog[0].op, tensor.Subtensor) assert isinstance(prog[0].op, tensor.Subtensor)
# Composite{add,exp} # Composite{add,exp}
...@@ -1996,6 +2017,7 @@ class test_local_subtensor_lift(unittest.TestCase): ...@@ -1996,6 +2017,7 @@ class test_local_subtensor_lift(unittest.TestCase):
assert newx.broadcastable == (True, False) assert newx.broadcastable == (True, False)
f1 = function([x], newx[:2, :5], mode=mode_opt) f1 = function([x], newx[:2, :5], mode=mode_opt)
self._verify_stack_trace(f1)
prog = f1.maker.fgraph.toposort() prog = f1.maker.fgraph.toposort()
assert isinstance(prog[0].op, tensor.Subtensor) assert isinstance(prog[0].op, tensor.Subtensor)
assert isinstance(prog[1].op, tensor.Rebroadcast) assert isinstance(prog[1].op, tensor.Rebroadcast)
...@@ -2009,6 +2031,7 @@ class test_local_subtensor_lift(unittest.TestCase): ...@@ -2009,6 +2031,7 @@ class test_local_subtensor_lift(unittest.TestCase):
assert newy.broadcastable == (True, False, True, False) assert newy.broadcastable == (True, False, True, False)
f2 = function([y], newy[:, 3, 0, :], mode=mode_opt) f2 = function([y], newy[:, 3, 0, :], mode=mode_opt)
self._verify_stack_trace(f2)
prog = f2.maker.fgraph.toposort() prog = f2.maker.fgraph.toposort()
assert isinstance(prog[0].op, tensor.Subtensor) assert isinstance(prog[0].op, tensor.Subtensor)
assert isinstance(prog[1].op, tensor.Rebroadcast) assert isinstance(prog[1].op, tensor.Rebroadcast)
...@@ -2016,6 +2039,7 @@ class test_local_subtensor_lift(unittest.TestCase): ...@@ -2016,6 +2039,7 @@ class test_local_subtensor_lift(unittest.TestCase):
# corner case 2: subtensor idx_list is shorter than resulting broadcast pattern # corner case 2: subtensor idx_list is shorter than resulting broadcast pattern
f3 = function([y], newy[:, 3, 0], mode=mode_opt) f3 = function([y], newy[:, 3, 0], mode=mode_opt)
self._verify_stack_trace(f3)
prog = f3.maker.fgraph.toposort() prog = f3.maker.fgraph.toposort()
assert isinstance(prog[0].op, tensor.Subtensor) assert isinstance(prog[0].op, tensor.Subtensor)
assert isinstance(prog[1].op, tensor.Rebroadcast) assert isinstance(prog[1].op, tensor.Rebroadcast)
...@@ -2030,6 +2054,7 @@ class test_local_subtensor_lift(unittest.TestCase): ...@@ -2030,6 +2054,7 @@ class test_local_subtensor_lift(unittest.TestCase):
out = newz[:, 3, 0] out = newz[:, 3, 0]
f4 = function([z], newz[:, 3, 0], mode=mode_opt) f4 = function([z], newz[:, 3, 0], mode=mode_opt)
self._verify_stack_trace(f4)
prog = f4.maker.fgraph.toposort() prog = f4.maker.fgraph.toposort()
assert isinstance(prog[0].op, tensor.Subtensor) assert isinstance(prog[0].op, tensor.Subtensor)
assert isinstance(prog[1].op, tensor.Rebroadcast) assert isinstance(prog[1].op, tensor.Rebroadcast)
...@@ -2037,6 +2062,11 @@ class test_local_subtensor_lift(unittest.TestCase): ...@@ -2037,6 +2062,11 @@ class test_local_subtensor_lift(unittest.TestCase):
class test_local_subtensor_merge(unittest.TestCase): class test_local_subtensor_merge(unittest.TestCase):
def _verify_stack_trace(self, f):
for output in f.outputs:
# Check stacktrace was copied over correctly after opt was applied
self.assertTrue(hasattr(output.variable.tag, 'trace'))
def setUp(self): def setUp(self):
utt.seed_rng() utt.seed_rng()
self.x_shapes = [(2, 2), (5, 3), (4, 1), (1, 2), self.x_shapes = [(2, 2), (5, 3), (4, 1), (1, 2),
...@@ -2051,6 +2081,8 @@ class test_local_subtensor_merge(unittest.TestCase): ...@@ -2051,6 +2081,8 @@ class test_local_subtensor_merge(unittest.TestCase):
g = function([x], x[idx::][-1], mode=mode_opt.excluding( g = function([x], x[idx::][-1], mode=mode_opt.excluding(
'local_subtensor_merge')) 'local_subtensor_merge'))
self._verify_stack_trace(f)
topo = f.maker.fgraph.toposort() topo = f.maker.fgraph.toposort()
assert len([t for t in topo assert len([t for t in topo
if isinstance(t.op, tensor.Subtensor)]) == 1 if isinstance(t.op, tensor.Subtensor)]) == 1
...@@ -2077,6 +2109,8 @@ class test_local_subtensor_merge(unittest.TestCase): ...@@ -2077,6 +2109,8 @@ class test_local_subtensor_merge(unittest.TestCase):
mode=mode_opt.excluding('local_subtensor_merge')) mode=mode_opt.excluding('local_subtensor_merge'))
#theano.printing.debugprint(f, print_type=True) #theano.printing.debugprint(f, print_type=True)
self._verify_stack_trace(f)
topo = f.maker.fgraph.toposort() topo = f.maker.fgraph.toposort()
# print [t for t in topo if isinstance(t.op, tensor.Subtensor)] # print [t for t in topo if isinstance(t.op, tensor.Subtensor)]
assert len([t for t in topo assert len([t for t in topo
...@@ -2104,6 +2138,8 @@ class test_local_subtensor_merge(unittest.TestCase): ...@@ -2104,6 +2138,8 @@ class test_local_subtensor_merge(unittest.TestCase):
g = function([x], x[::-1][idx], g = function([x], x[::-1][idx],
mode=mode_opt.excluding('local_subtensor_merge')) mode=mode_opt.excluding('local_subtensor_merge'))
self._verify_stack_trace(f)
#theano.printing.debugprint(f, print_type=True) #theano.printing.debugprint(f, print_type=True)
topo = f.maker.fgraph.toposort() topo = f.maker.fgraph.toposort()
# print [t for t in topo if isinstance(t.op, tensor.Subtensor)] # print [t for t in topo if isinstance(t.op, tensor.Subtensor)]
...@@ -2132,6 +2168,8 @@ class test_local_subtensor_merge(unittest.TestCase): ...@@ -2132,6 +2168,8 @@ class test_local_subtensor_merge(unittest.TestCase):
mode=mode_opt.excluding('local_subtensor_merge')) mode=mode_opt.excluding('local_subtensor_merge'))
#theano.printing.debugprint(f, print_type=True) #theano.printing.debugprint(f, print_type=True)
self._verify_stack_trace(f)
topo = f.maker.fgraph.toposort() topo = f.maker.fgraph.toposort()
# print [t for t in topo if isinstance(t.op, tensor.Subtensor)] # print [t for t in topo if isinstance(t.op, tensor.Subtensor)]
assert len([t for t in topo assert len([t for t in topo
...@@ -2154,6 +2192,8 @@ class test_local_subtensor_merge(unittest.TestCase): ...@@ -2154,6 +2192,8 @@ class test_local_subtensor_merge(unittest.TestCase):
for idx in xrange(-9, 8): for idx in xrange(-9, 8):
f = function([x], x[::-1][:idx], mode=mode_opt) f = function([x], x[::-1][:idx], mode=mode_opt)
self._verify_stack_trace(f)
#theano.printing.debugprint(f, print_type=True) #theano.printing.debugprint(f, print_type=True)
topo = f.maker.fgraph.toposort() topo = f.maker.fgraph.toposort()
# print [t for t in topo if isinstance(t.op, tensor.Subtensor)] # print [t for t in topo if isinstance(t.op, tensor.Subtensor)]
...@@ -2171,6 +2211,9 @@ class test_local_subtensor_merge(unittest.TestCase): ...@@ -2171,6 +2211,9 @@ class test_local_subtensor_merge(unittest.TestCase):
x = tensor.matrix('x') x = tensor.matrix('x')
y = tensor.iscalar('y') y = tensor.iscalar('y')
f = function([x, y], x[::-1][:y], mode=mode_opt) f = function([x, y], x[::-1][:y], mode=mode_opt)
self._verify_stack_trace(f)
#theano.printing.debugprint(f, print_type=True) #theano.printing.debugprint(f, print_type=True)
topo = f.maker.fgraph.toposort() topo = f.maker.fgraph.toposort()
...@@ -2192,6 +2235,8 @@ class test_local_subtensor_merge(unittest.TestCase): ...@@ -2192,6 +2235,8 @@ class test_local_subtensor_merge(unittest.TestCase):
for idx2 in xrange(-7, 7): for idx2 in xrange(-7, 7):
f = function([x], x[idx1:][:idx2], mode=mode_opt) f = function([x], x[idx1:][:idx2], mode=mode_opt)
self._verify_stack_trace(f)
#theano.printing.debugprint(f, print_type=True) #theano.printing.debugprint(f, print_type=True)
topo = f.maker.fgraph.toposort() topo = f.maker.fgraph.toposort()
# print [t for t in topo if isinstance(t.op, tensor.Subtensor)] # print [t for t in topo if isinstance(t.op, tensor.Subtensor)]
...@@ -2210,6 +2255,9 @@ class test_local_subtensor_merge(unittest.TestCase): ...@@ -2210,6 +2255,9 @@ class test_local_subtensor_merge(unittest.TestCase):
y = tensor.iscalar('y') y = tensor.iscalar('y')
z = tensor.iscalar('y') z = tensor.iscalar('y')
f = function([x, y, z], x[y:][:z], mode=mode_opt) f = function([x, y, z], x[y:][:z], mode=mode_opt)
self._verify_stack_trace(f)
#theano.printing.debugprint(f, print_type=True) #theano.printing.debugprint(f, print_type=True)
topo = f.maker.fgraph.toposort() topo = f.maker.fgraph.toposort()
...@@ -2239,6 +2287,9 @@ class test_local_subtensor_merge(unittest.TestCase): ...@@ -2239,6 +2287,9 @@ class test_local_subtensor_merge(unittest.TestCase):
z = x[slice(*sl1)][slice(*sl2)] z = x[slice(*sl1)][slice(*sl2)]
f = function([x], z, mode=mode_opt) f = function([x], z, mode=mode_opt)
self._verify_stack_trace(f)
x_val = self.rng.uniform(size=shape).astype(config.floatX) x_val = self.rng.uniform(size=shape).astype(config.floatX)
f(x_val) f(x_val)
...@@ -2254,6 +2305,9 @@ class test_local_subtensor_merge(unittest.TestCase): ...@@ -2254,6 +2305,9 @@ class test_local_subtensor_merge(unittest.TestCase):
s2 = tensor.iscalar('s2') s2 = tensor.iscalar('s2')
f = function([x, b1, e1, s1, b2, e2, s2], x[b1:e1:s1][b2:e2:s2], f = function([x, b1, e1, s1, b2, e2, s2], x[b1:e1:s1][b2:e2:s2],
mode=mode_opt) mode=mode_opt)
self._verify_stack_trace(f)
#theano.printing.debugprint(f, print_type=True) #theano.printing.debugprint(f, print_type=True)
topo = f.maker.fgraph.toposort() topo = f.maker.fgraph.toposort()
...@@ -2292,6 +2346,7 @@ class test_local_subtensor_merge(unittest.TestCase): ...@@ -2292,6 +2346,7 @@ class test_local_subtensor_merge(unittest.TestCase):
t = theano.shared(numpy.int64(0)) t = theano.shared(numpy.int64(0))
fun = theano.function([x], y[t]) fun = theano.function([x], y[t])
val = fun(data) val = fun(data)
assert val == data[7:1:-1][0] assert val == data[7:1:-1][0]
...@@ -2337,6 +2392,9 @@ class test_local_subtensor_merge(unittest.TestCase): ...@@ -2337,6 +2392,9 @@ class test_local_subtensor_merge(unittest.TestCase):
s = tensor.iscalar('s') s = tensor.iscalar('s')
i = tensor.iscalar('i') i = tensor.iscalar('i')
f = function([x, b, e, s, i], x[b:e:s][i], mode=mode_opt) f = function([x, b, e, s, i], x[b:e:s][i], mode=mode_opt)
self._verify_stack_trace(f)
#theano.printing.debugprint(f, print_type=True) #theano.printing.debugprint(f, print_type=True)
topo = f.maker.fgraph.toposort() topo = f.maker.fgraph.toposort()
...@@ -2428,6 +2486,9 @@ class test_local_subtensor_merge(unittest.TestCase): ...@@ -2428,6 +2486,9 @@ class test_local_subtensor_merge(unittest.TestCase):
sub_x = x[slice1][slice2] sub_x = x[slice1][slice2]
f = theano.function([x] + input_vars, sub_x, mode=mode_opt) f = theano.function([x] + input_vars, sub_x, mode=mode_opt)
self._verify_stack_trace(f)
topo = f.maker.fgraph.toposort() topo = f.maker.fgraph.toposort()
# print [t for t in topo if isinstance(t.op, tensor.Subtensor)] # print [t for t in topo if isinstance(t.op, tensor.Subtensor)]
assert len([t for t in topo if isinstance(t.op, assert len([t for t in topo if isinstance(t.op,
...@@ -2485,6 +2546,8 @@ class test_local_subtensor_merge(unittest.TestCase): ...@@ -2485,6 +2546,8 @@ class test_local_subtensor_merge(unittest.TestCase):
sub_x = x[symbol_slice][i] sub_x = x[symbol_slice][i]
f = theano.function([x] + input_vars, sub_x, mode=mode_opt) f = theano.function([x] + input_vars, sub_x, mode=mode_opt)
self._verify_stack_trace(f)
topo = f.maker.fgraph.toposort() topo = f.maker.fgraph.toposort()
# print [t for t in topo if isinstance(t.op, tensor.Subtensor)] # print [t for t in topo if isinstance(t.op, tensor.Subtensor)]
assert len([t for t in topo if isinstance(t.op, assert len([t for t in topo if isinstance(t.op,
...@@ -2911,7 +2974,7 @@ class Test_local_elemwise_alloc(unittest.TestCase): ...@@ -2911,7 +2974,7 @@ class Test_local_elemwise_alloc(unittest.TestCase):
def _verify_stack_trace(self, f): def _verify_stack_trace(self, f):
for output in f.outputs: for output in f.outputs:
# Check stacktrace was copied over correctly after opt was applied # Check stacktrace was copied over correctly after opt was applied
self.assertTrue(hasattr(output.tag, 'trace')) self.assertTrue(hasattr(output.variable.tag, 'trace'))
def test_remove_alloc_wo_dimshuffle(self): def test_remove_alloc_wo_dimshuffle(self):
# No optimization on alloc # No optimization on alloc
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论