提交 c69c657b authored 作者: Vincent Michalski's avatar Vincent Michalski

test_local_adv_sub1_adv_inc_sub1.test_stacktrace: refactored and fixed unit test…

test_local_adv_sub1_adv_inc_sub1.test_stacktrace: refactored and fixed unit test and removed code that was never executed, added missing copy_stack_trace call in the opt.
上级 a0478c3e
...@@ -3281,20 +3281,21 @@ def local_adv_sub1_adv_inc_sub1(node): ...@@ -3281,20 +3281,21 @@ def local_adv_sub1_adv_inc_sub1(node):
cond = [T.all(T.and_(T.lt(idx, x.shape[0]), T.ge(idx, -x.shape[0])))] cond = [T.all(T.and_(T.lt(idx, x.shape[0]), T.ge(idx, -x.shape[0])))]
if not node.fgraph.shape_feature.same_shape(idx, y, 0, 0): if not node.fgraph.shape_feature.same_shape(idx, y, 0, 0):
cond.append(T.eq(idx.shape[0], y.shape[0])) cond.append(T.eq(idx.shape[0], y.shape[0]))
y = Assert("Bad indexing or shapes in a AdvancedIncSubtensor1 " r = Assert("Bad indexing or shapes in a AdvancedIncSubtensor1 "
"that was optimized away")(y, *cond) "that was optimized away")(y, *cond)
copy_stack_trace(y, r)
if y.dtype == node.outputs[0].dtype: if r.dtype == node.outputs[0].dtype:
return [y] return [r]
# It is possible that y is upcast or downcast to x.dtype. # It is possible that y is upcast or downcast to x.dtype.
# In all case, as we set or add with 0, we can just cast y. # In all case, as we set or add with 0, we can just cast y.
r = T.cast(y, node.outputs[0].dtype) r2 = T.cast(r, node.outputs[0].dtype)
# Copy over stacktrace from before casting, since # Copy over stacktrace from before casting, since
# we don't expect problems in the casting operation, # we don't expect problems in the casting operation,
# and any problems in the indexing would have been spotted above. # and any problems in the indexing would have been spotted above.
copy_stack_trace(y, r) copy_stack_trace(r, r2)
return [r] return [r2]
@register_specialize @register_specialize
......
...@@ -2801,32 +2801,23 @@ class test_local_adv_sub1_adv_inc_sub1(unittest.TestCase): ...@@ -2801,32 +2801,23 @@ class test_local_adv_sub1_adv_inc_sub1(unittest.TestCase):
self.assertRaises((AssertionError, ValueError), self.assertRaises((AssertionError, ValueError),
f, dx, dy, [1]) f, dx, dy, [1])
def test_stacktrace(self): def test_stack_trace(self):
x = tensor.matrix("x") x = tensor.matrix("x")
y = tensor.matrix("y") # test cases with y.dtype
# - equal to x.dtype
# - different from x.dtype (to trigger the cast in
# local_adv_sub1_adv_inc_sub1)
ys = [tensor.matrix("y"), tensor.dmatrix("y")]
idx = tensor.ivector() idx = tensor.ivector()
dx = numpy.random.rand(4, 5).astype(config.floatX) # set_subtensor and then subtensor with both ys
dy = numpy.random.rand(2, 5).astype(config.floatX) incs = [tensor.set_subtensor(x[idx], y) for y in ys]
didx = numpy.asarray([1, 3], "int32") outs = [inc[idx] for inc in incs]
# set_subtensor for y, out in zip(ys, outs):
inc = tensor.set_subtensor(x[idx], y) f = theano.function([x, y, idx], out, self.mode)
o = inc[idx] self.assertTrue(check_stack_trace(
# Compile function using only the 'local_subtensor_make_vector' optimization, f, ops_to_check=(Assert, scal.Cast)))
# which requires us to add the 'canonicalize' phase.
mode = theano.compile.mode.Mode(optimizer=None).including('canonicalize').including("local_adv_sub1_adv_inc_sub1")
f = theano.function([x, y, idx], o, self.mode)
# The opt only removes nodes in this case, no check_stack_trace needed
# Compile function using all optimizations in fast_compile mode,
# including the 'local_subtensor_make_vector' optimization
mode = theano.compile.mode.get_mode('FAST_COMPILE').including("local_adv_sub1_adv_inc_sub1")
f = theano.function([x, y, idx], o, self.mode)
# The opt only removes nodes in this case, no check_stack_trace needed
# See if there are use cases which add nodes and need check_stack_trace
# See issue #4421
class Test_alloc_zero(unittest.TestCase): class Test_alloc_zero(unittest.TestCase):
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论