提交 c69c657b authored 作者: Vincent Michalski's avatar Vincent Michalski

test_local_adv_sub1_adv_inc_sub1.test_stacktrace: refactored and fixed unit test…

test_local_adv_sub1_adv_inc_sub1.test_stacktrace: refactored and fixed unit test and removed code that was never executed, added missing copy_stack_trace call in the opt.
上级 a0478c3e
......@@ -3281,20 +3281,21 @@ def local_adv_sub1_adv_inc_sub1(node):
cond = [T.all(T.and_(T.lt(idx, x.shape[0]), T.ge(idx, -x.shape[0])))]
if not node.fgraph.shape_feature.same_shape(idx, y, 0, 0):
cond.append(T.eq(idx.shape[0], y.shape[0]))
y = Assert("Bad indexing or shapes in a AdvancedIncSubtensor1 "
r = Assert("Bad indexing or shapes in a AdvancedIncSubtensor1 "
"that was optimized away")(y, *cond)
copy_stack_trace(y, r)
if y.dtype == node.outputs[0].dtype:
return [y]
if r.dtype == node.outputs[0].dtype:
return [r]
# It is possible that y is upcast or downcast to x.dtype.
# In all case, as we set or add with 0, we can just cast y.
r = T.cast(y, node.outputs[0].dtype)
r2 = T.cast(r, node.outputs[0].dtype)
# Copy over stacktrace from before casting, since
# we don't expect problems in the casting operation,
# and any problems in the indexing would have been spotted above.
copy_stack_trace(y, r)
return [r]
copy_stack_trace(r, r2)
return [r2]
@register_specialize
......
......@@ -2801,32 +2801,23 @@ class test_local_adv_sub1_adv_inc_sub1(unittest.TestCase):
self.assertRaises((AssertionError, ValueError),
f, dx, dy, [1])
def test_stacktrace(self):
def test_stack_trace(self):
x = tensor.matrix("x")
y = tensor.matrix("y")
# test cases with y.dtype
# - equal to x.dtype
# - different from x.dtype (to trigger the cast in
# local_adv_sub1_adv_inc_sub1)
ys = [tensor.matrix("y"), tensor.dmatrix("y")]
idx = tensor.ivector()
dx = numpy.random.rand(4, 5).astype(config.floatX)
dy = numpy.random.rand(2, 5).astype(config.floatX)
didx = numpy.asarray([1, 3], "int32")
# set_subtensor
inc = tensor.set_subtensor(x[idx], y)
o = inc[idx]
# Compile function using only the 'local_subtensor_make_vector' optimization,
# which requires us to add the 'canonicalize' phase.
mode = theano.compile.mode.Mode(optimizer=None).including('canonicalize').including("local_adv_sub1_adv_inc_sub1")
f = theano.function([x, y, idx], o, self.mode)
# The opt only removes nodes in this case, no check_stack_trace needed
# Compile function using all optimizations in fast_compile mode,
# including the 'local_subtensor_make_vector' optimization
mode = theano.compile.mode.get_mode('FAST_COMPILE').including("local_adv_sub1_adv_inc_sub1")
f = theano.function([x, y, idx], o, self.mode)
# The opt only removes nodes in this case, no check_stack_trace needed
# See if there are use cases which add nodes and need check_stack_trace
# See issue #4421
# set_subtensor and then subtensor with both ys
incs = [tensor.set_subtensor(x[idx], y) for y in ys]
outs = [inc[idx] for inc in incs]
for y, out in zip(ys, outs):
f = theano.function([x, y, idx], out, self.mode)
self.assertTrue(check_stack_trace(
f, ops_to_check=(Assert, scal.Cast)))
class Test_alloc_zero(unittest.TestCase):
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论