提交 9aac06d5 authored 作者: Pascal Lamblin's avatar Pascal Lamblin

Always introduce Alloc node when calling alloc()

That way, DebugMode can catch shape errors.
上级 de717450
......@@ -2883,22 +2883,6 @@ class Alloc(gof.Op):
gx = gz.sum(axis=range(n_axes_to_sum))
return [gx] + [None for i in inputs[1:]]
def __call__(self, val, *shapes):
"""
If the alloc would be useless, this function returns val.
If you always want an Alloc node, call make_node.
"""
ret = super(Alloc, self).__call__(val, *shapes)
try:
# It makes optimization difficult when useless allocs are thrown
# into the graph at every stage of optimization. This little logic
# tries to help at least in some cases.
if val.type == ret.type:
return val
except AttributeError:
pass
return ret
def R_op(self, inputs, eval_points):
if eval_points[0] is None:
return [None]
......
......@@ -1073,6 +1073,7 @@ def local_fill_to_alloc(node):
@register_specialize
@register_stabilize
@register_canonicalize
@gof.local_optimizer([T.alloc])
def local_useless_alloc(node):
......
......@@ -2194,6 +2194,28 @@ def test_local_fill_useless():
f(m_, x_)
class Test_local_useless_alloc(unittest.TestCase):
def setUp(self):
self.rng = numpy.random.RandomState(utt.fetch_seed())
def test0(self):
x = shared(self.rng.randn(3, 7))
a = tensor.alloc(x, 6, 7)
# It is a bad idea to have tensor.alloc return x directly,
# because the shape mismatch cannot be caught.
assert a.owner and isinstance(a.owner.op, tensor.Alloc)
f = function([], a)
# The optimization should then be applied, and remove Alloc
assert ([node.op for node in f.maker.env.toposort()]
== [compile.deep_copy_op])
# In DebugMode, the shape mismatch should be detected
if isinstance(mode_opt, compile.DebugMode):
self.assertRaises(ValueError, f)
class test_shapeoptimizer(unittest.TestCase):
def setUp(self):
utt.seed_rng()
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论