提交 aa6a4960 authored 作者: Pascal Lamblin's avatar Pascal Lamblin

Always create Alloc node if not during opt.

Also use correct mode in test.
上级 9aac06d5
......@@ -2883,6 +2883,28 @@ class Alloc(gof.Op):
gx = gz.sum(axis=range(n_axes_to_sum))
return [gx] + [None for i in inputs[1:]]
def __call__(self, val, *shapes):
"""
If the alloc would be useless, this function returns val.
If this function is called outside of a graph optimization context
(for instance, it is manually called by a user building a graph),
then we always return an Alloc node, to allow for DebugMode to check
for size mismatches.
If you always want an Alloc node, call make_node.
"""
ret = super(Alloc, self).__call__(val, *shapes)
try:
# It makes optimization difficult when useless allocs are thrown
# into the graph at every stage of optimization. This little logic
# tries to help at least in some cases.
if hasattr(val, 'env') and (val.type == ret.type):
return val
except AttributeError:
pass
return ret
def R_op(self, inputs, eval_points):
if eval_points[0] is None:
return [None]
......
......@@ -2206,7 +2206,7 @@ class Test_local_useless_alloc(unittest.TestCase):
# because the shape mismatch cannot be caught.
assert a.owner and isinstance(a.owner.op, tensor.Alloc)
f = function([], a)
f = function([], a, mode=mode_opt)
# The optimization should then be applied, and remove Alloc
assert ([node.op for node in f.maker.env.toposort()]
== [compile.deep_copy_op])
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论