提交 555af254 authored 作者: lamblin's avatar lamblin

Merge pull request #791 from nouiz/fix_test3

fix crash and test related to local_sum_broadcastable
......@@ -426,8 +426,11 @@ def dimshuffle_as_view(node):
new_op = DimShuffle(op.input_broadcastable, op.new_order, inplace=True)
return [new_op(*node.inputs)]
register_specialize(dimshuffle_as_view, 'inplace')
#Step 60 is the inplace optimization stage.
compile.optdb.register('dimshuffle_as_view',
TopoOptimizer(dimshuffle_as_view,
failure_callback=TopoOptimizer.warn_inplace), 60,
'fast_run', 'inplace')
register_canonicalize(local_dimshuffle_lift)
register_specialize(local_dimshuffle_lift)
......@@ -3142,9 +3145,14 @@ def local_cut_useless_reduce(node):
return [summed]
@register_canonicalize
#Enabling this optimization at canonicalization step break this test:
#theano/tensor/tests/test_opt.py:T_local_reduce.test_local_reduce_broadcast_some_0
# see gh-790 issue.
#
#@register_canonicalize
@register_specialize
@gof.local_optimizer([])
def local_sum_broadcastable(node):
def local_reduce_broadcastable(node):
"""Remove reduction over broadcastable dimensions"""
if isinstance(node.op, T.CAReduce):
reduced, = node.inputs
......@@ -3169,12 +3177,18 @@ def local_sum_broadcastable(node):
ii += 1
new_reduced = reduced.dimshuffle(*pattern)
if new_axis:
new_op = node.op.__class__(axis=new_axis)
if type(node.op) == theano.tensor.elemwise.CAReduce:
# This happen for tensor.max(), tensor.min()
new_op = node.op.__class__(node.op.scalar_op,
axis=new_axis)
else:
new_op = node.op.__class__(axis=new_axis)
return [new_op(new_reduced)]
else:
# -- in this case we can remove the reduction completely
return [new_reduced.astype(odtype)]
@register_specialize
@gof.local_optimizer([])
def local_sum_alloc(node):
......
......@@ -3188,7 +3188,8 @@ class test_local_remove_switch_const_cond(unittest.TestCase):
class T_local_sum(unittest.TestCase):
def setUp(self):
self.mode = theano.compile.get_default_mode().including('canonicalize')
self.mode = theano.compile.get_default_mode().including('canonicalize',
'specialize')
def test_local_sum_all_to_none(self):
a = T.tensor3()
......@@ -3312,54 +3313,59 @@ class T_local_sum(unittest.TestCase):
finally:
config.on_opt_error = backup
def test_local_sum_broadcast_all_0(self):
optimizer = optdb.query(self.mode._optimizer)
x = T.TensorType('int64', (True, True, True))()
g = FunctionGraph([x], [x.sum()])
optimizer.optimize(g)
assert not any([
isinstance(node.op, T.CAReduce)
for node in g.toposort()])
def test_local_sum_broadcast_all_1(self):
optimizer = optdb.query(self.mode._optimizer)
x = T.TensorType('int64', (True, True))()
g = FunctionGraph([x], [x.sum(axis=[0, 1])])
optimizer.optimize(g)
assert not any([
isinstance(node.op, T.CAReduce)
for node in g.toposort()])
def test_local_sum_broadcast_some_0(self):
optimizer = optdb.query(self.mode._optimizer)
x = T.TensorType('int64', (True, False, True))()
g = FunctionGraph([x], [x.sum(axis=[0, 1])])
optimizer.optimize(g)
order = g.toposort()
assert 1 == sum([isinstance(node.op, T.CAReduce) for node in order])
if config.mode == 'FAST_COMPILE':
node = order[-1]
else:
node = order[-2]
op = node.op
assert isinstance(op, T.CAReduce)
# -- the leading broadcastable dimension has been dropped
# by the local_sum_broadcastable optimization
# now summation is over the original x's dimension 1.
assert node.inputs[0].ndim == 2, node
assert op.axis == (0,), op.axis
def test_local_sum_broadcast_some_1(self):
optimizer = optdb.query(self.mode._optimizer)
x = T.TensorType('int64', (True, False, True))()
g = FunctionGraph([x], [x.sum(axis=[0, 2])])
optimizer.optimize(g)
order = g.toposort()
assert 0 == sum([isinstance(node.op, T.CAReduce) for node in order])
class T_local_reduce(unittest.TestCase):
def setUp(self):
self.mode = theano.compile.get_default_mode().including('canonicalize',
'specialize')
def test_local_reduce_broadcast_all_0(self):
for fct in [tensor.sum, tensor.all, tensor.any, tensor.prod,
tensor.max, tensor.min]:
x = T.TensorType('int64', (True, True, True))()
f = theano.function([x], [fct(x)], mode=self.mode)
assert not any([
isinstance(node.op, T.CAReduce)
for node in f.maker.fgraph.toposort()])
def test_local_reduce_broadcast_all_1(self):
for fct in [tensor.sum, tensor.all, tensor.any, tensor.prod,
tensor.max, tensor.min]:
x = T.TensorType('int64', (True, True))()
f = theano.function([x], [fct(x, axis=[0, 1])], mode=self.mode)
assert not any([
isinstance(node.op, T.CAReduce)
for node in f.maker.fgraph.toposort()])
def test_local_reduce_broadcast_some_0(self):
for fct in [tensor.sum, tensor.all, tensor.any, tensor.prod,
tensor.max, tensor.min]:
x = T.TensorType('int64', (True, False, True))()
f = theano.function([x], [fct(x, axis=[0, 1])], mode=self.mode)
order = f.maker.fgraph.toposort()
assert 1 == sum([isinstance(node.op, T.CAReduce)
for node in order])
node = [node for node in order if isinstance(node.op,
tensor.CAReduce)][0]
op = node.op
assert isinstance(op, T.CAReduce)
# -- the leading broadcastable dimension has been dropped
# by the local_reduce_broadcastable optimization
# now summation is over the original x's dimension 1.
assert node.inputs[0].ndim == 2, node
assert op.axis == (0,), op.axis
def test_local_reduce_broadcast_some_1(self):
for fct in [tensor.sum, tensor.all, tensor.any, tensor.prod,
tensor.max, tensor.min]:
x = T.TensorType('int64', (True, True, True))()
f = theano.function([x], [fct(x, axis=[0, 2])], mode=self.mode)
assert not any([
isinstance(node.op, T.CAReduce)
for node in f.maker.fgraph.toposort()])
class T_local_sum_dimshuffle(unittest.TestCase):
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论