提交 ac601d00 authored 作者: Iulian Vlad Serban's avatar Iulian Vlad Serban

Removed local_fill_cut optimization. Fixed flake8 error.

上级 3db7e767
...@@ -91,7 +91,7 @@ Optimization FAST_RUN FAST_COMPILE ...@@ -91,7 +91,7 @@ Optimization FAST_RUN FAST_COMPILE
* ``f(fill(a,b), c) -> f(b, c)`` * ``f(fill(a,b), c) -> f(b, c)``
* ``f(fill(a, b), fill(c, d), e) -> fill(a, fill(c, f(b, d, e)))`` * ``f(fill(a, b), fill(c, d), e) -> fill(a, fill(c, f(b, d, e)))``
See :func:`opt.local_fill_cut`, :func:`opt.local_fill_sink` See :func:`opt.local_fill_sink`
inc_subtensor serialization inc_subtensor serialization
Incrementing a small subregion of a large tensor can be done quickly Incrementing a small subregion of a large tensor can be done quickly
......
...@@ -4500,75 +4500,6 @@ if 0: ...@@ -4500,75 +4500,6 @@ if 0:
# Middleman cuts # # Middleman cuts #
################## ##################
@gof.local_optimizer([T.Elemwise])
def local_fill_cut(node):
"""
f(fill(a,b), c) -> f(b, c)
If c.type == a.type.
"""
# this optimization is basically for getting broadcasting to
# replace fill. This is always possible when using a Compound
# Elemwise operation, but it is not always possible without one
# (consider filling a large matrix with a scalar, and then adding
# another scalar). The only numbers that count are the two
# scalars, but we can't ignore the large matrix because it gives
# the shape of the result.
# Julian: I've fixed the if line below to check if it's an instance.
# Is this correct?
# Also, I doubt this optimization is being applied anywhere.
# See my comment below.
# if node.op != T.Elemwise:
# return False
if (not isinstance(node.op, T.Elemwise)):
return False
output = node.outputs[0]
try:
# reference is some input with the same type as the output but
# that is not produced by a fill
reference = [input
for input in node.inputs
if input.type == output.type and
(not input.owner or input.owner.op != T.fill)][0]
except IndexError:
return False
new_inputs = []
new = False
for input in node.inputs:
# Julian: no matter what kind of function I create,
# it seems that input.owner.op == T.fill is never true.
# Somehow the fill ops are replaced by other ops (e.g. Elemwise{second,no_inplace}).
# If that's true, I don't think we have any tests for this opt.
if input.owner and input.owner.op == T.fill:
model, filling = input.owner.inputs
if encompasses_broadcastable(reference.type.broadcastable,
filling.type.broadcastable):
new_inputs.append(filling)
new = True
continue
new_inputs.append(input)
if not new:
return False
rval = node.op(*new_inputs)
# Copy over stacktrace from previous elementwise op output.
# Since we are certain that an error in the cg can never come
# from the removed fill op, it must come from the elemntwise op.
copy_stack_trace(node.outputs, rval)
if isinstance(rval, gof.Variable):
return rval.owner.outputs
else:
return rval[0].owner.outputs
register_canonicalize(local_fill_cut)
register_canonicalize(gof.OpRemove(T.tensor_copy), name='remove_tensor_copy') register_canonicalize(gof.OpRemove(T.tensor_copy), name='remove_tensor_copy')
################ ################
...@@ -6136,7 +6067,7 @@ def local_add_specialize(node): ...@@ -6136,7 +6067,7 @@ def local_add_specialize(node):
return False return False
register_specialize(local_add_specialize) register_specialize(local_add_specialize)
mul_canonizer = in2out(gof.LocalOptGroup(local_mul_canonizer, local_fill_cut, mul_canonizer = in2out(gof.LocalOptGroup(local_mul_canonizer,
local_fill_sink, apply_all_opts=True), local_fill_sink, apply_all_opts=True),
name='mul_canonizer_groups') name='mul_canonizer_groups')
...@@ -6342,7 +6273,7 @@ def add_calculate(num, denum, aslist=False, out_type=None): ...@@ -6342,7 +6273,7 @@ def add_calculate(num, denum, aslist=False, out_type=None):
local_add_canonizer = Canonizer(T.add, T.sub, T.neg, add_calculate) local_add_canonizer = Canonizer(T.add, T.sub, T.neg, add_calculate)
add_canonizer = in2out(gof.LocalOptGroup(local_add_canonizer, local_fill_cut, add_canonizer = in2out(gof.LocalOptGroup(local_add_canonizer,
local_fill_sink, apply_all_opts=True), local_fill_sink, apply_all_opts=True),
name='add_canonizer_group') name='add_canonizer_group')
......
...@@ -3431,7 +3431,7 @@ def test_local_subtensor_of_alloc(): ...@@ -3431,7 +3431,7 @@ def test_local_subtensor_of_alloc():
def test_local_fill_useless(): def test_local_fill_useless():
# Test opt local_fill_cut # Test opt local_fill_useless
x = dvector() x = dvector()
y = dvector() y = dvector()
z = lvector() z = lvector()
...@@ -3446,8 +3446,6 @@ def test_local_fill_useless(): ...@@ -3446,8 +3446,6 @@ def test_local_fill_useless():
f = function([x], T.fill(x, x) * 2, mode=mode_opt) f = function([x], T.fill(x, x) * 2, mode=mode_opt)
assert [node.op for node in f.maker.fgraph.toposort()] == [T.mul] assert [node.op for node in f.maker.fgraph.toposort()] == [T.mul]
f(x_) f(x_)
# Julian: This doesn't work. See comments inside local_fill_cut.
# assert check_stack_trace(f, ops_to_check='all')
# basic case # basic case
f = function([x, y], T.second(y, x) * 2, mode=mode_opt) f = function([x, y], T.second(y, x) * 2, mode=mode_opt)
...@@ -5659,8 +5657,8 @@ class T_local_sum_prod(unittest.TestCase): ...@@ -5659,8 +5657,8 @@ class T_local_sum_prod(unittest.TestCase):
Test that stack trace is copied over correctly. Test that stack trace is copied over correctly.
""" """
m0 = theano.compile.get_default_mode()\ m0 = theano.compile.get_default_mode()\
.excluding('inplace_elemwise_opt')\ .excluding('inplace_elemwise_opt')\
.including('canonicalize', 'specialize') .including('canonicalize', 'specialize')
vect = T.dvector() vect = T.dvector()
mat = T.dmatrix() mat = T.dmatrix()
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论