提交 60f3ccfe authored 作者: Brandon T. Willard's avatar Brandon T. Willard

Remove premature optimizations involving in-place graph changes

This commit removes uses of `pre_greedy_local_optimizer` and `pre_constant_merge` that needlessly introduce complexity into the optimization process through in-place graph changes that occur completely outside of the optimization/`FunctionGraph` framework. Closes #190.
上级 033c51c1
......@@ -63,11 +63,7 @@ from theano.compile import optdb
from theano.compile.function.types import deep_copy_op
from theano.gof import DestroyHandler, InconsistencyError, toolbox
from theano.gof.graph import equal_computations
from theano.gof.opt import (
GlobalOptimizer,
pre_constant_merge,
pre_greedy_local_optimizer,
)
from theano.gof.opt import GlobalOptimizer
from theano.scan.op import Scan
from theano.scan.utils import (
clone,
......@@ -1413,22 +1409,6 @@ class ScanSaveMem(gof.GlobalOptimizer):
if store_steps[i] != -1:
pval = select_max(pval, store_steps[i])
# TODO: Simplify the number of steps needed.
# FB: This need good testing, left to later.
# call get_scalar_constant_value()? it can
# return python/numpy scalar or np.ndarray
# currently.
# pval = pre_greedy_local_optimizer(list_opt_slice,
# pval)
# pval = pre_constant_merge(fgraph, [pval])[0]
# if (isinstance(pval, theano.tensor.TensorConstant)
# and
# pval.dtype.startswith('int')):
# try:
# pval = int(pval.data)
# except Exception:
# pass
store_steps[i] = pval
flag_store = True
......@@ -1486,20 +1466,11 @@ class ScanSaveMem(gof.GlobalOptimizer):
tmp_idx = tensor.switch(
cval < initl, cval + initl, cval - initl
)
tmp = pre_greedy_local_optimizer(
fgraph, list_opt_slice, tmp_idx
)
tmp = pre_constant_merge(fgraph, [tmp])[0]
nw_input = expand_empty(_nw_input, tmp)
nw_input = expand_empty(_nw_input, tmp_idx)
else:
tmp = tensor.as_tensor_variable(val)
initl = tensor.as_tensor_variable(init_l[i])
tmp = tensor.maximum(tmp, initl)
tmp = pre_greedy_local_optimizer(
fgraph, list_opt_slice, tmp
)
tmp = pre_constant_merge(fgraph, [tmp])[0]
nw_input = nw_inputs[offset + idx][:tmp]
nw_inputs[offset + idx] = nw_input
......@@ -1565,10 +1536,6 @@ class ScanSaveMem(gof.GlobalOptimizer):
for k, v in compress_map.items():
inv_compress_map[v] = k
node_ins = [
pre_greedy_local_optimizer(fgraph, list_opt_slice, x) for x in node_ins
]
node_ins = pre_constant_merge(fgraph, node_ins)
# 3.6 Compose the new scan
# TODO: currently we don't support scan with 0 step. So
# don't create one.
......
......@@ -32,14 +32,7 @@ from theano.gof import (
toolbox,
)
from theano.gof.op import Op
from theano.gof.opt import (
GlobalOptimizer,
copy_stack_trace,
in2out,
local_optimizer,
pre_constant_merge,
pre_greedy_local_optimizer,
)
from theano.gof.opt import GlobalOptimizer, copy_stack_trace, in2out, local_optimizer
from theano.gof.utils import MethodNotDefined, TestValueError
from theano.gradient import DisconnectedType
......@@ -3201,13 +3194,6 @@ def merge_two_slices(fgraph, slice1, len1, slice2, len2):
``len1`` is the length of the tensor **before** applying the first slice,
while ``len2`` is the length **after** applying the first slice.
"""
list_opt = [
local_abs_merge,
local_mul_switch_sink,
local_upcast_elemwise_constant_inputs,
local_useless_switch,
constant_folding,
]
if not isinstance(slice1, slice):
raise ValueError(
......@@ -3240,7 +3226,6 @@ def merge_two_slices(fgraph, slice1, len1, slice2, len2):
val = tt.switch(tt.lt(sl2, 0), -len1 - 1, val)
if sl1.step:
val = tt.switch(tt.eq(sl1.step, 0), len1 + 1, val)
val = pre_greedy_local_optimizer(fgraph, list_opt, val)
return val
else:
# We are in the more complex case when we do not actually know
......@@ -3266,7 +3251,6 @@ def merge_two_slices(fgraph, slice1, len1, slice2, len2):
val = tt.switch(tt.lt(sl2, 0), -len1 - 1, val)
if sl1.step:
val = tt.switch(tt.eq(sl1.step, 0), len1 + 1, val)
val = pre_greedy_local_optimizer(fgraph, list_opt, val)
return val
else:
# We are deleaing with two slices that need to be put together
......@@ -3317,20 +3301,6 @@ def merge_two_slices(fgraph, slice1, len1, slice2, len2):
start = tt.switch(tt.le(flen, 0), 0, start)
stop = tt.switch(tt.le(flen, 0), 0, stop)
# The canonical form of the slice is pretty complicated
# and is not simplified. We simplify it in advance here
# as otherwise this create too many useless optimization that
# DebugMode must check.
start = pre_greedy_local_optimizer(fgraph, list_opt, start)
stop = pre_greedy_local_optimizer(fgraph, list_opt, stop)
step = pre_greedy_local_optimizer(fgraph, list_opt, step)
start = pre_greedy_local_optimizer(fgraph, list_opt, start)
stop = pre_greedy_local_optimizer(fgraph, list_opt, stop)
step = pre_greedy_local_optimizer(fgraph, list_opt, step)
# Pre merge constant for the same reason.
start, stop, step = pre_constant_merge(fgraph, [start, stop, step])
return slice(start, stop, step)
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论