提交 07b499b1 authored 作者: Frederic Bastien's avatar Frederic Bastien

simplified code a little bit.

上级 3b9fbee8
...@@ -1230,7 +1230,7 @@ def local_subtensor_lift(node): ...@@ -1230,7 +1230,7 @@ def local_subtensor_lift(node):
return [u.owner.op(*new_inputs)] return [u.owner.op(*new_inputs)]
def greedy_local_optimizer(list_optimizations, out, no_opt): def greedy_local_optimizer(list_optimizations, out):
''' '''
This function traverses the computation graph described by all This function traverses the computation graph described by all
``node`` in the graph before the variable out but that are not in the env. ``node`` in the graph before the variable out but that are not in the env.
...@@ -1240,13 +1240,17 @@ def greedy_local_optimizer(list_optimizations, out, no_opt): ...@@ -1240,13 +1240,17 @@ def greedy_local_optimizer(list_optimizations, out, no_opt):
the graph of the indices of a subtensor. the graph of the indices of a subtensor.
We should not apply optimizations on node that are in env. We should not apply optimizations on node that are in env.
So we don't optimize node in no_opt. So we don't optimize node that have an attribute env.
:note: This don't do an equilibrium... So if there is optimization
in the list, there can be case when calling this function
multiple time do think at each call.
''' '''
def local_recursive_function( list_opt, out, optimized_vars, depth): def local_recursive_function( list_opt, out, optimized_vars, depth):
if not out.owner : if not out.owner :
return [out], optimized_vars return [out], optimized_vars
node = out.owner node = out.owner
if node in no_opt: if hasattr(node, 'env'):
return node.outputs, optimized_vars return node.outputs, optimized_vars
for idx, inp in enumerate(node.inputs): for idx, inp in enumerate(node.inputs):
if inp in optimized_vars: if inp in optimized_vars:
...@@ -1399,24 +1403,16 @@ def merge_two_slices(slice1, len1, slice2, len2): ...@@ -1399,24 +1403,16 @@ def merge_two_slices(slice1, len1, slice2, len2):
start = T.switch(T.le(flen,0), 0, start) start = T.switch(T.le(flen,0), 0, start)
stop = T.switch(T.le(flen,0), 0, stop) stop = T.switch(T.le(flen,0), 0, stop)
# Find the list of nodes in the env.
# We should not optimize them here!
list_no_opt = set()
for sl in [slice1, slice2]:
if isinstance(sl, slice):
for idx in [sl.start, sl.stop, sl.step]:
if isinstance(idx, Variable):
list_no_opt.update(idx.env.nodes)
if isinstance(sl, Variable):
list_no_opt.update(sl.env.nodes)
# The canonical form of the slice is pretty complicated # The canonical form of the slice is pretty complicated
# and is not simplified. We simplify it in advance here # and is not simplified. We simplify it in advance here
# as otherwise this create too many useless optimization that # as otherwise this create too many useless optimization that
# DebugMode must check. # DebugMode must check.
start = greedy_local_optimizer( list_opt, start, list_no_opt) start = greedy_local_optimizer( list_opt, start)
stop = greedy_local_optimizer( list_opt, stop, list_no_opt) stop = greedy_local_optimizer( list_opt, stop)
step = greedy_local_optimizer( list_opt, step, list_no_opt) step = greedy_local_optimizer( list_opt, step)
start = greedy_local_optimizer( list_opt, start)
stop = greedy_local_optimizer( list_opt, stop)
step = greedy_local_optimizer( list_opt, step)
#start = theano.printing.Print('start')(start) #start = theano.printing.Print('start')(start)
#stop = theano.printing.Print('stop')(stop) #stop = theano.printing.Print('stop')(stop)
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论