提交 8c7511bf authored 作者: AdeB's avatar AdeB

Move copy_stack_trace to gof.opt

上级 c10aa585
...@@ -2568,3 +2568,48 @@ def pre_greedy_local_optimizer(list_optimizations, out): ...@@ -2568,3 +2568,48 @@ def pre_greedy_local_optimizer(list_optimizations, out):
final_outs, optimized_nodes = local_recursive_function( final_outs, optimized_nodes = local_recursive_function(
list_optimizations, out, {}, 0) list_optimizations, out, {}, 0)
return final_outs[out_index] return final_outs[out_index]
def copy_stack_trace(from_var, to_var):
"""
Copies the stack trace from one or more tensor variables to
one or more tensor variables.
Parameters
----------
from_var
Tensor variable or list of tensor variables to copy stack traces from.
to_var
Tensor variable or list of tensor variables to copy stack traces to.
Notes
-----
The stacktrace is assumed to be of the form of a list of lists
of tuples. Each tuple contains the filename, line number, function name
and so on. Each list of tuples contains the truples belonging to a
particular variable.
"""
# Store stack traces from from_var
tr = []
if type(from_var) is list:
# If from_var is a list, store concatenated stack traces
for v in from_var:
tr += getattr(v.tag, 'trace', [])
else:
# If from_var is not a list, it must be a single tensor variable,
# so just store that particular stack trace
tr = getattr(from_var.tag, 'trace', [])
# Copy over stack traces to to_var
if type(to_var) is list:
# Copy over stack traces from from_var to each variable in
# to_var, including the stack_trace of the to_var before
for v in to_var:
v.tag.trace = getattr(v.tag, 'trace', []) + tr
else:
# Copy over stack traces from from_var to each variable to
# to_var, including the stack_trace of the to_var before
to_var.tag.trace = getattr(to_var.tag, 'trace', []) + tr
...@@ -2,9 +2,9 @@ from __future__ import absolute_import, print_function, division ...@@ -2,9 +2,9 @@ from __future__ import absolute_import, print_function, division
import theano import theano
from theano.gradient import DisconnectedType from theano.gradient import DisconnectedType
from theano.gof import Op, Apply, TopoOptimizer from theano.gof import Op, Apply, TopoOptimizer
from theano.gof.opt import copy_stack_trace
from theano import tensor from theano import tensor
import theano.sandbox.cuda as cuda import theano.sandbox.cuda as cuda
from theano.tensor.opt import copy_stack_trace
def get_diagonal_subtensor_view(x, i0, i1): def get_diagonal_subtensor_view(x, i0, i1):
......
...@@ -20,10 +20,10 @@ from six.moves import xrange ...@@ -20,10 +20,10 @@ from six.moves import xrange
import theano import theano
from theano import gof from theano import gof
from theano import scalar from theano import scalar
from theano.gof.opt import copy_stack_trace
from theano.tensor import basic as tensor, subtensor, opt, elemwise from theano.tensor import basic as tensor, subtensor, opt, elemwise
from theano.tensor.type import (values_eq_approx_remove_inf, from theano.tensor.type import (values_eq_approx_remove_inf,
values_eq_approx_remove_nan) values_eq_approx_remove_nan)
from theano.tensor.opt import copy_stack_trace
from theano.compile import optdb from theano.compile import optdb
from theano.gof import Apply from theano.gof import Apply
......
...@@ -6,6 +6,7 @@ import theano ...@@ -6,6 +6,7 @@ import theano
from theano import compile, gof from theano import compile, gof
from theano.compile import optdb from theano.compile import optdb
from theano.gof import local_optimizer from theano.gof import local_optimizer
from theano.gof.opt import copy_stack_trace
from theano.tensor.nnet.corr import ( from theano.tensor.nnet.corr import (
CorrMM, CorrMM_gradInputs, CorrMM_gradWeights) CorrMM, CorrMM_gradInputs, CorrMM_gradWeights)
...@@ -18,8 +19,7 @@ from theano.tensor.nnet.abstract_conv import (AbstractConv2d, ...@@ -18,8 +19,7 @@ from theano.tensor.nnet.abstract_conv import (AbstractConv2d,
AbstractConv2d_gradWeights, AbstractConv2d_gradWeights,
AbstractConv2d_gradInputs) AbstractConv2d_gradInputs)
from theano.tensor.nnet.abstract_conv import get_conv_output_shape from theano.tensor.nnet.abstract_conv import get_conv_output_shape
from theano.tensor.opt import (copy_stack_trace, from theano.tensor.opt import register_specialize_device
register_specialize_device)
from theano.tensor import TensorType from theano.tensor import TensorType
from theano.tensor import opt from theano.tensor import opt
......
...@@ -18,7 +18,7 @@ from theano.printing import pprint ...@@ -18,7 +18,7 @@ from theano.printing import pprint
from theano.tensor import basic as tensor from theano.tensor import basic as tensor
from theano.tensor import elemwise, opt, NotScalarConstantError from theano.tensor import elemwise, opt, NotScalarConstantError
from theano.tensor.type import values_eq_approx_remove_inf from theano.tensor.type import values_eq_approx_remove_inf
from theano.tensor.opt import copy_stack_trace from theano.gof.opt import copy_stack_trace
############ ############
# #
......
...@@ -22,6 +22,7 @@ from theano import gof ...@@ -22,6 +22,7 @@ from theano import gof
from theano.compat import izip from theano.compat import izip
from theano.gof import opt, InconsistencyError, TopoOptimizer, graph from theano.gof import opt, InconsistencyError, TopoOptimizer, graph
from theano.gof import Variable, Constant from theano.gof import Variable, Constant
from theano.gof.opt import copy_stack_trace
from theano.gof.utils import MethodNotDefined from theano.gof.utils import MethodNotDefined
from theano.gradient import DisconnectedType from theano.gradient import DisconnectedType
from theano.configparser import config from theano.configparser import config
...@@ -54,51 +55,6 @@ _logger = logging.getLogger('theano.tensor.opt') ...@@ -54,51 +55,6 @@ _logger = logging.getLogger('theano.tensor.opt')
# Utilities # Utilities
def copy_stack_trace(from_var, to_var):
"""
Copies the stack trace from one or more tensor variables to
one or more tensor variables.
Parameters
----------
from_var
Tensor variable or list of tensor variables to copy stack traces from.
to_var
Tensor variable or list of tensor variables to copy stack traces to.
Notes
-----
The stacktrace is assumed to be of the form of a list of lists
of tuples. Each tuple contains the filename, line number, function name
and so on. Each list of tuples contains the truples belonging to a
particular variable.
"""
# Store stack traces from from_var
tr = []
if type(from_var) is list:
# If from_var is a list, store concatenated stack traces
for v in from_var:
tr += getattr(v.tag, 'trace', [])
else:
# If from_var is not a list, it must be a single tensor variable,
# so just store that particular stack trace
tr = getattr(from_var.tag, 'trace', [])
# Copy over stack traces to to_var
if type(to_var) is list:
# Copy over stack traces from from_var to each variable in
# to_var, including the stack_trace of the to_var before
for v in to_var:
v.tag.trace = getattr(v.tag, 'trace', []) + tr
else:
# Copy over stack traces from from_var to each variable to
# to_var, including the stack_trace of the to_var before
to_var.tag.trace = getattr(to_var.tag, 'trace', []) + tr
def out2in(*local_opts, **kwargs): def out2in(*local_opts, **kwargs):
"""WRITEME """ """WRITEME """
name = (kwargs and kwargs.pop('name', None)) name = (kwargs and kwargs.pop('name', None))
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论