提交 daf7ea76 authored 作者: Iulian Vlad Serban's avatar Iulian Vlad Serban

Fixed bugs and restructured code. Implemented Fred's comments.

上级 72ad300a
...@@ -450,7 +450,7 @@ class FunctionGraph(utils.object2): ...@@ -450,7 +450,7 @@ class FunctionGraph(utils.object2):
assert path is not None assert path is not None
tr = getattr(r.tag, 'trace', []) tr = getattr(r.tag, 'trace', [])
detailed_err_msg = "" detailed_err_msg = ""
if len(tr) > 0: if type(tr) is list and len(tr) > 0:
detailed_err_msg += "\nBacktrace when the variable is created:\n" detailed_err_msg += "\nBacktrace when the variable is created:\n"
# Print separate message for each element in # Print separate message for each element in
......
...@@ -168,7 +168,7 @@ def raise_with_op(node, thunk=None, exc_info=None, storage_map=None): ...@@ -168,7 +168,7 @@ def raise_with_op(node, thunk=None, exc_info=None, storage_map=None):
# Print node backtraces # Print node backtraces
tr = getattr(node.outputs[0].tag, 'trace', []) tr = getattr(node.outputs[0].tag, 'trace', [])
if len(tr) > 0: if type(tr) is list and len(tr) > 0:
detailed_err_msg += "\nBacktrace when the node is created:\n" detailed_err_msg += "\nBacktrace when the node is created:\n"
# Print separate message for each element in the list of batcktraces # Print separate message for each element in the list of batcktraces
......
...@@ -542,7 +542,7 @@ class PureOp(object): ...@@ -542,7 +542,7 @@ class PureOp(object):
"For compute_test_value, one input test value does not" "For compute_test_value, one input test value does not"
" have the requested type.\n") " have the requested type.\n")
tr = getattr(v.tag, 'trace', []) tr = getattr(v.tag, 'trace', [])
if len(tr) > 0: if type(tr) is list and len(tr) > 0:
detailed_err_msg += ( detailed_err_msg += (
" \nBacktrace when that variable is created:\n") " \nBacktrace when that variable is created:\n")
# Print separate message for each element in the list # Print separate message for each element in the list
......
...@@ -87,15 +87,13 @@ def copy_stack_trace(from_var, to_var): ...@@ -87,15 +87,13 @@ def copy_stack_trace(from_var, to_var):
tr = [] tr = []
if type(from_var) is list: if type(from_var) is list:
# If from_var is a list, store concatenated stack traces # If from_var is a list, store concatenated stack traces
if len(from_var) > 0:
for v in from_var: for v in from_var:
if hasattr(v.tag, 'trace'): tr += getattr(v.tag, 'trace', [])
tr = tr + v.tag.trace
else: else:
# If from_var is not a list, it must be a single tensor # If from_var is not a list, it must be a single tensor
# variable, so just store that particular stack trace # variable, so just store that particular stack trace
if hasattr(from_var.tag, 'trace'): tr = getattr(from_var.tag, 'trace', [])
tr = from_var.tag.trace
# Copy over stack traces to to_var # Copy over stack traces to to_var
if type(to_var) is list: if type(to_var) is list:
...@@ -1872,7 +1870,10 @@ def local_subtensor_make_vector(node): ...@@ -1872,7 +1870,10 @@ def local_subtensor_make_vector(node):
try: try:
const_slice = node.op.get_constant_idx(node.inputs, const_slice = node.op.get_constant_idx(node.inputs,
allow_partial=False)[0] allow_partial=False)[0]
return [make_vector(*x.owner.inputs[const_slice])] ret = make_vector(*x.owner.inputs[const_slice])
# Copy over stack trace from previous outputs to new output
copy_stack_trace(node.outputs, ret)
return [ret]
except NotScalarConstantError: except NotScalarConstantError:
pass pass
else: else:
...@@ -2001,7 +2002,7 @@ def local_alloc_unary(node): ...@@ -2001,7 +2002,7 @@ def local_alloc_unary(node):
# Is it really necessary to copy over stack trace here? # Is it really necessary to copy over stack trace here?
# after all, T.alloc and T.cast should preserve the stack trace from x, # after all, T.alloc and T.cast should preserve the stack trace from x,
# but perhaps the trace is lost in "v = node.op(x)"? # but perhaps the trace is lost in "v = node.op(x)"?
copy_stack_trace(node.outputs[0], ret) copy_stack_trace([node.outputs[0], a], ret)
return [ret] return [ret]
...@@ -2560,12 +2561,12 @@ def local_subtensor_lift(node): ...@@ -2560,12 +2561,12 @@ def local_subtensor_lift(node):
idx = node.inputs[1:] idx = node.inputs[1:]
x_idx = node.op(u.owner.inputs[0], *idx) x_idx = node.op(u.owner.inputs[0], *idx)
# Copy over previous output stacktrace # Copy over previous output stacktrace
# Julian: Would it make more sense to copy stacktace before opt is applied, i.e. from u.owner.inputs[0]?
copy_stack_trace(node.outputs, x_idx) copy_stack_trace(node.outputs, x_idx)
ret = u.owner.op(x_idx) ret = u.owner.op(x_idx)
# Copy over previous output stacktrace # Copy over previous output stacktrace
copy_stack_trace(node.outputs, ret) # and stacktrace from previous unary operation
return [] copy_stack_trace([node.outputs, node.inputs[0]], ret)
return [ret]
if isinstance(u.owner.op, T.Elemwise): if isinstance(u.owner.op, T.Elemwise):
new_inputs = [] new_inputs = []
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论