提交 ace7e3b9 authored 作者: Frederic's avatar Frederic

Review fixes and correc check for var.owner

上级 2865e546
...@@ -282,8 +282,8 @@ class NanGuardMode(Mode): ...@@ -282,8 +282,8 @@ class NanGuardMode(Mode):
" output of a node in this variable:", file=sio) " output of a node in this variable:", file=sio)
print(theano.printing.debugprint(nd, file='str'), file=sio) print(theano.printing.debugprint(nd, file='str'), file=sio)
else: else:
print("NanGuardMode found an error in the" print("NanGuardMode found an error in an"
" input %d of this node.", file=sio) " input of this node." , file=sio)
print('Node:', file=sio) print('Node:', file=sio)
print(nd, file=sio) print(nd, file=sio)
print("The input variable that cause problem:", file=sio) print("The input variable that cause problem:", file=sio)
...@@ -318,7 +318,7 @@ class NanGuardMode(Mode): ...@@ -318,7 +318,7 @@ class NanGuardMode(Mode):
# If the input is the result of computation, then we # If the input is the result of computation, then we
# don't need to check it. It is already done after the # don't need to check it. It is already done after the
# computation. # computation.
if not var.owner: if var.owner is not None:
do_check_on(x[0], node, fn, True) do_check_on(x[0], node, fn, True)
fn() fn()
outputs = fn.outputs outputs = fn.outputs
......
...@@ -306,7 +306,7 @@ def raise_with_op(node, thunk=None, exc_info=None, storage_map=None): ...@@ -306,7 +306,7 @@ def raise_with_op(node, thunk=None, exc_info=None, storage_map=None):
exc_value = exc_type(str(exc_value) + detailed_err_msg + exc_value = exc_type(str(exc_value) + detailed_err_msg +
'\n' + '\n'.join(hints)) '\n' + '\n'.join(hints))
except TypeError: except TypeError:
print("WARNING: %s error do not allow us to add extra error message" % print("WARNING: %s error does not allow us to add extra error message" %
str(exc_type)) str(exc_type))
# Some exception need extra parameter in inputs. So forget the # Some exception need extra parameter in inputs. So forget the
# extra long error message in that case. # extra long error message in that case.
......
...@@ -2389,7 +2389,6 @@ if True: ...@@ -2389,7 +2389,6 @@ if True:
(node.inputs[1].owner and (node.inputs[1].owner and
isinstance(node.inputs[1].owner.op, HostFromGpu)))): isinstance(node.inputs[1].owner.op, HostFromGpu)))):
if not dnn_available(): if not dnn_available():
# Softmax grad is broken in v3 rc1 for this case
return return
ins = [] ins = []
for n in node.inputs: for n in node.inputs:
......
...@@ -797,7 +797,7 @@ def local_gpu_careduce(node): ...@@ -797,7 +797,7 @@ def local_gpu_careduce(node):
replace = True replace = True
# If this is a useless reduce, remove it as # If this is a useless reduce, remove it as
# local_cut_useless_reduce. This is needed as the code # local_cut_useless_reduce. This is needed as the code
# bellow do not support when x.ndim == 0. # below do not support when x.ndim == 0.
if x.type == node.outputs[0].type: if x.type == node.outputs[0].type:
return [x] return [x]
elif (all([c != "output" and isinstance(c.op, GpuFromHost) elif (all([c != "output" and isinstance(c.op, GpuFromHost)
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论