提交 ace7e3b9 authored 作者: Frederic's avatar Frederic

Review fixes and correc check for var.owner

上级 2865e546
......@@ -282,8 +282,8 @@ class NanGuardMode(Mode):
" output of a node in this variable:", file=sio)
print(theano.printing.debugprint(nd, file='str'), file=sio)
else:
print("NanGuardMode found an error in the"
" input %d of this node.", file=sio)
print("NanGuardMode found an error in an"
" input of this node." , file=sio)
print('Node:', file=sio)
print(nd, file=sio)
print("The input variable that cause problem:", file=sio)
......@@ -318,7 +318,7 @@ class NanGuardMode(Mode):
# If the input is the result of computation, then we
# don't need to check it. It is already done after the
# computation.
if not var.owner:
if var.owner is not None:
do_check_on(x[0], node, fn, True)
fn()
outputs = fn.outputs
......
......@@ -306,7 +306,7 @@ def raise_with_op(node, thunk=None, exc_info=None, storage_map=None):
exc_value = exc_type(str(exc_value) + detailed_err_msg +
'\n' + '\n'.join(hints))
except TypeError:
print("WARNING: %s error do not allow us to add extra error message" %
print("WARNING: %s error does not allow us to add extra error message" %
str(exc_type))
# Some exception need extra parameter in inputs. So forget the
# extra long error message in that case.
......
......@@ -2389,7 +2389,6 @@ if True:
(node.inputs[1].owner and
isinstance(node.inputs[1].owner.op, HostFromGpu)))):
if not dnn_available():
# Softmax grad is broken in v3 rc1 for this case
return
ins = []
for n in node.inputs:
......
......@@ -797,7 +797,7 @@ def local_gpu_careduce(node):
replace = True
# If this is a useless reduce, remove it as
# local_cut_useless_reduce. This is needed as the code
# bellow do not support when x.ndim == 0.
# below do not support when x.ndim == 0.
if x.type == node.outputs[0].type:
return [x]
elif (all([c != "output" and isinstance(c.op, GpuFromHost)
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论