提交 f0e9354b authored 作者: Virgile Andreani's avatar Virgile Andreani 提交者: Ricardo Vieira

Replace more not x.owner by x.owner is None

上级 639b0871
......@@ -221,7 +221,7 @@ def function(
if not isinstance(node.op.scalar_op, log):
return
inp = node.inputs[0]
if not inp.owner:
if inp.owner is None:
return
if not isinstance(inp.owner.op, add):
return
......
......@@ -1896,11 +1896,10 @@ def equal_computations(
if isinstance(x, Constant):
return np.array_equal(x.data, y)
return False
if x.owner and not y.owner:
x_is_owned, y_is_owned = (x.owner is not None, y.owner is not None)
if x_is_owned != y_is_owned:
return False
if y.owner and not x.owner:
return False
if x.owner and y.owner:
if x_is_owned and y_is_owned:
if x.owner.outputs.index(x) != y.owner.outputs.index(y):
return False
if x not in in_xs and not (y.type.in_same_class(x.type)):
......@@ -1918,7 +1917,7 @@ def equal_computations(
for dx, dy in zip(xs, ys):
assert isinstance(dx, Variable)
# We checked above that both dx and dy have an owner or not
if not dx.owner:
if dx.owner is None:
if isinstance(dx, Constant) and isinstance(dy, Constant):
if not dx.equals(dy):
return False
......
......@@ -252,7 +252,7 @@ class FunctionGraph(MetaObject):
# Now, `var` has no more clients, so check if we need to remove it
# and its `Apply` node
if not var.owner:
if var.owner is None:
self.variables.remove(var)
else:
apply_node = var.owner
......
......@@ -1321,7 +1321,7 @@ class SequentialNodeRewriter(NodeRewriter):
# only 1 iteration
if not self.apply_all_rewrites:
return new_repl
if not new_vars[0].owner:
if new_vars[0].owner is None:
# We are at the start of the graph.
return new_repl
if len(new_repl) > 1:
......
......@@ -662,7 +662,7 @@ def local_cast_cast(fgraph, node):
return
x = node.inputs[0]
if (
not x.owner
x.owner is None
or not isinstance(x.owner.op, Elemwise)
or not isinstance(x.owner.op.scalar_op, ps.Cast)
):
......@@ -1189,7 +1189,7 @@ def local_merge_alloc(fgraph, node):
"""
if not isinstance(node.op, Alloc):
return False
if not node.inputs[0].owner or not isinstance(node.inputs[0].owner.op, Alloc):
if not (node.inputs[0].owner and isinstance(node.inputs[0].owner.op, Alloc)):
return False
inputs_outer = node.inputs
inputs_inner = node.inputs[0].owner.inputs
......
......@@ -370,7 +370,7 @@ def apply_local_dimshuffle_lift(fgraph, var):
"""
lift recursively
"""
if not var.owner:
if var.owner is None:
return var
new = local_dimshuffle_lift.transform(fgraph, var.owner)
if new:
......
......@@ -1248,7 +1248,7 @@ def local_sum_prod_of_mul_or_div(fgraph, node):
"""
[node_inps] = node.inputs
if not node_inps.owner:
if node_inps.owner is None:
return None
inner_op = node_inps.owner.op
......@@ -2711,13 +2711,13 @@ def local_grad_log_erfc_neg(fgraph, node):
Make it so that the test does not generate an error in that case!
"""
if not node.inputs[1].owner or node.inputs[1].owner.op != erfc:
if not (node.inputs[1].owner and node.inputs[1].owner.op == erfc):
return False
erfc_in = node.inputs[1]
erfc_x = erfc_in.owner.inputs[0]
if not node.inputs[0].owner:
if node.inputs[0].owner is None:
return False
# TODO: All of this should be replaced with a single, simple unification
......@@ -2744,7 +2744,7 @@ def local_grad_log_erfc_neg(fgraph, node):
y = mul_in.owner.inputs[:]
del y[idx]
if not exp_in.owner.inputs[0].owner:
if exp_in.owner.inputs[0].owner is None:
return False
if exp_in.owner.inputs[0].owner.op == neg:
......
......@@ -286,7 +286,7 @@ def local_subtensor_of_dot(fgraph, node):
"""
if not isinstance(node.op, Subtensor):
return
if not node.inputs[0].owner or not isinstance(node.inputs[0].owner.op, Dot):
if not (node.inputs[0].owner and isinstance(node.inputs[0].owner.op, Dot)):
return
# If there is other node that use the outputs of the dot
# We don't want to compute twice the sub part.
......@@ -1445,7 +1445,7 @@ def local_adv_sub1_adv_inc_sub1(fgraph, node):
if not isinstance(node.op, AdvancedSubtensor1):
return
inp = node.inputs[0]
if not inp.owner or not isinstance(inp.owner.op, AdvancedIncSubtensor1):
if not (inp.owner and isinstance(inp.owner.op, AdvancedIncSubtensor1)):
return
idx = node.inputs[1]
idx2 = inp.owner.inputs[2]
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论