提交 f0e9354b authored 作者: Virgile Andreani's avatar Virgile Andreani 提交者: Ricardo Vieira

Replace more not x.owner by x.owner is None

上级 639b0871
...@@ -221,7 +221,7 @@ def function( ...@@ -221,7 +221,7 @@ def function(
if not isinstance(node.op.scalar_op, log): if not isinstance(node.op.scalar_op, log):
return return
inp = node.inputs[0] inp = node.inputs[0]
if not inp.owner: if inp.owner is None:
return return
if not isinstance(inp.owner.op, add): if not isinstance(inp.owner.op, add):
return return
......
...@@ -1896,11 +1896,10 @@ def equal_computations( ...@@ -1896,11 +1896,10 @@ def equal_computations(
if isinstance(x, Constant): if isinstance(x, Constant):
return np.array_equal(x.data, y) return np.array_equal(x.data, y)
return False return False
if x.owner and not y.owner: x_is_owned, y_is_owned = (x.owner is not None, y.owner is not None)
if x_is_owned != y_is_owned:
return False return False
if y.owner and not x.owner: if x_is_owned and y_is_owned:
return False
if x.owner and y.owner:
if x.owner.outputs.index(x) != y.owner.outputs.index(y): if x.owner.outputs.index(x) != y.owner.outputs.index(y):
return False return False
if x not in in_xs and not (y.type.in_same_class(x.type)): if x not in in_xs and not (y.type.in_same_class(x.type)):
...@@ -1918,7 +1917,7 @@ def equal_computations( ...@@ -1918,7 +1917,7 @@ def equal_computations(
for dx, dy in zip(xs, ys): for dx, dy in zip(xs, ys):
assert isinstance(dx, Variable) assert isinstance(dx, Variable)
# We checked above that both dx and dy have an owner or not # We checked above that both dx and dy have an owner or not
if not dx.owner: if dx.owner is None:
if isinstance(dx, Constant) and isinstance(dy, Constant): if isinstance(dx, Constant) and isinstance(dy, Constant):
if not dx.equals(dy): if not dx.equals(dy):
return False return False
......
...@@ -252,7 +252,7 @@ class FunctionGraph(MetaObject): ...@@ -252,7 +252,7 @@ class FunctionGraph(MetaObject):
# Now, `var` has no more clients, so check if we need to remove it # Now, `var` has no more clients, so check if we need to remove it
# and its `Apply` node # and its `Apply` node
if not var.owner: if var.owner is None:
self.variables.remove(var) self.variables.remove(var)
else: else:
apply_node = var.owner apply_node = var.owner
......
...@@ -1321,7 +1321,7 @@ class SequentialNodeRewriter(NodeRewriter): ...@@ -1321,7 +1321,7 @@ class SequentialNodeRewriter(NodeRewriter):
# only 1 iteration # only 1 iteration
if not self.apply_all_rewrites: if not self.apply_all_rewrites:
return new_repl return new_repl
if not new_vars[0].owner: if new_vars[0].owner is None:
# We are at the start of the graph. # We are at the start of the graph.
return new_repl return new_repl
if len(new_repl) > 1: if len(new_repl) > 1:
......
...@@ -662,7 +662,7 @@ def local_cast_cast(fgraph, node): ...@@ -662,7 +662,7 @@ def local_cast_cast(fgraph, node):
return return
x = node.inputs[0] x = node.inputs[0]
if ( if (
not x.owner x.owner is None
or not isinstance(x.owner.op, Elemwise) or not isinstance(x.owner.op, Elemwise)
or not isinstance(x.owner.op.scalar_op, ps.Cast) or not isinstance(x.owner.op.scalar_op, ps.Cast)
): ):
...@@ -1189,7 +1189,7 @@ def local_merge_alloc(fgraph, node): ...@@ -1189,7 +1189,7 @@ def local_merge_alloc(fgraph, node):
""" """
if not isinstance(node.op, Alloc): if not isinstance(node.op, Alloc):
return False return False
if not node.inputs[0].owner or not isinstance(node.inputs[0].owner.op, Alloc): if not (node.inputs[0].owner and isinstance(node.inputs[0].owner.op, Alloc)):
return False return False
inputs_outer = node.inputs inputs_outer = node.inputs
inputs_inner = node.inputs[0].owner.inputs inputs_inner = node.inputs[0].owner.inputs
......
...@@ -370,7 +370,7 @@ def apply_local_dimshuffle_lift(fgraph, var): ...@@ -370,7 +370,7 @@ def apply_local_dimshuffle_lift(fgraph, var):
""" """
lift recursively lift recursively
""" """
if not var.owner: if var.owner is None:
return var return var
new = local_dimshuffle_lift.transform(fgraph, var.owner) new = local_dimshuffle_lift.transform(fgraph, var.owner)
if new: if new:
......
...@@ -1248,7 +1248,7 @@ def local_sum_prod_of_mul_or_div(fgraph, node): ...@@ -1248,7 +1248,7 @@ def local_sum_prod_of_mul_or_div(fgraph, node):
""" """
[node_inps] = node.inputs [node_inps] = node.inputs
if not node_inps.owner: if node_inps.owner is None:
return None return None
inner_op = node_inps.owner.op inner_op = node_inps.owner.op
...@@ -2711,13 +2711,13 @@ def local_grad_log_erfc_neg(fgraph, node): ...@@ -2711,13 +2711,13 @@ def local_grad_log_erfc_neg(fgraph, node):
Make it so that the test does not generate an error in that case! Make it so that the test does not generate an error in that case!
""" """
if not node.inputs[1].owner or node.inputs[1].owner.op != erfc: if not (node.inputs[1].owner and node.inputs[1].owner.op == erfc):
return False return False
erfc_in = node.inputs[1] erfc_in = node.inputs[1]
erfc_x = erfc_in.owner.inputs[0] erfc_x = erfc_in.owner.inputs[0]
if not node.inputs[0].owner: if node.inputs[0].owner is None:
return False return False
# TODO: All of this should be replaced with a single, simple unification # TODO: All of this should be replaced with a single, simple unification
...@@ -2744,7 +2744,7 @@ def local_grad_log_erfc_neg(fgraph, node): ...@@ -2744,7 +2744,7 @@ def local_grad_log_erfc_neg(fgraph, node):
y = mul_in.owner.inputs[:] y = mul_in.owner.inputs[:]
del y[idx] del y[idx]
if not exp_in.owner.inputs[0].owner: if exp_in.owner.inputs[0].owner is None:
return False return False
if exp_in.owner.inputs[0].owner.op == neg: if exp_in.owner.inputs[0].owner.op == neg:
......
...@@ -286,7 +286,7 @@ def local_subtensor_of_dot(fgraph, node): ...@@ -286,7 +286,7 @@ def local_subtensor_of_dot(fgraph, node):
""" """
if not isinstance(node.op, Subtensor): if not isinstance(node.op, Subtensor):
return return
if not node.inputs[0].owner or not isinstance(node.inputs[0].owner.op, Dot): if not (node.inputs[0].owner and isinstance(node.inputs[0].owner.op, Dot)):
return return
# If there is other node that use the outputs of the dot # If there is other node that use the outputs of the dot
# We don't want to compute twice the sub part. # We don't want to compute twice the sub part.
...@@ -1445,7 +1445,7 @@ def local_adv_sub1_adv_inc_sub1(fgraph, node): ...@@ -1445,7 +1445,7 @@ def local_adv_sub1_adv_inc_sub1(fgraph, node):
if not isinstance(node.op, AdvancedSubtensor1): if not isinstance(node.op, AdvancedSubtensor1):
return return
inp = node.inputs[0] inp = node.inputs[0]
if not inp.owner or not isinstance(inp.owner.op, AdvancedIncSubtensor1): if not (inp.owner and isinstance(inp.owner.op, AdvancedIncSubtensor1)):
return return
idx = node.inputs[1] idx = node.inputs[1]
idx2 = inp.owner.inputs[2] idx2 = inp.owner.inputs[2]
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论