Skip to content

Commit f0e9354

Browse files
ArmavicaricardoV94
authored andcommitted
Replace more not x.owner by x.owner is None
1 parent 639b087 commit f0e9354

File tree

8 files changed

+16
-17
lines changed

8 files changed

+16
-17
lines changed

Diff for: pytensor/compile/function/__init__.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -221,7 +221,7 @@ def opt_log1p(node):
221221
if not isinstance(node.op.scalar_op, log):
222222
return
223223
inp = node.inputs[0]
224-
if not inp.owner:
224+
if inp.owner is None:
225225
return
226226
if not isinstance(inp.owner.op, add):
227227
return

Diff for: pytensor/graph/basic.py

+4-5
Original file line numberDiff line numberDiff line change
@@ -1896,11 +1896,10 @@ def equal_computations(
18961896
if isinstance(x, Constant):
18971897
return np.array_equal(x.data, y)
18981898
return False
1899-
if x.owner and not y.owner:
1899+
x_is_owned, y_is_owned = (x.owner is not None, y.owner is not None)
1900+
if x_is_owned != y_is_owned:
19001901
return False
1901-
if y.owner and not x.owner:
1902-
return False
1903-
if x.owner and y.owner:
1902+
if x_is_owned and y_is_owned:
19041903
if x.owner.outputs.index(x) != y.owner.outputs.index(y):
19051904
return False
19061905
if x not in in_xs and not (y.type.in_same_class(x.type)):
@@ -1918,7 +1917,7 @@ def equal_computations(
19181917
for dx, dy in zip(xs, ys):
19191918
assert isinstance(dx, Variable)
19201919
# We checked above that both dx and dy have an owner or not
1921-
if not dx.owner:
1920+
if dx.owner is None:
19221921
if isinstance(dx, Constant) and isinstance(dy, Constant):
19231922
if not dx.equals(dy):
19241923
return False

Diff for: pytensor/graph/fg.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -252,7 +252,7 @@ def remove_client(
252252

253253
# Now, `var` has no more clients, so check if we need to remove it
254254
# and its `Apply` node
255-
if not var.owner:
255+
if var.owner is None:
256256
self.variables.remove(var)
257257
else:
258258
apply_node = var.owner

Diff for: pytensor/graph/rewriting/basic.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1321,7 +1321,7 @@ def transform(self, fgraph, node):
13211321
# only 1 iteration
13221322
if not self.apply_all_rewrites:
13231323
return new_repl
1324-
if not new_vars[0].owner:
1324+
if new_vars[0].owner is None:
13251325
# We are at the start of the graph.
13261326
return new_repl
13271327
if len(new_repl) > 1:

Diff for: pytensor/tensor/rewriting/basic.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -662,7 +662,7 @@ def local_cast_cast(fgraph, node):
662662
return
663663
x = node.inputs[0]
664664
if (
665-
not x.owner
665+
x.owner is None
666666
or not isinstance(x.owner.op, Elemwise)
667667
or not isinstance(x.owner.op.scalar_op, ps.Cast)
668668
):
@@ -1189,7 +1189,7 @@ def local_merge_alloc(fgraph, node):
11891189
"""
11901190
if not isinstance(node.op, Alloc):
11911191
return False
1192-
if not node.inputs[0].owner or not isinstance(node.inputs[0].owner.op, Alloc):
1192+
if not (node.inputs[0].owner and isinstance(node.inputs[0].owner.op, Alloc)):
11931193
return False
11941194
inputs_outer = node.inputs
11951195
inputs_inner = node.inputs[0].owner.inputs

Diff for: pytensor/tensor/rewriting/elemwise.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -370,7 +370,7 @@ def apply_local_dimshuffle_lift(fgraph, var):
370370
"""
371371
lift recursively
372372
"""
373-
if not var.owner:
373+
if var.owner is None:
374374
return var
375375
new = local_dimshuffle_lift.transform(fgraph, var.owner)
376376
if new:

Diff for: pytensor/tensor/rewriting/math.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -1248,7 +1248,7 @@ def local_sum_prod_of_mul_or_div(fgraph, node):
12481248
"""
12491249

12501250
[node_inps] = node.inputs
1251-
if not node_inps.owner:
1251+
if node_inps.owner is None:
12521252
return None
12531253

12541254
inner_op = node_inps.owner.op
@@ -2711,13 +2711,13 @@ def local_grad_log_erfc_neg(fgraph, node):
27112711
Make it so that the test does not generate an error in that case!
27122712
27132713
"""
2714-
if not node.inputs[1].owner or node.inputs[1].owner.op != erfc:
2714+
if not (node.inputs[1].owner and node.inputs[1].owner.op == erfc):
27152715
return False
27162716

27172717
erfc_in = node.inputs[1]
27182718
erfc_x = erfc_in.owner.inputs[0]
27192719

2720-
if not node.inputs[0].owner:
2720+
if node.inputs[0].owner is None:
27212721
return False
27222722

27232723
# TODO: All of this should be replaced with a single, simple unification
@@ -2744,7 +2744,7 @@ def local_grad_log_erfc_neg(fgraph, node):
27442744
y = mul_in.owner.inputs[:]
27452745
del y[idx]
27462746

2747-
if not exp_in.owner.inputs[0].owner:
2747+
if exp_in.owner.inputs[0].owner is None:
27482748
return False
27492749

27502750
if exp_in.owner.inputs[0].owner.op == neg:

Diff for: pytensor/tensor/rewriting/subtensor.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -286,7 +286,7 @@ def local_subtensor_of_dot(fgraph, node):
286286
"""
287287
if not isinstance(node.op, Subtensor):
288288
return
289-
if not node.inputs[0].owner or not isinstance(node.inputs[0].owner.op, Dot):
289+
if not (node.inputs[0].owner and isinstance(node.inputs[0].owner.op, Dot)):
290290
return
291291
# If there is other node that use the outputs of the dot
292292
# We don't want to compute twice the sub part.
@@ -1445,7 +1445,7 @@ def local_adv_sub1_adv_inc_sub1(fgraph, node):
14451445
if not isinstance(node.op, AdvancedSubtensor1):
14461446
return
14471447
inp = node.inputs[0]
1448-
if not inp.owner or not isinstance(inp.owner.op, AdvancedIncSubtensor1):
1448+
if not (inp.owner and isinstance(inp.owner.op, AdvancedIncSubtensor1)):
14491449
return
14501450
idx = node.inputs[1]
14511451
idx2 = inp.owner.inputs[2]

0 commit comments

Comments
 (0)