Skip to content

Commit 9f88e1f

Browse files
committed
OpFromGraph subclasses shouldn't have __props__
When specified, Ops with identical __props__ are considered identical, in that they can be swapped and given the original inputs to obtain the same output.
1 parent 1509cee commit 9f88e1f

File tree

3 files changed

+19
-4
lines changed

3 files changed

+19
-4
lines changed

pytensor/tensor/basic.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3780,15 +3780,16 @@ class AllocDiag(OpFromGraph):
37803780
Wrapper Op for alloc_diag graphs
37813781
"""
37823782

3783-
__props__ = ("axis1", "axis2")
3784-
37853783
def __init__(self, *args, axis1, axis2, offset, **kwargs):
37863784
self.axis1 = axis1
37873785
self.axis2 = axis2
37883786
self.offset = offset
37893787

37903788
super().__init__(*args, **kwargs, strict=True)
37913789

3790+
def __str__(self):
3791+
return f"AllocDiag{{{self.axis1=}, {self.axis2=}, {self.offset=}}}"
3792+
37923793
@staticmethod
37933794
def is_offset_zero(node) -> bool:
37943795
"""

pytensor/tensor/einsum.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -52,14 +52,15 @@ class Einsum(OpFromGraph):
5252
desired. We haven't decided whether we want to provide this functionality.
5353
"""
5454

55-
__props__ = ("subscripts", "path", "optimized")
56-
5755
def __init__(self, *args, subscripts: str, path: PATH, optimized: bool, **kwargs):
5856
self.subscripts = subscripts
5957
self.path = path
6058
self.optimized = optimized
6159
super().__init__(*args, **kwargs, strict=True)
6260

61+
def __str__(self):
62+
return f"Einsum{{{self.subscripts=}, {self.path=}, {self.optimized=}}}"
63+
6364

6465
def _iota(shape: TensorVariable, axis: int) -> TensorVariable:
6566
"""

tests/tensor/test_basic.py

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,7 @@
3737
TensorFromScalar,
3838
Tri,
3939
alloc,
40+
alloc_diag,
4041
arange,
4142
as_tensor_variable,
4243
atleast_Nd,
@@ -3793,6 +3794,18 @@ def test_alloc_diag_values(self):
37933794
)
37943795
assert np.all(true_grad_input == grad_input)
37953796

3797+
def test_multiple_ops_same_graph(self):
3798+
"""Regression test when AllocDiag OFG was given insufficient props, causing incompatible Ops to be merged."""
3799+
v1 = vector("v1", shape=(2,), dtype="float64")
3800+
v2 = vector("v2", shape=(3,), dtype="float64")
3801+
a1 = alloc_diag(v1)
3802+
a2 = alloc_diag(v2)
3803+
3804+
fn = function([v1, v2], [a1, a2])
3805+
res1, res2 = fn(v1=[np.e, np.e], v2=[np.pi, np.pi, np.pi])
3806+
np.testing.assert_allclose(res1, np.eye(2) * np.e)
3807+
np.testing.assert_allclose(res2, np.eye(3) * np.pi)
3808+
37963809

37973810
def test_diagonal_negative_axis():
37983811
x = np.arange(2 * 3 * 3).reshape((2, 3, 3))

0 commit comments

Comments
 (0)