Skip to content

Commit 825b307

Browse files
Remove all uses of Variable.fgraph
This change requires the addition of an `fgraph` argument to the `Op.infer_shape` method used by `ShapeFeature`.
1 parent fe70d40 commit 825b307

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

62 files changed

+367
-492
lines changed

doc/extending/cop.txt

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -177,13 +177,13 @@ There are less methods to define for an Op than for a Type:
177177
.. method:: c_cleanup_code_struct(node, name)
178178

179179
Allows you to specify code that will be inserted in the struct
180-
destructor of the Op. This is for cleaninp up allocations and
180+
destructor of the `Op`. This is for cleaninp up allocations and
181181
stuff like this when the thunk is released (when you "free" a
182182
compiled function using this op).
183183

184-
.. method:: infer_shape(node, (i0_shapes,i1_shapes,...))
184+
.. method:: infer_shape(fgraph, node, (i0_shapes,i1_shapes,...))
185185

186-
Allow optimizations to lift the Shape op over this op. An
186+
Allow optimizations to lift the `Shape` `Op` over this `Op`. An
187187
example of why this is good is when we only need the shape of a
188188
variable: we will be able to obtain it without computing the
189189
variable itself.
@@ -192,8 +192,8 @@ There are less methods to define for an Op than for a Type:
192192
the shape of one output.
193193

194194
For example, for the matrix-matrix product ``infer_shape`` will
195-
have as inputs (node, ((x0,x1), (y0,y1))) and should return
196-
[(x0, y1)]. Both the inputs and the return value may be Theano
195+
have as inputs ``(fgraph, node, ((x0,x1), (y0,y1)))`` and should return
196+
``[(x0, y1)]``. Both the inputs and the return value may be Theano
197197
variables.
198198

199199
.. method:: c_code_cache_version()

doc/extending/extending_theano.txt

Lines changed: 11 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -114,7 +114,7 @@ possibilities you may encounter or need. For that refer to
114114
def R_op(self, inputs, eval_points):
115115
pass
116116

117-
def infer_shape(node, input_shapes):
117+
def infer_shape(self, fgraph, node, input_shapes):
118118
pass
119119

120120
An op has to implement some methods defined in the the interface of
@@ -237,9 +237,9 @@ There are other methods that can be optionally defined by the op:
237237
:attr:`__props__` will also generate a suitable :func:`__str__` for your op.
238238
This requires development version after September 1st, 2014 or version 0.7.
239239

240-
The :func:`infer_shape` method allows to infer the shape of the op
241-
output variables, without actually computing the outputs.
242-
It takes as input ``node``, a reference to the op Apply node,
240+
The :func:`infer_shape` method allows an `Op` to infer the shape of its
241+
output variables without actually computing them.
242+
It takes as input ``fgraph``, a `FunctionGraph`; ``node``, a reference to the op Apply node;
243243
and a list of Theano symbolic Varables (``i0_shape``, ``i1_shape``, ...)
244244
which are the shape of the op input Variables.
245245
:func:`infer_shape` returns a list where each element is a tuple representing
@@ -302,7 +302,7 @@ Example: Op definition
302302
z = output_storage[0]
303303
z[0] = x * 2
304304

305-
def infer_shape(self, node, i0_shapes):
305+
def infer_shape(self, fgraph, node, i0_shapes):
306306
return i0_shapes
307307

308308
def grad(self, inputs, output_grads):
@@ -333,7 +333,7 @@ Example: Op definition
333333
z = output_storage[0]
334334
z[0] = x * 2
335335

336-
def infer_shape(self, node, i0_shapes):
336+
def infer_shape(self, fgraph, node, i0_shapes):
337337
return i0_shapes
338338

339339
def grad(self, inputs, output_grads):
@@ -508,7 +508,7 @@ and ``b`` are equal.
508508
z = output_storage[0]
509509
z[0] = self.a * x + self.b
510510

511-
def infer_shape(self, node, i0_shapes):
511+
def infer_shape(self, fgraph, node, i0_shapes):
512512
return i0_shapes
513513

514514
def grad(self, inputs, output_grads):
@@ -750,12 +750,13 @@ signature:
750750

751751
.. code-block:: none
752752

753-
def infer_shape(node, input_shapes):
753+
def infer_shape(fgraph, node, input_shapes):
754754
# ...
755755
return output_shapes
756756

757757
- `input_shapes` and `output_shapes` are lists of tuples that
758-
represent the shape of the corresponding inputs/outputs.
758+
represent the shape of the corresponding inputs/outputs, and `fgraph`
759+
is a `FunctionGraph`.
759760

760761
.. note::
761762

@@ -788,7 +789,7 @@ as_op Example
788789
from theano import function
789790
from theano.compile.ops import as_op
790791

791-
def infer_shape_numpy_dot(node, input_shapes):
792+
def infer_shape_numpy_dot(fgraph, node, input_shapes):
792793
ashp, bshp = input_shapes
793794
return [ashp[:-1] + bshp[-1:]]
794795

doc/extending/extending_theano_solution_1.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ def perform(self, node, inputs, output_storage):
3434
z = output_storage[0]
3535
z[0] = x * y
3636

37-
def infer_shape(self, node, i0_shapes):
37+
def infer_shape(self, fgraph, node, i0_shapes):
3838
return [i0_shapes[0]]
3939

4040
def grad(self, inputs, output_grads):
@@ -71,7 +71,7 @@ def perform(self, node, inputs, output_storage):
7171
z1[0] = x + y
7272
z2[0] = x - y
7373

74-
def infer_shape(self, node, i0_shapes):
74+
def infer_shape(self, fgraph, node, i0_shapes):
7575
return [i0_shapes[0], i0_shapes[0]]
7676

7777
def grad(self, inputs, output_grads):
@@ -172,7 +172,7 @@ def test_infer_shape(self):
172172
from theano.compile.ops import as_op
173173

174174

175-
def infer_shape_numpy_dot(node, input_shapes):
175+
def infer_shape_numpy_dot(fgraph, node, input_shapes):
176176
ashp, bshp = input_shapes
177177
return [ashp[:-1] + bshp[-1:]]
178178

@@ -183,7 +183,7 @@ def numpy_add(a, b):
183183
return np.add(a, b)
184184

185185

186-
def infer_shape_numpy_add_sub(node, input_shapes):
186+
def infer_shape_numpy_add_sub(fgraph, node, input_shapes):
187187
ashp, bshp = input_shapes
188188
# Both inputs should have that same shape, so we just return one of them.
189189
return [ashp[0]]

doc/extending/op.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -215,7 +215,7 @@ Optional methods or attributes
215215
will use your Op and build the graphs that you want and call that
216216
instead of the Op instance directly.
217217

218-
.. function:: infer_shape(node, shapes)
218+
.. function:: infer_shape(fgraph, node, shapes)
219219

220220
This function is needed for shape optimization. ``shapes`` is a
221221
list with one tuple for each input of the Apply node (which corresponds

doc/hpcs2011_tutorial/presentation.tex

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1390,7 +1390,7 @@ \subsection{Theano}
13901390
{\color{gray}# optional:}
13911391
def __init__(self, ...):
13921392
def grad(self, inputs, g):
1393-
def infer_shape(node, (i0_shapes, ...))
1393+
def infer_shape(fgraph, node, (i0_shapes, ...))
13941394
\end{Verbatim}
13951395
\end{frame}
13961396

tests/compile/test_monitormode.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ def test_detect_nan():
1111

1212
nan_detected = [False]
1313

14-
def detect_nan(i, node, fn):
14+
def detect_nan(fgraph, i, node, fn):
1515
for output in fn.outputs:
1616
if np.isnan(output[0]).any():
1717
print("*** NaN detected ***")
@@ -41,7 +41,7 @@ def test_optimizer():
4141

4242
nan_detected = [False]
4343

44-
def detect_nan(i, node, fn):
44+
def detect_nan(fgraph, i, node, fn):
4545
for output in fn.outputs:
4646
if np.isnan(output[0]).any():
4747
print("*** NaN detected ***")
@@ -73,7 +73,7 @@ def test_not_inplace():
7373

7474
nan_detected = [False]
7575

76-
def detect_nan(i, node, fn):
76+
def detect_nan(fgraph, i, node, fn):
7777
for output in fn.outputs:
7878
if np.isnan(output[0]).any():
7979
print("*** NaN detected ***")

tests/compile/test_ops.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,7 @@ def test_infer_shape(self):
6969
y = dvector("y")
7070
y.tag.test_value = [0, 0, 0, 0]
7171

72-
def infer_shape(node, shapes):
72+
def infer_shape(fgraph, node, shapes):
7373
x, y = shapes
7474
return [y]
7575

tests/gof/test_fg.py

Lines changed: 0 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -216,13 +216,6 @@ def test_replace(self):
216216
var5 = op3(var4, var2, var2)
217217
fg = FunctionGraph([var1, var2], [var3, var5], clone=False)
218218

219-
with pytest.raises(Exception, match="Cannot replace.*"):
220-
var4.fgraph = object()
221-
# Trigger a `FunctionGraph` ownership error
222-
fg.replace(var4, var1, verbose=True)
223-
224-
var4.fgraph = fg
225-
226219
with pytest.raises(BadOptimization):
227220
var0 = MyVariable2("var0")
228221
# The types don't match and one cannot be converted to the other
@@ -262,7 +255,6 @@ def test_replace_bad_state(self):
262255

263256
with pytest.raises(MissingInputError):
264257
var0 = MyVariable("var0")
265-
var0.fgraph = object()
266258

267259
# FIXME TODO XXX: This breaks the state of the `FunctionGraph`,
268260
# because it doesn't check for validity of the replacement *first*.
@@ -299,7 +291,6 @@ def test_check_integrity(self):
299291

300292
with pytest.raises(Exception, match="Undeclared input.*"):
301293
var6 = MyVariable2("var6")
302-
var6.fgraph = fg
303294
fg.clients[var6] = [(var5.owner, 3)]
304295
fg.variables.add(var6)
305296
var5.owner.inputs.append(var6)

tests/gof/test_link.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -138,7 +138,7 @@ class TestWrapLinker:
138138
def test_0(self):
139139
nodes = []
140140

141-
def wrap(i, node, th):
141+
def wrap(fgraph, i, node, th):
142142
nodes.append(node.op)
143143

144144
x, y, z = inputs()
@@ -155,7 +155,7 @@ def wrap(i, node, th):
155155
def test_1(self):
156156
nodes = []
157157

158-
def wrap(i, node, th):
158+
def wrap(fgraph, i, node, th):
159159
nodes.append(node.op)
160160
th()
161161

tests/gpuarray/test_cgpukernelbase.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,7 @@ def make_node(self, n, m):
5454

5555
return Apply(self, [n, m], [otype()])
5656

57-
def infer_shape(self, node, in_shapes):
57+
def infer_shape(self, fgraph, node, in_shapes):
5858
out_shape = [node.inputs[0], node.inputs[1]]
5959
return [out_shape]
6060

0 commit comments

Comments
 (0)