-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathDigraph.gv
179 lines (179 loc) · 6.59 KB
/
Digraph.gv
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
digraph {
graph [size="23.099999999999998,23.099999999999998"]
node [align=left fontname=monospace fontsize=10 height=0.2 ranksep=0.1 shape=box style=filled]
139905123795248 [label="
(1, 8)" fillcolor=darkolivegreen1]
139905123782864 [label=SoftmaxBackward0]
139905123782912 -> 139905123782864
139905123782912 [label=AddmmBackward0]
139905123783056 -> 139905123782912
139905143331472 [label="fc.bias
(8)" fillcolor=lightblue]
139905143331472 -> 139905123783056
139905123783056 [label=AccumulateGrad]
139905123783152 -> 139905123782912
139905123783152 [label=SqueezeBackward1]
139905123783632 -> 139905123783152
139905123783632 [label=BmmBackward0]
139905123783824 -> 139905123783632
139905123783824 [label=SoftmaxBackward0]
139905123783968 -> 139905123783824
139905123783968 [label=BmmBackward0]
139905123784160 -> 139905123783968
139905123784160 [label=UnsqueezeBackward0]
139905123784256 -> 139905123784160
139905123784256 [label=ReluBackward0]
139905123784352 -> 139905123784256
139905123784352 [label=AddmmBackward0]
139905123784400 -> 139905123784352
139905143331312 [label="attn.0.bias
(64)" fillcolor=lightblue]
139905143331312 -> 139905123784400
139905123784400 [label=AccumulateGrad]
139905123784064 -> 139905123784352
139905123784064 [label=SqueezeBackward1]
139905123784736 -> 139905123784064
139905123784736 [label=SumBackward1]
139905123784928 -> 139905123784736
139905123784928 [label=PermuteBackward0]
139905123785024 -> 139905123784928
139905123785024 [label=CudnnRnnBackward0]
139905123785120 -> 139905123785024
139905123785120 [label=PermuteBackward0]
139905123785600 -> 139905123785120
139905123785600 [label=PermuteBackward0]
139905123785648 -> 139905123785600
139905123785648 [label=TanhBackward0]
139905123785792 -> 139905123785648
139905123785792 [label=ViewBackward0]
139905123785936 -> 139905123785792
139905123785936 [label=MaxPool2DWithIndicesBackward0]
139905123786080 -> 139905123785936
139905123786080 [label=ReluBackward0]
139905123786224 -> 139905123786080
139905123786224 [label=CudnnBatchNormBackward0]
139905123786368 -> 139905123786224
139905123786368 [label=AddBackward0]
139905123786656 -> 139905123786368
139905123786656 [label=CudnnConvolutionBackward0]
139905123786704 -> 139905123786656
139905123786704 [label=MaxPool2DWithIndicesBackward0]
139905123881216 -> 139905123786704
139905123881216 [label=ReluBackward0]
139905123881264 -> 139905123881216
139905123881264 [label=CudnnBatchNormBackward0]
139905123881408 -> 139905123881264
139905123881408 [label=AddBackward0]
139905123881696 -> 139905123881408
139905123881696 [label=CudnnConvolutionBackward0]
139905123881840 -> 139905123881696
139905143331632 [label="conv1.weight
(16, 1, 3, 3)" fillcolor=lightblue]
139905143331632 -> 139905123881840
139905123881840 [label=AccumulateGrad]
139905123881648 -> 139905123881408
139905123881648 [label=ReshapeAliasBackward0]
139905123882032 -> 139905123881648
139905143331712 [label="conv1.bias
(16)" fillcolor=lightblue]
139905143331712 -> 139905123882032
139905123882032 [label=AccumulateGrad]
139905123881360 -> 139905123881264
139905143331792 [label="bn1.weight
(16)" fillcolor=lightblue]
139905143331792 -> 139905123881360
139905123881360 [label=AccumulateGrad]
139905123881504 -> 139905123881264
139905143331872 [label="bn1.bias
(16)" fillcolor=lightblue]
139905143331872 -> 139905123881504
139905123881504 [label=AccumulateGrad]
139905123881072 -> 139905123786656
139905143332272 [label="conv2.weight
(32, 16, 3, 3)" fillcolor=lightblue]
139905143332272 -> 139905123881072
139905123881072 [label=AccumulateGrad]
139905123786608 -> 139905123786368
139905123786608 [label=ReshapeAliasBackward0]
139905123881120 -> 139905123786608
139905143332352 [label="conv2.bias
(32)" fillcolor=lightblue]
139905143332352 -> 139905123881120
139905123881120 [label=AccumulateGrad]
139905123786320 -> 139905123786224
139905143332432 [label="bn2.weight
(32)" fillcolor=lightblue]
139905143332432 -> 139905123786320
139905123786320 [label=AccumulateGrad]
139905123786464 -> 139905123786224
139905143332512 [label="bn2.bias
(32)" fillcolor=lightblue]
139905143332512 -> 139905123786464
139905123786464 [label=AccumulateGrad]
139905123785072 -> 139905123785024
139905143330192 [label="rnn.gru.weight_ih_l0
(192, 32)" fillcolor=lightblue]
139905143330192 -> 139905123785072
139905123785072 [label=AccumulateGrad]
139905123784832 -> 139905123785024
139905143330272 [label="rnn.gru.weight_hh_l0
(192, 64)" fillcolor=lightblue]
139905143330272 -> 139905123784832
139905123784832 [label=AccumulateGrad]
139905123785168 -> 139905123785024
139905143330352 [label="rnn.gru.bias_ih_l0
(192)" fillcolor=lightblue]
139905143330352 -> 139905123785168
139905123785168 [label=AccumulateGrad]
139905123785216 -> 139905123785024
139905143330432 [label="rnn.gru.bias_hh_l0
(192)" fillcolor=lightblue]
139905143330432 -> 139905123785216
139905123785216 [label=AccumulateGrad]
139905123785264 -> 139905123785024
139905143330592 [label="rnn.gru.weight_ih_l0_reverse
(192, 32)" fillcolor=lightblue]
139905143330592 -> 139905123785264
139905123785264 [label=AccumulateGrad]
139905123785312 -> 139905123785024
139905143330672 [label="rnn.gru.weight_hh_l0_reverse
(192, 64)" fillcolor=lightblue]
139905143330672 -> 139905123785312
139905123785312 [label=AccumulateGrad]
139905123785360 -> 139905123785024
139905143330752 [label="rnn.gru.bias_ih_l0_reverse
(192)" fillcolor=lightblue]
139905143330752 -> 139905123785360
139905123785360 [label=AccumulateGrad]
139905123785408 -> 139905123785024
139905143330832 [label="rnn.gru.bias_hh_l0_reverse
(192)" fillcolor=lightblue]
139905143330832 -> 139905123785408
139905123785408 [label=AccumulateGrad]
139905123784496 -> 139905123784352
139905123784496 [label=TBackward0]
139905123784976 -> 139905123784496
139905143331232 [label="attn.0.weight
(64, 64)" fillcolor=lightblue]
139905143331232 -> 139905123784976
139905123784976 [label=AccumulateGrad]
139905123784016 -> 139905123783968
139905123784016 [label=TransposeBackward0]
139905123783776 -> 139905123784016
139905123783776 [label=PermuteBackward0]
139905123784880 -> 139905123783776
139905123784880 [label=AddBackward0]
139905123785456 -> 139905123784880
139905123785456 [label=SplitBackward0]
139905123785024 -> 139905123785456
139905123785456 -> 139905123784880
139905123783776 -> 139905123783632
139905123782816 -> 139905123782912
139905123782816 [label=TBackward0]
139905123783920 -> 139905123782816
139905143331392 [label="fc.weight
(8, 64)" fillcolor=lightblue]
139905143331392 -> 139905123783920
139905123783920 [label=AccumulateGrad]
139905123782864 -> 139905123795248
}