We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 52b1bb4 commit e192179Copy full SHA for e192179
pocket/models/transformers.py
@@ -206,7 +206,7 @@ def forward(self,
206
x = self.output(m, x)
207
return x, attn_data
208
209
-class FeedFowardNetwork(nn.Module):
+class FeedForwardNetwork(nn.Module):
210
"""
211
Position-wise feed-forward networks succeeding the attention layer
212
@@ -269,7 +269,7 @@ def __init__(self,
269
dropout_prob=dropout_prob,
270
return_weights=return_weights
271
)
272
- self.ffn = FeedFowardNetwork(
+ self.ffn = FeedForwardNetwork(
273
hidden_size=hidden_size,
274
intermediate_size=intermediate_size,
275
dropout_prob=dropout_prob
0 commit comments