Skip to content

Commit 7192a52

Browse files
committed
update reference graphs
1 parent 5ab00f5 commit 7192a52

File tree

4 files changed

+920
-898
lines changed

4 files changed

+920
-898
lines changed

nncf/experimental/torch2/function_hook/handle_inner_functions.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -202,8 +202,8 @@ def multi_head_attention_forward(
202202
src_len = k.size(1)
203203

204204
if key_padding_mask is not None:
205-
if not torch.jit.is_scripting() and not torch.jit.is_tracing():
206-
_check_key_padding_mask(key_padding_mask, src_len, bsz)
205+
if not torch.jit.is_scripting() and not torch.jit.is_tracing(): # type: ignore
206+
_check_key_padding_mask(key_padding_mask, src_len, bsz) # type: ignore
207207

208208
key_padding_mask = (
209209
key_padding_mask.view(bsz, 1, 1, src_len).expand(-1, num_heads, -1, -1).reshape(bsz * num_heads, 1, src_len)
@@ -217,7 +217,7 @@ def multi_head_attention_forward(
217217
dropout_p = 0.0
218218

219219
if need_weights:
220-
_B, _Nt, E = q.shape # noqa: F841
220+
_B, _Nt, E = q.shape # noqa: F841
221221
q_scaled = q * math.sqrt(1.0 / float(E))
222222

223223
assert not (is_causal and attn_mask is None), "FIXME: is_causal not implemented for need_weights"

0 commit comments

Comments
 (0)