mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-03 12:50:06 +06:00
Fix attention mask expansion when converting to executorch (#38637)
This commit is contained in:
parent
19224c3642
commit
282d6684dc
@ -193,7 +193,7 @@ class AttentionMaskConverter:
|
||||
|
||||
expanded_mask = mask[:, None, None, :].expand(bsz, 1, tgt_len, src_len).to(dtype)
|
||||
|
||||
inverted_mask = 1.0 - expanded_mask
|
||||
inverted_mask = torch.tensor(1.0, dtype=dtype) - expanded_mask
|
||||
|
||||
return inverted_mask.masked_fill(inverted_mask.to(torch.bool), torch.finfo(dtype).min)
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user