mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 18:22:34 +06:00
parent
419d979f7f
commit
a70da86b84
@ -807,7 +807,7 @@ class ModuleUtilsMixin:
|
|||||||
return extended_attention_mask
|
return extended_attention_mask
|
||||||
|
|
||||||
def get_extended_attention_mask(
|
def get_extended_attention_mask(
|
||||||
self, attention_mask: Tensor, input_shape: Tuple[int], device: device = None, dtype: torch.float = None
|
self, attention_mask: Tensor, input_shape: Tuple[int], device: torch.device = None, dtype: torch.float = None
|
||||||
) -> Tensor:
|
) -> Tensor:
|
||||||
"""
|
"""
|
||||||
Makes broadcastable attention and causal masks so that future and masked tokens are ignored.
|
Makes broadcastable attention and causal masks so that future and masked tokens are ignored.
|
||||||
|
Loading…
Reference in New Issue
Block a user