Fix hint in src/transformers/modeling_utils.py (#22074)

fix hint
This commit is contained in:
J-shang 2023-03-10 21:56:42 +08:00 committed by GitHub
parent 419d979f7f
commit a70da86b84
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -807,7 +807,7 @@ class ModuleUtilsMixin:
return extended_attention_mask
def get_extended_attention_mask(
self, attention_mask: Tensor, input_shape: Tuple[int], device: device = None, dtype: torch.float = None
self, attention_mask: Tensor, input_shape: Tuple[int], device: torch.device = None, dtype: torch.float = None
) -> Tensor:
"""
Makes broadcastable attention and causal masks so that future and masked tokens are ignored.