mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-30 09:42:22 +06:00
Fix comment (#10886)
This commit is contained in:
parent
9856c9213d
commit
86c6f8a8b1
@ -904,6 +904,7 @@ class T5Stack(T5PreTrainedModel):
|
||||
if past_key_values is None:
|
||||
past_key_values = [None] * len(self.block)
|
||||
|
||||
# We can provide a self-attention mask of dimensions [batch_size, from_seq_length, to_seq_length]
|
||||
# ourselves in which case we just need to make it broadcastable to all heads.
|
||||
extended_attention_mask = self.get_extended_attention_mask(attention_mask, input_shape, inputs_embeds.device)
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user