mirror of
https://github.com/huggingface/transformers.git
synced 2025-08-01 18:51:14 +06:00
Fix copies for DBRX - neuron fix (#30610)
This commit is contained in:
parent
f95302584b
commit
4ad5adaf1d
@ -1256,8 +1256,11 @@ class DbrxModel(DbrxPreTrainedModel):
|
|||||||
causal_mask = causal_mask.clone() # copy to contiguous memory for in-place edit
|
causal_mask = causal_mask.clone() # copy to contiguous memory for in-place edit
|
||||||
if attention_mask.dim() == 2:
|
if attention_mask.dim() == 2:
|
||||||
mask_length = attention_mask.shape[-1]
|
mask_length = attention_mask.shape[-1]
|
||||||
padding_mask = causal_mask[..., :mask_length].eq(0.0) * attention_mask[:, None, None, :].eq(0.0)
|
padding_mask = causal_mask[:, :, :, :mask_length] + attention_mask[:, None, None, :]
|
||||||
causal_mask[..., :mask_length] = causal_mask[..., :mask_length].masked_fill(padding_mask, min_dtype)
|
padding_mask = padding_mask == 0
|
||||||
|
causal_mask[:, :, :, :mask_length] = causal_mask[:, :, :, :mask_length].masked_fill(
|
||||||
|
padding_mask, min_dtype
|
||||||
|
)
|
||||||
elif attention_mask.dim() == 4:
|
elif attention_mask.dim() == 4:
|
||||||
# backwards compatibility: we allow passing a 4D attention mask shorter than the input length with
|
# backwards compatibility: we allow passing a 4D attention mask shorter than the input length with
|
||||||
# cache. In that case, the 4D attention mask attends to the newest tokens only.
|
# cache. In that case, the 4D attention mask attends to the newest tokens only.
|
||||||
|
Loading…
Reference in New Issue
Block a user