mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
Fix copies for DBRX - neuron fix (#30610)
This commit is contained in:
parent
f95302584b
commit
4ad5adaf1d
@ -1256,8 +1256,11 @@ class DbrxModel(DbrxPreTrainedModel):
|
||||
causal_mask = causal_mask.clone() # copy to contiguous memory for in-place edit
|
||||
if attention_mask.dim() == 2:
|
||||
mask_length = attention_mask.shape[-1]
|
||||
padding_mask = causal_mask[..., :mask_length].eq(0.0) * attention_mask[:, None, None, :].eq(0.0)
|
||||
causal_mask[..., :mask_length] = causal_mask[..., :mask_length].masked_fill(padding_mask, min_dtype)
|
||||
padding_mask = causal_mask[:, :, :, :mask_length] + attention_mask[:, None, None, :]
|
||||
padding_mask = padding_mask == 0
|
||||
causal_mask[:, :, :, :mask_length] = causal_mask[:, :, :, :mask_length].masked_fill(
|
||||
padding_mask, min_dtype
|
||||
)
|
||||
elif attention_mask.dim() == 4:
|
||||
# backwards compatibility: we allow passing a 4D attention mask shorter than the input length with
|
||||
# cache. In that case, the 4D attention mask attends to the newest tokens only.
|
||||
|
Loading…
Reference in New Issue
Block a user