fix switch transformers

This commit is contained in:
raushan 2025-07-02 11:45:46 +02:00
parent f19d166d2b
commit ab7fac4a01

View File

@ -979,9 +979,7 @@ class SwitchTransformersStack(SwitchTransformersPreTrainedModel):
attention_mask,
inputs_embeds,
cache_position,
past_key_values.self_attention_cache
if isinstance(past_key_values, EncoderDecoderCache) is not None
else None,
past_key_values.self_attention_cache if isinstance(past_key_values, EncoderDecoderCache) else None,
output_attentions,
)
else: