Fix gradient checkpointing bug in Roformer (#21946)

This commit is contained in:
Karim Foda 2023-03-04 16:44:33 +01:00 committed by GitHub
parent 6386eb9721
commit 6feb39b43c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -556,6 +556,12 @@ class RoFormerEncoder(nn.Module):
output_hidden_states=False,
return_dict=True,
):
if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
all_hidden_states = () if output_hidden_states else None
all_self_attentions = () if output_attentions else None
all_cross_attentions = () if output_attentions and self.config.add_cross_attention else None
@ -572,11 +578,6 @@ class RoFormerEncoder(nn.Module):
past_key_value = past_key_values[i] if past_key_values is not None else None
if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
def create_custom_forward(module):
def custom_forward(*inputs):