Fix gradient checkpointing bug in git (#21818)

Co-authored-by: Sylvain Gugger <35901082+sgugger@users.noreply.github.com>
This commit is contained in:
Karim Foda 2023-02-28 12:46:33 +00:00 committed by GitHub
parent 50db741417
commit e07a3d95f8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -431,6 +431,13 @@ class GitEncoder(nn.Module):
pixel_values_present: Optional[bool] = False,
return_dict: Optional[bool] = True,
) -> Union[Tuple[torch.Tensor], BaseModelOutputWithPast]:
if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
all_hidden_states = () if output_hidden_states else None
all_self_attentions = () if output_attentions else None
@ -443,11 +450,6 @@ class GitEncoder(nn.Module):
past_key_value = past_key_values[i] if past_key_values is not None else None
if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
def create_custom_forward(module):
def custom_forward(*inputs):