[GPTJ] Fix gradient checkpointing bug (#21794)

* If applied, this commit fixes generate bug in gptj

* Remove extra same code block

* formatting and test fix

* Conflict fix and declaration error fix

---------

Co-authored-by: Sylvain Gugger <35901082+sgugger@users.noreply.github.com>
This commit is contained in:
Herumb Shandilya 2023-02-28 20:42:42 +05:30 committed by GitHub
parent eec76042f4
commit 31fa2b6c68
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -633,6 +633,13 @@ class GPTJModel(GPTJPreTrainedModel):
output_shape = input_shape + (hidden_states.size(-1),)
if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
presents = () if use_cache else None
all_self_attentions = () if output_attentions else None
all_hidden_states = () if output_hidden_states else None
@ -652,11 +659,6 @@ class GPTJModel(GPTJPreTrainedModel):
all_hidden_states = all_hidden_states + (hidden_states,)
if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
def create_custom_forward(module):
def custom_forward(*inputs):