Fix TF Causal LM models' returned logits (#15256)

* Fix TF Causal LM models' returned logits

* Fix expected shape in the tests

Co-authored-by: ydshieh <ydshieh@users.noreply.github.com>
This commit is contained in:
Yih-Dar 2022-02-01 12:04:07 +01:00 committed by GitHub
parent af5c3329d7
commit dc05dd539f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 18 additions and 18 deletions

View File

@ -1542,9 +1542,9 @@ class TFBertLMHeadModel(TFBertPreTrainedModel, TFCausalLanguageModelingLoss):
if inputs["labels"] is not None:
# shift labels to the left and cut last logit token
logits = logits[:, :-1]
shifted_logits = logits[:, :-1]
labels = inputs["labels"][:, 1:]
loss = self.hf_compute_loss(labels=labels, logits=logits)
loss = self.hf_compute_loss(labels=labels, logits=shifted_logits)
if not inputs["return_dict"]:
output = (logits,) + outputs[2:]

View File

@ -735,9 +735,9 @@ class TFCTRLLMHeadModel(TFCTRLPreTrainedModel, TFCausalLanguageModelingLoss):
loss = None
if inputs["labels"] is not None:
# shift labels to the left and cut last logit token
logits = logits[:, :-1]
shifted_logits = logits[:, :-1]
labels = inputs["labels"][:, 1:]
loss = self.hf_compute_loss(labels, logits)
loss = self.hf_compute_loss(labels, shifted_logits)
if not inputs["return_dict"]:
output = (logits,) + transformer_outputs[1:]

View File

@ -949,9 +949,9 @@ class TFGPT2LMHeadModel(TFGPT2PreTrainedModel, TFCausalLanguageModelingLoss):
loss = None
if inputs["labels"] is not None:
# shift labels to the left and cut last logit token
logits = logits[:, :-1]
shifted_logits = logits[:, :-1]
labels = inputs["labels"][:, 1:]
loss = self.hf_compute_loss(labels, logits)
loss = self.hf_compute_loss(labels, shifted_logits)
if not inputs["return_dict"]:
output = (logits,) + transformer_outputs[1:]

View File

@ -656,9 +656,9 @@ class TFOpenAIGPTLMHeadModel(TFOpenAIGPTPreTrainedModel, TFCausalLanguageModelin
loss = None
if inputs["labels"] is not None:
# shift labels to the left and cut last logit token
logits = logits[:, :-1]
shifted_logits = logits[:, :-1]
labels = inputs["labels"][:, 1:]
loss = self.hf_compute_loss(labels, logits)
loss = self.hf_compute_loss(labels, shifted_logits)
if not inputs["return_dict"]:
output = (logits,) + transformer_outputs[1:]

View File

@ -1275,9 +1275,9 @@ class TFRemBertForCausalLM(TFRemBertPreTrainedModel, TFCausalLanguageModelingLos
if inputs["labels"] is not None:
# shift labels to the left and cut last logit token
logits = logits[:, :-1]
shifted_logits = logits[:, :-1]
labels = inputs["labels"][:, 1:]
loss = self.hf_compute_loss(labels=labels, logits=logits)
loss = self.hf_compute_loss(labels=labels, logits=shifted_logits)
if not inputs["return_dict"]:
output = (logits,) + outputs[2:]

View File

@ -1310,9 +1310,9 @@ class TFRobertaForCausalLM(TFRobertaPreTrainedModel, TFCausalLanguageModelingLos
if inputs["labels"] is not None:
# shift labels to the left and cut last logit token
logits = logits[:, :-1]
shifted_logits = logits[:, :-1]
labels = inputs["labels"][:, 1:]
loss = self.hf_compute_loss(labels=labels, logits=logits)
loss = self.hf_compute_loss(labels=labels, logits=shifted_logits)
if not inputs["return_dict"]:
output = (logits,) + outputs[2:]

View File

@ -1035,9 +1035,9 @@ class TFRoFormerForCausalLM(TFRoFormerPreTrainedModel, TFCausalLanguageModelingL
if inputs["labels"] is not None:
# shift labels to the left and cut last logit token
logits = logits[:, :-1]
shifted_logits = logits[:, :-1]
labels = inputs["labels"][:, 1:]
loss = self.hf_compute_loss(labels=labels, logits=logits)
loss = self.hf_compute_loss(labels=labels, logits=shifted_logits)
if not inputs["return_dict"]:
output = (logits,) + outputs[2:]

View File

@ -1262,9 +1262,9 @@ class TF{{cookiecutter.camelcase_modelname}}ForCausalLM(TF{{cookiecutter.camelca
if inputs["labels"] is not None:
# shift labels to the left and cut last logit token
logits = logits[:, :-1]
shifted_logits = logits[:, :-1]
labels = inputs["labels"][:, 1:]
loss = self.hf_compute_loss(labels=labels, logits=logits)
loss = self.hf_compute_loss(labels=labels, logits=shifted_logits)
if not inputs["return_dict"]:
output = (logits,) + outputs[2:]

View File

@ -240,7 +240,7 @@ class TFEncoderDecoderMixin:
assert "loss" in outputs_encoder_decoder
batch_size, seq_len = decoder_input_ids.shape
expected_shape = (batch_size, seq_len - 1, decoder_config.vocab_size)
expected_shape = (batch_size, seq_len, decoder_config.vocab_size)
self.assertEqual(outputs_encoder_decoder["logits"].shape, expected_shape)
self.assertEqual(
outputs_encoder_decoder["encoder_last_hidden_state"].shape, (input_ids.shape + (config.hidden_size,))

View File

@ -231,7 +231,7 @@ class TFVisionEncoderDecoderMixin:
self.assertIn("loss", outputs_encoder_decoder)
batch_size, seq_len = decoder_input_ids.shape
expected_shape = (batch_size, seq_len - 1, decoder_config.vocab_size)
expected_shape = (batch_size, seq_len, decoder_config.vocab_size)
self.assertEqual(outputs_encoder_decoder["logits"].shape, expected_shape)
self.assertEqual(outputs_encoder_decoder["encoder_last_hidden_state"].shape[0], pixel_values.shape[0])
self.assertEqual(outputs_encoder_decoder["encoder_last_hidden_state"].shape[-1], config.hidden_size)