Do not change the output from tuple to list - to match PT's version (#15918)

* Do not change the output from tuple to list - to match PT's version

* Fix the same issues for 5 other models and the template

Co-authored-by: ydshieh <ydshieh@users.noreply.github.com>
This commit is contained in:
Yih-Dar 2022-03-04 17:50:24 +01:00 committed by GitHub
parent 10b76987fc
commit f0aacc140b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 0 additions and 42 deletions

View File

@ -1012,12 +1012,6 @@ class TFBartDecoder(tf.keras.layers.Layer):
if inputs["output_hidden_states"]:
all_hidden_states += (hidden_states,)
if inputs["output_attentions"]:
all_self_attns = list(all_self_attns)
if inputs["encoder_hidden_states"] is not None:
all_cross_attns = list(all_cross_attns)
if inputs["use_cache"]:
present_key_values = (inputs["encoder_hidden_states"], present_key_values)

View File

@ -1011,12 +1011,6 @@ class TFBlenderbotDecoder(tf.keras.layers.Layer):
if inputs["output_hidden_states"]:
all_hidden_states += (hidden_states,)
if inputs["output_attentions"]:
all_self_attns = list(all_self_attns)
if inputs["encoder_hidden_states"] is not None:
all_cross_attns = list(all_cross_attns)
if inputs["use_cache"]:
present_key_values = (inputs["encoder_hidden_states"], present_key_values)

View File

@ -1010,12 +1010,6 @@ class TFBlenderbotSmallDecoder(tf.keras.layers.Layer):
if inputs["output_hidden_states"]:
all_hidden_states += (hidden_states,)
if inputs["output_attentions"]:
all_self_attns = list(all_self_attns)
if inputs["encoder_hidden_states"] is not None:
all_cross_attns = list(all_cross_attns)
if inputs["use_cache"]:
present_key_values = (inputs["encoder_hidden_states"], present_key_values)

View File

@ -1050,12 +1050,6 @@ class TFMarianDecoder(tf.keras.layers.Layer):
if inputs["output_hidden_states"]:
all_hidden_states += (hidden_states,)
if inputs["output_attentions"]:
all_self_attns = list(all_self_attns)
if inputs["encoder_hidden_states"] is not None:
all_cross_attns = list(all_cross_attns)
if inputs["use_cache"]:
present_key_values = (inputs["encoder_hidden_states"], present_key_values)

View File

@ -1034,12 +1034,6 @@ class TFMBartDecoder(tf.keras.layers.Layer):
if inputs["output_hidden_states"]:
all_hidden_states += (hidden_states,)
if inputs["output_attentions"]:
all_self_attns = list(all_self_attns)
if inputs["encoder_hidden_states"] is not None:
all_cross_attns = list(all_cross_attns)
if inputs["use_cache"]:
present_key_values = (inputs["encoder_hidden_states"], present_key_values)

View File

@ -1058,12 +1058,6 @@ class TFPegasusDecoder(tf.keras.layers.Layer):
if inputs["output_hidden_states"]:
all_hidden_states += (hidden_states,)
if inputs["output_attentions"]:
all_self_attns = list(all_self_attns)
if inputs["encoder_hidden_states"] is not None:
all_cross_attns = list(all_cross_attns)
if inputs["use_cache"]:
present_key_values = (inputs["encoder_hidden_states"], present_key_values)

View File

@ -2736,12 +2736,6 @@ class TF{{cookiecutter.camelcase_modelname}}Decoder(tf.keras.layers.Layer):
if inputs["output_hidden_states"]:
all_hidden_states += (hidden_states,)
if inputs["output_attentions"]:
all_self_attns = list(all_self_attns)
if inputs["encoder_hidden_states"] is not None:
all_cross_attns = list(all_cross_attns)
if inputs["use_cache"]:
present_key_values = (inputs["encoder_hidden_states"], present_key_values)