mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
Fix serving_output
for TF composite models (encoder-decoder like models) (#22743)
* fix * style * fix --------- Co-authored-by: ydshieh <ydshieh@users.noreply.github.com>
This commit is contained in:
parent
410b61ad7e
commit
a6752a7d3c
@ -633,14 +633,18 @@ class TFEncoderDecoderModel(TFPreTrainedModel, TFCausalLanguageModelingLoss):
|
||||
)
|
||||
|
||||
def serving_output(self, output):
|
||||
pkv = tf.tuple(output.past_key_values)[1] if self.config.use_cache else None
|
||||
dec_hs = tf.convert_to_tensor(output.decoder_hidden_states) if self.config.output_hidden_states else None
|
||||
dec_attns = tf.convert_to_tensor(output.decoder_attentions) if self.config.output_attentions else None
|
||||
enc_hs = tf.convert_to_tensor(output.encoder_hidden_states) if self.config.output_hidden_states else None
|
||||
enc_attns = tf.convert_to_tensor(output.encoder_attentions) if self.config.output_attentions else None
|
||||
pkv = tf.tuple(output.past_key_values)[1] if self.config.decoder.use_cache else None
|
||||
dec_hs = (
|
||||
tf.convert_to_tensor(output.decoder_hidden_states) if self.config.decoder.output_hidden_states else None
|
||||
)
|
||||
dec_attns = tf.convert_to_tensor(output.decoder_attentions) if self.config.decoder.output_attentions else None
|
||||
enc_hs = (
|
||||
tf.convert_to_tensor(output.encoder_hidden_states) if self.config.encoder.output_hidden_states else None
|
||||
)
|
||||
enc_attns = tf.convert_to_tensor(output.encoder_attentions) if self.config.encoder.output_attentions else None
|
||||
cross_attns = (
|
||||
tf.convert_to_tensor(output.cross_attentions)
|
||||
if self.config.output_attentions and output.cross_attentions is not None
|
||||
if self.config.decoder.output_attentions and output.cross_attentions is not None
|
||||
else None
|
||||
)
|
||||
|
||||
|
@ -662,14 +662,18 @@ class TFVisionEncoderDecoderModel(TFPreTrainedModel, TFCausalLanguageModelingLos
|
||||
)
|
||||
|
||||
def serving_output(self, output):
|
||||
pkv = tf.tuple(output.past_key_values)[1] if self.config.use_cache else None
|
||||
dec_hs = tf.convert_to_tensor(output.decoder_hidden_states) if self.config.output_hidden_states else None
|
||||
dec_attns = tf.convert_to_tensor(output.decoder_attentions) if self.config.output_attentions else None
|
||||
enc_hs = tf.convert_to_tensor(output.encoder_hidden_states) if self.config.output_hidden_states else None
|
||||
enc_attns = tf.convert_to_tensor(output.encoder_attentions) if self.config.output_attentions else None
|
||||
pkv = tf.tuple(output.past_key_values)[1] if self.config.decoder.use_cache else None
|
||||
dec_hs = (
|
||||
tf.convert_to_tensor(output.decoder_hidden_states) if self.config.decoder.output_hidden_states else None
|
||||
)
|
||||
dec_attns = tf.convert_to_tensor(output.decoder_attentions) if self.config.decoder.output_attentions else None
|
||||
enc_hs = (
|
||||
tf.convert_to_tensor(output.encoder_hidden_states) if self.config.encoder.output_hidden_states else None
|
||||
)
|
||||
enc_attns = tf.convert_to_tensor(output.encoder_attentions) if self.config.encoder.output_attentions else None
|
||||
cross_attns = (
|
||||
tf.convert_to_tensor(output.cross_attentions)
|
||||
if self.config.output_attentions and output.cross_attentions is not None
|
||||
if self.config.decoder.output_attentions and output.cross_attentions is not None
|
||||
else None
|
||||
)
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user