mirror of
https://github.com/huggingface/transformers.git
synced 2025-08-01 18:51:14 +06:00
[TFGPT2] - Fix flaky past_key_values test (#9460)
* fix tf flakey * remove test files
This commit is contained in:
parent
758ed3332b
commit
f33a6f3446
@ -218,6 +218,11 @@ class TFGPT2ModelTester:
|
|||||||
):
|
):
|
||||||
model = TFGPT2Model(config=config)
|
model = TFGPT2Model(config=config)
|
||||||
|
|
||||||
|
input_ids = input_ids[:1, :]
|
||||||
|
input_mask = input_mask[:1, :]
|
||||||
|
token_type_ids = token_type_ids[:1, :]
|
||||||
|
self.batch_size = 1
|
||||||
|
|
||||||
# first forward pass
|
# first forward pass
|
||||||
outputs = model(input_ids, attention_mask=input_mask, token_type_ids=token_type_ids, use_cache=True)
|
outputs = model(input_ids, attention_mask=input_mask, token_type_ids=token_type_ids, use_cache=True)
|
||||||
|
|
||||||
@ -225,13 +230,13 @@ class TFGPT2ModelTester:
|
|||||||
|
|
||||||
# create hypothetical next token and extent to next_input_ids
|
# create hypothetical next token and extent to next_input_ids
|
||||||
next_tokens = ids_tensor((self.batch_size, 3), config.vocab_size)
|
next_tokens = ids_tensor((self.batch_size, 3), config.vocab_size)
|
||||||
next_token_types = ids_tensor((self.batch_size, 3), self.type_vocab_size)
|
|
||||||
next_attn_mask = ids_tensor((self.batch_size, 3), 2)
|
next_attn_mask = ids_tensor((self.batch_size, 3), 2)
|
||||||
|
next_token_types = ids_tensor((self.batch_size, 3), self.type_vocab_size)
|
||||||
|
|
||||||
# append to next input_ids and token_type_ids
|
# append to next input_ids and token_type_ids
|
||||||
next_input_ids = tf.concat([input_ids, next_tokens], axis=-1)
|
next_input_ids = tf.concat([input_ids, next_tokens], axis=-1)
|
||||||
next_token_type_ids = tf.concat([token_type_ids, next_token_types], axis=-1)
|
|
||||||
next_attention_mask = tf.concat([input_mask, next_attn_mask], axis=-1)
|
next_attention_mask = tf.concat([input_mask, next_attn_mask], axis=-1)
|
||||||
|
next_token_type_ids = tf.concat([token_type_ids, next_token_types], axis=-1)
|
||||||
|
|
||||||
output_from_no_past = model(
|
output_from_no_past = model(
|
||||||
next_input_ids, token_type_ids=next_token_type_ids, attention_mask=next_attention_mask
|
next_input_ids, token_type_ids=next_token_type_ids, attention_mask=next_attention_mask
|
||||||
|
Loading…
Reference in New Issue
Block a user