mirror of
https://github.com/huggingface/transformers.git
synced 2025-08-01 18:51:14 +06:00
Hotfixing tests (blenderbot decoderonly tests, also need to remove (#10003)
`encoder_no_repeat_ngram_size` from their config.
This commit is contained in:
parent
8c3b1fcb67
commit
d5888ef0ab
@ -325,6 +325,7 @@ class BlenderbotStandaloneDecoderModelTester:
|
|||||||
decoder_attention_heads=4,
|
decoder_attention_heads=4,
|
||||||
max_position_embeddings=30,
|
max_position_embeddings=30,
|
||||||
is_encoder_decoder=False,
|
is_encoder_decoder=False,
|
||||||
|
encoder_no_repeat_ngram_size=0,
|
||||||
pad_token_id=0,
|
pad_token_id=0,
|
||||||
bos_token_id=1,
|
bos_token_id=1,
|
||||||
eos_token_id=2,
|
eos_token_id=2,
|
||||||
@ -355,6 +356,7 @@ class BlenderbotStandaloneDecoderModelTester:
|
|||||||
self.use_cache = use_cache
|
self.use_cache = use_cache
|
||||||
self.max_position_embeddings = max_position_embeddings
|
self.max_position_embeddings = max_position_embeddings
|
||||||
self.is_encoder_decoder = is_encoder_decoder
|
self.is_encoder_decoder = is_encoder_decoder
|
||||||
|
self.encoder_no_repeat_ngram_size = encoder_no_repeat_ngram_size
|
||||||
|
|
||||||
self.scope = None
|
self.scope = None
|
||||||
self.decoder_key_length = decoder_seq_length
|
self.decoder_key_length = decoder_seq_length
|
||||||
@ -386,6 +388,7 @@ class BlenderbotStandaloneDecoderModelTester:
|
|||||||
decoder_start_token_id=self.decoder_start_token_id,
|
decoder_start_token_id=self.decoder_start_token_id,
|
||||||
max_position_embeddings=self.max_position_embeddings,
|
max_position_embeddings=self.max_position_embeddings,
|
||||||
is_encoder_decoder=self.is_encoder_decoder,
|
is_encoder_decoder=self.is_encoder_decoder,
|
||||||
|
encoder_no_repeat_ngram_size=self.encoder_no_repeat_ngram_size,
|
||||||
)
|
)
|
||||||
|
|
||||||
return (
|
return (
|
||||||
|
Loading…
Reference in New Issue
Block a user