mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
skip test for XGLM (#15445)
This commit is contained in:
parent
0c17e766cb
commit
d4f201b860
@ -152,7 +152,8 @@ class TextGenerationPipelineTests(unittest.TestCase, metaclass=PipelineTestCaseM
|
||||
return
|
||||
# We don't care about infinite range models.
|
||||
# They already work.
|
||||
if tokenizer.model_max_length < 10000:
|
||||
# Skip this test for XGLM, since it uses sinusoidal positional embeddings which are resized on-the-fly.
|
||||
if tokenizer.model_max_length < 10000 and "XGLM" not in tokenizer.__class__.__name__:
|
||||
# Handling of large generations
|
||||
with self.assertRaises((RuntimeError, IndexError, ValueError, AssertionError)):
|
||||
text_generator("This is a test" * 500, max_new_tokens=20)
|
||||
|
Loading…
Reference in New Issue
Block a user