Fixing tests on master. (#14317)

* Fixing tests on master.

* Better fix.

* Lxmert doesn't have feature extractor but is bimodal.
This commit is contained in:
Nicolas Patry 2021-11-08 14:28:26 +01:00 committed by GitHub
parent df1f94eb4a
commit 9b78b070ef
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -15,12 +15,11 @@
import unittest import unittest
from transformers import ( from transformers import (
FEATURE_EXTRACTOR_MAPPING,
MODEL_MAPPING, MODEL_MAPPING,
TF_MODEL_MAPPING, TF_MODEL_MAPPING,
CLIPConfig,
FeatureExtractionPipeline, FeatureExtractionPipeline,
LxmertConfig, LxmertConfig,
Wav2Vec2Config,
pipeline, pipeline,
) )
from transformers.testing_utils import is_pipeline_test, nested_simplify, require_tf, require_torch from transformers.testing_utils import is_pipeline_test, nested_simplify, require_tf, require_torch
@ -73,11 +72,8 @@ class FeatureExtractionPipelineTests(unittest.TestCase, metaclass=PipelineTestCa
if tokenizer is None: if tokenizer is None:
self.skipTest("No tokenizer") self.skipTest("No tokenizer")
return return
elif type(model.config) in FEATURE_EXTRACTOR_MAPPING or isinstance(model.config, LxmertConfig):
elif isinstance(model.config, (LxmertConfig, CLIPConfig, Wav2Vec2Config)): self.skipTest("This is a bimodal model, we need to find a more consistent way to switch on those models.")
self.skipTest(
"This is an Lxmert bimodal model, we need to find a more consistent way to switch on those models."
)
return return
elif model.config.is_encoder_decoder: elif model.config.is_encoder_decoder:
self.skipTest( self.skipTest(