mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
Fixing tests on master. (#14317)
* Fixing tests on master. * Better fix. * Lxmert doesn't have feature extractor but is bimodal.
This commit is contained in:
parent
df1f94eb4a
commit
9b78b070ef
@ -15,12 +15,11 @@
|
||||
import unittest
|
||||
|
||||
from transformers import (
|
||||
FEATURE_EXTRACTOR_MAPPING,
|
||||
MODEL_MAPPING,
|
||||
TF_MODEL_MAPPING,
|
||||
CLIPConfig,
|
||||
FeatureExtractionPipeline,
|
||||
LxmertConfig,
|
||||
Wav2Vec2Config,
|
||||
pipeline,
|
||||
)
|
||||
from transformers.testing_utils import is_pipeline_test, nested_simplify, require_tf, require_torch
|
||||
@ -73,11 +72,8 @@ class FeatureExtractionPipelineTests(unittest.TestCase, metaclass=PipelineTestCa
|
||||
if tokenizer is None:
|
||||
self.skipTest("No tokenizer")
|
||||
return
|
||||
|
||||
elif isinstance(model.config, (LxmertConfig, CLIPConfig, Wav2Vec2Config)):
|
||||
self.skipTest(
|
||||
"This is an Lxmert bimodal model, we need to find a more consistent way to switch on those models."
|
||||
)
|
||||
elif type(model.config) in FEATURE_EXTRACTOR_MAPPING or isinstance(model.config, LxmertConfig):
|
||||
self.skipTest("This is a bimodal model, we need to find a more consistent way to switch on those models.")
|
||||
return
|
||||
elif model.config.is_encoder_decoder:
|
||||
self.skipTest(
|
||||
|
Loading…
Reference in New Issue
Block a user