diff --git a/.circleci/create_circleci_config.py b/.circleci/create_circleci_config.py index 338f2508246..8d30eae0327 100644 --- a/.circleci/create_circleci_config.py +++ b/.circleci/create_circleci_config.py @@ -24,7 +24,7 @@ from typing import Any, Dict, List, Optional import yaml -COMMON_ENV_VARIABLES = {"OMP_NUM_THREADS": 1, "TRANSFORMERS_IS_CI": True, "PYTEST_TIMEOUT": 120} +COMMON_ENV_VARIABLES = {"OMP_NUM_THREADS": 1, "TRANSFORMERS_IS_CI": True, "PYTEST_TIMEOUT": 120, "RUN_PIPELINE_TESTS": False} COMMON_PYTEST_OPTIONS = {"max-worker-restart": 0, "dist": "loadfile", "s": None} DEFAULT_DOCKER_IMAGE = [{"image": "cimg/python:3.7.12"}] @@ -64,10 +64,12 @@ class CircleCIJob: self.parallelism = 1 def to_dict(self): + env = COMMON_ENV_VARIABLES.copy() + env.update(self.additional_env) job = { "working_directory": self.working_directory, "docker": self.docker_image, - "environment": {**COMMON_ENV_VARIABLES, **self.additional_env}, + "environment": env, } if self.resource_class is not None: job["resource_class"] = self.resource_class @@ -239,25 +241,27 @@ flax_job = CircleCIJob( pipelines_torch_job = CircleCIJob( "pipelines_torch", + additional_env={"RUN_PIPELINE_TESTS": True}, install_steps=[ "sudo apt-get -y update && sudo apt-get install -y libsndfile1-dev espeak-ng", "pip install --upgrade pip", "pip install .[sklearn,torch,testing,sentencepiece,torch-speech,vision,timm,video]", ], pytest_options={"rA": None}, - tests_to_run="tests/pipelines/" + marker="is_pipeline_test", ) pipelines_tf_job = CircleCIJob( "pipelines_tf", + additional_env={"RUN_PIPELINE_TESTS": True}, install_steps=[ "pip install --upgrade pip", "pip install .[sklearn,tf-cpu,testing,sentencepiece,vision]", "pip install tensorflow_probability", ], pytest_options={"rA": None}, - tests_to_run="tests/pipelines/" + marker="is_pipeline_test", ) diff --git a/conftest.py b/conftest.py index c3d4f70326d..f1d6a69a1eb 100644 --- a/conftest.py +++ b/conftest.py @@ -38,6 +38,9 @@ def pytest_configure(config): config.addinivalue_line( "markers", "is_pt_flax_cross_test: mark test to run only when PT and FLAX interactions are tested" ) + config.addinivalue_line( + "markers", "is_pipeline_test: mark test to run only when pipelines are tested" + ) config.addinivalue_line("markers", "is_staging_test: mark test to run only in the staging environment") diff --git a/src/transformers/testing_utils.py b/src/transformers/testing_utils.py index 2c9d9dccfed..db744e2fb3a 100644 --- a/src/transformers/testing_utils.py +++ b/src/transformers/testing_utils.py @@ -145,6 +145,7 @@ _run_custom_tokenizers = parse_flag_from_env("RUN_CUSTOM_TOKENIZERS", default=Fa _run_staging = parse_flag_from_env("HUGGINGFACE_CO_STAGING", default=False) _run_git_lfs_tests = parse_flag_from_env("RUN_GIT_LFS_TESTS", default=False) _tf_gpu_memory_limit = parse_int_from_env("TF_GPU_MEMORY_LIMIT", default=None) +_run_pipeline_tests = parse_flag_from_env("RUN_PIPELINE_TESTS", default=True) def is_pt_tf_cross_test(test_case): @@ -202,6 +203,22 @@ def is_staging_test(test_case): return pytest.mark.is_staging_test()(test_case) +def is_pipeline_test(test_case): + """ + Decorator marking a test as a pipeline test. If RUN_PIPELINE_TESTS is set to a falsy value, those tests will be + skipped. + """ + if not _run_pipeline_tests: + return unittest.skip("test is pipeline test")(test_case) + else: + try: + import pytest # We don't need a hard dependency on pytest in the main library + except ImportError: + return test_case + else: + return pytest.mark.is_pipeline_test()(test_case) + + def slow(test_case): """ Decorator marking a test as slow. diff --git a/tests/pipelines/test_pipelines_audio_classification.py b/tests/pipelines/test_pipelines_audio_classification.py index f33ccd46ca3..b0ff5517973 100644 --- a/tests/pipelines/test_pipelines_audio_classification.py +++ b/tests/pipelines/test_pipelines_audio_classification.py @@ -18,11 +18,19 @@ import numpy as np from transformers import MODEL_FOR_AUDIO_CLASSIFICATION_MAPPING from transformers.pipelines import AudioClassificationPipeline, pipeline -from transformers.testing_utils import nested_simplify, require_tf, require_torch, require_torchaudio, slow +from transformers.testing_utils import ( + is_pipeline_test, + nested_simplify, + require_tf, + require_torch, + require_torchaudio, + slow, +) from .test_pipelines_common import ANY +@is_pipeline_test @require_torch class AudioClassificationPipelineTests(unittest.TestCase): model_mapping = MODEL_FOR_AUDIO_CLASSIFICATION_MAPPING diff --git a/tests/pipelines/test_pipelines_automatic_speech_recognition.py b/tests/pipelines/test_pipelines_automatic_speech_recognition.py index 84b54d1aa83..fcabc0ad356 100644 --- a/tests/pipelines/test_pipelines_automatic_speech_recognition.py +++ b/tests/pipelines/test_pipelines_automatic_speech_recognition.py @@ -33,6 +33,7 @@ from transformers.pipelines import AutomaticSpeechRecognitionPipeline, pipeline from transformers.pipelines.audio_utils import chunk_bytes_iter from transformers.pipelines.automatic_speech_recognition import _find_timestamp_sequence, chunk_iter from transformers.testing_utils import ( + is_pipeline_test, is_torch_available, nested_simplify, require_pyctcdecode, @@ -53,6 +54,7 @@ if is_torch_available(): # from .test_pipelines_common import CustomInputPipelineCommonMixin +@is_pipeline_test class AutomaticSpeechRecognitionPipelineTests(unittest.TestCase): model_mapping = { k: v diff --git a/tests/pipelines/test_pipelines_common.py b/tests/pipelines/test_pipelines_common.py index 8e876957d99..68207b9ca18 100644 --- a/tests/pipelines/test_pipelines_common.py +++ b/tests/pipelines/test_pipelines_common.py @@ -39,6 +39,7 @@ from transformers.testing_utils import ( USER, CaptureLogger, RequestCounter, + is_pipeline_test, is_staging_test, nested_simplify, require_tensorflow_probability, @@ -77,6 +78,7 @@ class ANY: return f"ANY({', '.join(_type.__name__ for _type in self._types)})" +@is_pipeline_test class CommonPipelineTest(unittest.TestCase): @require_torch def test_pipeline_iteration(self): @@ -194,6 +196,7 @@ class CommonPipelineTest(unittest.TestCase): self.assertEqual(len(outputs), 20) +@is_pipeline_test class PipelineScikitCompatTest(unittest.TestCase): @require_torch def test_pipeline_predict_pt(self): @@ -244,6 +247,7 @@ class PipelineScikitCompatTest(unittest.TestCase): self.assertEqual(expected_output, actual_output) +@is_pipeline_test class PipelinePadTest(unittest.TestCase): @require_torch def test_pipeline_padding(self): @@ -325,6 +329,7 @@ class PipelinePadTest(unittest.TestCase): ) +@is_pipeline_test class PipelineUtilsTest(unittest.TestCase): @require_torch def test_pipeline_dataset(self): @@ -620,6 +625,7 @@ class CustomPipeline(Pipeline): return model_outputs["logits"].softmax(-1).numpy() +@is_pipeline_test class CustomPipelineTest(unittest.TestCase): def test_warning_logs(self): transformers_logging.set_verbosity_debug() diff --git a/tests/pipelines/test_pipelines_conversational.py b/tests/pipelines/test_pipelines_conversational.py index 199b8d6ba5f..70ecbc31042 100644 --- a/tests/pipelines/test_pipelines_conversational.py +++ b/tests/pipelines/test_pipelines_conversational.py @@ -29,7 +29,7 @@ from transformers import ( TFAutoModelForCausalLM, pipeline, ) -from transformers.testing_utils import require_tf, require_torch, slow, torch_device +from transformers.testing_utils import is_pipeline_test, require_tf, require_torch, slow, torch_device from .test_pipelines_common import ANY @@ -37,6 +37,7 @@ from .test_pipelines_common import ANY DEFAULT_DEVICE_NUM = -1 if torch_device == "cpu" else 0 +@is_pipeline_test class ConversationalPipelineTests(unittest.TestCase): model_mapping = dict( list(MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING.items()) diff --git a/tests/pipelines/test_pipelines_depth_estimation.py b/tests/pipelines/test_pipelines_depth_estimation.py index fd4d3a6ca18..054574b4fd8 100644 --- a/tests/pipelines/test_pipelines_depth_estimation.py +++ b/tests/pipelines/test_pipelines_depth_estimation.py @@ -17,7 +17,15 @@ import unittest from transformers import MODEL_FOR_DEPTH_ESTIMATION_MAPPING, is_torch_available, is_vision_available from transformers.pipelines import DepthEstimationPipeline, pipeline -from transformers.testing_utils import nested_simplify, require_tf, require_timm, require_torch, require_vision, slow +from transformers.testing_utils import ( + is_pipeline_test, + nested_simplify, + require_tf, + require_timm, + require_torch, + require_vision, + slow, +) from .test_pipelines_common import ANY @@ -40,6 +48,7 @@ def hashimage(image: Image) -> str: return m.hexdigest() +@is_pipeline_test @require_vision @require_timm @require_torch diff --git a/tests/pipelines/test_pipelines_document_question_answering.py b/tests/pipelines/test_pipelines_document_question_answering.py index edd23834465..388be9247b3 100644 --- a/tests/pipelines/test_pipelines_document_question_answering.py +++ b/tests/pipelines/test_pipelines_document_question_answering.py @@ -18,6 +18,7 @@ from transformers import MODEL_FOR_DOCUMENT_QUESTION_ANSWERING_MAPPING, AutoToke from transformers.pipelines import pipeline from transformers.pipelines.document_question_answering import apply_tesseract from transformers.testing_utils import ( + is_pipeline_test, nested_simplify, require_detectron2, require_pytesseract, @@ -52,6 +53,7 @@ INVOICE_URL = ( ) +@is_pipeline_test @require_torch @require_vision class DocumentQuestionAnsweringPipelineTests(unittest.TestCase): diff --git a/tests/pipelines/test_pipelines_feature_extraction.py b/tests/pipelines/test_pipelines_feature_extraction.py index f191bea57a6..87c5a151175 100644 --- a/tests/pipelines/test_pipelines_feature_extraction.py +++ b/tests/pipelines/test_pipelines_feature_extraction.py @@ -27,7 +27,7 @@ from transformers import ( is_torch_available, pipeline, ) -from transformers.testing_utils import nested_simplify, require_tf, require_torch +from transformers.testing_utils import is_pipeline_test, nested_simplify, require_tf, require_torch if is_torch_available(): @@ -37,6 +37,7 @@ if is_tf_available(): import tensorflow as tf +@is_pipeline_test class FeatureExtractionPipelineTests(unittest.TestCase): model_mapping = MODEL_MAPPING tf_model_mapping = TF_MODEL_MAPPING diff --git a/tests/pipelines/test_pipelines_fill_mask.py b/tests/pipelines/test_pipelines_fill_mask.py index ec69860d20d..5426a854252 100644 --- a/tests/pipelines/test_pipelines_fill_mask.py +++ b/tests/pipelines/test_pipelines_fill_mask.py @@ -16,11 +16,19 @@ import unittest from transformers import MODEL_FOR_MASKED_LM_MAPPING, TF_MODEL_FOR_MASKED_LM_MAPPING, FillMaskPipeline, pipeline from transformers.pipelines import PipelineException -from transformers.testing_utils import nested_simplify, require_tf, require_torch, require_torch_gpu, slow +from transformers.testing_utils import ( + is_pipeline_test, + nested_simplify, + require_tf, + require_torch, + require_torch_gpu, + slow, +) from .test_pipelines_common import ANY +@is_pipeline_test class FillMaskPipelineTests(unittest.TestCase): model_mapping = MODEL_FOR_MASKED_LM_MAPPING tf_model_mapping = TF_MODEL_FOR_MASKED_LM_MAPPING diff --git a/tests/pipelines/test_pipelines_image_classification.py b/tests/pipelines/test_pipelines_image_classification.py index e42f7f67795..0b5a51fb3c9 100644 --- a/tests/pipelines/test_pipelines_image_classification.py +++ b/tests/pipelines/test_pipelines_image_classification.py @@ -22,6 +22,7 @@ from transformers import ( ) from transformers.pipelines import ImageClassificationPipeline, pipeline from transformers.testing_utils import ( + is_pipeline_test, nested_simplify, require_tf, require_torch, @@ -43,6 +44,7 @@ else: pass +@is_pipeline_test @require_torch_or_tf @require_vision class ImageClassificationPipelineTests(unittest.TestCase): diff --git a/tests/pipelines/test_pipelines_image_segmentation.py b/tests/pipelines/test_pipelines_image_segmentation.py index 631b8b079d7..b06672047a3 100644 --- a/tests/pipelines/test_pipelines_image_segmentation.py +++ b/tests/pipelines/test_pipelines_image_segmentation.py @@ -34,7 +34,15 @@ from transformers import ( is_vision_available, pipeline, ) -from transformers.testing_utils import nested_simplify, require_tf, require_timm, require_torch, require_vision, slow +from transformers.testing_utils import ( + is_pipeline_test, + nested_simplify, + require_tf, + require_timm, + require_torch, + require_vision, + slow, +) from .test_pipelines_common import ANY @@ -67,6 +75,7 @@ def mask_to_test_readable_only_shape(mask: Image) -> Dict: return {"shape": shape} +@is_pipeline_test @require_vision @require_timm @require_torch diff --git a/tests/pipelines/test_pipelines_image_to_text.py b/tests/pipelines/test_pipelines_image_to_text.py index e685b51e13d..97fe3a398f5 100644 --- a/tests/pipelines/test_pipelines_image_to_text.py +++ b/tests/pipelines/test_pipelines_image_to_text.py @@ -16,7 +16,7 @@ import unittest from transformers import MODEL_FOR_VISION_2_SEQ_MAPPING, TF_MODEL_FOR_VISION_2_SEQ_MAPPING, is_vision_available from transformers.pipelines import pipeline -from transformers.testing_utils import require_tf, require_torch, require_vision, slow +from transformers.testing_utils import is_pipeline_test, require_tf, require_torch, require_vision, slow from .test_pipelines_common import ANY @@ -31,6 +31,7 @@ else: pass +@is_pipeline_test @require_vision class ImageToTextPipelineTests(unittest.TestCase): model_mapping = MODEL_FOR_VISION_2_SEQ_MAPPING diff --git a/tests/pipelines/test_pipelines_object_detection.py b/tests/pipelines/test_pipelines_object_detection.py index 80afae3254f..4196db36d76 100644 --- a/tests/pipelines/test_pipelines_object_detection.py +++ b/tests/pipelines/test_pipelines_object_detection.py @@ -23,6 +23,7 @@ from transformers import ( pipeline, ) from transformers.testing_utils import ( + is_pipeline_test, nested_simplify, require_pytesseract, require_tf, @@ -45,6 +46,7 @@ else: pass +@is_pipeline_test @require_vision @require_timm @require_torch diff --git a/tests/pipelines/test_pipelines_question_answering.py b/tests/pipelines/test_pipelines_question_answering.py index 037b60c152a..cac2b399b27 100644 --- a/tests/pipelines/test_pipelines_question_answering.py +++ b/tests/pipelines/test_pipelines_question_answering.py @@ -22,11 +22,19 @@ from transformers import ( ) from transformers.data.processors.squad import SquadExample from transformers.pipelines import QuestionAnsweringArgumentHandler, pipeline -from transformers.testing_utils import nested_simplify, require_tf, require_torch, require_torch_or_tf, slow +from transformers.testing_utils import ( + is_pipeline_test, + nested_simplify, + require_tf, + require_torch, + require_torch_or_tf, + slow, +) from .test_pipelines_common import ANY +@is_pipeline_test class QAPipelineTests(unittest.TestCase): model_mapping = MODEL_FOR_QUESTION_ANSWERING_MAPPING tf_model_mapping = TF_MODEL_FOR_QUESTION_ANSWERING_MAPPING diff --git a/tests/pipelines/test_pipelines_summarization.py b/tests/pipelines/test_pipelines_summarization.py index ac85dd3944a..e6aaebb31d0 100644 --- a/tests/pipelines/test_pipelines_summarization.py +++ b/tests/pipelines/test_pipelines_summarization.py @@ -21,7 +21,7 @@ from transformers import ( TFPreTrainedModel, pipeline, ) -from transformers.testing_utils import get_gpu_count, require_tf, require_torch, slow, torch_device +from transformers.testing_utils import get_gpu_count, is_pipeline_test, require_tf, require_torch, slow, torch_device from transformers.tokenization_utils import TruncationStrategy from .test_pipelines_common import ANY @@ -30,6 +30,7 @@ from .test_pipelines_common import ANY DEFAULT_DEVICE_NUM = -1 if torch_device == "cpu" else 0 +@is_pipeline_test class SummarizationPipelineTests(unittest.TestCase): model_mapping = MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING tf_model_mapping = TF_MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING diff --git a/tests/pipelines/test_pipelines_table_question_answering.py b/tests/pipelines/test_pipelines_table_question_answering.py index 43176e0c7c8..6c427d840c5 100644 --- a/tests/pipelines/test_pipelines_table_question_answering.py +++ b/tests/pipelines/test_pipelines_table_question_answering.py @@ -22,9 +22,17 @@ from transformers import ( TFAutoModelForTableQuestionAnswering, pipeline, ) -from transformers.testing_utils import require_pandas, require_tensorflow_probability, require_tf, require_torch, slow +from transformers.testing_utils import ( + is_pipeline_test, + require_pandas, + require_tensorflow_probability, + require_tf, + require_torch, + slow, +) +@is_pipeline_test class TQAPipelineTests(unittest.TestCase): # Putting it there for consistency, but TQA do not have fast tokenizer # which are needed to generate automatic tests diff --git a/tests/pipelines/test_pipelines_text2text_generation.py b/tests/pipelines/test_pipelines_text2text_generation.py index 8528dd8f6b9..eccae9850b3 100644 --- a/tests/pipelines/test_pipelines_text2text_generation.py +++ b/tests/pipelines/test_pipelines_text2text_generation.py @@ -20,7 +20,7 @@ from transformers import ( Text2TextGenerationPipeline, pipeline, ) -from transformers.testing_utils import require_tf, require_torch +from transformers.testing_utils import is_pipeline_test, require_tf, require_torch from transformers.utils import is_torch_available from .test_pipelines_common import ANY @@ -30,6 +30,7 @@ if is_torch_available(): import torch +@is_pipeline_test class Text2TextGenerationPipelineTests(unittest.TestCase): model_mapping = MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING tf_model_mapping = TF_MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING diff --git a/tests/pipelines/test_pipelines_text_classification.py b/tests/pipelines/test_pipelines_text_classification.py index 82237d75f0a..14ff62aad92 100644 --- a/tests/pipelines/test_pipelines_text_classification.py +++ b/tests/pipelines/test_pipelines_text_classification.py @@ -20,11 +20,12 @@ from transformers import ( TextClassificationPipeline, pipeline, ) -from transformers.testing_utils import nested_simplify, require_tf, require_torch, slow +from transformers.testing_utils import is_pipeline_test, nested_simplify, require_tf, require_torch, slow from .test_pipelines_common import ANY +@is_pipeline_test class TextClassificationPipelineTests(unittest.TestCase): model_mapping = MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING tf_model_mapping = TF_MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING diff --git a/tests/pipelines/test_pipelines_text_generation.py b/tests/pipelines/test_pipelines_text_generation.py index 24f2d75a3a5..34dbef6df2d 100644 --- a/tests/pipelines/test_pipelines_text_generation.py +++ b/tests/pipelines/test_pipelines_text_generation.py @@ -16,6 +16,7 @@ import unittest from transformers import MODEL_FOR_CAUSAL_LM_MAPPING, TF_MODEL_FOR_CAUSAL_LM_MAPPING, TextGenerationPipeline, pipeline from transformers.testing_utils import ( + is_pipeline_test, require_accelerate, require_tf, require_torch, @@ -26,6 +27,7 @@ from transformers.testing_utils import ( from .test_pipelines_common import ANY +@is_pipeline_test @require_torch_or_tf class TextGenerationPipelineTests(unittest.TestCase): model_mapping = MODEL_FOR_CAUSAL_LM_MAPPING diff --git a/tests/pipelines/test_pipelines_token_classification.py b/tests/pipelines/test_pipelines_token_classification.py index 2d167eb6358..39c54fddb49 100644 --- a/tests/pipelines/test_pipelines_token_classification.py +++ b/tests/pipelines/test_pipelines_token_classification.py @@ -25,7 +25,14 @@ from transformers import ( pipeline, ) from transformers.pipelines import AggregationStrategy, TokenClassificationArgumentHandler -from transformers.testing_utils import nested_simplify, require_tf, require_torch, require_torch_gpu, slow +from transformers.testing_utils import ( + is_pipeline_test, + nested_simplify, + require_tf, + require_torch, + require_torch_gpu, + slow, +) from .test_pipelines_common import ANY @@ -33,6 +40,7 @@ from .test_pipelines_common import ANY VALID_INPUTS = ["A simple string", ["list of strings", "A simple string that is quite a bit longer"]] +@is_pipeline_test class TokenClassificationPipelineTests(unittest.TestCase): model_mapping = MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING tf_model_mapping = TF_MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING diff --git a/tests/pipelines/test_pipelines_translation.py b/tests/pipelines/test_pipelines_translation.py index cbcf4b4341d..61d390fe76e 100644 --- a/tests/pipelines/test_pipelines_translation.py +++ b/tests/pipelines/test_pipelines_translation.py @@ -25,11 +25,12 @@ from transformers import ( TranslationPipeline, pipeline, ) -from transformers.testing_utils import require_tf, require_torch, slow +from transformers.testing_utils import is_pipeline_test, require_tf, require_torch, slow from .test_pipelines_common import ANY +@is_pipeline_test class TranslationPipelineTests(unittest.TestCase): model_mapping = MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING tf_model_mapping = TF_MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING diff --git a/tests/pipelines/test_pipelines_video_classification.py b/tests/pipelines/test_pipelines_video_classification.py index 32c280c03a7..33e06e30f5a 100644 --- a/tests/pipelines/test_pipelines_video_classification.py +++ b/tests/pipelines/test_pipelines_video_classification.py @@ -19,6 +19,7 @@ from huggingface_hub import hf_hub_download from transformers import MODEL_FOR_VIDEO_CLASSIFICATION_MAPPING, VideoMAEFeatureExtractor from transformers.pipelines import VideoClassificationPipeline, pipeline from transformers.testing_utils import ( + is_pipeline_test, nested_simplify, require_decord, require_tf, @@ -30,6 +31,7 @@ from transformers.testing_utils import ( from .test_pipelines_common import ANY +@is_pipeline_test @require_torch_or_tf @require_vision @require_decord diff --git a/tests/pipelines/test_pipelines_visual_question_answering.py b/tests/pipelines/test_pipelines_visual_question_answering.py index 1c82705d4c0..63a5cc70978 100644 --- a/tests/pipelines/test_pipelines_visual_question_answering.py +++ b/tests/pipelines/test_pipelines_visual_question_answering.py @@ -16,7 +16,14 @@ import unittest from transformers import MODEL_FOR_VISUAL_QUESTION_ANSWERING_MAPPING, is_vision_available from transformers.pipelines import pipeline -from transformers.testing_utils import nested_simplify, require_tf, require_torch, require_vision, slow +from transformers.testing_utils import ( + is_pipeline_test, + nested_simplify, + require_tf, + require_torch, + require_vision, + slow, +) from .test_pipelines_common import ANY @@ -31,6 +38,7 @@ else: pass +@is_pipeline_test @require_torch @require_vision class VisualQuestionAnsweringPipelineTests(unittest.TestCase): diff --git a/tests/pipelines/test_pipelines_zero_shot.py b/tests/pipelines/test_pipelines_zero_shot.py index 81df7298993..caf8ee473c5 100644 --- a/tests/pipelines/test_pipelines_zero_shot.py +++ b/tests/pipelines/test_pipelines_zero_shot.py @@ -21,11 +21,12 @@ from transformers import ( ZeroShotClassificationPipeline, pipeline, ) -from transformers.testing_utils import nested_simplify, require_tf, require_torch, slow +from transformers.testing_utils import is_pipeline_test, nested_simplify, require_tf, require_torch, slow from .test_pipelines_common import ANY +@is_pipeline_test class ZeroShotClassificationPipelineTests(unittest.TestCase): model_mapping = MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING tf_model_mapping = TF_MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING diff --git a/tests/pipelines/test_pipelines_zero_shot_audio_classification.py b/tests/pipelines/test_pipelines_zero_shot_audio_classification.py index c2d1b89de49..87f91a7d27e 100644 --- a/tests/pipelines/test_pipelines_zero_shot_audio_classification.py +++ b/tests/pipelines/test_pipelines_zero_shot_audio_classification.py @@ -17,9 +17,10 @@ import unittest from datasets import load_dataset from transformers.pipelines import pipeline -from transformers.testing_utils import nested_simplify, require_torch, slow +from transformers.testing_utils import is_pipeline_test, nested_simplify, require_torch, slow +@is_pipeline_test @require_torch class ZeroShotAudioClassificationPipelineTests(unittest.TestCase): # Deactivating auto tests since we don't have a good MODEL_FOR_XX mapping, diff --git a/tests/pipelines/test_pipelines_zero_shot_image_classification.py b/tests/pipelines/test_pipelines_zero_shot_image_classification.py index 1be534350d9..fb101f42104 100644 --- a/tests/pipelines/test_pipelines_zero_shot_image_classification.py +++ b/tests/pipelines/test_pipelines_zero_shot_image_classification.py @@ -16,7 +16,14 @@ import unittest from transformers import is_vision_available from transformers.pipelines import pipeline -from transformers.testing_utils import nested_simplify, require_tf, require_torch, require_vision, slow +from transformers.testing_utils import ( + is_pipeline_test, + nested_simplify, + require_tf, + require_torch, + require_vision, + slow, +) from .test_pipelines_common import ANY @@ -31,6 +38,7 @@ else: pass +@is_pipeline_test @require_vision class ZeroShotImageClassificationPipelineTests(unittest.TestCase): # Deactivating auto tests since we don't have a good MODEL_FOR_XX mapping, diff --git a/tests/pipelines/test_pipelines_zero_shot_object_detection.py b/tests/pipelines/test_pipelines_zero_shot_object_detection.py index 8995d0da9dd..c8b424483fa 100644 --- a/tests/pipelines/test_pipelines_zero_shot_object_detection.py +++ b/tests/pipelines/test_pipelines_zero_shot_object_detection.py @@ -15,7 +15,14 @@ import unittest from transformers import MODEL_FOR_ZERO_SHOT_OBJECT_DETECTION_MAPPING, is_vision_available, pipeline -from transformers.testing_utils import nested_simplify, require_tf, require_torch, require_vision, slow +from transformers.testing_utils import ( + is_pipeline_test, + nested_simplify, + require_tf, + require_torch, + require_vision, + slow, +) from .test_pipelines_common import ANY @@ -30,6 +37,7 @@ else: pass +@is_pipeline_test @require_vision @require_torch class ZeroShotObjectDetectionPipelineTests(unittest.TestCase): diff --git a/tests/test_pipeline_mixin.py b/tests/test_pipeline_mixin.py index 142028dc9c0..84d90e2724b 100644 --- a/tests/test_pipeline_mixin.py +++ b/tests/test_pipeline_mixin.py @@ -20,6 +20,7 @@ import random from pathlib import Path from transformers.testing_utils import ( + is_pipeline_test, require_decord, require_pytesseract, require_timm, @@ -104,6 +105,7 @@ PATH_TO_TRANSFORMERS = os.path.join(Path(__file__).parent.parent, "src/transform transformers_module = direct_transformers_import(PATH_TO_TRANSFORMERS) +@is_pipeline_test class PipelineTesterMixin: model_tester = None pipeline_model_mapping = None