mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-18 03:58:25 +06:00
Mark pipeline tests to skip them easily (#21887)
* Mark pipeline tests to skip them easily * Mark the mixin as pipeline test * Update src/transformers/testing_utils.py Co-authored-by: Yih-Dar <2521628+ydshieh@users.noreply.github.com> --------- Co-authored-by: Yih-Dar <2521628+ydshieh@users.noreply.github.com>
This commit is contained in:
parent
d9e28d91a8
commit
50a8ed3ee0
@ -24,7 +24,7 @@ from typing import Any, Dict, List, Optional
|
|||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
|
|
||||||
COMMON_ENV_VARIABLES = {"OMP_NUM_THREADS": 1, "TRANSFORMERS_IS_CI": True, "PYTEST_TIMEOUT": 120}
|
COMMON_ENV_VARIABLES = {"OMP_NUM_THREADS": 1, "TRANSFORMERS_IS_CI": True, "PYTEST_TIMEOUT": 120, "RUN_PIPELINE_TESTS": False}
|
||||||
COMMON_PYTEST_OPTIONS = {"max-worker-restart": 0, "dist": "loadfile", "s": None}
|
COMMON_PYTEST_OPTIONS = {"max-worker-restart": 0, "dist": "loadfile", "s": None}
|
||||||
DEFAULT_DOCKER_IMAGE = [{"image": "cimg/python:3.7.12"}]
|
DEFAULT_DOCKER_IMAGE = [{"image": "cimg/python:3.7.12"}]
|
||||||
|
|
||||||
@ -64,10 +64,12 @@ class CircleCIJob:
|
|||||||
self.parallelism = 1
|
self.parallelism = 1
|
||||||
|
|
||||||
def to_dict(self):
|
def to_dict(self):
|
||||||
|
env = COMMON_ENV_VARIABLES.copy()
|
||||||
|
env.update(self.additional_env)
|
||||||
job = {
|
job = {
|
||||||
"working_directory": self.working_directory,
|
"working_directory": self.working_directory,
|
||||||
"docker": self.docker_image,
|
"docker": self.docker_image,
|
||||||
"environment": {**COMMON_ENV_VARIABLES, **self.additional_env},
|
"environment": env,
|
||||||
}
|
}
|
||||||
if self.resource_class is not None:
|
if self.resource_class is not None:
|
||||||
job["resource_class"] = self.resource_class
|
job["resource_class"] = self.resource_class
|
||||||
@ -239,25 +241,27 @@ flax_job = CircleCIJob(
|
|||||||
|
|
||||||
pipelines_torch_job = CircleCIJob(
|
pipelines_torch_job = CircleCIJob(
|
||||||
"pipelines_torch",
|
"pipelines_torch",
|
||||||
|
additional_env={"RUN_PIPELINE_TESTS": True},
|
||||||
install_steps=[
|
install_steps=[
|
||||||
"sudo apt-get -y update && sudo apt-get install -y libsndfile1-dev espeak-ng",
|
"sudo apt-get -y update && sudo apt-get install -y libsndfile1-dev espeak-ng",
|
||||||
"pip install --upgrade pip",
|
"pip install --upgrade pip",
|
||||||
"pip install .[sklearn,torch,testing,sentencepiece,torch-speech,vision,timm,video]",
|
"pip install .[sklearn,torch,testing,sentencepiece,torch-speech,vision,timm,video]",
|
||||||
],
|
],
|
||||||
pytest_options={"rA": None},
|
pytest_options={"rA": None},
|
||||||
tests_to_run="tests/pipelines/"
|
marker="is_pipeline_test",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
pipelines_tf_job = CircleCIJob(
|
pipelines_tf_job = CircleCIJob(
|
||||||
"pipelines_tf",
|
"pipelines_tf",
|
||||||
|
additional_env={"RUN_PIPELINE_TESTS": True},
|
||||||
install_steps=[
|
install_steps=[
|
||||||
"pip install --upgrade pip",
|
"pip install --upgrade pip",
|
||||||
"pip install .[sklearn,tf-cpu,testing,sentencepiece,vision]",
|
"pip install .[sklearn,tf-cpu,testing,sentencepiece,vision]",
|
||||||
"pip install tensorflow_probability",
|
"pip install tensorflow_probability",
|
||||||
],
|
],
|
||||||
pytest_options={"rA": None},
|
pytest_options={"rA": None},
|
||||||
tests_to_run="tests/pipelines/"
|
marker="is_pipeline_test",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -38,6 +38,9 @@ def pytest_configure(config):
|
|||||||
config.addinivalue_line(
|
config.addinivalue_line(
|
||||||
"markers", "is_pt_flax_cross_test: mark test to run only when PT and FLAX interactions are tested"
|
"markers", "is_pt_flax_cross_test: mark test to run only when PT and FLAX interactions are tested"
|
||||||
)
|
)
|
||||||
|
config.addinivalue_line(
|
||||||
|
"markers", "is_pipeline_test: mark test to run only when pipelines are tested"
|
||||||
|
)
|
||||||
config.addinivalue_line("markers", "is_staging_test: mark test to run only in the staging environment")
|
config.addinivalue_line("markers", "is_staging_test: mark test to run only in the staging environment")
|
||||||
|
|
||||||
|
|
||||||
|
@ -145,6 +145,7 @@ _run_custom_tokenizers = parse_flag_from_env("RUN_CUSTOM_TOKENIZERS", default=Fa
|
|||||||
_run_staging = parse_flag_from_env("HUGGINGFACE_CO_STAGING", default=False)
|
_run_staging = parse_flag_from_env("HUGGINGFACE_CO_STAGING", default=False)
|
||||||
_run_git_lfs_tests = parse_flag_from_env("RUN_GIT_LFS_TESTS", default=False)
|
_run_git_lfs_tests = parse_flag_from_env("RUN_GIT_LFS_TESTS", default=False)
|
||||||
_tf_gpu_memory_limit = parse_int_from_env("TF_GPU_MEMORY_LIMIT", default=None)
|
_tf_gpu_memory_limit = parse_int_from_env("TF_GPU_MEMORY_LIMIT", default=None)
|
||||||
|
_run_pipeline_tests = parse_flag_from_env("RUN_PIPELINE_TESTS", default=True)
|
||||||
|
|
||||||
|
|
||||||
def is_pt_tf_cross_test(test_case):
|
def is_pt_tf_cross_test(test_case):
|
||||||
@ -202,6 +203,22 @@ def is_staging_test(test_case):
|
|||||||
return pytest.mark.is_staging_test()(test_case)
|
return pytest.mark.is_staging_test()(test_case)
|
||||||
|
|
||||||
|
|
||||||
|
def is_pipeline_test(test_case):
|
||||||
|
"""
|
||||||
|
Decorator marking a test as a pipeline test. If RUN_PIPELINE_TESTS is set to a falsy value, those tests will be
|
||||||
|
skipped.
|
||||||
|
"""
|
||||||
|
if not _run_pipeline_tests:
|
||||||
|
return unittest.skip("test is pipeline test")(test_case)
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
import pytest # We don't need a hard dependency on pytest in the main library
|
||||||
|
except ImportError:
|
||||||
|
return test_case
|
||||||
|
else:
|
||||||
|
return pytest.mark.is_pipeline_test()(test_case)
|
||||||
|
|
||||||
|
|
||||||
def slow(test_case):
|
def slow(test_case):
|
||||||
"""
|
"""
|
||||||
Decorator marking a test as slow.
|
Decorator marking a test as slow.
|
||||||
|
@ -18,11 +18,19 @@ import numpy as np
|
|||||||
|
|
||||||
from transformers import MODEL_FOR_AUDIO_CLASSIFICATION_MAPPING
|
from transformers import MODEL_FOR_AUDIO_CLASSIFICATION_MAPPING
|
||||||
from transformers.pipelines import AudioClassificationPipeline, pipeline
|
from transformers.pipelines import AudioClassificationPipeline, pipeline
|
||||||
from transformers.testing_utils import nested_simplify, require_tf, require_torch, require_torchaudio, slow
|
from transformers.testing_utils import (
|
||||||
|
is_pipeline_test,
|
||||||
|
nested_simplify,
|
||||||
|
require_tf,
|
||||||
|
require_torch,
|
||||||
|
require_torchaudio,
|
||||||
|
slow,
|
||||||
|
)
|
||||||
|
|
||||||
from .test_pipelines_common import ANY
|
from .test_pipelines_common import ANY
|
||||||
|
|
||||||
|
|
||||||
|
@is_pipeline_test
|
||||||
@require_torch
|
@require_torch
|
||||||
class AudioClassificationPipelineTests(unittest.TestCase):
|
class AudioClassificationPipelineTests(unittest.TestCase):
|
||||||
model_mapping = MODEL_FOR_AUDIO_CLASSIFICATION_MAPPING
|
model_mapping = MODEL_FOR_AUDIO_CLASSIFICATION_MAPPING
|
||||||
|
@ -33,6 +33,7 @@ from transformers.pipelines import AutomaticSpeechRecognitionPipeline, pipeline
|
|||||||
from transformers.pipelines.audio_utils import chunk_bytes_iter
|
from transformers.pipelines.audio_utils import chunk_bytes_iter
|
||||||
from transformers.pipelines.automatic_speech_recognition import _find_timestamp_sequence, chunk_iter
|
from transformers.pipelines.automatic_speech_recognition import _find_timestamp_sequence, chunk_iter
|
||||||
from transformers.testing_utils import (
|
from transformers.testing_utils import (
|
||||||
|
is_pipeline_test,
|
||||||
is_torch_available,
|
is_torch_available,
|
||||||
nested_simplify,
|
nested_simplify,
|
||||||
require_pyctcdecode,
|
require_pyctcdecode,
|
||||||
@ -53,6 +54,7 @@ if is_torch_available():
|
|||||||
# from .test_pipelines_common import CustomInputPipelineCommonMixin
|
# from .test_pipelines_common import CustomInputPipelineCommonMixin
|
||||||
|
|
||||||
|
|
||||||
|
@is_pipeline_test
|
||||||
class AutomaticSpeechRecognitionPipelineTests(unittest.TestCase):
|
class AutomaticSpeechRecognitionPipelineTests(unittest.TestCase):
|
||||||
model_mapping = {
|
model_mapping = {
|
||||||
k: v
|
k: v
|
||||||
|
@ -39,6 +39,7 @@ from transformers.testing_utils import (
|
|||||||
USER,
|
USER,
|
||||||
CaptureLogger,
|
CaptureLogger,
|
||||||
RequestCounter,
|
RequestCounter,
|
||||||
|
is_pipeline_test,
|
||||||
is_staging_test,
|
is_staging_test,
|
||||||
nested_simplify,
|
nested_simplify,
|
||||||
require_tensorflow_probability,
|
require_tensorflow_probability,
|
||||||
@ -77,6 +78,7 @@ class ANY:
|
|||||||
return f"ANY({', '.join(_type.__name__ for _type in self._types)})"
|
return f"ANY({', '.join(_type.__name__ for _type in self._types)})"
|
||||||
|
|
||||||
|
|
||||||
|
@is_pipeline_test
|
||||||
class CommonPipelineTest(unittest.TestCase):
|
class CommonPipelineTest(unittest.TestCase):
|
||||||
@require_torch
|
@require_torch
|
||||||
def test_pipeline_iteration(self):
|
def test_pipeline_iteration(self):
|
||||||
@ -194,6 +196,7 @@ class CommonPipelineTest(unittest.TestCase):
|
|||||||
self.assertEqual(len(outputs), 20)
|
self.assertEqual(len(outputs), 20)
|
||||||
|
|
||||||
|
|
||||||
|
@is_pipeline_test
|
||||||
class PipelineScikitCompatTest(unittest.TestCase):
|
class PipelineScikitCompatTest(unittest.TestCase):
|
||||||
@require_torch
|
@require_torch
|
||||||
def test_pipeline_predict_pt(self):
|
def test_pipeline_predict_pt(self):
|
||||||
@ -244,6 +247,7 @@ class PipelineScikitCompatTest(unittest.TestCase):
|
|||||||
self.assertEqual(expected_output, actual_output)
|
self.assertEqual(expected_output, actual_output)
|
||||||
|
|
||||||
|
|
||||||
|
@is_pipeline_test
|
||||||
class PipelinePadTest(unittest.TestCase):
|
class PipelinePadTest(unittest.TestCase):
|
||||||
@require_torch
|
@require_torch
|
||||||
def test_pipeline_padding(self):
|
def test_pipeline_padding(self):
|
||||||
@ -325,6 +329,7 @@ class PipelinePadTest(unittest.TestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@is_pipeline_test
|
||||||
class PipelineUtilsTest(unittest.TestCase):
|
class PipelineUtilsTest(unittest.TestCase):
|
||||||
@require_torch
|
@require_torch
|
||||||
def test_pipeline_dataset(self):
|
def test_pipeline_dataset(self):
|
||||||
@ -620,6 +625,7 @@ class CustomPipeline(Pipeline):
|
|||||||
return model_outputs["logits"].softmax(-1).numpy()
|
return model_outputs["logits"].softmax(-1).numpy()
|
||||||
|
|
||||||
|
|
||||||
|
@is_pipeline_test
|
||||||
class CustomPipelineTest(unittest.TestCase):
|
class CustomPipelineTest(unittest.TestCase):
|
||||||
def test_warning_logs(self):
|
def test_warning_logs(self):
|
||||||
transformers_logging.set_verbosity_debug()
|
transformers_logging.set_verbosity_debug()
|
||||||
|
@ -29,7 +29,7 @@ from transformers import (
|
|||||||
TFAutoModelForCausalLM,
|
TFAutoModelForCausalLM,
|
||||||
pipeline,
|
pipeline,
|
||||||
)
|
)
|
||||||
from transformers.testing_utils import require_tf, require_torch, slow, torch_device
|
from transformers.testing_utils import is_pipeline_test, require_tf, require_torch, slow, torch_device
|
||||||
|
|
||||||
from .test_pipelines_common import ANY
|
from .test_pipelines_common import ANY
|
||||||
|
|
||||||
@ -37,6 +37,7 @@ from .test_pipelines_common import ANY
|
|||||||
DEFAULT_DEVICE_NUM = -1 if torch_device == "cpu" else 0
|
DEFAULT_DEVICE_NUM = -1 if torch_device == "cpu" else 0
|
||||||
|
|
||||||
|
|
||||||
|
@is_pipeline_test
|
||||||
class ConversationalPipelineTests(unittest.TestCase):
|
class ConversationalPipelineTests(unittest.TestCase):
|
||||||
model_mapping = dict(
|
model_mapping = dict(
|
||||||
list(MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING.items())
|
list(MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING.items())
|
||||||
|
@ -17,7 +17,15 @@ import unittest
|
|||||||
|
|
||||||
from transformers import MODEL_FOR_DEPTH_ESTIMATION_MAPPING, is_torch_available, is_vision_available
|
from transformers import MODEL_FOR_DEPTH_ESTIMATION_MAPPING, is_torch_available, is_vision_available
|
||||||
from transformers.pipelines import DepthEstimationPipeline, pipeline
|
from transformers.pipelines import DepthEstimationPipeline, pipeline
|
||||||
from transformers.testing_utils import nested_simplify, require_tf, require_timm, require_torch, require_vision, slow
|
from transformers.testing_utils import (
|
||||||
|
is_pipeline_test,
|
||||||
|
nested_simplify,
|
||||||
|
require_tf,
|
||||||
|
require_timm,
|
||||||
|
require_torch,
|
||||||
|
require_vision,
|
||||||
|
slow,
|
||||||
|
)
|
||||||
|
|
||||||
from .test_pipelines_common import ANY
|
from .test_pipelines_common import ANY
|
||||||
|
|
||||||
@ -40,6 +48,7 @@ def hashimage(image: Image) -> str:
|
|||||||
return m.hexdigest()
|
return m.hexdigest()
|
||||||
|
|
||||||
|
|
||||||
|
@is_pipeline_test
|
||||||
@require_vision
|
@require_vision
|
||||||
@require_timm
|
@require_timm
|
||||||
@require_torch
|
@require_torch
|
||||||
|
@ -18,6 +18,7 @@ from transformers import MODEL_FOR_DOCUMENT_QUESTION_ANSWERING_MAPPING, AutoToke
|
|||||||
from transformers.pipelines import pipeline
|
from transformers.pipelines import pipeline
|
||||||
from transformers.pipelines.document_question_answering import apply_tesseract
|
from transformers.pipelines.document_question_answering import apply_tesseract
|
||||||
from transformers.testing_utils import (
|
from transformers.testing_utils import (
|
||||||
|
is_pipeline_test,
|
||||||
nested_simplify,
|
nested_simplify,
|
||||||
require_detectron2,
|
require_detectron2,
|
||||||
require_pytesseract,
|
require_pytesseract,
|
||||||
@ -52,6 +53,7 @@ INVOICE_URL = (
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@is_pipeline_test
|
||||||
@require_torch
|
@require_torch
|
||||||
@require_vision
|
@require_vision
|
||||||
class DocumentQuestionAnsweringPipelineTests(unittest.TestCase):
|
class DocumentQuestionAnsweringPipelineTests(unittest.TestCase):
|
||||||
|
@ -27,7 +27,7 @@ from transformers import (
|
|||||||
is_torch_available,
|
is_torch_available,
|
||||||
pipeline,
|
pipeline,
|
||||||
)
|
)
|
||||||
from transformers.testing_utils import nested_simplify, require_tf, require_torch
|
from transformers.testing_utils import is_pipeline_test, nested_simplify, require_tf, require_torch
|
||||||
|
|
||||||
|
|
||||||
if is_torch_available():
|
if is_torch_available():
|
||||||
@ -37,6 +37,7 @@ if is_tf_available():
|
|||||||
import tensorflow as tf
|
import tensorflow as tf
|
||||||
|
|
||||||
|
|
||||||
|
@is_pipeline_test
|
||||||
class FeatureExtractionPipelineTests(unittest.TestCase):
|
class FeatureExtractionPipelineTests(unittest.TestCase):
|
||||||
model_mapping = MODEL_MAPPING
|
model_mapping = MODEL_MAPPING
|
||||||
tf_model_mapping = TF_MODEL_MAPPING
|
tf_model_mapping = TF_MODEL_MAPPING
|
||||||
|
@ -16,11 +16,19 @@ import unittest
|
|||||||
|
|
||||||
from transformers import MODEL_FOR_MASKED_LM_MAPPING, TF_MODEL_FOR_MASKED_LM_MAPPING, FillMaskPipeline, pipeline
|
from transformers import MODEL_FOR_MASKED_LM_MAPPING, TF_MODEL_FOR_MASKED_LM_MAPPING, FillMaskPipeline, pipeline
|
||||||
from transformers.pipelines import PipelineException
|
from transformers.pipelines import PipelineException
|
||||||
from transformers.testing_utils import nested_simplify, require_tf, require_torch, require_torch_gpu, slow
|
from transformers.testing_utils import (
|
||||||
|
is_pipeline_test,
|
||||||
|
nested_simplify,
|
||||||
|
require_tf,
|
||||||
|
require_torch,
|
||||||
|
require_torch_gpu,
|
||||||
|
slow,
|
||||||
|
)
|
||||||
|
|
||||||
from .test_pipelines_common import ANY
|
from .test_pipelines_common import ANY
|
||||||
|
|
||||||
|
|
||||||
|
@is_pipeline_test
|
||||||
class FillMaskPipelineTests(unittest.TestCase):
|
class FillMaskPipelineTests(unittest.TestCase):
|
||||||
model_mapping = MODEL_FOR_MASKED_LM_MAPPING
|
model_mapping = MODEL_FOR_MASKED_LM_MAPPING
|
||||||
tf_model_mapping = TF_MODEL_FOR_MASKED_LM_MAPPING
|
tf_model_mapping = TF_MODEL_FOR_MASKED_LM_MAPPING
|
||||||
|
@ -22,6 +22,7 @@ from transformers import (
|
|||||||
)
|
)
|
||||||
from transformers.pipelines import ImageClassificationPipeline, pipeline
|
from transformers.pipelines import ImageClassificationPipeline, pipeline
|
||||||
from transformers.testing_utils import (
|
from transformers.testing_utils import (
|
||||||
|
is_pipeline_test,
|
||||||
nested_simplify,
|
nested_simplify,
|
||||||
require_tf,
|
require_tf,
|
||||||
require_torch,
|
require_torch,
|
||||||
@ -43,6 +44,7 @@ else:
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@is_pipeline_test
|
||||||
@require_torch_or_tf
|
@require_torch_or_tf
|
||||||
@require_vision
|
@require_vision
|
||||||
class ImageClassificationPipelineTests(unittest.TestCase):
|
class ImageClassificationPipelineTests(unittest.TestCase):
|
||||||
|
@ -34,7 +34,15 @@ from transformers import (
|
|||||||
is_vision_available,
|
is_vision_available,
|
||||||
pipeline,
|
pipeline,
|
||||||
)
|
)
|
||||||
from transformers.testing_utils import nested_simplify, require_tf, require_timm, require_torch, require_vision, slow
|
from transformers.testing_utils import (
|
||||||
|
is_pipeline_test,
|
||||||
|
nested_simplify,
|
||||||
|
require_tf,
|
||||||
|
require_timm,
|
||||||
|
require_torch,
|
||||||
|
require_vision,
|
||||||
|
slow,
|
||||||
|
)
|
||||||
|
|
||||||
from .test_pipelines_common import ANY
|
from .test_pipelines_common import ANY
|
||||||
|
|
||||||
@ -67,6 +75,7 @@ def mask_to_test_readable_only_shape(mask: Image) -> Dict:
|
|||||||
return {"shape": shape}
|
return {"shape": shape}
|
||||||
|
|
||||||
|
|
||||||
|
@is_pipeline_test
|
||||||
@require_vision
|
@require_vision
|
||||||
@require_timm
|
@require_timm
|
||||||
@require_torch
|
@require_torch
|
||||||
|
@ -16,7 +16,7 @@ import unittest
|
|||||||
|
|
||||||
from transformers import MODEL_FOR_VISION_2_SEQ_MAPPING, TF_MODEL_FOR_VISION_2_SEQ_MAPPING, is_vision_available
|
from transformers import MODEL_FOR_VISION_2_SEQ_MAPPING, TF_MODEL_FOR_VISION_2_SEQ_MAPPING, is_vision_available
|
||||||
from transformers.pipelines import pipeline
|
from transformers.pipelines import pipeline
|
||||||
from transformers.testing_utils import require_tf, require_torch, require_vision, slow
|
from transformers.testing_utils import is_pipeline_test, require_tf, require_torch, require_vision, slow
|
||||||
|
|
||||||
from .test_pipelines_common import ANY
|
from .test_pipelines_common import ANY
|
||||||
|
|
||||||
@ -31,6 +31,7 @@ else:
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@is_pipeline_test
|
||||||
@require_vision
|
@require_vision
|
||||||
class ImageToTextPipelineTests(unittest.TestCase):
|
class ImageToTextPipelineTests(unittest.TestCase):
|
||||||
model_mapping = MODEL_FOR_VISION_2_SEQ_MAPPING
|
model_mapping = MODEL_FOR_VISION_2_SEQ_MAPPING
|
||||||
|
@ -23,6 +23,7 @@ from transformers import (
|
|||||||
pipeline,
|
pipeline,
|
||||||
)
|
)
|
||||||
from transformers.testing_utils import (
|
from transformers.testing_utils import (
|
||||||
|
is_pipeline_test,
|
||||||
nested_simplify,
|
nested_simplify,
|
||||||
require_pytesseract,
|
require_pytesseract,
|
||||||
require_tf,
|
require_tf,
|
||||||
@ -45,6 +46,7 @@ else:
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@is_pipeline_test
|
||||||
@require_vision
|
@require_vision
|
||||||
@require_timm
|
@require_timm
|
||||||
@require_torch
|
@require_torch
|
||||||
|
@ -22,11 +22,19 @@ from transformers import (
|
|||||||
)
|
)
|
||||||
from transformers.data.processors.squad import SquadExample
|
from transformers.data.processors.squad import SquadExample
|
||||||
from transformers.pipelines import QuestionAnsweringArgumentHandler, pipeline
|
from transformers.pipelines import QuestionAnsweringArgumentHandler, pipeline
|
||||||
from transformers.testing_utils import nested_simplify, require_tf, require_torch, require_torch_or_tf, slow
|
from transformers.testing_utils import (
|
||||||
|
is_pipeline_test,
|
||||||
|
nested_simplify,
|
||||||
|
require_tf,
|
||||||
|
require_torch,
|
||||||
|
require_torch_or_tf,
|
||||||
|
slow,
|
||||||
|
)
|
||||||
|
|
||||||
from .test_pipelines_common import ANY
|
from .test_pipelines_common import ANY
|
||||||
|
|
||||||
|
|
||||||
|
@is_pipeline_test
|
||||||
class QAPipelineTests(unittest.TestCase):
|
class QAPipelineTests(unittest.TestCase):
|
||||||
model_mapping = MODEL_FOR_QUESTION_ANSWERING_MAPPING
|
model_mapping = MODEL_FOR_QUESTION_ANSWERING_MAPPING
|
||||||
tf_model_mapping = TF_MODEL_FOR_QUESTION_ANSWERING_MAPPING
|
tf_model_mapping = TF_MODEL_FOR_QUESTION_ANSWERING_MAPPING
|
||||||
|
@ -21,7 +21,7 @@ from transformers import (
|
|||||||
TFPreTrainedModel,
|
TFPreTrainedModel,
|
||||||
pipeline,
|
pipeline,
|
||||||
)
|
)
|
||||||
from transformers.testing_utils import get_gpu_count, require_tf, require_torch, slow, torch_device
|
from transformers.testing_utils import get_gpu_count, is_pipeline_test, require_tf, require_torch, slow, torch_device
|
||||||
from transformers.tokenization_utils import TruncationStrategy
|
from transformers.tokenization_utils import TruncationStrategy
|
||||||
|
|
||||||
from .test_pipelines_common import ANY
|
from .test_pipelines_common import ANY
|
||||||
@ -30,6 +30,7 @@ from .test_pipelines_common import ANY
|
|||||||
DEFAULT_DEVICE_NUM = -1 if torch_device == "cpu" else 0
|
DEFAULT_DEVICE_NUM = -1 if torch_device == "cpu" else 0
|
||||||
|
|
||||||
|
|
||||||
|
@is_pipeline_test
|
||||||
class SummarizationPipelineTests(unittest.TestCase):
|
class SummarizationPipelineTests(unittest.TestCase):
|
||||||
model_mapping = MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING
|
model_mapping = MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING
|
||||||
tf_model_mapping = TF_MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING
|
tf_model_mapping = TF_MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING
|
||||||
|
@ -22,9 +22,17 @@ from transformers import (
|
|||||||
TFAutoModelForTableQuestionAnswering,
|
TFAutoModelForTableQuestionAnswering,
|
||||||
pipeline,
|
pipeline,
|
||||||
)
|
)
|
||||||
from transformers.testing_utils import require_pandas, require_tensorflow_probability, require_tf, require_torch, slow
|
from transformers.testing_utils import (
|
||||||
|
is_pipeline_test,
|
||||||
|
require_pandas,
|
||||||
|
require_tensorflow_probability,
|
||||||
|
require_tf,
|
||||||
|
require_torch,
|
||||||
|
slow,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@is_pipeline_test
|
||||||
class TQAPipelineTests(unittest.TestCase):
|
class TQAPipelineTests(unittest.TestCase):
|
||||||
# Putting it there for consistency, but TQA do not have fast tokenizer
|
# Putting it there for consistency, but TQA do not have fast tokenizer
|
||||||
# which are needed to generate automatic tests
|
# which are needed to generate automatic tests
|
||||||
|
@ -20,7 +20,7 @@ from transformers import (
|
|||||||
Text2TextGenerationPipeline,
|
Text2TextGenerationPipeline,
|
||||||
pipeline,
|
pipeline,
|
||||||
)
|
)
|
||||||
from transformers.testing_utils import require_tf, require_torch
|
from transformers.testing_utils import is_pipeline_test, require_tf, require_torch
|
||||||
from transformers.utils import is_torch_available
|
from transformers.utils import is_torch_available
|
||||||
|
|
||||||
from .test_pipelines_common import ANY
|
from .test_pipelines_common import ANY
|
||||||
@ -30,6 +30,7 @@ if is_torch_available():
|
|||||||
import torch
|
import torch
|
||||||
|
|
||||||
|
|
||||||
|
@is_pipeline_test
|
||||||
class Text2TextGenerationPipelineTests(unittest.TestCase):
|
class Text2TextGenerationPipelineTests(unittest.TestCase):
|
||||||
model_mapping = MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING
|
model_mapping = MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING
|
||||||
tf_model_mapping = TF_MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING
|
tf_model_mapping = TF_MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING
|
||||||
|
@ -20,11 +20,12 @@ from transformers import (
|
|||||||
TextClassificationPipeline,
|
TextClassificationPipeline,
|
||||||
pipeline,
|
pipeline,
|
||||||
)
|
)
|
||||||
from transformers.testing_utils import nested_simplify, require_tf, require_torch, slow
|
from transformers.testing_utils import is_pipeline_test, nested_simplify, require_tf, require_torch, slow
|
||||||
|
|
||||||
from .test_pipelines_common import ANY
|
from .test_pipelines_common import ANY
|
||||||
|
|
||||||
|
|
||||||
|
@is_pipeline_test
|
||||||
class TextClassificationPipelineTests(unittest.TestCase):
|
class TextClassificationPipelineTests(unittest.TestCase):
|
||||||
model_mapping = MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING
|
model_mapping = MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING
|
||||||
tf_model_mapping = TF_MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING
|
tf_model_mapping = TF_MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING
|
||||||
|
@ -16,6 +16,7 @@ import unittest
|
|||||||
|
|
||||||
from transformers import MODEL_FOR_CAUSAL_LM_MAPPING, TF_MODEL_FOR_CAUSAL_LM_MAPPING, TextGenerationPipeline, pipeline
|
from transformers import MODEL_FOR_CAUSAL_LM_MAPPING, TF_MODEL_FOR_CAUSAL_LM_MAPPING, TextGenerationPipeline, pipeline
|
||||||
from transformers.testing_utils import (
|
from transformers.testing_utils import (
|
||||||
|
is_pipeline_test,
|
||||||
require_accelerate,
|
require_accelerate,
|
||||||
require_tf,
|
require_tf,
|
||||||
require_torch,
|
require_torch,
|
||||||
@ -26,6 +27,7 @@ from transformers.testing_utils import (
|
|||||||
from .test_pipelines_common import ANY
|
from .test_pipelines_common import ANY
|
||||||
|
|
||||||
|
|
||||||
|
@is_pipeline_test
|
||||||
@require_torch_or_tf
|
@require_torch_or_tf
|
||||||
class TextGenerationPipelineTests(unittest.TestCase):
|
class TextGenerationPipelineTests(unittest.TestCase):
|
||||||
model_mapping = MODEL_FOR_CAUSAL_LM_MAPPING
|
model_mapping = MODEL_FOR_CAUSAL_LM_MAPPING
|
||||||
|
@ -25,7 +25,14 @@ from transformers import (
|
|||||||
pipeline,
|
pipeline,
|
||||||
)
|
)
|
||||||
from transformers.pipelines import AggregationStrategy, TokenClassificationArgumentHandler
|
from transformers.pipelines import AggregationStrategy, TokenClassificationArgumentHandler
|
||||||
from transformers.testing_utils import nested_simplify, require_tf, require_torch, require_torch_gpu, slow
|
from transformers.testing_utils import (
|
||||||
|
is_pipeline_test,
|
||||||
|
nested_simplify,
|
||||||
|
require_tf,
|
||||||
|
require_torch,
|
||||||
|
require_torch_gpu,
|
||||||
|
slow,
|
||||||
|
)
|
||||||
|
|
||||||
from .test_pipelines_common import ANY
|
from .test_pipelines_common import ANY
|
||||||
|
|
||||||
@ -33,6 +40,7 @@ from .test_pipelines_common import ANY
|
|||||||
VALID_INPUTS = ["A simple string", ["list of strings", "A simple string that is quite a bit longer"]]
|
VALID_INPUTS = ["A simple string", ["list of strings", "A simple string that is quite a bit longer"]]
|
||||||
|
|
||||||
|
|
||||||
|
@is_pipeline_test
|
||||||
class TokenClassificationPipelineTests(unittest.TestCase):
|
class TokenClassificationPipelineTests(unittest.TestCase):
|
||||||
model_mapping = MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING
|
model_mapping = MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING
|
||||||
tf_model_mapping = TF_MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING
|
tf_model_mapping = TF_MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING
|
||||||
|
@ -25,11 +25,12 @@ from transformers import (
|
|||||||
TranslationPipeline,
|
TranslationPipeline,
|
||||||
pipeline,
|
pipeline,
|
||||||
)
|
)
|
||||||
from transformers.testing_utils import require_tf, require_torch, slow
|
from transformers.testing_utils import is_pipeline_test, require_tf, require_torch, slow
|
||||||
|
|
||||||
from .test_pipelines_common import ANY
|
from .test_pipelines_common import ANY
|
||||||
|
|
||||||
|
|
||||||
|
@is_pipeline_test
|
||||||
class TranslationPipelineTests(unittest.TestCase):
|
class TranslationPipelineTests(unittest.TestCase):
|
||||||
model_mapping = MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING
|
model_mapping = MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING
|
||||||
tf_model_mapping = TF_MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING
|
tf_model_mapping = TF_MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING
|
||||||
|
@ -19,6 +19,7 @@ from huggingface_hub import hf_hub_download
|
|||||||
from transformers import MODEL_FOR_VIDEO_CLASSIFICATION_MAPPING, VideoMAEFeatureExtractor
|
from transformers import MODEL_FOR_VIDEO_CLASSIFICATION_MAPPING, VideoMAEFeatureExtractor
|
||||||
from transformers.pipelines import VideoClassificationPipeline, pipeline
|
from transformers.pipelines import VideoClassificationPipeline, pipeline
|
||||||
from transformers.testing_utils import (
|
from transformers.testing_utils import (
|
||||||
|
is_pipeline_test,
|
||||||
nested_simplify,
|
nested_simplify,
|
||||||
require_decord,
|
require_decord,
|
||||||
require_tf,
|
require_tf,
|
||||||
@ -30,6 +31,7 @@ from transformers.testing_utils import (
|
|||||||
from .test_pipelines_common import ANY
|
from .test_pipelines_common import ANY
|
||||||
|
|
||||||
|
|
||||||
|
@is_pipeline_test
|
||||||
@require_torch_or_tf
|
@require_torch_or_tf
|
||||||
@require_vision
|
@require_vision
|
||||||
@require_decord
|
@require_decord
|
||||||
|
@ -16,7 +16,14 @@ import unittest
|
|||||||
|
|
||||||
from transformers import MODEL_FOR_VISUAL_QUESTION_ANSWERING_MAPPING, is_vision_available
|
from transformers import MODEL_FOR_VISUAL_QUESTION_ANSWERING_MAPPING, is_vision_available
|
||||||
from transformers.pipelines import pipeline
|
from transformers.pipelines import pipeline
|
||||||
from transformers.testing_utils import nested_simplify, require_tf, require_torch, require_vision, slow
|
from transformers.testing_utils import (
|
||||||
|
is_pipeline_test,
|
||||||
|
nested_simplify,
|
||||||
|
require_tf,
|
||||||
|
require_torch,
|
||||||
|
require_vision,
|
||||||
|
slow,
|
||||||
|
)
|
||||||
|
|
||||||
from .test_pipelines_common import ANY
|
from .test_pipelines_common import ANY
|
||||||
|
|
||||||
@ -31,6 +38,7 @@ else:
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@is_pipeline_test
|
||||||
@require_torch
|
@require_torch
|
||||||
@require_vision
|
@require_vision
|
||||||
class VisualQuestionAnsweringPipelineTests(unittest.TestCase):
|
class VisualQuestionAnsweringPipelineTests(unittest.TestCase):
|
||||||
|
@ -21,11 +21,12 @@ from transformers import (
|
|||||||
ZeroShotClassificationPipeline,
|
ZeroShotClassificationPipeline,
|
||||||
pipeline,
|
pipeline,
|
||||||
)
|
)
|
||||||
from transformers.testing_utils import nested_simplify, require_tf, require_torch, slow
|
from transformers.testing_utils import is_pipeline_test, nested_simplify, require_tf, require_torch, slow
|
||||||
|
|
||||||
from .test_pipelines_common import ANY
|
from .test_pipelines_common import ANY
|
||||||
|
|
||||||
|
|
||||||
|
@is_pipeline_test
|
||||||
class ZeroShotClassificationPipelineTests(unittest.TestCase):
|
class ZeroShotClassificationPipelineTests(unittest.TestCase):
|
||||||
model_mapping = MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING
|
model_mapping = MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING
|
||||||
tf_model_mapping = TF_MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING
|
tf_model_mapping = TF_MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING
|
||||||
|
@ -17,9 +17,10 @@ import unittest
|
|||||||
from datasets import load_dataset
|
from datasets import load_dataset
|
||||||
|
|
||||||
from transformers.pipelines import pipeline
|
from transformers.pipelines import pipeline
|
||||||
from transformers.testing_utils import nested_simplify, require_torch, slow
|
from transformers.testing_utils import is_pipeline_test, nested_simplify, require_torch, slow
|
||||||
|
|
||||||
|
|
||||||
|
@is_pipeline_test
|
||||||
@require_torch
|
@require_torch
|
||||||
class ZeroShotAudioClassificationPipelineTests(unittest.TestCase):
|
class ZeroShotAudioClassificationPipelineTests(unittest.TestCase):
|
||||||
# Deactivating auto tests since we don't have a good MODEL_FOR_XX mapping,
|
# Deactivating auto tests since we don't have a good MODEL_FOR_XX mapping,
|
||||||
|
@ -16,7 +16,14 @@ import unittest
|
|||||||
|
|
||||||
from transformers import is_vision_available
|
from transformers import is_vision_available
|
||||||
from transformers.pipelines import pipeline
|
from transformers.pipelines import pipeline
|
||||||
from transformers.testing_utils import nested_simplify, require_tf, require_torch, require_vision, slow
|
from transformers.testing_utils import (
|
||||||
|
is_pipeline_test,
|
||||||
|
nested_simplify,
|
||||||
|
require_tf,
|
||||||
|
require_torch,
|
||||||
|
require_vision,
|
||||||
|
slow,
|
||||||
|
)
|
||||||
|
|
||||||
from .test_pipelines_common import ANY
|
from .test_pipelines_common import ANY
|
||||||
|
|
||||||
@ -31,6 +38,7 @@ else:
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@is_pipeline_test
|
||||||
@require_vision
|
@require_vision
|
||||||
class ZeroShotImageClassificationPipelineTests(unittest.TestCase):
|
class ZeroShotImageClassificationPipelineTests(unittest.TestCase):
|
||||||
# Deactivating auto tests since we don't have a good MODEL_FOR_XX mapping,
|
# Deactivating auto tests since we don't have a good MODEL_FOR_XX mapping,
|
||||||
|
@ -15,7 +15,14 @@
|
|||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
from transformers import MODEL_FOR_ZERO_SHOT_OBJECT_DETECTION_MAPPING, is_vision_available, pipeline
|
from transformers import MODEL_FOR_ZERO_SHOT_OBJECT_DETECTION_MAPPING, is_vision_available, pipeline
|
||||||
from transformers.testing_utils import nested_simplify, require_tf, require_torch, require_vision, slow
|
from transformers.testing_utils import (
|
||||||
|
is_pipeline_test,
|
||||||
|
nested_simplify,
|
||||||
|
require_tf,
|
||||||
|
require_torch,
|
||||||
|
require_vision,
|
||||||
|
slow,
|
||||||
|
)
|
||||||
|
|
||||||
from .test_pipelines_common import ANY
|
from .test_pipelines_common import ANY
|
||||||
|
|
||||||
@ -30,6 +37,7 @@ else:
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@is_pipeline_test
|
||||||
@require_vision
|
@require_vision
|
||||||
@require_torch
|
@require_torch
|
||||||
class ZeroShotObjectDetectionPipelineTests(unittest.TestCase):
|
class ZeroShotObjectDetectionPipelineTests(unittest.TestCase):
|
||||||
|
@ -20,6 +20,7 @@ import random
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from transformers.testing_utils import (
|
from transformers.testing_utils import (
|
||||||
|
is_pipeline_test,
|
||||||
require_decord,
|
require_decord,
|
||||||
require_pytesseract,
|
require_pytesseract,
|
||||||
require_timm,
|
require_timm,
|
||||||
@ -104,6 +105,7 @@ PATH_TO_TRANSFORMERS = os.path.join(Path(__file__).parent.parent, "src/transform
|
|||||||
transformers_module = direct_transformers_import(PATH_TO_TRANSFORMERS)
|
transformers_module = direct_transformers_import(PATH_TO_TRANSFORMERS)
|
||||||
|
|
||||||
|
|
||||||
|
@is_pipeline_test
|
||||||
class PipelineTesterMixin:
|
class PipelineTesterMixin:
|
||||||
model_tester = None
|
model_tester = None
|
||||||
pipeline_model_mapping = None
|
pipeline_model_mapping = None
|
||||||
|
Loading…
Reference in New Issue
Block a user