mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-20 21:18:21 +06:00
Rework pipeline tests (#19366)
* Rework pipeline tests * Try to fix Flax tests * Try to put it before * Use a new decorator instead * Remove ignore marker since it doesn't work * Filter pipeline tests * Woopsie * Use the fitlered list * Clean up and fake modif * Remove init * Revert fake modif
This commit is contained in:
parent
983451a13e
commit
9ac586b3c8
@ -79,10 +79,19 @@ jobs:
|
|||||||
path: ~/transformers/tests_fetched_summary.txt
|
path: ~/transformers/tests_fetched_summary.txt
|
||||||
- run: |
|
- run: |
|
||||||
if [ -f test_list.txt ]; then
|
if [ -f test_list.txt ]; then
|
||||||
mv test_list.txt test_preparation/test_list.txt
|
cp test_list.txt test_preparation/test_list.txt
|
||||||
else
|
else
|
||||||
touch test_preparation/test_list.txt
|
touch test_preparation/test_list.txt
|
||||||
fi
|
fi
|
||||||
|
- run: python utils/tests_fetcher.py --filter_pipeline_tests
|
||||||
|
- run: |
|
||||||
|
if [ -f test_list.txt ]; then
|
||||||
|
mv test_list.txt test_preparation/filtered_test_list.txt
|
||||||
|
else
|
||||||
|
touch test_preparation/filtered_test_list.txt
|
||||||
|
fi
|
||||||
|
- store_artifacts:
|
||||||
|
path: ~/transformers/test_preparation/filtered_test_list.txt
|
||||||
- run: python utils/tests_fetcher.py --filters tests examples | tee examples_tests_fetched_summary.txt
|
- run: python utils/tests_fetcher.py --filters tests examples | tee examples_tests_fetched_summary.txt
|
||||||
- store_artifacts:
|
- store_artifacts:
|
||||||
path: ~/transformers/examples_tests_fetched_summary.txt
|
path: ~/transformers/examples_tests_fetched_summary.txt
|
||||||
@ -97,6 +106,7 @@ jobs:
|
|||||||
root: test_preparation/
|
root: test_preparation/
|
||||||
paths:
|
paths:
|
||||||
test_list.txt
|
test_list.txt
|
||||||
|
filtered_test_list.txt
|
||||||
examples_test_list.txt
|
examples_test_list.txt
|
||||||
|
|
||||||
# To run all tests for the nightly build
|
# To run all tests for the nightly build
|
||||||
@ -110,6 +120,8 @@ jobs:
|
|||||||
mkdir test_preparation
|
mkdir test_preparation
|
||||||
echo "tests" > test_preparation/test_list.txt
|
echo "tests" > test_preparation/test_list.txt
|
||||||
echo "tests" > test_preparation/examples_test_list.txt
|
echo "tests" > test_preparation/examples_test_list.txt
|
||||||
|
- run: python utils/tests_fetcher.py --filter_pipeline_tests
|
||||||
|
- run: mv test_list.txt test_preparation/filtered_test_list.txt
|
||||||
|
|
||||||
- persist_to_workspace:
|
- persist_to_workspace:
|
||||||
root: test_preparation/
|
root: test_preparation/
|
||||||
@ -132,7 +144,7 @@ jobs:
|
|||||||
- attach_workspace:
|
- attach_workspace:
|
||||||
at: ~/transformers/test_preparation
|
at: ~/transformers/test_preparation
|
||||||
- run: |
|
- run: |
|
||||||
if [ ! -s test_preparation/test_list.txt ]; then
|
if [ ! -s test_preparation/filtered_test_list.txt ]; then
|
||||||
echo "No tests to run, exiting early!"
|
echo "No tests to run, exiting early!"
|
||||||
circleci-agent step halt
|
circleci-agent step halt
|
||||||
fi
|
fi
|
||||||
@ -152,7 +164,7 @@ jobs:
|
|||||||
key: v0.5-torch_and_tf-{{ checksum "setup.py" }}
|
key: v0.5-torch_and_tf-{{ checksum "setup.py" }}
|
||||||
paths:
|
paths:
|
||||||
- '~/.cache/pip'
|
- '~/.cache/pip'
|
||||||
- run: python -m pytest -n 8 --max-worker-restart=0 --dist=loadfile -rA -s --make-reports=tests_torch_and_tf $(cat test_preparation/test_list.txt) -m is_pt_tf_cross_test --durations=0 | tee tests_output.txt
|
- run: python -m pytest -n 8 --max-worker-restart=0 --dist=loadfile -rA -s --make-reports=tests_torch_and_tf $(cat test_preparation/filtered_test_list.txt) -m is_pt_tf_cross_test --durations=0 | tee tests_output.txt
|
||||||
- store_artifacts:
|
- store_artifacts:
|
||||||
path: ~/transformers/tests_output.txt
|
path: ~/transformers/tests_output.txt
|
||||||
- store_artifacts:
|
- store_artifacts:
|
||||||
@ -174,7 +186,7 @@ jobs:
|
|||||||
- attach_workspace:
|
- attach_workspace:
|
||||||
at: ~/transformers/test_preparation
|
at: ~/transformers/test_preparation
|
||||||
- run: |
|
- run: |
|
||||||
if [ ! -s test_preparation/test_list.txt ]; then
|
if [ ! -s test_preparation/filtered_test_list.txt ]; then
|
||||||
echo "No tests to run, exiting early!"
|
echo "No tests to run, exiting early!"
|
||||||
circleci-agent step halt
|
circleci-agent step halt
|
||||||
fi
|
fi
|
||||||
@ -192,7 +204,7 @@ jobs:
|
|||||||
key: v0.5-torch_and_flax-{{ checksum "setup.py" }}
|
key: v0.5-torch_and_flax-{{ checksum "setup.py" }}
|
||||||
paths:
|
paths:
|
||||||
- '~/.cache/pip'
|
- '~/.cache/pip'
|
||||||
- run: python -m pytest -n 8 --max-worker-restart=0 --dist=loadfile -rA -s --make-reports=tests_torch_and_flax $(cat test_preparation/test_list.txt) -m is_pt_flax_cross_test --durations=0 | tee tests_output.txt
|
- run: python -m pytest -n 8 --max-worker-restart=0 --dist=loadfile -rA -s --make-reports=tests_torch_and_flax $(cat test_preparation/filtered_test_list.txt) -m is_pt_flax_cross_test --durations=0 | tee tests_output.txt
|
||||||
- store_artifacts:
|
- store_artifacts:
|
||||||
path: ~/transformers/tests_output.txt
|
path: ~/transformers/tests_output.txt
|
||||||
- store_artifacts:
|
- store_artifacts:
|
||||||
@ -213,7 +225,7 @@ jobs:
|
|||||||
- attach_workspace:
|
- attach_workspace:
|
||||||
at: ~/transformers/test_preparation
|
at: ~/transformers/test_preparation
|
||||||
- run: |
|
- run: |
|
||||||
if [ ! -s test_preparation/test_list.txt ]; then
|
if [ ! -s test_preparation/filtered_test_list.txt ]; then
|
||||||
echo "No tests to run, exiting early!"
|
echo "No tests to run, exiting early!"
|
||||||
circleci-agent step halt
|
circleci-agent step halt
|
||||||
fi
|
fi
|
||||||
@ -231,7 +243,7 @@ jobs:
|
|||||||
key: v0.5-torch-{{ checksum "setup.py" }}
|
key: v0.5-torch-{{ checksum "setup.py" }}
|
||||||
paths:
|
paths:
|
||||||
- '~/.cache/pip'
|
- '~/.cache/pip'
|
||||||
- run: python -m pytest -n 3 --max-worker-restart=0 --dist=loadfile -s --make-reports=tests_torch $(cat test_preparation/test_list.txt) | tee tests_output.txt
|
- run: python -m pytest -n 3 --max-worker-restart=0 --dist=loadfile -s --make-reports=tests_torch $(cat test_preparation/filtered_test_list.txt) | tee tests_output.txt
|
||||||
- store_artifacts:
|
- store_artifacts:
|
||||||
path: ~/transformers/tests_output.txt
|
path: ~/transformers/tests_output.txt
|
||||||
- store_artifacts:
|
- store_artifacts:
|
||||||
@ -252,7 +264,7 @@ jobs:
|
|||||||
- attach_workspace:
|
- attach_workspace:
|
||||||
at: ~/transformers/test_preparation
|
at: ~/transformers/test_preparation
|
||||||
- run: |
|
- run: |
|
||||||
if [ ! -s test_preparation/test_list.txt ]; then
|
if [ ! -s test_preparation/filtered_test_list.txt ]; then
|
||||||
echo "No tests to run, exiting early!"
|
echo "No tests to run, exiting early!"
|
||||||
circleci-agent step halt
|
circleci-agent step halt
|
||||||
fi
|
fi
|
||||||
@ -269,7 +281,7 @@ jobs:
|
|||||||
key: v0.5-tf-{{ checksum "setup.py" }}
|
key: v0.5-tf-{{ checksum "setup.py" }}
|
||||||
paths:
|
paths:
|
||||||
- '~/.cache/pip'
|
- '~/.cache/pip'
|
||||||
- run: python -m pytest -n 8 --max-worker-restart=0 --dist=loadfile -rA -s --make-reports=tests_tf $(cat test_preparation/test_list.txt) | tee tests_output.txt
|
- run: python -m pytest -n 8 --max-worker-restart=0 --dist=loadfile -rA -s --make-reports=tests_tf $(cat test_preparation/filtered_test_list.txt) | tee tests_output.txt
|
||||||
- store_artifacts:
|
- store_artifacts:
|
||||||
path: ~/transformers/tests_output.txt
|
path: ~/transformers/tests_output.txt
|
||||||
- store_artifacts:
|
- store_artifacts:
|
||||||
@ -290,7 +302,7 @@ jobs:
|
|||||||
- attach_workspace:
|
- attach_workspace:
|
||||||
at: ~/transformers/test_preparation
|
at: ~/transformers/test_preparation
|
||||||
- run: |
|
- run: |
|
||||||
if [ ! -s test_preparation/test_list.txt ]; then
|
if [ ! -s test_preparation/filtered_test_list.txt ]; then
|
||||||
echo "No tests to run, exiting early!"
|
echo "No tests to run, exiting early!"
|
||||||
circleci-agent step halt
|
circleci-agent step halt
|
||||||
fi
|
fi
|
||||||
@ -306,7 +318,7 @@ jobs:
|
|||||||
key: v0.5-flax-{{ checksum "setup.py" }}
|
key: v0.5-flax-{{ checksum "setup.py" }}
|
||||||
paths:
|
paths:
|
||||||
- '~/.cache/pip'
|
- '~/.cache/pip'
|
||||||
- run: python -m pytest -n 8 --max-worker-restart=0 --dist=loadfile -rA -s --make-reports=tests_flax $(cat test_preparation/test_list.txt) | tee tests_output.txt
|
- run: python -m pytest -n 8 --max-worker-restart=0 --dist=loadfile -rA -s --make-reports=tests_flax $(cat test_preparation/filtered_test_list.txt) | tee tests_output.txt
|
||||||
- store_artifacts:
|
- store_artifacts:
|
||||||
path: ~/transformers/tests_output.txt
|
path: ~/transformers/tests_output.txt
|
||||||
- store_artifacts:
|
- store_artifacts:
|
||||||
@ -318,7 +330,6 @@ jobs:
|
|||||||
- image: cimg/python:3.7.12
|
- image: cimg/python:3.7.12
|
||||||
environment:
|
environment:
|
||||||
OMP_NUM_THREADS: 1
|
OMP_NUM_THREADS: 1
|
||||||
RUN_PIPELINE_TESTS: yes
|
|
||||||
TRANSFORMERS_IS_CI: yes
|
TRANSFORMERS_IS_CI: yes
|
||||||
PYTEST_TIMEOUT: 120
|
PYTEST_TIMEOUT: 120
|
||||||
resource_class: xlarge
|
resource_class: xlarge
|
||||||
@ -345,7 +356,7 @@ jobs:
|
|||||||
key: v0.5-torch-{{ checksum "setup.py" }}
|
key: v0.5-torch-{{ checksum "setup.py" }}
|
||||||
paths:
|
paths:
|
||||||
- '~/.cache/pip'
|
- '~/.cache/pip'
|
||||||
- run: python -m pytest -n 8 --max-worker-restart=0 --dist=loadfile -rA -s --make-reports=tests_pipelines_torch -m is_pipeline_test $(cat test_preparation/test_list.txt) | tee tests_output.txt
|
- run: python -m pytest -n 8 --max-worker-restart=0 --dist=loadfile -rA -s --make-reports=tests_pipelines_torch tests/pipelines | tee tests_output.txt
|
||||||
- store_artifacts:
|
- store_artifacts:
|
||||||
path: ~/transformers/tests_output.txt
|
path: ~/transformers/tests_output.txt
|
||||||
- store_artifacts:
|
- store_artifacts:
|
||||||
@ -357,7 +368,6 @@ jobs:
|
|||||||
- image: cimg/python:3.7.12
|
- image: cimg/python:3.7.12
|
||||||
environment:
|
environment:
|
||||||
OMP_NUM_THREADS: 1
|
OMP_NUM_THREADS: 1
|
||||||
RUN_PIPELINE_TESTS: yes
|
|
||||||
TRANSFORMERS_IS_CI: yes
|
TRANSFORMERS_IS_CI: yes
|
||||||
PYTEST_TIMEOUT: 120
|
PYTEST_TIMEOUT: 120
|
||||||
resource_class: xlarge
|
resource_class: xlarge
|
||||||
@ -382,7 +392,7 @@ jobs:
|
|||||||
key: v0.5-tf-{{ checksum "setup.py" }}
|
key: v0.5-tf-{{ checksum "setup.py" }}
|
||||||
paths:
|
paths:
|
||||||
- '~/.cache/pip'
|
- '~/.cache/pip'
|
||||||
- run: python -m pytest -n 8 --max-worker-restart=0 --dist=loadfile -rA -s --make-reports=tests_pipelines_tf $(cat test_preparation/test_list.txt) -m is_pipeline_test | tee tests_output.txt
|
- run: python -m pytest -n 8 --max-worker-restart=0 --dist=loadfile -rA -s --make-reports=tests_pipelines_tf tests/pipelines | tee tests_output.txt
|
||||||
- store_artifacts:
|
- store_artifacts:
|
||||||
path: ~/transformers/tests_output.txt
|
path: ~/transformers/tests_output.txt
|
||||||
- store_artifacts:
|
- store_artifacts:
|
||||||
@ -401,7 +411,7 @@ jobs:
|
|||||||
- attach_workspace:
|
- attach_workspace:
|
||||||
at: ~/transformers/test_preparation
|
at: ~/transformers/test_preparation
|
||||||
- run: |
|
- run: |
|
||||||
if [ ! -s test_preparation/test_list.txt ]; then
|
if [ ! -s test_preparation/filtered_test_list.txt ]; then
|
||||||
echo "No tests to run, exiting early!"
|
echo "No tests to run, exiting early!"
|
||||||
circleci-agent step halt
|
circleci-agent step halt
|
||||||
fi
|
fi
|
||||||
@ -557,7 +567,7 @@ jobs:
|
|||||||
- attach_workspace:
|
- attach_workspace:
|
||||||
at: ~/transformers/test_preparation
|
at: ~/transformers/test_preparation
|
||||||
- run: |
|
- run: |
|
||||||
if [ ! -s test_preparation/test_list.txt ]; then
|
if [ ! -s test_preparation/filtered_test_list.txt ]; then
|
||||||
echo "No tests to run, exiting early!"
|
echo "No tests to run, exiting early!"
|
||||||
circleci-agent step halt
|
circleci-agent step halt
|
||||||
fi
|
fi
|
||||||
@ -575,7 +585,7 @@ jobs:
|
|||||||
key: v0.5-hub-{{ checksum "setup.py" }}
|
key: v0.5-hub-{{ checksum "setup.py" }}
|
||||||
paths:
|
paths:
|
||||||
- '~/.cache/pip'
|
- '~/.cache/pip'
|
||||||
- run: python -m pytest --max-worker-restart=0 -sv --make-reports=tests_hub $(cat test_preparation/test_list.txt) -m is_staging_test | tee tests_output.txt
|
- run: python -m pytest --max-worker-restart=0 -sv --make-reports=tests_hub $(cat test_preparation/filtered_test_list.txt) -m is_staging_test | tee tests_output.txt
|
||||||
- store_artifacts:
|
- store_artifacts:
|
||||||
path: ~/transformers/tests_output.txt
|
path: ~/transformers/tests_output.txt
|
||||||
- store_artifacts:
|
- store_artifacts:
|
||||||
@ -596,7 +606,7 @@ jobs:
|
|||||||
- attach_workspace:
|
- attach_workspace:
|
||||||
at: ~/transformers/test_preparation
|
at: ~/transformers/test_preparation
|
||||||
- run: |
|
- run: |
|
||||||
if [ ! -s test_preparation/test_list.txt ]; then
|
if [ ! -s test_preparation/filtered_test_list.txt ]; then
|
||||||
echo "No tests to run, exiting early!"
|
echo "No tests to run, exiting early!"
|
||||||
circleci-agent step halt
|
circleci-agent step halt
|
||||||
fi
|
fi
|
||||||
@ -610,7 +620,7 @@ jobs:
|
|||||||
key: v0.5-onnx-{{ checksum "setup.py" }}
|
key: v0.5-onnx-{{ checksum "setup.py" }}
|
||||||
paths:
|
paths:
|
||||||
- '~/.cache/pip'
|
- '~/.cache/pip'
|
||||||
- run: python -m pytest -n 1 --max-worker-restart=0 --dist=loadfile -s --make-reports=tests_onnx $(cat test_preparation/test_list.txt) -k onnx | tee tests_output.txt
|
- run: python -m pytest -n 1 --max-worker-restart=0 --dist=loadfile -s --make-reports=tests_onnx $(cat test_preparation/filtered_test_list.txt) -k onnx | tee tests_output.txt
|
||||||
|
|
||||||
- store_artifacts:
|
- store_artifacts:
|
||||||
path: ~/transformers/tests_output.txt
|
path: ~/transformers/tests_output.txt
|
||||||
@ -690,7 +700,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- checkout
|
- checkout
|
||||||
- attach_workspace:
|
- attach_workspace:
|
||||||
at: ~/transformers/test_preparation
|
at: ~/transformers/filtered_test_list.txt
|
||||||
- run: |
|
- run: |
|
||||||
if [ ! -s test_preparation/test_list.txt ]; then
|
if [ ! -s test_preparation/test_list.txt ]; then
|
||||||
echo "No tests to run, exiting early!"
|
echo "No tests to run, exiting early!"
|
||||||
|
8
.github/workflows/self-scheduled.yml
vendored
8
.github/workflows/self-scheduled.yml
vendored
@ -256,10 +256,8 @@ jobs:
|
|||||||
|
|
||||||
- name: Run all pipeline tests on GPU
|
- name: Run all pipeline tests on GPU
|
||||||
working-directory: /transformers
|
working-directory: /transformers
|
||||||
env:
|
|
||||||
RUN_PIPELINE_TESTS: yes
|
|
||||||
run: |
|
run: |
|
||||||
python3 -m pytest -n 1 -v --dist=loadfile -m is_pipeline_test --make-reports=${{ matrix.machine_type }}_tests_torch_pipeline_gpu tests
|
python3 -m pytest -n 1 -v --dist=loadfile --make-reports=${{ matrix.machine_type }}_tests_torch_pipeline_gpu tests/pipelines
|
||||||
|
|
||||||
- name: Failure short reports
|
- name: Failure short reports
|
||||||
if: ${{ failure() }}
|
if: ${{ failure() }}
|
||||||
@ -301,10 +299,8 @@ jobs:
|
|||||||
|
|
||||||
- name: Run all pipeline tests on GPU
|
- name: Run all pipeline tests on GPU
|
||||||
working-directory: /transformers
|
working-directory: /transformers
|
||||||
env:
|
|
||||||
RUN_PIPELINE_TESTS: yes
|
|
||||||
run: |
|
run: |
|
||||||
python3 -m pytest -n 1 -v --dist=loadfile -m is_pipeline_test --make-reports=${{ matrix.machine_type }}_tests_tf_pipeline_gpu tests
|
python3 -m pytest -n 1 -v --dist=loadfile --make-reports=${{ matrix.machine_type }}_tests_tf_pipeline_gpu tests/pipelines
|
||||||
|
|
||||||
- name: Failure short reports
|
- name: Failure short reports
|
||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
|
@ -32,7 +32,6 @@ warnings.simplefilter(action="ignore", category=FutureWarning)
|
|||||||
|
|
||||||
|
|
||||||
def pytest_configure(config):
|
def pytest_configure(config):
|
||||||
config.addinivalue_line("markers", "is_pipeline_test: mark test to run only when pipeline are tested")
|
|
||||||
config.addinivalue_line(
|
config.addinivalue_line(
|
||||||
"markers", "is_pt_tf_cross_test: mark test to run only when PT and TF interactions are tested"
|
"markers", "is_pt_tf_cross_test: mark test to run only when PT and TF interactions are tested"
|
||||||
)
|
)
|
||||||
|
@ -133,7 +133,6 @@ _run_pt_tf_cross_tests = parse_flag_from_env("RUN_PT_TF_CROSS_TESTS", default=Fa
|
|||||||
_run_pt_flax_cross_tests = parse_flag_from_env("RUN_PT_FLAX_CROSS_TESTS", default=False)
|
_run_pt_flax_cross_tests = parse_flag_from_env("RUN_PT_FLAX_CROSS_TESTS", default=False)
|
||||||
_run_custom_tokenizers = parse_flag_from_env("RUN_CUSTOM_TOKENIZERS", default=False)
|
_run_custom_tokenizers = parse_flag_from_env("RUN_CUSTOM_TOKENIZERS", default=False)
|
||||||
_run_staging = parse_flag_from_env("HUGGINGFACE_CO_STAGING", default=False)
|
_run_staging = parse_flag_from_env("HUGGINGFACE_CO_STAGING", default=False)
|
||||||
_run_pipeline_tests = parse_flag_from_env("RUN_PIPELINE_TESTS", default=False)
|
|
||||||
_run_git_lfs_tests = parse_flag_from_env("RUN_GIT_LFS_TESTS", default=False)
|
_run_git_lfs_tests = parse_flag_from_env("RUN_GIT_LFS_TESTS", default=False)
|
||||||
_tf_gpu_memory_limit = parse_int_from_env("TF_GPU_MEMORY_LIMIT", default=None)
|
_tf_gpu_memory_limit = parse_int_from_env("TF_GPU_MEMORY_LIMIT", default=None)
|
||||||
|
|
||||||
@ -176,25 +175,6 @@ def is_pt_flax_cross_test(test_case):
|
|||||||
return pytest.mark.is_pt_flax_cross_test()(test_case)
|
return pytest.mark.is_pt_flax_cross_test()(test_case)
|
||||||
|
|
||||||
|
|
||||||
def is_pipeline_test(test_case):
|
|
||||||
"""
|
|
||||||
Decorator marking a test as a pipeline test.
|
|
||||||
|
|
||||||
Pipeline tests are skipped by default and we can run only them by setting RUN_PIPELINE_TESTS environment variable
|
|
||||||
to a truthy value and selecting the is_pipeline_test pytest mark.
|
|
||||||
|
|
||||||
"""
|
|
||||||
if not _run_pipeline_tests:
|
|
||||||
return unittest.skip("test is pipeline test")(test_case)
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
import pytest # We don't need a hard dependency on pytest in the main library
|
|
||||||
except ImportError:
|
|
||||||
return test_case
|
|
||||||
else:
|
|
||||||
return pytest.mark.is_pipeline_test()(test_case)
|
|
||||||
|
|
||||||
|
|
||||||
def is_staging_test(test_case):
|
def is_staging_test(test_case):
|
||||||
"""
|
"""
|
||||||
Decorator marking a test as a staging test.
|
Decorator marking a test as a staging test.
|
||||||
@ -309,6 +289,18 @@ def require_torch(test_case):
|
|||||||
return unittest.skipUnless(is_torch_available(), "test requires PyTorch")(test_case)
|
return unittest.skipUnless(is_torch_available(), "test requires PyTorch")(test_case)
|
||||||
|
|
||||||
|
|
||||||
|
def require_torch_or_tf(test_case):
|
||||||
|
"""
|
||||||
|
Decorator marking a test that requires PyTorch or TensorFlow.
|
||||||
|
|
||||||
|
These tests are skipped when neither PyTorch not TensorFlow is installed.
|
||||||
|
|
||||||
|
"""
|
||||||
|
return unittest.skipUnless(is_torch_available() or is_tf_available(), "test requires PyTorch or TensorFlow")(
|
||||||
|
test_case
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def require_intel_extension_for_pytorch(test_case):
|
def require_intel_extension_for_pytorch(test_case):
|
||||||
"""
|
"""
|
||||||
Decorator marking a test that requires Intel Extension for PyTorch.
|
Decorator marking a test that requires Intel Extension for PyTorch.
|
||||||
|
@ -18,19 +18,11 @@ import numpy as np
|
|||||||
|
|
||||||
from transformers import MODEL_FOR_AUDIO_CLASSIFICATION_MAPPING
|
from transformers import MODEL_FOR_AUDIO_CLASSIFICATION_MAPPING
|
||||||
from transformers.pipelines import AudioClassificationPipeline, pipeline
|
from transformers.pipelines import AudioClassificationPipeline, pipeline
|
||||||
from transformers.testing_utils import (
|
from transformers.testing_utils import nested_simplify, require_tf, require_torch, require_torchaudio, slow
|
||||||
is_pipeline_test,
|
|
||||||
nested_simplify,
|
|
||||||
require_tf,
|
|
||||||
require_torch,
|
|
||||||
require_torchaudio,
|
|
||||||
slow,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .test_pipelines_common import ANY, PipelineTestCaseMeta
|
from .test_pipelines_common import ANY, PipelineTestCaseMeta
|
||||||
|
|
||||||
|
|
||||||
@is_pipeline_test
|
|
||||||
@require_torch
|
@require_torch
|
||||||
class AudioClassificationPipelineTests(unittest.TestCase, metaclass=PipelineTestCaseMeta):
|
class AudioClassificationPipelineTests(unittest.TestCase, metaclass=PipelineTestCaseMeta):
|
||||||
model_mapping = MODEL_FOR_AUDIO_CLASSIFICATION_MAPPING
|
model_mapping = MODEL_FOR_AUDIO_CLASSIFICATION_MAPPING
|
||||||
|
@ -31,7 +31,6 @@ from transformers.pipelines import AutomaticSpeechRecognitionPipeline, pipeline
|
|||||||
from transformers.pipelines.audio_utils import chunk_bytes_iter
|
from transformers.pipelines.audio_utils import chunk_bytes_iter
|
||||||
from transformers.pipelines.automatic_speech_recognition import chunk_iter
|
from transformers.pipelines.automatic_speech_recognition import chunk_iter
|
||||||
from transformers.testing_utils import (
|
from transformers.testing_utils import (
|
||||||
is_pipeline_test,
|
|
||||||
is_torch_available,
|
is_torch_available,
|
||||||
nested_simplify,
|
nested_simplify,
|
||||||
require_pyctcdecode,
|
require_pyctcdecode,
|
||||||
@ -52,7 +51,6 @@ if is_torch_available():
|
|||||||
# from .test_pipelines_common import CustomInputPipelineCommonMixin
|
# from .test_pipelines_common import CustomInputPipelineCommonMixin
|
||||||
|
|
||||||
|
|
||||||
@is_pipeline_test
|
|
||||||
class AutomaticSpeechRecognitionPipelineTests(unittest.TestCase, metaclass=PipelineTestCaseMeta):
|
class AutomaticSpeechRecognitionPipelineTests(unittest.TestCase, metaclass=PipelineTestCaseMeta):
|
||||||
model_mapping = {
|
model_mapping = {
|
||||||
k: v
|
k: v
|
||||||
|
@ -48,13 +48,13 @@ from transformers.testing_utils import (
|
|||||||
USER,
|
USER,
|
||||||
CaptureLogger,
|
CaptureLogger,
|
||||||
RequestCounter,
|
RequestCounter,
|
||||||
is_pipeline_test,
|
|
||||||
is_staging_test,
|
is_staging_test,
|
||||||
nested_simplify,
|
nested_simplify,
|
||||||
require_scatter,
|
require_scatter,
|
||||||
require_tensorflow_probability,
|
require_tensorflow_probability,
|
||||||
require_tf,
|
require_tf,
|
||||||
require_torch,
|
require_torch,
|
||||||
|
require_torch_or_tf,
|
||||||
slow,
|
slow,
|
||||||
)
|
)
|
||||||
from transformers.utils import is_tf_available, is_torch_available
|
from transformers.utils import is_tf_available, is_torch_available
|
||||||
@ -307,7 +307,6 @@ class PipelineTestCaseMeta(type):
|
|||||||
return type.__new__(mcs, name, bases, dct)
|
return type.__new__(mcs, name, bases, dct)
|
||||||
|
|
||||||
|
|
||||||
@is_pipeline_test
|
|
||||||
class CommonPipelineTest(unittest.TestCase):
|
class CommonPipelineTest(unittest.TestCase):
|
||||||
@require_torch
|
@require_torch
|
||||||
def test_pipeline_iteration(self):
|
def test_pipeline_iteration(self):
|
||||||
@ -416,7 +415,6 @@ class CommonPipelineTest(unittest.TestCase):
|
|||||||
self.assertEqual(len(outputs), 20)
|
self.assertEqual(len(outputs), 20)
|
||||||
|
|
||||||
|
|
||||||
@is_pipeline_test
|
|
||||||
class PipelinePadTest(unittest.TestCase):
|
class PipelinePadTest(unittest.TestCase):
|
||||||
@require_torch
|
@require_torch
|
||||||
def test_pipeline_padding(self):
|
def test_pipeline_padding(self):
|
||||||
@ -498,7 +496,6 @@ class PipelinePadTest(unittest.TestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@is_pipeline_test
|
|
||||||
class PipelineUtilsTest(unittest.TestCase):
|
class PipelineUtilsTest(unittest.TestCase):
|
||||||
@require_torch
|
@require_torch
|
||||||
def test_pipeline_dataset(self):
|
def test_pipeline_dataset(self):
|
||||||
@ -795,7 +792,6 @@ class CustomPipeline(Pipeline):
|
|||||||
return model_outputs["logits"].softmax(-1).numpy()
|
return model_outputs["logits"].softmax(-1).numpy()
|
||||||
|
|
||||||
|
|
||||||
@is_pipeline_test
|
|
||||||
class CustomPipelineTest(unittest.TestCase):
|
class CustomPipelineTest(unittest.TestCase):
|
||||||
def test_warning_logs(self):
|
def test_warning_logs(self):
|
||||||
transformers_logging.set_verbosity_debug()
|
transformers_logging.set_verbosity_debug()
|
||||||
@ -835,6 +831,7 @@ class CustomPipelineTest(unittest.TestCase):
|
|||||||
# Clean registry for next tests.
|
# Clean registry for next tests.
|
||||||
del PIPELINE_REGISTRY.supported_tasks["custom-text-classification"]
|
del PIPELINE_REGISTRY.supported_tasks["custom-text-classification"]
|
||||||
|
|
||||||
|
@require_torch_or_tf
|
||||||
def test_dynamic_pipeline(self):
|
def test_dynamic_pipeline(self):
|
||||||
PIPELINE_REGISTRY.register_pipeline(
|
PIPELINE_REGISTRY.register_pipeline(
|
||||||
"pair-classification",
|
"pair-classification",
|
||||||
@ -886,6 +883,7 @@ class CustomPipelineTest(unittest.TestCase):
|
|||||||
[{"label": "LABEL_0", "score": 0.505}],
|
[{"label": "LABEL_0", "score": 0.505}],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@require_torch_or_tf
|
||||||
def test_cached_pipeline_has_minimum_calls_to_head(self):
|
def test_cached_pipeline_has_minimum_calls_to_head(self):
|
||||||
# Make sure we have cached the pipeline.
|
# Make sure we have cached the pipeline.
|
||||||
_ = pipeline("text-classification", model="hf-internal-testing/tiny-random-bert")
|
_ = pipeline("text-classification", model="hf-internal-testing/tiny-random-bert")
|
||||||
|
@ -29,7 +29,7 @@ from transformers import (
|
|||||||
TFAutoModelForCausalLM,
|
TFAutoModelForCausalLM,
|
||||||
pipeline,
|
pipeline,
|
||||||
)
|
)
|
||||||
from transformers.testing_utils import is_pipeline_test, require_tf, require_torch, slow, torch_device
|
from transformers.testing_utils import require_tf, require_torch, slow, torch_device
|
||||||
|
|
||||||
from .test_pipelines_common import ANY, PipelineTestCaseMeta
|
from .test_pipelines_common import ANY, PipelineTestCaseMeta
|
||||||
|
|
||||||
@ -37,7 +37,6 @@ from .test_pipelines_common import ANY, PipelineTestCaseMeta
|
|||||||
DEFAULT_DEVICE_NUM = -1 if torch_device == "cpu" else 0
|
DEFAULT_DEVICE_NUM = -1 if torch_device == "cpu" else 0
|
||||||
|
|
||||||
|
|
||||||
@is_pipeline_test
|
|
||||||
class ConversationalPipelineTests(unittest.TestCase, metaclass=PipelineTestCaseMeta):
|
class ConversationalPipelineTests(unittest.TestCase, metaclass=PipelineTestCaseMeta):
|
||||||
model_mapping = dict(
|
model_mapping = dict(
|
||||||
list(MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING.items())
|
list(MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING.items())
|
||||||
|
@ -18,7 +18,6 @@ from transformers import MODEL_FOR_DOCUMENT_QUESTION_ANSWERING_MAPPING, AutoToke
|
|||||||
from transformers.pipelines import pipeline
|
from transformers.pipelines import pipeline
|
||||||
from transformers.pipelines.document_question_answering import apply_tesseract
|
from transformers.pipelines.document_question_answering import apply_tesseract
|
||||||
from transformers.testing_utils import (
|
from transformers.testing_utils import (
|
||||||
is_pipeline_test,
|
|
||||||
nested_simplify,
|
nested_simplify,
|
||||||
require_detectron2,
|
require_detectron2,
|
||||||
require_pytesseract,
|
require_pytesseract,
|
||||||
@ -53,7 +52,6 @@ INVOICE_URL = (
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@is_pipeline_test
|
|
||||||
@require_torch
|
@require_torch
|
||||||
@require_vision
|
@require_vision
|
||||||
class DocumentQuestionAnsweringPipelineTests(unittest.TestCase, metaclass=PipelineTestCaseMeta):
|
class DocumentQuestionAnsweringPipelineTests(unittest.TestCase, metaclass=PipelineTestCaseMeta):
|
||||||
|
@ -22,12 +22,11 @@ from transformers import (
|
|||||||
LxmertConfig,
|
LxmertConfig,
|
||||||
pipeline,
|
pipeline,
|
||||||
)
|
)
|
||||||
from transformers.testing_utils import is_pipeline_test, nested_simplify, require_tf, require_torch
|
from transformers.testing_utils import nested_simplify, require_tf, require_torch
|
||||||
|
|
||||||
from .test_pipelines_common import PipelineTestCaseMeta
|
from .test_pipelines_common import PipelineTestCaseMeta
|
||||||
|
|
||||||
|
|
||||||
@is_pipeline_test
|
|
||||||
class FeatureExtractionPipelineTests(unittest.TestCase, metaclass=PipelineTestCaseMeta):
|
class FeatureExtractionPipelineTests(unittest.TestCase, metaclass=PipelineTestCaseMeta):
|
||||||
model_mapping = MODEL_MAPPING
|
model_mapping = MODEL_MAPPING
|
||||||
tf_model_mapping = TF_MODEL_MAPPING
|
tf_model_mapping = TF_MODEL_MAPPING
|
||||||
|
@ -16,19 +16,11 @@ import unittest
|
|||||||
|
|
||||||
from transformers import MODEL_FOR_MASKED_LM_MAPPING, TF_MODEL_FOR_MASKED_LM_MAPPING, FillMaskPipeline, pipeline
|
from transformers import MODEL_FOR_MASKED_LM_MAPPING, TF_MODEL_FOR_MASKED_LM_MAPPING, FillMaskPipeline, pipeline
|
||||||
from transformers.pipelines import PipelineException
|
from transformers.pipelines import PipelineException
|
||||||
from transformers.testing_utils import (
|
from transformers.testing_utils import nested_simplify, require_tf, require_torch, require_torch_gpu, slow
|
||||||
is_pipeline_test,
|
|
||||||
nested_simplify,
|
|
||||||
require_tf,
|
|
||||||
require_torch,
|
|
||||||
require_torch_gpu,
|
|
||||||
slow,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .test_pipelines_common import ANY, PipelineTestCaseMeta
|
from .test_pipelines_common import ANY, PipelineTestCaseMeta
|
||||||
|
|
||||||
|
|
||||||
@is_pipeline_test
|
|
||||||
class FillMaskPipelineTests(unittest.TestCase, metaclass=PipelineTestCaseMeta):
|
class FillMaskPipelineTests(unittest.TestCase, metaclass=PipelineTestCaseMeta):
|
||||||
model_mapping = MODEL_FOR_MASKED_LM_MAPPING
|
model_mapping = MODEL_FOR_MASKED_LM_MAPPING
|
||||||
tf_model_mapping = TF_MODEL_FOR_MASKED_LM_MAPPING
|
tf_model_mapping = TF_MODEL_FOR_MASKED_LM_MAPPING
|
||||||
|
@ -22,10 +22,10 @@ from transformers import (
|
|||||||
)
|
)
|
||||||
from transformers.pipelines import ImageClassificationPipeline, pipeline
|
from transformers.pipelines import ImageClassificationPipeline, pipeline
|
||||||
from transformers.testing_utils import (
|
from transformers.testing_utils import (
|
||||||
is_pipeline_test,
|
|
||||||
nested_simplify,
|
nested_simplify,
|
||||||
require_tf,
|
require_tf,
|
||||||
require_torch,
|
require_torch,
|
||||||
|
require_torch_or_tf,
|
||||||
require_vision,
|
require_vision,
|
||||||
slow,
|
slow,
|
||||||
)
|
)
|
||||||
@ -43,7 +43,7 @@ else:
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
@is_pipeline_test
|
@require_torch_or_tf
|
||||||
@require_vision
|
@require_vision
|
||||||
class ImageClassificationPipelineTests(unittest.TestCase, metaclass=PipelineTestCaseMeta):
|
class ImageClassificationPipelineTests(unittest.TestCase, metaclass=PipelineTestCaseMeta):
|
||||||
model_mapping = MODEL_FOR_IMAGE_CLASSIFICATION_MAPPING
|
model_mapping = MODEL_FOR_IMAGE_CLASSIFICATION_MAPPING
|
||||||
|
@ -31,15 +31,7 @@ from transformers import (
|
|||||||
is_vision_available,
|
is_vision_available,
|
||||||
pipeline,
|
pipeline,
|
||||||
)
|
)
|
||||||
from transformers.testing_utils import (
|
from transformers.testing_utils import nested_simplify, require_tf, require_timm, require_torch, require_vision, slow
|
||||||
is_pipeline_test,
|
|
||||||
nested_simplify,
|
|
||||||
require_tf,
|
|
||||||
require_timm,
|
|
||||||
require_torch,
|
|
||||||
require_vision,
|
|
||||||
slow,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .test_pipelines_common import ANY, PipelineTestCaseMeta
|
from .test_pipelines_common import ANY, PipelineTestCaseMeta
|
||||||
|
|
||||||
@ -62,7 +54,6 @@ def hashimage(image: Image) -> str:
|
|||||||
@require_vision
|
@require_vision
|
||||||
@require_timm
|
@require_timm
|
||||||
@require_torch
|
@require_torch
|
||||||
@is_pipeline_test
|
|
||||||
class ImageSegmentationPipelineTests(unittest.TestCase, metaclass=PipelineTestCaseMeta):
|
class ImageSegmentationPipelineTests(unittest.TestCase, metaclass=PipelineTestCaseMeta):
|
||||||
model_mapping = {
|
model_mapping = {
|
||||||
k: v
|
k: v
|
||||||
|
@ -16,7 +16,7 @@ import unittest
|
|||||||
|
|
||||||
from transformers import MODEL_FOR_VISION_2_SEQ_MAPPING, TF_MODEL_FOR_VISION_2_SEQ_MAPPING, is_vision_available
|
from transformers import MODEL_FOR_VISION_2_SEQ_MAPPING, TF_MODEL_FOR_VISION_2_SEQ_MAPPING, is_vision_available
|
||||||
from transformers.pipelines import pipeline
|
from transformers.pipelines import pipeline
|
||||||
from transformers.testing_utils import is_pipeline_test, require_tf, require_torch, require_vision, slow
|
from transformers.testing_utils import require_tf, require_torch, require_vision, slow
|
||||||
|
|
||||||
from .test_pipelines_common import ANY, PipelineTestCaseMeta
|
from .test_pipelines_common import ANY, PipelineTestCaseMeta
|
||||||
|
|
||||||
@ -31,7 +31,6 @@ else:
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
@is_pipeline_test
|
|
||||||
@require_vision
|
@require_vision
|
||||||
class ImageToTextPipelineTests(unittest.TestCase, metaclass=PipelineTestCaseMeta):
|
class ImageToTextPipelineTests(unittest.TestCase, metaclass=PipelineTestCaseMeta):
|
||||||
model_mapping = MODEL_FOR_VISION_2_SEQ_MAPPING
|
model_mapping = MODEL_FOR_VISION_2_SEQ_MAPPING
|
||||||
|
@ -22,15 +22,7 @@ from transformers import (
|
|||||||
is_vision_available,
|
is_vision_available,
|
||||||
pipeline,
|
pipeline,
|
||||||
)
|
)
|
||||||
from transformers.testing_utils import (
|
from transformers.testing_utils import nested_simplify, require_tf, require_timm, require_torch, require_vision, slow
|
||||||
is_pipeline_test,
|
|
||||||
nested_simplify,
|
|
||||||
require_tf,
|
|
||||||
require_timm,
|
|
||||||
require_torch,
|
|
||||||
require_vision,
|
|
||||||
slow,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .test_pipelines_common import ANY, PipelineTestCaseMeta
|
from .test_pipelines_common import ANY, PipelineTestCaseMeta
|
||||||
|
|
||||||
@ -48,7 +40,6 @@ else:
|
|||||||
@require_vision
|
@require_vision
|
||||||
@require_timm
|
@require_timm
|
||||||
@require_torch
|
@require_torch
|
||||||
@is_pipeline_test
|
|
||||||
class ObjectDetectionPipelineTests(unittest.TestCase, metaclass=PipelineTestCaseMeta):
|
class ObjectDetectionPipelineTests(unittest.TestCase, metaclass=PipelineTestCaseMeta):
|
||||||
model_mapping = MODEL_FOR_OBJECT_DETECTION_MAPPING
|
model_mapping = MODEL_FOR_OBJECT_DETECTION_MAPPING
|
||||||
|
|
||||||
|
@ -22,12 +22,11 @@ from transformers import (
|
|||||||
)
|
)
|
||||||
from transformers.data.processors.squad import SquadExample
|
from transformers.data.processors.squad import SquadExample
|
||||||
from transformers.pipelines import QuestionAnsweringArgumentHandler, pipeline
|
from transformers.pipelines import QuestionAnsweringArgumentHandler, pipeline
|
||||||
from transformers.testing_utils import is_pipeline_test, nested_simplify, require_tf, require_torch, slow
|
from transformers.testing_utils import nested_simplify, require_tf, require_torch, require_torch_or_tf, slow
|
||||||
|
|
||||||
from .test_pipelines_common import ANY, PipelineTestCaseMeta
|
from .test_pipelines_common import ANY, PipelineTestCaseMeta
|
||||||
|
|
||||||
|
|
||||||
@is_pipeline_test
|
|
||||||
class QAPipelineTests(unittest.TestCase, metaclass=PipelineTestCaseMeta):
|
class QAPipelineTests(unittest.TestCase, metaclass=PipelineTestCaseMeta):
|
||||||
model_mapping = MODEL_FOR_QUESTION_ANSWERING_MAPPING
|
model_mapping = MODEL_FOR_QUESTION_ANSWERING_MAPPING
|
||||||
tf_model_mapping = TF_MODEL_FOR_QUESTION_ANSWERING_MAPPING
|
tf_model_mapping = TF_MODEL_FOR_QUESTION_ANSWERING_MAPPING
|
||||||
@ -345,7 +344,7 @@ between them. It's straightforward to train your models with one before loading
|
|||||||
self.assertEqual(nested_simplify(outputs), {"score": 0.979, "start": 27, "end": 32, "answer": "Paris"})
|
self.assertEqual(nested_simplify(outputs), {"score": 0.979, "start": 27, "end": 32, "answer": "Paris"})
|
||||||
|
|
||||||
|
|
||||||
@is_pipeline_test
|
@require_torch_or_tf
|
||||||
class QuestionAnsweringArgumentHandlerTests(unittest.TestCase):
|
class QuestionAnsweringArgumentHandlerTests(unittest.TestCase):
|
||||||
def test_argument_handler(self):
|
def test_argument_handler(self):
|
||||||
qa = QuestionAnsweringArgumentHandler()
|
qa = QuestionAnsweringArgumentHandler()
|
||||||
|
@ -23,7 +23,7 @@ from transformers import (
|
|||||||
T5Config,
|
T5Config,
|
||||||
pipeline,
|
pipeline,
|
||||||
)
|
)
|
||||||
from transformers.testing_utils import is_pipeline_test, require_tf, require_torch, slow, torch_device
|
from transformers.testing_utils import require_tf, require_torch, slow, torch_device
|
||||||
from transformers.tokenization_utils import TruncationStrategy
|
from transformers.tokenization_utils import TruncationStrategy
|
||||||
|
|
||||||
from .test_pipelines_common import ANY, PipelineTestCaseMeta
|
from .test_pipelines_common import ANY, PipelineTestCaseMeta
|
||||||
@ -32,7 +32,6 @@ from .test_pipelines_common import ANY, PipelineTestCaseMeta
|
|||||||
DEFAULT_DEVICE_NUM = -1 if torch_device == "cpu" else 0
|
DEFAULT_DEVICE_NUM = -1 if torch_device == "cpu" else 0
|
||||||
|
|
||||||
|
|
||||||
@is_pipeline_test
|
|
||||||
class SummarizationPipelineTests(unittest.TestCase, metaclass=PipelineTestCaseMeta):
|
class SummarizationPipelineTests(unittest.TestCase, metaclass=PipelineTestCaseMeta):
|
||||||
model_mapping = MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING
|
model_mapping = MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING
|
||||||
tf_model_mapping = TF_MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING
|
tf_model_mapping = TF_MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING
|
||||||
|
@ -23,7 +23,6 @@ from transformers import (
|
|||||||
pipeline,
|
pipeline,
|
||||||
)
|
)
|
||||||
from transformers.testing_utils import (
|
from transformers.testing_utils import (
|
||||||
is_pipeline_test,
|
|
||||||
require_pandas,
|
require_pandas,
|
||||||
require_tensorflow_probability,
|
require_tensorflow_probability,
|
||||||
require_tf,
|
require_tf,
|
||||||
@ -35,7 +34,6 @@ from transformers.testing_utils import (
|
|||||||
from .test_pipelines_common import PipelineTestCaseMeta
|
from .test_pipelines_common import PipelineTestCaseMeta
|
||||||
|
|
||||||
|
|
||||||
@is_pipeline_test
|
|
||||||
class TQAPipelineTests(unittest.TestCase, metaclass=PipelineTestCaseMeta):
|
class TQAPipelineTests(unittest.TestCase, metaclass=PipelineTestCaseMeta):
|
||||||
# Putting it there for consistency, but TQA do not have fast tokenizer
|
# Putting it there for consistency, but TQA do not have fast tokenizer
|
||||||
# which are needed to generate automatic tests
|
# which are needed to generate automatic tests
|
||||||
|
@ -20,7 +20,7 @@ from transformers import (
|
|||||||
Text2TextGenerationPipeline,
|
Text2TextGenerationPipeline,
|
||||||
pipeline,
|
pipeline,
|
||||||
)
|
)
|
||||||
from transformers.testing_utils import is_pipeline_test, require_tf, require_torch
|
from transformers.testing_utils import require_tf, require_torch
|
||||||
from transformers.utils import is_torch_available
|
from transformers.utils import is_torch_available
|
||||||
|
|
||||||
from .test_pipelines_common import ANY, PipelineTestCaseMeta
|
from .test_pipelines_common import ANY, PipelineTestCaseMeta
|
||||||
@ -30,7 +30,6 @@ if is_torch_available():
|
|||||||
import torch
|
import torch
|
||||||
|
|
||||||
|
|
||||||
@is_pipeline_test
|
|
||||||
class Text2TextGenerationPipelineTests(unittest.TestCase, metaclass=PipelineTestCaseMeta):
|
class Text2TextGenerationPipelineTests(unittest.TestCase, metaclass=PipelineTestCaseMeta):
|
||||||
model_mapping = MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING
|
model_mapping = MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING
|
||||||
tf_model_mapping = TF_MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING
|
tf_model_mapping = TF_MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING
|
||||||
|
@ -20,12 +20,11 @@ from transformers import (
|
|||||||
TextClassificationPipeline,
|
TextClassificationPipeline,
|
||||||
pipeline,
|
pipeline,
|
||||||
)
|
)
|
||||||
from transformers.testing_utils import is_pipeline_test, nested_simplify, require_tf, require_torch, slow
|
from transformers.testing_utils import nested_simplify, require_tf, require_torch, slow
|
||||||
|
|
||||||
from .test_pipelines_common import ANY, PipelineTestCaseMeta
|
from .test_pipelines_common import ANY, PipelineTestCaseMeta
|
||||||
|
|
||||||
|
|
||||||
@is_pipeline_test
|
|
||||||
class TextClassificationPipelineTests(unittest.TestCase, metaclass=PipelineTestCaseMeta):
|
class TextClassificationPipelineTests(unittest.TestCase, metaclass=PipelineTestCaseMeta):
|
||||||
model_mapping = MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING
|
model_mapping = MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING
|
||||||
tf_model_mapping = TF_MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING
|
tf_model_mapping = TF_MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING
|
||||||
|
@ -16,17 +16,17 @@ import unittest
|
|||||||
|
|
||||||
from transformers import MODEL_FOR_CAUSAL_LM_MAPPING, TF_MODEL_FOR_CAUSAL_LM_MAPPING, TextGenerationPipeline, pipeline
|
from transformers import MODEL_FOR_CAUSAL_LM_MAPPING, TF_MODEL_FOR_CAUSAL_LM_MAPPING, TextGenerationPipeline, pipeline
|
||||||
from transformers.testing_utils import (
|
from transformers.testing_utils import (
|
||||||
is_pipeline_test,
|
|
||||||
require_accelerate,
|
require_accelerate,
|
||||||
require_tf,
|
require_tf,
|
||||||
require_torch,
|
require_torch,
|
||||||
require_torch_gpu,
|
require_torch_gpu,
|
||||||
|
require_torch_or_tf,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .test_pipelines_common import ANY, PipelineTestCaseMeta
|
from .test_pipelines_common import ANY, PipelineTestCaseMeta
|
||||||
|
|
||||||
|
|
||||||
@is_pipeline_test
|
@require_torch_or_tf
|
||||||
class TextGenerationPipelineTests(unittest.TestCase, metaclass=PipelineTestCaseMeta):
|
class TextGenerationPipelineTests(unittest.TestCase, metaclass=PipelineTestCaseMeta):
|
||||||
model_mapping = MODEL_FOR_CAUSAL_LM_MAPPING
|
model_mapping = MODEL_FOR_CAUSAL_LM_MAPPING
|
||||||
tf_model_mapping = TF_MODEL_FOR_CAUSAL_LM_MAPPING
|
tf_model_mapping = TF_MODEL_FOR_CAUSAL_LM_MAPPING
|
||||||
|
@ -25,14 +25,7 @@ from transformers import (
|
|||||||
pipeline,
|
pipeline,
|
||||||
)
|
)
|
||||||
from transformers.pipelines import AggregationStrategy, TokenClassificationArgumentHandler
|
from transformers.pipelines import AggregationStrategy, TokenClassificationArgumentHandler
|
||||||
from transformers.testing_utils import (
|
from transformers.testing_utils import nested_simplify, require_tf, require_torch, require_torch_gpu, slow
|
||||||
is_pipeline_test,
|
|
||||||
nested_simplify,
|
|
||||||
require_tf,
|
|
||||||
require_torch,
|
|
||||||
require_torch_gpu,
|
|
||||||
slow,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .test_pipelines_common import ANY, PipelineTestCaseMeta
|
from .test_pipelines_common import ANY, PipelineTestCaseMeta
|
||||||
|
|
||||||
@ -40,7 +33,6 @@ from .test_pipelines_common import ANY, PipelineTestCaseMeta
|
|||||||
VALID_INPUTS = ["A simple string", ["list of strings", "A simple string that is quite a bit longer"]]
|
VALID_INPUTS = ["A simple string", ["list of strings", "A simple string that is quite a bit longer"]]
|
||||||
|
|
||||||
|
|
||||||
@is_pipeline_test
|
|
||||||
class TokenClassificationPipelineTests(unittest.TestCase, metaclass=PipelineTestCaseMeta):
|
class TokenClassificationPipelineTests(unittest.TestCase, metaclass=PipelineTestCaseMeta):
|
||||||
model_mapping = MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING
|
model_mapping = MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING
|
||||||
tf_model_mapping = TF_MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING
|
tf_model_mapping = TF_MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING
|
||||||
@ -770,7 +762,6 @@ class TokenClassificationPipelineTests(unittest.TestCase, metaclass=PipelineTest
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@is_pipeline_test
|
|
||||||
class TokenClassificationArgumentHandlerTestCase(unittest.TestCase):
|
class TokenClassificationArgumentHandlerTestCase(unittest.TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.args_parser = TokenClassificationArgumentHandler()
|
self.args_parser = TokenClassificationArgumentHandler()
|
||||||
|
@ -25,12 +25,11 @@ from transformers import (
|
|||||||
TranslationPipeline,
|
TranslationPipeline,
|
||||||
pipeline,
|
pipeline,
|
||||||
)
|
)
|
||||||
from transformers.testing_utils import is_pipeline_test, require_tf, require_torch, slow
|
from transformers.testing_utils import require_tf, require_torch, slow
|
||||||
|
|
||||||
from .test_pipelines_common import ANY, PipelineTestCaseMeta
|
from .test_pipelines_common import ANY, PipelineTestCaseMeta
|
||||||
|
|
||||||
|
|
||||||
@is_pipeline_test
|
|
||||||
class TranslationPipelineTests(unittest.TestCase, metaclass=PipelineTestCaseMeta):
|
class TranslationPipelineTests(unittest.TestCase, metaclass=PipelineTestCaseMeta):
|
||||||
model_mapping = MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING
|
model_mapping = MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING
|
||||||
tf_model_mapping = TF_MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING
|
tf_model_mapping = TF_MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING
|
||||||
@ -118,7 +117,6 @@ class TranslationPipelineTests(unittest.TestCase, metaclass=PipelineTestCaseMeta
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@is_pipeline_test
|
|
||||||
class TranslationNewFormatPipelineTests(unittest.TestCase):
|
class TranslationNewFormatPipelineTests(unittest.TestCase):
|
||||||
@require_torch
|
@require_torch
|
||||||
@slow
|
@slow
|
||||||
|
@ -16,14 +16,7 @@ import unittest
|
|||||||
|
|
||||||
from transformers import MODEL_FOR_VISUAL_QUESTION_ANSWERING_MAPPING, is_vision_available
|
from transformers import MODEL_FOR_VISUAL_QUESTION_ANSWERING_MAPPING, is_vision_available
|
||||||
from transformers.pipelines import pipeline
|
from transformers.pipelines import pipeline
|
||||||
from transformers.testing_utils import (
|
from transformers.testing_utils import nested_simplify, require_tf, require_torch, require_vision, slow
|
||||||
is_pipeline_test,
|
|
||||||
nested_simplify,
|
|
||||||
require_tf,
|
|
||||||
require_torch,
|
|
||||||
require_vision,
|
|
||||||
slow,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .test_pipelines_common import ANY, PipelineTestCaseMeta
|
from .test_pipelines_common import ANY, PipelineTestCaseMeta
|
||||||
|
|
||||||
@ -38,7 +31,6 @@ else:
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
@is_pipeline_test
|
|
||||||
@require_torch
|
@require_torch
|
||||||
@require_vision
|
@require_vision
|
||||||
class VisualQuestionAnsweringPipelineTests(unittest.TestCase, metaclass=PipelineTestCaseMeta):
|
class VisualQuestionAnsweringPipelineTests(unittest.TestCase, metaclass=PipelineTestCaseMeta):
|
||||||
|
@ -21,12 +21,11 @@ from transformers import (
|
|||||||
ZeroShotClassificationPipeline,
|
ZeroShotClassificationPipeline,
|
||||||
pipeline,
|
pipeline,
|
||||||
)
|
)
|
||||||
from transformers.testing_utils import is_pipeline_test, nested_simplify, require_tf, require_torch, slow
|
from transformers.testing_utils import nested_simplify, require_tf, require_torch, slow
|
||||||
|
|
||||||
from .test_pipelines_common import ANY, PipelineTestCaseMeta
|
from .test_pipelines_common import ANY, PipelineTestCaseMeta
|
||||||
|
|
||||||
|
|
||||||
@is_pipeline_test
|
|
||||||
class ZeroShotClassificationPipelineTests(unittest.TestCase, metaclass=PipelineTestCaseMeta):
|
class ZeroShotClassificationPipelineTests(unittest.TestCase, metaclass=PipelineTestCaseMeta):
|
||||||
model_mapping = MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING
|
model_mapping = MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING
|
||||||
tf_model_mapping = TF_MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING
|
tf_model_mapping = TF_MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING
|
||||||
|
@ -16,14 +16,7 @@ import unittest
|
|||||||
|
|
||||||
from transformers import is_vision_available
|
from transformers import is_vision_available
|
||||||
from transformers.pipelines import pipeline
|
from transformers.pipelines import pipeline
|
||||||
from transformers.testing_utils import (
|
from transformers.testing_utils import nested_simplify, require_tf, require_torch, require_vision, slow
|
||||||
is_pipeline_test,
|
|
||||||
nested_simplify,
|
|
||||||
require_tf,
|
|
||||||
require_torch,
|
|
||||||
require_vision,
|
|
||||||
slow,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .test_pipelines_common import ANY, PipelineTestCaseMeta
|
from .test_pipelines_common import ANY, PipelineTestCaseMeta
|
||||||
|
|
||||||
@ -39,7 +32,6 @@ else:
|
|||||||
|
|
||||||
|
|
||||||
@require_vision
|
@require_vision
|
||||||
@is_pipeline_test
|
|
||||||
class ZeroShotImageClassificationPipelineTests(unittest.TestCase, metaclass=PipelineTestCaseMeta):
|
class ZeroShotImageClassificationPipelineTests(unittest.TestCase, metaclass=PipelineTestCaseMeta):
|
||||||
# Deactivating auto tests since we don't have a good MODEL_FOR_XX mapping,
|
# Deactivating auto tests since we don't have a good MODEL_FOR_XX mapping,
|
||||||
# and only CLIP would be there for now.
|
# and only CLIP would be there for now.
|
||||||
|
@ -619,6 +619,25 @@ def infer_tests_to_run(output_file, diff_with_last_commit=False, filters=None, j
|
|||||||
json.dump(test_map, fp, ensure_ascii=False)
|
json.dump(test_map, fp, ensure_ascii=False)
|
||||||
|
|
||||||
|
|
||||||
|
def filter_pipeline_tests(output_file):
|
||||||
|
if not os.path.isfile(output_file):
|
||||||
|
print("No test file found.")
|
||||||
|
return
|
||||||
|
with open(output_file, "r", encoding="utf-8") as f:
|
||||||
|
test_files = f.read().split(" ")
|
||||||
|
|
||||||
|
if len(test_files) == 0:
|
||||||
|
print("No tests to filter.")
|
||||||
|
return
|
||||||
|
if test_files == ["tests"]:
|
||||||
|
test_files = [os.path.join("tests", f) for f in os.listdir("tests") if f not in ["__init__.py", "pipelines"]]
|
||||||
|
else:
|
||||||
|
test_files = [f for f in test_files if not f.startswith(os.path.join("tests", "pipelines"))]
|
||||||
|
|
||||||
|
with open(output_file, "w", encoding="utf-8") as f:
|
||||||
|
f.write(" ".join(test_files))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
@ -645,6 +664,11 @@ if __name__ == "__main__":
|
|||||||
default=["tests"],
|
default=["tests"],
|
||||||
help="Only keep the test files matching one of those filters.",
|
help="Only keep the test files matching one of those filters.",
|
||||||
)
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--filter_pipeline_tests",
|
||||||
|
action="store_true",
|
||||||
|
help="Will filter the pipeline tests outside of the generated list of tests.",
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--print_dependencies_of",
|
"--print_dependencies_of",
|
||||||
type=str,
|
type=str,
|
||||||
@ -656,6 +680,8 @@ if __name__ == "__main__":
|
|||||||
print_tree_deps_of(args.print_dependencies_of)
|
print_tree_deps_of(args.print_dependencies_of)
|
||||||
elif args.sanity_check:
|
elif args.sanity_check:
|
||||||
sanity_check()
|
sanity_check()
|
||||||
|
elif args.filter_pipeline_tests:
|
||||||
|
filter_pipeline_tests(args.output_file)
|
||||||
else:
|
else:
|
||||||
repo = Repo(PATH_TO_TRANFORMERS)
|
repo = Repo(PATH_TO_TRANFORMERS)
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user