diff --git a/templates/adding_a_new_model/tests/test_tokenization_xxx.py b/templates/adding_a_new_model/tests/test_tokenization_xxx.py index 0f46cfa3a38..2a7b58edb69 100644 --- a/templates/adding_a_new_model/tests/test_tokenization_xxx.py +++ b/templates/adding_a_new_model/tests/test_tokenization_xxx.py @@ -19,7 +19,7 @@ from io import open from transformers.tokenization_bert import VOCAB_FILES_NAMES, XxxTokenizer -from .tokenization_tests_commons import CommonTestCases +from .test_tokenization_commo import CommonTestCases class XxxTokenizationTest(CommonTestCases.CommonTokenizerTester): diff --git a/tests/test_configuration_common.py b/tests/test_configuration_common.py index a94ddaea7ba..16bf9ea5721 100644 --- a/tests/test_configuration_common.py +++ b/tests/test_configuration_common.py @@ -17,7 +17,7 @@ from __future__ import absolute_import, division, print_function import json import os -from .tokenization_tests_commons import TemporaryDirectory +from .test_tokenization_commo import TemporaryDirectory class ConfigTester(object): diff --git a/tests/test_model_card.py b/tests/test_model_card.py index 9fcae1dbd8a..8f4b1d24b7f 100644 --- a/tests/test_model_card.py +++ b/tests/test_model_card.py @@ -20,7 +20,7 @@ import unittest from transformers.modelcard import ModelCard -from .tokenization_tests_commons import TemporaryDirectory +from .test_tokenization_commo import TemporaryDirectory class ModelCardTester(unittest.TestCase): diff --git a/tests/test_optimization.py b/tests/test_optimization.py index 267fbf09862..fa628be8ef4 100644 --- a/tests/test_optimization.py +++ b/tests/test_optimization.py @@ -19,7 +19,7 @@ import unittest from transformers import is_torch_available -from .tokenization_tests_commons import TemporaryDirectory +from .test_tokenization_commo import TemporaryDirectory from .utils import require_torch diff --git a/tests/test_tokenization_albert.py b/tests/test_tokenization_albert.py index d22e8760a9b..3a3c47537f9 100644 --- a/tests/test_tokenization_albert.py +++ b/tests/test_tokenization_albert.py @@ -18,7 +18,7 @@ import os from transformers.tokenization_albert import AlbertTokenizer -from .tokenization_tests_commons import CommonTestCases +from .test_tokenization_commo import CommonTestCases SAMPLE_VOCAB = os.path.join(os.path.dirname(os.path.abspath(__file__)), "fixtures/spiece.model") diff --git a/tests/test_tokenization_bert.py b/tests/test_tokenization_bert.py index 2081570fb3a..4b1cb5b9c90 100644 --- a/tests/test_tokenization_bert.py +++ b/tests/test_tokenization_bert.py @@ -27,7 +27,7 @@ from transformers.tokenization_bert import ( _is_whitespace, ) -from .tokenization_tests_commons import CommonTestCases +from .test_tokenization_commo import CommonTestCases from .utils import slow diff --git a/tests/test_tokenization_bert_japanese.py b/tests/test_tokenization_bert_japanese.py index 526f823b705..519cc199f87 100644 --- a/tests/test_tokenization_bert_japanese.py +++ b/tests/test_tokenization_bert_japanese.py @@ -25,7 +25,7 @@ from transformers.tokenization_bert_japanese import ( MecabTokenizer, ) -from .tokenization_tests_commons import CommonTestCases +from .test_tokenization_commo import CommonTestCases from .utils import custom_tokenizers, slow diff --git a/tests/tokenization_tests_commons.py b/tests/test_tokenization_common.py similarity index 100% rename from tests/tokenization_tests_commons.py rename to tests/test_tokenization_common.py diff --git a/tests/test_tokenization_ctrl.py b/tests/test_tokenization_ctrl.py index 612e01213f7..77ff6a86ea0 100644 --- a/tests/test_tokenization_ctrl.py +++ b/tests/test_tokenization_ctrl.py @@ -19,7 +19,7 @@ from io import open from transformers.tokenization_ctrl import VOCAB_FILES_NAMES, CTRLTokenizer -from .tokenization_tests_commons import CommonTestCases +from .test_tokenization_commo import CommonTestCases class CTRLTokenizationTest(CommonTestCases.CommonTokenizerTester): diff --git a/tests/test_tokenization_gpt2.py b/tests/test_tokenization_gpt2.py index 0f916db3c9b..fbc45738f07 100644 --- a/tests/test_tokenization_gpt2.py +++ b/tests/test_tokenization_gpt2.py @@ -20,7 +20,7 @@ from io import open from transformers.tokenization_gpt2 import VOCAB_FILES_NAMES, GPT2Tokenizer -from .tokenization_tests_commons import CommonTestCases +from .test_tokenization_commo import CommonTestCases class GPT2TokenizationTest(CommonTestCases.CommonTokenizerTester): diff --git a/tests/test_tokenization_openai.py b/tests/test_tokenization_openai.py index c1a0fe10f73..a9e8cc38eaf 100644 --- a/tests/test_tokenization_openai.py +++ b/tests/test_tokenization_openai.py @@ -19,7 +19,7 @@ import os from transformers.tokenization_openai import VOCAB_FILES_NAMES, OpenAIGPTTokenizer -from .tokenization_tests_commons import CommonTestCases +from .test_tokenization_commo import CommonTestCases class OpenAIGPTTokenizationTest(CommonTestCases.CommonTokenizerTester): diff --git a/tests/test_tokenization_roberta.py b/tests/test_tokenization_roberta.py index 9f70b1d27ec..bacfd51555a 100644 --- a/tests/test_tokenization_roberta.py +++ b/tests/test_tokenization_roberta.py @@ -20,7 +20,7 @@ from io import open from transformers.tokenization_roberta import VOCAB_FILES_NAMES, RobertaTokenizer -from .tokenization_tests_commons import CommonTestCases +from .test_tokenization_commo import CommonTestCases from .utils import slow diff --git a/tests/test_tokenization_t5.py b/tests/test_tokenization_t5.py index e02d45af5f2..68ceebb83cb 100644 --- a/tests/test_tokenization_t5.py +++ b/tests/test_tokenization_t5.py @@ -19,7 +19,7 @@ import os from transformers.tokenization_t5 import T5Tokenizer from transformers.tokenization_xlnet import SPIECE_UNDERLINE -from .tokenization_tests_commons import CommonTestCases +from .test_tokenization_commo import CommonTestCases SAMPLE_VOCAB = os.path.join(os.path.dirname(os.path.abspath(__file__)), "fixtures/test_sentencepiece.model") diff --git a/tests/test_tokenization_transfo_xl.py b/tests/test_tokenization_transfo_xl.py index df9d6cd6ab6..1d275f591fc 100644 --- a/tests/test_tokenization_transfo_xl.py +++ b/tests/test_tokenization_transfo_xl.py @@ -19,7 +19,7 @@ from io import open from transformers import is_torch_available -from .tokenization_tests_commons import CommonTestCases +from .test_tokenization_commo import CommonTestCases from .utils import require_torch diff --git a/tests/test_tokenization_xlm.py b/tests/test_tokenization_xlm.py index fe8cf1c974e..12bff7f6183 100644 --- a/tests/test_tokenization_xlm.py +++ b/tests/test_tokenization_xlm.py @@ -19,7 +19,7 @@ import os from transformers.tokenization_xlm import VOCAB_FILES_NAMES, XLMTokenizer -from .tokenization_tests_commons import CommonTestCases +from .test_tokenization_commo import CommonTestCases from .utils import slow diff --git a/tests/test_tokenization_xlnet.py b/tests/test_tokenization_xlnet.py index 1dca9cef0e3..ed6430959e9 100644 --- a/tests/test_tokenization_xlnet.py +++ b/tests/test_tokenization_xlnet.py @@ -18,7 +18,7 @@ import os from transformers.tokenization_xlnet import SPIECE_UNDERLINE, XLNetTokenizer -from .tokenization_tests_commons import CommonTestCases +from .test_tokenization_commo import CommonTestCases from .utils import slow