mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-23 14:29:01 +06:00
Rename file for consistency.
This commit is contained in:
parent
daf8bebcdd
commit
a3c5883f2c
@ -19,7 +19,7 @@ from io import open
|
|||||||
|
|
||||||
from transformers.tokenization_bert import VOCAB_FILES_NAMES, XxxTokenizer
|
from transformers.tokenization_bert import VOCAB_FILES_NAMES, XxxTokenizer
|
||||||
|
|
||||||
from .tokenization_tests_commons import CommonTestCases
|
from .test_tokenization_commo import CommonTestCases
|
||||||
|
|
||||||
|
|
||||||
class XxxTokenizationTest(CommonTestCases.CommonTokenizerTester):
|
class XxxTokenizationTest(CommonTestCases.CommonTokenizerTester):
|
||||||
|
@ -17,7 +17,7 @@ from __future__ import absolute_import, division, print_function
|
|||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from .tokenization_tests_commons import TemporaryDirectory
|
from .test_tokenization_commo import TemporaryDirectory
|
||||||
|
|
||||||
|
|
||||||
class ConfigTester(object):
|
class ConfigTester(object):
|
||||||
|
@ -20,7 +20,7 @@ import unittest
|
|||||||
|
|
||||||
from transformers.modelcard import ModelCard
|
from transformers.modelcard import ModelCard
|
||||||
|
|
||||||
from .tokenization_tests_commons import TemporaryDirectory
|
from .test_tokenization_commo import TemporaryDirectory
|
||||||
|
|
||||||
|
|
||||||
class ModelCardTester(unittest.TestCase):
|
class ModelCardTester(unittest.TestCase):
|
||||||
|
@ -19,7 +19,7 @@ import unittest
|
|||||||
|
|
||||||
from transformers import is_torch_available
|
from transformers import is_torch_available
|
||||||
|
|
||||||
from .tokenization_tests_commons import TemporaryDirectory
|
from .test_tokenization_commo import TemporaryDirectory
|
||||||
from .utils import require_torch
|
from .utils import require_torch
|
||||||
|
|
||||||
|
|
||||||
|
@ -18,7 +18,7 @@ import os
|
|||||||
|
|
||||||
from transformers.tokenization_albert import AlbertTokenizer
|
from transformers.tokenization_albert import AlbertTokenizer
|
||||||
|
|
||||||
from .tokenization_tests_commons import CommonTestCases
|
from .test_tokenization_commo import CommonTestCases
|
||||||
|
|
||||||
|
|
||||||
SAMPLE_VOCAB = os.path.join(os.path.dirname(os.path.abspath(__file__)), "fixtures/spiece.model")
|
SAMPLE_VOCAB = os.path.join(os.path.dirname(os.path.abspath(__file__)), "fixtures/spiece.model")
|
||||||
|
@ -27,7 +27,7 @@ from transformers.tokenization_bert import (
|
|||||||
_is_whitespace,
|
_is_whitespace,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .tokenization_tests_commons import CommonTestCases
|
from .test_tokenization_commo import CommonTestCases
|
||||||
from .utils import slow
|
from .utils import slow
|
||||||
|
|
||||||
|
|
||||||
|
@ -25,7 +25,7 @@ from transformers.tokenization_bert_japanese import (
|
|||||||
MecabTokenizer,
|
MecabTokenizer,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .tokenization_tests_commons import CommonTestCases
|
from .test_tokenization_commo import CommonTestCases
|
||||||
from .utils import custom_tokenizers, slow
|
from .utils import custom_tokenizers, slow
|
||||||
|
|
||||||
|
|
||||||
|
@ -19,7 +19,7 @@ from io import open
|
|||||||
|
|
||||||
from transformers.tokenization_ctrl import VOCAB_FILES_NAMES, CTRLTokenizer
|
from transformers.tokenization_ctrl import VOCAB_FILES_NAMES, CTRLTokenizer
|
||||||
|
|
||||||
from .tokenization_tests_commons import CommonTestCases
|
from .test_tokenization_commo import CommonTestCases
|
||||||
|
|
||||||
|
|
||||||
class CTRLTokenizationTest(CommonTestCases.CommonTokenizerTester):
|
class CTRLTokenizationTest(CommonTestCases.CommonTokenizerTester):
|
||||||
|
@ -20,7 +20,7 @@ from io import open
|
|||||||
|
|
||||||
from transformers.tokenization_gpt2 import VOCAB_FILES_NAMES, GPT2Tokenizer
|
from transformers.tokenization_gpt2 import VOCAB_FILES_NAMES, GPT2Tokenizer
|
||||||
|
|
||||||
from .tokenization_tests_commons import CommonTestCases
|
from .test_tokenization_commo import CommonTestCases
|
||||||
|
|
||||||
|
|
||||||
class GPT2TokenizationTest(CommonTestCases.CommonTokenizerTester):
|
class GPT2TokenizationTest(CommonTestCases.CommonTokenizerTester):
|
||||||
|
@ -19,7 +19,7 @@ import os
|
|||||||
|
|
||||||
from transformers.tokenization_openai import VOCAB_FILES_NAMES, OpenAIGPTTokenizer
|
from transformers.tokenization_openai import VOCAB_FILES_NAMES, OpenAIGPTTokenizer
|
||||||
|
|
||||||
from .tokenization_tests_commons import CommonTestCases
|
from .test_tokenization_commo import CommonTestCases
|
||||||
|
|
||||||
|
|
||||||
class OpenAIGPTTokenizationTest(CommonTestCases.CommonTokenizerTester):
|
class OpenAIGPTTokenizationTest(CommonTestCases.CommonTokenizerTester):
|
||||||
|
@ -20,7 +20,7 @@ from io import open
|
|||||||
|
|
||||||
from transformers.tokenization_roberta import VOCAB_FILES_NAMES, RobertaTokenizer
|
from transformers.tokenization_roberta import VOCAB_FILES_NAMES, RobertaTokenizer
|
||||||
|
|
||||||
from .tokenization_tests_commons import CommonTestCases
|
from .test_tokenization_commo import CommonTestCases
|
||||||
from .utils import slow
|
from .utils import slow
|
||||||
|
|
||||||
|
|
||||||
|
@ -19,7 +19,7 @@ import os
|
|||||||
from transformers.tokenization_t5 import T5Tokenizer
|
from transformers.tokenization_t5 import T5Tokenizer
|
||||||
from transformers.tokenization_xlnet import SPIECE_UNDERLINE
|
from transformers.tokenization_xlnet import SPIECE_UNDERLINE
|
||||||
|
|
||||||
from .tokenization_tests_commons import CommonTestCases
|
from .test_tokenization_commo import CommonTestCases
|
||||||
|
|
||||||
|
|
||||||
SAMPLE_VOCAB = os.path.join(os.path.dirname(os.path.abspath(__file__)), "fixtures/test_sentencepiece.model")
|
SAMPLE_VOCAB = os.path.join(os.path.dirname(os.path.abspath(__file__)), "fixtures/test_sentencepiece.model")
|
||||||
|
@ -19,7 +19,7 @@ from io import open
|
|||||||
|
|
||||||
from transformers import is_torch_available
|
from transformers import is_torch_available
|
||||||
|
|
||||||
from .tokenization_tests_commons import CommonTestCases
|
from .test_tokenization_commo import CommonTestCases
|
||||||
from .utils import require_torch
|
from .utils import require_torch
|
||||||
|
|
||||||
|
|
||||||
|
@ -19,7 +19,7 @@ import os
|
|||||||
|
|
||||||
from transformers.tokenization_xlm import VOCAB_FILES_NAMES, XLMTokenizer
|
from transformers.tokenization_xlm import VOCAB_FILES_NAMES, XLMTokenizer
|
||||||
|
|
||||||
from .tokenization_tests_commons import CommonTestCases
|
from .test_tokenization_commo import CommonTestCases
|
||||||
from .utils import slow
|
from .utils import slow
|
||||||
|
|
||||||
|
|
||||||
|
@ -18,7 +18,7 @@ import os
|
|||||||
|
|
||||||
from transformers.tokenization_xlnet import SPIECE_UNDERLINE, XLNetTokenizer
|
from transformers.tokenization_xlnet import SPIECE_UNDERLINE, XLNetTokenizer
|
||||||
|
|
||||||
from .tokenization_tests_commons import CommonTestCases
|
from .test_tokenization_commo import CommonTestCases
|
||||||
from .utils import slow
|
from .utils import slow
|
||||||
|
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user