mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-20 21:18:21 +06:00
Herbert tokenizer auto load (#7968)
This commit is contained in:
parent
4abb7ffc18
commit
95792a948e
@ -73,6 +73,7 @@ from .tokenization_flaubert import FlaubertTokenizer
|
|||||||
from .tokenization_fsmt import FSMTTokenizer
|
from .tokenization_fsmt import FSMTTokenizer
|
||||||
from .tokenization_funnel import FunnelTokenizer
|
from .tokenization_funnel import FunnelTokenizer
|
||||||
from .tokenization_gpt2 import GPT2Tokenizer
|
from .tokenization_gpt2 import GPT2Tokenizer
|
||||||
|
from .tokenization_herbert import HerbertTokenizer
|
||||||
from .tokenization_layoutlm import LayoutLMTokenizer
|
from .tokenization_layoutlm import LayoutLMTokenizer
|
||||||
from .tokenization_longformer import LongformerTokenizer
|
from .tokenization_longformer import LongformerTokenizer
|
||||||
from .tokenization_lxmert import LxmertTokenizer
|
from .tokenization_lxmert import LxmertTokenizer
|
||||||
@ -123,6 +124,7 @@ if is_tokenizers_available():
|
|||||||
from .tokenization_electra_fast import ElectraTokenizerFast
|
from .tokenization_electra_fast import ElectraTokenizerFast
|
||||||
from .tokenization_funnel_fast import FunnelTokenizerFast
|
from .tokenization_funnel_fast import FunnelTokenizerFast
|
||||||
from .tokenization_gpt2_fast import GPT2TokenizerFast
|
from .tokenization_gpt2_fast import GPT2TokenizerFast
|
||||||
|
from .tokenization_herbert_fast import HerbertTokenizerFast
|
||||||
from .tokenization_layoutlm_fast import LayoutLMTokenizerFast
|
from .tokenization_layoutlm_fast import LayoutLMTokenizerFast
|
||||||
from .tokenization_longformer_fast import LongformerTokenizerFast
|
from .tokenization_longformer_fast import LongformerTokenizerFast
|
||||||
from .tokenization_lxmert_fast import LxmertTokenizerFast
|
from .tokenization_lxmert_fast import LxmertTokenizerFast
|
||||||
@ -147,6 +149,7 @@ else:
|
|||||||
ElectraTokenizerFast = None
|
ElectraTokenizerFast = None
|
||||||
FunnelTokenizerFast = None
|
FunnelTokenizerFast = None
|
||||||
GPT2TokenizerFast = None
|
GPT2TokenizerFast = None
|
||||||
|
HerbertTokenizerFast = None
|
||||||
LayoutLMTokenizerFast = None
|
LayoutLMTokenizerFast = None
|
||||||
LongformerTokenizerFast = None
|
LongformerTokenizerFast = None
|
||||||
LxmertTokenizerFast = None
|
LxmertTokenizerFast = None
|
||||||
@ -191,6 +194,7 @@ TOKENIZER_MAPPING = OrderedDict(
|
|||||||
(LayoutLMConfig, (LayoutLMTokenizer, LayoutLMTokenizerFast)),
|
(LayoutLMConfig, (LayoutLMTokenizer, LayoutLMTokenizerFast)),
|
||||||
(DPRConfig, (DPRQuestionEncoderTokenizer, DPRQuestionEncoderTokenizerFast)),
|
(DPRConfig, (DPRQuestionEncoderTokenizer, DPRQuestionEncoderTokenizerFast)),
|
||||||
(SqueezeBertConfig, (SqueezeBertTokenizer, SqueezeBertTokenizerFast)),
|
(SqueezeBertConfig, (SqueezeBertTokenizer, SqueezeBertTokenizerFast)),
|
||||||
|
(BertConfig, (HerbertTokenizer, HerbertTokenizerFast)),
|
||||||
(BertConfig, (BertTokenizer, BertTokenizerFast)),
|
(BertConfig, (BertTokenizer, BertTokenizerFast)),
|
||||||
(OpenAIGPTConfig, (OpenAIGPTTokenizer, OpenAIGPTTokenizerFast)),
|
(OpenAIGPTConfig, (OpenAIGPTTokenizer, OpenAIGPTTokenizerFast)),
|
||||||
(GPT2Config, (GPT2Tokenizer, GPT2TokenizerFast)),
|
(GPT2Config, (GPT2Tokenizer, GPT2TokenizerFast)),
|
||||||
|
Loading…
Reference in New Issue
Block a user