mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
🔫 Python35
This commit is contained in:
parent
0304628590
commit
b20f11d4ca
@ -17,7 +17,6 @@
|
||||
|
||||
import logging
|
||||
from collections import OrderedDict
|
||||
from typing import Dict, Type
|
||||
|
||||
from .configuration_auto import (
|
||||
AlbertConfig,
|
||||
@ -78,7 +77,6 @@ from .modeling_roberta import (
|
||||
)
|
||||
from .modeling_t5 import T5_PRETRAINED_MODEL_ARCHIVE_MAP, T5Model, T5WithLMHeadModel
|
||||
from .modeling_transfo_xl import TRANSFO_XL_PRETRAINED_MODEL_ARCHIVE_MAP, TransfoXLLMHeadModel, TransfoXLModel
|
||||
from .modeling_utils import PreTrainedModel
|
||||
from .modeling_xlm import (
|
||||
XLM_PRETRAINED_MODEL_ARCHIVE_MAP,
|
||||
XLMForQuestionAnswering,
|
||||
@ -126,7 +124,7 @@ ALL_PRETRAINED_MODEL_ARCHIVE_MAP = dict(
|
||||
for key, value, in pretrained_map.items()
|
||||
)
|
||||
|
||||
MODEL_MAPPING: Dict[Type[PretrainedConfig], Type[PreTrainedModel]] = OrderedDict(
|
||||
MODEL_MAPPING = OrderedDict(
|
||||
[
|
||||
(T5Config, T5Model),
|
||||
(DistilBertConfig, DistilBertModel),
|
||||
@ -144,7 +142,7 @@ MODEL_MAPPING: Dict[Type[PretrainedConfig], Type[PreTrainedModel]] = OrderedDict
|
||||
]
|
||||
)
|
||||
|
||||
MODEL_WITH_LM_HEAD_MAPPING: Dict[Type[PretrainedConfig], Type[PreTrainedModel]] = OrderedDict(
|
||||
MODEL_WITH_LM_HEAD_MAPPING = OrderedDict(
|
||||
[
|
||||
(T5Config, T5WithLMHeadModel),
|
||||
(DistilBertConfig, DistilBertForMaskedLM),
|
||||
@ -162,7 +160,7 @@ MODEL_WITH_LM_HEAD_MAPPING: Dict[Type[PretrainedConfig], Type[PreTrainedModel]]
|
||||
]
|
||||
)
|
||||
|
||||
MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING: Dict[Type[PretrainedConfig], Type[PreTrainedModel]] = OrderedDict(
|
||||
MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING = OrderedDict(
|
||||
[
|
||||
(DistilBertConfig, DistilBertForSequenceClassification),
|
||||
(AlbertConfig, AlbertForSequenceClassification),
|
||||
@ -175,7 +173,7 @@ MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING: Dict[Type[PretrainedConfig], Type[Pre
|
||||
]
|
||||
)
|
||||
|
||||
MODEL_FOR_QUESTION_ANSWERING_MAPPING: Dict[Type[PretrainedConfig], Type[PreTrainedModel]] = OrderedDict(
|
||||
MODEL_FOR_QUESTION_ANSWERING_MAPPING = OrderedDict(
|
||||
[
|
||||
(DistilBertConfig, DistilBertForQuestionAnswering),
|
||||
(AlbertConfig, AlbertForQuestionAnswering),
|
||||
@ -185,7 +183,7 @@ MODEL_FOR_QUESTION_ANSWERING_MAPPING: Dict[Type[PretrainedConfig], Type[PreTrain
|
||||
]
|
||||
)
|
||||
|
||||
MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING: Dict[Type[PretrainedConfig], Type[PreTrainedModel]] = OrderedDict(
|
||||
MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING = OrderedDict(
|
||||
[
|
||||
(DistilBertConfig, DistilBertForTokenClassification),
|
||||
(CamembertConfig, CamembertForTokenClassification),
|
||||
|
@ -17,7 +17,6 @@
|
||||
|
||||
import logging
|
||||
from collections import OrderedDict
|
||||
from typing import Dict, Type
|
||||
|
||||
from .configuration_auto import (
|
||||
AlbertConfig,
|
||||
@ -72,7 +71,6 @@ from .modeling_tf_transfo_xl import (
|
||||
TFTransfoXLLMHeadModel,
|
||||
TFTransfoXLModel,
|
||||
)
|
||||
from .modeling_tf_utils import TFPreTrainedModel
|
||||
from .modeling_tf_xlm import (
|
||||
TF_XLM_PRETRAINED_MODEL_ARCHIVE_MAP,
|
||||
TFXLMForQuestionAnsweringSimple,
|
||||
@ -111,8 +109,9 @@ TF_ALL_PRETRAINED_MODEL_ARCHIVE_MAP = dict(
|
||||
for key, value, in pretrained_map.items()
|
||||
)
|
||||
|
||||
TF_MODEL_MAPPING: Dict[Type[PretrainedConfig], Type[TFPreTrainedModel]] = OrderedDict(
|
||||
TF_MODEL_MAPPING = OrderedDict(
|
||||
[
|
||||
(T5Config, TFT5Model),
|
||||
(DistilBertConfig, TFDistilBertModel),
|
||||
(AlbertConfig, TFAlbertModel),
|
||||
(RobertaConfig, TFRobertaModel),
|
||||
@ -126,8 +125,9 @@ TF_MODEL_MAPPING: Dict[Type[PretrainedConfig], Type[TFPreTrainedModel]] = Ordere
|
||||
]
|
||||
)
|
||||
|
||||
TF_MODEL_WITH_LM_HEAD_MAPPING: Dict[Type[PretrainedConfig], Type[TFPreTrainedModel]] = OrderedDict(
|
||||
TF_MODEL_WITH_LM_HEAD_MAPPING = OrderedDict(
|
||||
[
|
||||
(T5Config, TFT5WithLMHeadModel),
|
||||
(DistilBertConfig, TFDistilBertForMaskedLM),
|
||||
(AlbertConfig, TFAlbertForMaskedLM),
|
||||
(RobertaConfig, TFRobertaForMaskedLM),
|
||||
@ -141,7 +141,7 @@ TF_MODEL_WITH_LM_HEAD_MAPPING: Dict[Type[PretrainedConfig], Type[TFPreTrainedMod
|
||||
]
|
||||
)
|
||||
|
||||
TF_MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING: Dict[Type[PretrainedConfig], Type[TFPreTrainedModel]] = OrderedDict(
|
||||
TF_MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING = OrderedDict(
|
||||
[
|
||||
(DistilBertConfig, TFDistilBertForSequenceClassification),
|
||||
(AlbertConfig, TFAlbertForSequenceClassification),
|
||||
@ -152,7 +152,7 @@ TF_MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING: Dict[Type[PretrainedConfig], Type[
|
||||
]
|
||||
)
|
||||
|
||||
TF_MODEL_FOR_QUESTION_ANSWERING_MAPPING: Dict[Type[PretrainedConfig], Type[TFPreTrainedModel]] = OrderedDict(
|
||||
TF_MODEL_FOR_QUESTION_ANSWERING_MAPPING = OrderedDict(
|
||||
[
|
||||
(DistilBertConfig, TFDistilBertForQuestionAnswering),
|
||||
(BertConfig, TFBertForQuestionAnswering),
|
||||
@ -161,7 +161,7 @@ TF_MODEL_FOR_QUESTION_ANSWERING_MAPPING: Dict[Type[PretrainedConfig], Type[TFPre
|
||||
]
|
||||
)
|
||||
|
||||
TF_MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING: Dict[Type[PretrainedConfig], Type[TFPreTrainedModel]] = OrderedDict(
|
||||
TF_MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING = OrderedDict(
|
||||
[
|
||||
(DistilBertConfig, TFDistilBertForTokenClassification),
|
||||
(RobertaConfig, TFRobertaForTokenClassification),
|
||||
|
@ -17,7 +17,6 @@
|
||||
|
||||
import logging
|
||||
from collections import OrderedDict
|
||||
from typing import Dict, Type
|
||||
|
||||
from .configuration_auto import (
|
||||
AlbertConfig,
|
||||
@ -47,7 +46,6 @@ from .tokenization_openai import OpenAIGPTTokenizer
|
||||
from .tokenization_roberta import RobertaTokenizer
|
||||
from .tokenization_t5 import T5Tokenizer
|
||||
from .tokenization_transfo_xl import TransfoXLTokenizer
|
||||
from .tokenization_utils import PreTrainedTokenizer
|
||||
from .tokenization_xlm import XLMTokenizer
|
||||
from .tokenization_xlm_roberta import XLMRobertaTokenizer
|
||||
from .tokenization_xlnet import XLNetTokenizer
|
||||
@ -56,7 +54,7 @@ from .tokenization_xlnet import XLNetTokenizer
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
TOKENIZER_MAPPING: Dict[Type[PretrainedConfig], Type[PreTrainedTokenizer]] = OrderedDict(
|
||||
TOKENIZER_MAPPING = OrderedDict(
|
||||
[
|
||||
(T5Config, T5Tokenizer),
|
||||
(DistilBertConfig, DistilBertTokenizer),
|
||||
@ -183,6 +181,6 @@ class AutoTokenizer(object):
|
||||
raise ValueError(
|
||||
"Unrecognized configuration class {} to build an AutoTokenizer.\n"
|
||||
"Model type should be one of {}.".format(
|
||||
config.__class__, ", ".join(c.__name__ for c in MODEL_MAPPING.keys())
|
||||
config.__class__, ", ".join(c.__name__ for c in TOKENIZER_MAPPING.keys())
|
||||
)
|
||||
)
|
||||
|
Loading…
Reference in New Issue
Block a user