mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
[EncoderDecoder] Add xlm-roberta to encoder decoder (#6878)
* finish xlm-roberta * finish docs * expose XLMRobertaForCausalLM
This commit is contained in:
parent
311992630c
commit
4d1a3ffde8
@ -56,6 +56,13 @@ XLMRobertaModel
|
||||
:members:
|
||||
|
||||
|
||||
XLMRobertaForCausalLM
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
.. autoclass:: transformers.XLMRobertaForCausalLM
|
||||
:members:
|
||||
|
||||
|
||||
XLMRobertaForMaskedLM
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
@ -130,4 +137,4 @@ TFXLMRobertaForQuestionAnswering
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
.. autoclass:: transformers.TFXLMRobertaForQuestionAnswering
|
||||
:members:
|
||||
:members:
|
||||
|
@ -418,6 +418,7 @@ if is_torch_available():
|
||||
)
|
||||
from .modeling_xlm_roberta import (
|
||||
XLM_ROBERTA_PRETRAINED_MODEL_ARCHIVE_LIST,
|
||||
XLMRobertaForCausalLM,
|
||||
XLMRobertaForMaskedLM,
|
||||
XLMRobertaForMultipleChoice,
|
||||
XLMRobertaForQuestionAnswering,
|
||||
|
@ -156,6 +156,7 @@ from .modeling_xlm import (
|
||||
XLMWithLMHeadModel,
|
||||
)
|
||||
from .modeling_xlm_roberta import (
|
||||
XLMRobertaForCausalLM,
|
||||
XLMRobertaForMaskedLM,
|
||||
XLMRobertaForMultipleChoice,
|
||||
XLMRobertaForQuestionAnswering,
|
||||
@ -255,6 +256,7 @@ MODEL_WITH_LM_HEAD_MAPPING = OrderedDict(
|
||||
MODEL_FOR_CAUSAL_LM_MAPPING = OrderedDict(
|
||||
[
|
||||
(CamembertConfig, CamembertForCausalLM),
|
||||
(XLMRobertaConfig, XLMRobertaForCausalLM),
|
||||
(RobertaConfig, RobertaForCausalLM),
|
||||
(BertConfig, BertLMHeadModel),
|
||||
(OpenAIGPTConfig, OpenAIGPTLMHeadModel),
|
||||
|
@ -18,6 +18,7 @@
|
||||
from .configuration_xlm_roberta import XLMRobertaConfig
|
||||
from .file_utils import add_start_docstrings
|
||||
from .modeling_roberta import (
|
||||
RobertaForCausalLM,
|
||||
RobertaForMaskedLM,
|
||||
RobertaForMultipleChoice,
|
||||
RobertaForQuestionAnswering,
|
||||
@ -67,6 +68,19 @@ class XLMRobertaModel(RobertaModel):
|
||||
config_class = XLMRobertaConfig
|
||||
|
||||
|
||||
@add_start_docstrings(
|
||||
"XLM-RoBERTa Model with a `language modeling` head on top for CLM fine-tuning.",
|
||||
XLM_ROBERTA_START_DOCSTRING,
|
||||
)
|
||||
class XLMRobertaForCausalLM(RobertaForCausalLM):
|
||||
"""
|
||||
This class overrides :class:`~transformers.RobertaForCausalLM`. Please check the
|
||||
superclass for the appropriate documentation alongside usage examples.
|
||||
"""
|
||||
|
||||
config_class = XLMRobertaConfig
|
||||
|
||||
|
||||
@add_start_docstrings(
|
||||
"""XLM-RoBERTa Model with a `language modeling` head on top. """,
|
||||
XLM_ROBERTA_START_DOCSTRING,
|
||||
|
Loading…
Reference in New Issue
Block a user