Add XLMRobertaForQuestionAnswering (#4855)

* Add XLMRobertaForQuestionAnswering

* Formatting

* Make test happy
This commit is contained in:
Sylvain Gugger 2020-06-08 21:22:37 -04:00 committed by GitHub
parent a139d1a160
commit 41a1d27cde
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 25 additions and 0 deletions

View File

@ -84,6 +84,13 @@ XLMRobertaForTokenClassification
:members:
XLMRobertaForQuestionAnswering
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
.. autoclass:: transformers.XLMRobertaForQuestionAnswering
:members:
TFXLMRobertaModel
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

View File

@ -298,6 +298,7 @@ if is_torch_available():
XLMRobertaForMultipleChoice,
XLMRobertaForSequenceClassification,
XLMRobertaForTokenClassification,
XLMRobertaForQuestionAnswering,
XLM_ROBERTA_PRETRAINED_MODEL_ARCHIVE_LIST,
)
from .modeling_mmbt import ModalEmbeddings, MMBTModel, MMBTForClassification

View File

@ -121,6 +121,7 @@ from .modeling_xlm import (
from .modeling_xlm_roberta import (
XLMRobertaForMaskedLM,
XLMRobertaForMultipleChoice,
XLMRobertaForQuestionAnswering,
XLMRobertaForSequenceClassification,
XLMRobertaForTokenClassification,
XLMRobertaModel,
@ -230,6 +231,7 @@ MODEL_FOR_QUESTION_ANSWERING_MAPPING = OrderedDict(
(DistilBertConfig, DistilBertForQuestionAnswering),
(AlbertConfig, AlbertForQuestionAnswering),
(LongformerConfig, LongformerForQuestionAnswering),
(XLMRobertaConfig, XLMRobertaForQuestionAnswering),
(RobertaConfig, RobertaForQuestionAnswering),
(BertConfig, BertForQuestionAnswering),
(XLNetConfig, XLNetForQuestionAnsweringSimple),

View File

@ -23,6 +23,7 @@ from .file_utils import add_start_docstrings
from .modeling_roberta import (
RobertaForMaskedLM,
RobertaForMultipleChoice,
RobertaForQuestionAnswering,
RobertaForSequenceClassification,
RobertaForTokenClassification,
RobertaModel,
@ -120,3 +121,17 @@ class XLMRobertaForTokenClassification(RobertaForTokenClassification):
"""
config_class = XLMRobertaConfig
@add_start_docstrings(
"""XLM-RoBERTa Model with a span classification head on top for extractive question-answering tasks like SQuAD (a
linear layers on top of the hidden-states output to compute `span start logits` and `span end logits`).""",
XLM_ROBERTA_START_DOCSTRING,
)
class XLMRobertaForQuestionAnswering(RobertaForQuestionAnswering):
"""
This class overrides :class:`~transformers.RobertaForQuestionAnswering`. Please check the
superclass for the appropriate documentation alongside usage examples.
"""
config_class = XLMRobertaConfig