mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
[doc] fix broken ref (#12597)
This commit is contained in:
parent
f0dde60127
commit
ce111feed1
@ -56,7 +56,7 @@ def load_vocab(vocab_file):
|
||||
|
||||
class XLMProphetNetTokenizer(PreTrainedTokenizer):
|
||||
"""
|
||||
Adapted from :class:`~transformers.RobertaTokenizer` and class:`~transformers.XLNetTokenizer`. Based on
|
||||
Adapted from :class:`~transformers.RobertaTokenizer` and :class:`~transformers.XLNetTokenizer`. Based on
|
||||
`SentencePiece <https://github.com/google/sentencepiece>`__.
|
||||
|
||||
This tokenizer inherits from :class:`~transformers.PreTrainedTokenizer` which contains most of the main methods.
|
||||
|
@ -54,7 +54,7 @@ PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES = {
|
||||
|
||||
class XLMRobertaTokenizer(PreTrainedTokenizer):
|
||||
"""
|
||||
Adapted from :class:`~transformers.RobertaTokenizer` and class:`~transformers.XLNetTokenizer`. Based on
|
||||
Adapted from :class:`~transformers.RobertaTokenizer` and :class:`~transformers.XLNetTokenizer`. Based on
|
||||
`SentencePiece <https://github.com/google/sentencepiece>`__.
|
||||
|
||||
This tokenizer inherits from :class:`~transformers.PreTrainedTokenizer` which contains most of the main methods.
|
||||
|
@ -67,7 +67,7 @@ PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES = {
|
||||
class XLMRobertaTokenizerFast(PreTrainedTokenizerFast):
|
||||
"""
|
||||
Construct a "fast" XLM-RoBERTa tokenizer (backed by HuggingFace's `tokenizers` library). Adapted from
|
||||
:class:`~transformers.RobertaTokenizer` and class:`~transformers.XLNetTokenizer`. Based on `BPE
|
||||
:class:`~transformers.RobertaTokenizer` and :class:`~transformers.XLNetTokenizer`. Based on `BPE
|
||||
<https://huggingface.co/docs/tokenizers/python/latest/components.html?highlight=BPE#models>`__.
|
||||
|
||||
This tokenizer inherits from :class:`~transformers.PreTrainedTokenizerFast` which contains most of the main
|
||||
|
Loading…
Reference in New Issue
Block a user