Fix syntax for class references (#14644)

This commit is contained in:
Sylvain Gugger 2021-12-06 13:31:27 -05:00 committed by GitHub
parent e9688875bf
commit e513c16e82
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 25 additions and 25 deletions

View File

@ -326,8 +326,8 @@ class FlaxGenerationMixin:
self, top_k: int = None, top_p: float = None, temperature: float = None
) -> FlaxLogitsProcessorList:
"""
This class returns a :obj:`~transformers.FlaxLogitsProcessorList` list object that contains all relevant
:obj:`~transformers.FlaxLogitsWarper` instances used for multinomial sampling.
This class returns a :class:`~transformers.FlaxLogitsProcessorList` list object that contains all relevant
:class:`~transformers.FlaxLogitsWarper` instances used for multinomial sampling.
"""
# init warp parameters
@ -358,8 +358,8 @@ class FlaxGenerationMixin:
forced_eos_token_id: int,
) -> FlaxLogitsProcessorList:
"""
This class returns a :obj:`~transformers.FlaxLogitsProcessorList` list object that contains all relevant
:obj:`~transformers.FlaxLogitsProcessor` instances used to modify the scores of the language model head.
This class returns a :class:`~transformers.FlaxLogitsProcessorList` list object that contains all relevant
:class:`~transformers.FlaxLogitsProcessor` instances used to modify the scores of the language model head.
"""
processors = FlaxLogitsProcessorList()

View File

@ -535,8 +535,8 @@ class GenerationMixin:
self, top_k: int = None, top_p: float = None, temperature: float = None, num_beams: int = None
) -> LogitsProcessorList:
"""
This class returns a :obj:`~transformers.LogitsProcessorList` list object that contains all relevant
:obj:`~transformers.LogitsWarper` instances used for multinomial sampling.
This class returns a :class:`~transformers.LogitsProcessorList` list object that contains all relevant
:class:`~transformers.LogitsWarper` instances used for multinomial sampling.
"""
# init warp parameters
@ -575,8 +575,8 @@ class GenerationMixin:
remove_invalid_values: bool,
) -> LogitsProcessorList:
"""
This class returns a :obj:`~transformers.LogitsProcessorList` list object that contains all relevant
:obj:`~transformers.LogitsProcessor` instances used to modify the scores of the language model head.
This class returns a :class:`~transformers.LogitsProcessorList` list object that contains all relevant
:class:`~transformers.LogitsProcessor` instances used to modify the scores of the language model head.
"""
processors = LogitsProcessorList()

View File

@ -737,7 +737,7 @@ class TFPreTrainedModel(tf.keras.Model, TFModelUtilsMixin, TFGenerationMixin, Pu
Prepare the output of the saved model. Each model must implement this function.
Args:
output (:obj:`~transformers.TFBaseModelOutput`):
output (:class:`~transformers.TFBaseModelOutput`):
The output returned by the model.
"""
raise NotImplementedError

View File

@ -1277,7 +1277,7 @@ class TapasTokenizer(PreTrainedTokenizer):
Total number of table columns
max_length (:obj:`int`):
Total maximum length.
truncation_strategy (:obj:`str` or :obj:`~transformers.TapasTruncationStrategy`):
truncation_strategy (:obj:`str` or :class:`~transformers.TapasTruncationStrategy`):
Truncation strategy to use. Seeing as this method should only be called when truncating, the only
available strategy is the :obj:`"drop_rows_to_fit"` strategy.

View File

@ -372,13 +372,13 @@ def pipeline(
- :obj:`"summarization"`: will return a :class:`~transformers.SummarizationPipeline`:.
- :obj:`"zero-shot-classification"`: will return a :class:`~transformers.ZeroShotClassificationPipeline`:.
model (:obj:`str` or :obj:`~transformers.PreTrainedModel` or :obj:`~transformers.TFPreTrainedModel`, `optional`):
model (:obj:`str` or :class:`~transformers.PreTrainedModel` or :class:`~transformers.TFPreTrainedModel`, `optional`):
The model that will be used by the pipeline to make predictions. This can be a model identifier or an
actual instance of a pretrained model inheriting from :class:`~transformers.PreTrainedModel` (for PyTorch)
or :class:`~transformers.TFPreTrainedModel` (for TensorFlow).
If not provided, the default for the :obj:`task` will be loaded.
config (:obj:`str` or :obj:`~transformers.PretrainedConfig`, `optional`):
config (:obj:`str` or :class:`~transformers.PretrainedConfig`, `optional`):
The configuration that will be used by the pipeline to instantiate the model. This can be a model
identifier or an actual pretrained model configuration inheriting from
:class:`~transformers.PretrainedConfig`.
@ -386,7 +386,7 @@ def pipeline(
If not provided, the default configuration file for the requested model will be used. That means that if
:obj:`model` is given, its default configuration will be used. However, if :obj:`model` is not supplied,
this :obj:`task`'s default model's config is used instead.
tokenizer (:obj:`str` or :obj:`~transformers.PreTrainedTokenizer`, `optional`):
tokenizer (:obj:`str` or :class:`~transformers.PreTrainedTokenizer`, `optional`):
The tokenizer that will be used by the pipeline to encode data for the model. This can be a model
identifier or an actual pretrained tokenizer inheriting from :class:`~transformers.PreTrainedTokenizer`.
@ -394,7 +394,7 @@ def pipeline(
:obj:`model` is not specified or not a string, then the default tokenizer for :obj:`config` is loaded (if
it is a string). However, if :obj:`config` is also not given or not a string, then the default tokenizer
for the given :obj:`task` will be loaded.
feature_extractor (:obj:`str` or :obj:`~transformers.PreTrainedFeatureExtractor`, `optional`):
feature_extractor (:obj:`str` or :class:`~transformers.PreTrainedFeatureExtractor`, `optional`):
The feature extractor that will be used by the pipeline to encode data for the model. This can be a model
identifier or an actual pretrained feature extractor inheriting from
:class:`~transformers.PreTrainedFeatureExtractor`.

View File

@ -93,7 +93,7 @@ class AudioClassificationPipeline(Pipeline):
**kwargs,
):
"""
Classify the sequence(s) given as inputs. See the :obj:`~transformers.AutomaticSpeechRecognitionPipeline`
Classify the sequence(s) given as inputs. See the :class:`~transformers.AutomaticSpeechRecognitionPipeline`
documentation for more information.
Args:

View File

@ -77,13 +77,13 @@ class AutomaticSpeechRecognitionPipeline(Pipeline):
def __init__(self, feature_extractor: Union["SequenceFeatureExtractor", str], *args, **kwargs):
"""
Arguments:
feature_extractor (:obj:`~transformers.SequenceFeatureExtractor`):
feature_extractor (:class:`~transformers.SequenceFeatureExtractor`):
The feature extractor that will be used by the pipeline to encode waveform for the model.
model (:obj:`~transformers.PreTrainedModel` or :obj:`~transformers.TFPreTrainedModel`):
model (:class:`~transformers.PreTrainedModel` or :class:`~transformers.TFPreTrainedModel`):
The model that will be used by the pipeline to make predictions. This needs to be a model inheriting
from :class:`~transformers.PreTrainedModel` for PyTorch and :class:`~transformers.TFPreTrainedModel`
for TensorFlow.
tokenizer (:obj:`~transformers.PreTrainedTokenizer`):
tokenizer (:class:`~transformers.PreTrainedTokenizer`):
The tokenizer that will be used by the pipeline to encode data for the model. This object inherits from
:class:`~transformers.PreTrainedTokenizer`.
modelcard (:obj:`str` or :class:`~transformers.ModelCard`, `optional`):
@ -114,7 +114,7 @@ class AutomaticSpeechRecognitionPipeline(Pipeline):
**kwargs,
):
"""
Classify the sequence(s) given as inputs. See the :obj:`~transformers.AutomaticSpeechRecognitionPipeline`
Classify the sequence(s) given as inputs. See the :class:`~transformers.AutomaticSpeechRecognitionPipeline`
documentation for more information.
Args:

View File

@ -644,11 +644,11 @@ class _ScikitCompat(ABC):
PIPELINE_INIT_ARGS = r"""
Arguments:
model (:obj:`~transformers.PreTrainedModel` or :obj:`~transformers.TFPreTrainedModel`):
model (:class:`~transformers.PreTrainedModel` or :class:`~transformers.TFPreTrainedModel`):
The model that will be used by the pipeline to make predictions. This needs to be a model inheriting from
:class:`~transformers.PreTrainedModel` for PyTorch and :class:`~transformers.TFPreTrainedModel` for
TensorFlow.
tokenizer (:obj:`~transformers.PreTrainedTokenizer`):
tokenizer (:class:`~transformers.PreTrainedTokenizer`):
The tokenizer that will be used by the pipeline to encode data for the model. This object inherits from
:class:`~transformers.PreTrainedTokenizer`.
modelcard (:obj:`str` or :class:`~transformers.ModelCard`, `optional`):

View File

@ -16,11 +16,11 @@ class FeatureExtractionPipeline(Pipeline):
`huggingface.co/models <https://huggingface.co/models>`__.
Arguments:
model (:obj:`~transformers.PreTrainedModel` or :obj:`~transformers.TFPreTrainedModel`):
model (:class:`~transformers.PreTrainedModel` or :class:`~transformers.TFPreTrainedModel`):
The model that will be used by the pipeline to make predictions. This needs to be a model inheriting from
:class:`~transformers.PreTrainedModel` for PyTorch and :class:`~transformers.TFPreTrainedModel` for
TensorFlow.
tokenizer (:obj:`~transformers.PreTrainedTokenizer`):
tokenizer (:class:`~transformers.PreTrainedTokenizer`):
The tokenizer that will be used by the pipeline to encode data for the model. This object inherits from
:class:`~transformers.PreTrainedTokenizer`.
modelcard (:obj:`str` or :class:`~transformers.ModelCard`, `optional`):

View File

@ -154,7 +154,7 @@ class ZeroShotClassificationPipeline(Pipeline):
**kwargs,
):
"""
Classify the sequence(s) given as inputs. See the :obj:`~transformers.ZeroShotClassificationPipeline`
Classify the sequence(s) given as inputs. See the :class:`~transformers.ZeroShotClassificationPipeline`
documentation for more information.
Args:

View File

@ -239,7 +239,7 @@ class Trainer:
compute_metrics (:obj:`Callable[[EvalPrediction], Dict]`, `optional`):
The function that will be used to compute metrics at evaluation. Must take a
:class:`~transformers.EvalPrediction` and return a dictionary string to metric values.
callbacks (List of :obj:`~transformers.TrainerCallback`, `optional`):
callbacks (List of :class:`~transformers.TrainerCallback`, `optional`):
A list of callbacks to customize the training loop. Will add those to the list of default callbacks
detailed in :doc:`here <callback>`.