mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
[doc] :class: hunt (#14955)
* [doc] :class: hunt * Apply suggestions from code review Co-authored-by: Sylvain Gugger <35901082+sgugger@users.noreply.github.com> * fix the fix + style Co-authored-by: Sylvain Gugger <35901082+sgugger@users.noreply.github.com>
This commit is contained in:
parent
2c5597f6c7
commit
10fd4fa1a6
@ -139,7 +139,7 @@ class SequenceFeatureExtractor(FeatureExtractionMixin):
|
||||
# The model's main input name, usually `input_values`, has be passed for padding
|
||||
if self.model_input_names[0] not in processed_features:
|
||||
raise ValueError(
|
||||
"You should supply an instance of :class:`~transformers.BatchFeature` or list of :class:`~transformers.BatchFeature` to this method "
|
||||
"You should supply an instance of `transformers.BatchFeature` or list of `transformers.BatchFeature` to this method "
|
||||
f"that includes {self.model_input_names[0]}, but you provided {list(processed_features.keys())}"
|
||||
)
|
||||
|
||||
|
@ -411,8 +411,8 @@ ALL_PRETRAINED_CONFIG_ARCHIVE_MAP = _LazyLoadAllMappings(CONFIG_ARCHIVE_MAP_MAPP
|
||||
|
||||
def _get_class_name(model_class: Union[str, List[str]]):
|
||||
if isinstance(model_class, (list, tuple)):
|
||||
return " or ".join([f":class:`~transformers.{c}`" for c in model_class if c is not None])
|
||||
return f":class:`~transformers.{model_class}`"
|
||||
return " or ".join([f"[`{c}`]" for c in model_class if c is not None])
|
||||
return f"[`{model_class}`]"
|
||||
|
||||
|
||||
def _list_model_options(indent, config_to_class=None, use_model_types=True):
|
||||
@ -420,9 +420,7 @@ def _list_model_options(indent, config_to_class=None, use_model_types=True):
|
||||
raise ValueError("Using `use_model_types=False` requires a `config_to_class` dictionary.")
|
||||
if use_model_types:
|
||||
if config_to_class is None:
|
||||
model_type_to_name = {
|
||||
model_type: f":class:`~transformers.{config}`" for model_type, config in CONFIG_MAPPING_NAMES.items()
|
||||
}
|
||||
model_type_to_name = {model_type: f"[`{config}`]" for model_type, config in CONFIG_MAPPING_NAMES.items()}
|
||||
else:
|
||||
model_type_to_name = {
|
||||
model_type: _get_class_name(model_class)
|
||||
@ -443,7 +441,7 @@ def _list_model_options(indent, config_to_class=None, use_model_types=True):
|
||||
config: MODEL_NAMES_MAPPING[model_type] for model_type, config in CONFIG_MAPPING_NAMES.items()
|
||||
}
|
||||
lines = [
|
||||
f"{indent}- :class:`~transformers.{config_name}` configuration class: {config_to_name[config_name]} ({config_to_model_name[config_name]} model)"
|
||||
f"{indent}- [`{config_name}`] configuration class: {config_to_name[config_name]} ({config_to_model_name[config_name]} model)"
|
||||
for config_name in sorted(config_to_name.keys())
|
||||
]
|
||||
return "\n".join(lines)
|
||||
|
@ -94,9 +94,9 @@ MMBT_START_DOCSTRING = r"""
|
||||
config ([`MMBTConfig`]): Model configuration class with all the parameters of the model.
|
||||
Initializing with a config file does not load the weights associated with the model, only the
|
||||
configuration.
|
||||
transformer (:class: *~nn.Module*): A text transformer that is used by MMBT.
|
||||
transformer (`nn.Module`): A text transformer that is used by MMBT.
|
||||
It should have embeddings, encoder, and pooler attributes.
|
||||
encoder (:class: *~nn.Module*): Encoder for the second modality.
|
||||
encoder (`nn.Module`): Encoder for the second modality.
|
||||
It should take in a batch of modal inputs and return k, n dimension embeddings.
|
||||
"""
|
||||
|
||||
|
@ -737,7 +737,7 @@ class Speech2Text2DecoderWrapper(Speech2Text2PreTrainedModel):
|
||||
|
||||
|
||||
@add_start_docstrings(
|
||||
"The Speech2Text2 Decoder with a language modeling head. Can be used as the decoder part of :class:`~transformers.EncoderDecoderModel` and :class:`~transformers.SpeechEncoderDecoder`.",
|
||||
"The Speech2Text2 Decoder with a language modeling head. Can be used as the decoder part of [`EncoderDecoderModel`] and [`SpeechEncoderDecoder`].",
|
||||
SPEECH_TO_TEXT_2_START_DOCSTRING,
|
||||
)
|
||||
class Speech2Text2ForCausalLM(Speech2Text2PreTrainedModel):
|
||||
|
@ -770,7 +770,7 @@ class TrOCRDecoderWrapper(TrOCRPreTrainedModel):
|
||||
|
||||
|
||||
@add_start_docstrings(
|
||||
"The TrOCR Decoder with a language modeling head. Can be used as the decoder part of :class:`~transformers.EncoderDecoderModel` and :class:`~transformers.VisionEncoderDecoder`.",
|
||||
"The TrOCR Decoder with a language modeling head. Can be used as the decoder part of [`EncoderDecoderModel`] and [`VisionEncoderDecoder`].",
|
||||
TROCR_START_DOCSTRING,
|
||||
)
|
||||
class TrOCRForCausalLM(TrOCRPreTrainedModel):
|
||||
|
@ -84,7 +84,7 @@ def _assert_tensors_equal(a, b, atol=1e-12, prefix=""):
|
||||
def require_retrieval(test_case):
|
||||
"""
|
||||
Decorator marking a test that requires a set of dependencies necessary for pefrorm retrieval with
|
||||
:class:`~transformers.RagRetriever`.
|
||||
[`RagRetriever`].
|
||||
|
||||
These tests are skipped when respective libraries are not installed.
|
||||
|
||||
|
@ -44,7 +44,7 @@ TOLERANCE = 1e-3
|
||||
def require_retrieval(test_case):
|
||||
"""
|
||||
Decorator marking a test that requires a set of dependencies necessary for pefrorm retrieval with
|
||||
:class:`~transformers.RagRetriever`.
|
||||
[`RagRetriever`].
|
||||
|
||||
These tests are skipped when respective libraries are not installed.
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user