From 711d901c49bbc896f508920b70bfd8a83f11e5da Mon Sep 17 00:00:00 2001 From: qqaatw Date: Wed, 14 Jul 2021 00:08:15 +0800 Subject: [PATCH] Fix minor docstring typos. (#12682) --- src/transformers/modeling_flax_utils.py | 2 +- src/transformers/modeling_tf_utils.py | 2 +- src/transformers/modeling_utils.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/transformers/modeling_flax_utils.py b/src/transformers/modeling_flax_utils.py index b93a97c6283..6c4a30a35c0 100644 --- a/src/transformers/modeling_flax_utils.py +++ b/src/transformers/modeling_flax_utils.py @@ -199,7 +199,7 @@ class FlaxPreTrainedModel(PushToHubMixin, FlaxGenerationMixin): from_pt (:obj:`bool`, `optional`, defaults to :obj:`False`): Load the model weights from a PyTorch checkpoint save file (see docstring of ``pretrained_model_name_or_path`` argument). - ignore_mismatched_size (:obj:`bool`, `optional`, defaults to :obj:`False`): + ignore_mismatched_sizes (:obj:`bool`, `optional`, defaults to :obj:`False`): Whether or not to raise an error if some of the weights from the checkpoint do not have the same size as the weights of the model (if for instance, you are instantiating a model with 10 labels from a checkpoint with 3 labels). diff --git a/src/transformers/modeling_tf_utils.py b/src/transformers/modeling_tf_utils.py index ff84b80af09..da6c6c32d59 100644 --- a/src/transformers/modeling_tf_utils.py +++ b/src/transformers/modeling_tf_utils.py @@ -1132,7 +1132,7 @@ class TFPreTrainedModel(tf.keras.Model, TFModelUtilsMixin, TFGenerationMixin, Pu from_pt: (:obj:`bool`, `optional`, defaults to :obj:`False`): Load the model weights from a PyTorch state_dict save file (see docstring of ``pretrained_model_name_or_path`` argument). - ignore_mismatched_size (:obj:`bool`, `optional`, defaults to :obj:`False`): + ignore_mismatched_sizes (:obj:`bool`, `optional`, defaults to :obj:`False`): Whether or not to raise an error if some of the weights from the checkpoint do not have the same size as the weights of the model (if for instance, you are instantiating a model with 10 labels from a checkpoint with 3 labels). diff --git a/src/transformers/modeling_utils.py b/src/transformers/modeling_utils.py index 207503ccf9a..20102c51a54 100644 --- a/src/transformers/modeling_utils.py +++ b/src/transformers/modeling_utils.py @@ -1037,7 +1037,7 @@ class PreTrainedModel(nn.Module, ModuleUtilsMixin, GenerationMixin, PushToHubMix from_flax (:obj:`bool`, `optional`, defaults to :obj:`False`): Load the model weights from a Flax checkpoint save file (see docstring of ``pretrained_model_name_or_path`` argument). - ignore_mismatched_size (:obj:`bool`, `optional`, defaults to :obj:`False`): + ignore_mismatched_sizes (:obj:`bool`, `optional`, defaults to :obj:`False`): Whether or not to raise an error if some of the weights from the checkpoint do not have the same size as the weights of the model (if for instance, you are instantiating a model with 10 labels from a checkpoint with 3 labels).