mirror of
https://github.com/huggingface/transformers.git
synced 2025-08-02 03:01:07 +06:00
Replace deprecated logger.warn with warning (#16876)
This commit is contained in:
parent
e03966e404
commit
fea94d6790
@ -269,7 +269,7 @@ def set_quantizer(name, mod, quantizer, k, v):
|
||||
assert hasattr(quantizer_mod, k)
|
||||
setattr(quantizer_mod, k, v)
|
||||
else:
|
||||
logger.warn(f"{name} has no {quantizer}")
|
||||
logger.warning(f"{name} has no {quantizer}")
|
||||
|
||||
|
||||
def set_quantizers(name, mod, which="both", **kwargs):
|
||||
|
@ -306,7 +306,7 @@ class PretrainedConfig(PushToHubMixin):
|
||||
if self.id2label is not None:
|
||||
num_labels = kwargs.pop("num_labels", None)
|
||||
if num_labels is not None and len(self.id2label) != num_labels:
|
||||
logger.warn(
|
||||
logger.warning(
|
||||
f"You passed along `num_labels={num_labels}` with an incompatible id to label map: "
|
||||
f"{self.id2label}. The number of labels wil be overwritten to {self.num_labels}."
|
||||
)
|
||||
|
@ -641,7 +641,7 @@ class FlaxPreTrainedModel(PushToHubMixin, FlaxGenerationMixin):
|
||||
unexpected_keys = set(state.keys()) - model.required_params
|
||||
|
||||
if missing_keys and not _do_init:
|
||||
logger.warn(
|
||||
logger.warning(
|
||||
f"The checkpoint {pretrained_model_name_or_path} is missing required keys: {missing_keys}. "
|
||||
f"Make sure to call model.init_weights to initialize the missing weights."
|
||||
)
|
||||
|
Loading…
Reference in New Issue
Block a user