mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 10:12:23 +06:00
Move usage of deprecated logging.warn to logging.warning (#25310)
The former spelling is deprecated and has been discouraged for a while. The latter spelling seems to be more common in this project anyway, so this change ought to be safe. Fixes https://github.com/huggingface/transformers/issues/25283
This commit is contained in:
parent
641adca558
commit
67683095a6
@ -2853,7 +2853,7 @@ class PreTrainedModel(nn.Module, ModuleUtilsMixin, GenerationMixin, PushToHubMix
|
||||
if "special_dtypes" in inspect.signature(infer_auto_device_map).parameters:
|
||||
kwargs["special_dtypes"] = special_dtypes
|
||||
elif len(special_dtypes) > 0:
|
||||
logger.warn(
|
||||
logger.warning(
|
||||
"This model has some weights that should be kept in higher precision, you need to upgrade "
|
||||
"`accelerate` to properly deal with them (`pip install --upgrade accelerate`)."
|
||||
)
|
||||
@ -3359,7 +3359,7 @@ class PreTrainedModel(nn.Module, ModuleUtilsMixin, GenerationMixin, PushToHubMix
|
||||
|
||||
if len(unexpected_keys) > 0:
|
||||
archs = [] if model.config.architectures is None else model.config.architectures
|
||||
warner = logger.warn if model.__class__.__name__ in archs else logger.info
|
||||
warner = logger.warning if model.__class__.__name__ in archs else logger.info
|
||||
warner(
|
||||
f"Some weights of the model checkpoint at {pretrained_model_name_or_path} were not used when"
|
||||
f" initializing {model.__class__.__name__}: {unexpected_keys}\n- This IS expected if you are"
|
||||
|
@ -403,7 +403,7 @@ class BlipTextEncoder(nn.Module):
|
||||
) -> Union[Tuple[torch.Tensor], BaseModelOutputWithPastAndCrossAttentions]:
|
||||
if self.gradient_checkpointing and self.training:
|
||||
if use_cache:
|
||||
logger.warn(
|
||||
logger.warning(
|
||||
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
|
||||
)
|
||||
use_cache = False
|
||||
|
@ -940,7 +940,7 @@ class Blip2QFormerEncoder(nn.Module):
|
||||
|
||||
if getattr(self.config, "gradient_checkpointing", False) and self.training:
|
||||
if use_cache:
|
||||
logger.warn(
|
||||
logger.warning(
|
||||
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
|
||||
)
|
||||
use_cache = False
|
||||
|
@ -39,7 +39,7 @@ try:
|
||||
from xformers import ops as xops
|
||||
except ImportError:
|
||||
xops = None
|
||||
logger.warn(
|
||||
logger.warning(
|
||||
"Xformers is not installed correctly. If you want to use memory_efficient_attention to accelerate training use the following command to install Xformers\npip install xformers."
|
||||
)
|
||||
|
||||
|
@ -930,7 +930,7 @@ class InstructBlipQFormerEncoder(nn.Module):
|
||||
|
||||
if getattr(self.config, "gradient_checkpointing", False) and self.training:
|
||||
if use_cache:
|
||||
logger.warn(
|
||||
logger.warning(
|
||||
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
|
||||
)
|
||||
use_cache = False
|
||||
|
@ -1273,7 +1273,7 @@ class Wav2Vec2PreTrainedModel(PreTrainedModel):
|
||||
raise ValueError(f"Cannot load_adapter for {target_lang} if `config.adapter_attn_dim` is not defined.")
|
||||
|
||||
if target_lang == self.target_lang and not force_load:
|
||||
logger.warn(f"Adapter weights are already set to {target_lang}.")
|
||||
logger.warning(f"Adapter weights are already set to {target_lang}.")
|
||||
return
|
||||
|
||||
cache_dir = kwargs.pop("cache_dir", None)
|
||||
|
@ -224,12 +224,12 @@ class Agent:
|
||||
self._toolbox.update(additional_tools)
|
||||
if len(replacements) > 1:
|
||||
names = "\n".join([f"- {n}: {t}" for n, t in replacements.items()])
|
||||
logger.warn(
|
||||
logger.warning(
|
||||
f"The following tools have been replaced by the ones provided in `additional_tools`:\n{names}."
|
||||
)
|
||||
elif len(replacements) == 1:
|
||||
name = list(replacements.keys())[0]
|
||||
logger.warn(f"{name} has been replaced by {replacements[name]} as provided in `additional_tools`.")
|
||||
logger.warning(f"{name} has been replaced by {replacements[name]} as provided in `additional_tools`.")
|
||||
|
||||
self.prepare_for_new_chat()
|
||||
|
||||
|
@ -264,7 +264,7 @@ class Tool:
|
||||
if len(tool_class.name) == 0:
|
||||
tool_class.name = custom_tool["name"]
|
||||
if tool_class.name != custom_tool["name"]:
|
||||
logger.warn(
|
||||
logger.warning(
|
||||
f"{tool_class.__name__} implements a different name in its configuration and class. Using the tool "
|
||||
"configuration name."
|
||||
)
|
||||
@ -273,7 +273,7 @@ class Tool:
|
||||
if len(tool_class.description) == 0:
|
||||
tool_class.description = custom_tool["description"]
|
||||
if tool_class.description != custom_tool["description"]:
|
||||
logger.warn(
|
||||
logger.warning(
|
||||
f"{tool_class.__name__} implements a different description in its configuration and class. Using the "
|
||||
"tool configuration description."
|
||||
)
|
||||
|
Loading…
Reference in New Issue
Block a user