From aa43a765380693cbb0657b3de216886e0a6a674c Mon Sep 17 00:00:00 2001 From: Quentin Ambard Date: Thu, 20 Apr 2023 14:08:03 +0200 Subject: [PATCH] fix warning function call creating logger error (max_length and max_new_tokens) (#22889) --- src/transformers/generation/flax_utils.py | 5 ++--- src/transformers/generation/tf_utils.py | 5 ++--- src/transformers/generation/utils.py | 5 ++--- 3 files changed, 6 insertions(+), 9 deletions(-) diff --git a/src/transformers/generation/flax_utils.py b/src/transformers/generation/flax_utils.py index 4ff1164c88e..c28e0afbb99 100644 --- a/src/transformers/generation/flax_utils.py +++ b/src/transformers/generation/flax_utils.py @@ -387,12 +387,11 @@ class FlaxGenerationMixin: elif generation_config.max_new_tokens is not None: generation_config.max_length = generation_config.max_new_tokens + input_ids_seq_length if not has_default_max_length: - logger.warn( + logger.warning( f"Both `max_new_tokens` (={generation_config.max_new_tokens}) and `max_length`(=" f"{generation_config.max_length}) seem to have been set. `max_new_tokens` will take precedence. " "Please refer to the documentation for more information. " - "(https://huggingface.co/docs/transformers/main/en/main_classes/text_generation)", - UserWarning, + "(https://huggingface.co/docs/transformers/main/en/main_classes/text_generation)" ) if generation_config.min_length is not None and generation_config.min_length > generation_config.max_length: diff --git a/src/transformers/generation/tf_utils.py b/src/transformers/generation/tf_utils.py index cc95cb31a4a..5cd8153c2bf 100644 --- a/src/transformers/generation/tf_utils.py +++ b/src/transformers/generation/tf_utils.py @@ -860,12 +860,11 @@ class TFGenerationMixin: elif generation_config.max_new_tokens is not None: generation_config.max_length = generation_config.max_new_tokens + input_ids_seq_length if not has_default_max_length: - logger.warn( + logger.warning( f"Both `max_new_tokens` (={generation_config.max_new_tokens}) and `max_length`(=" f"{generation_config.max_length}) seem to have been set. `max_new_tokens` will take precedence. " "Please refer to the documentation for more information. " - "(https://huggingface.co/docs/transformers/main/en/main_classes/text_generation)", - UserWarning, + "(https://huggingface.co/docs/transformers/main/en/main_classes/text_generation)" ) # If the input length is a tensor (i.e. dynamic length), skip length checks diff --git a/src/transformers/generation/utils.py b/src/transformers/generation/utils.py index ebc3ab14dca..62a04d6dcf4 100644 --- a/src/transformers/generation/utils.py +++ b/src/transformers/generation/utils.py @@ -1347,12 +1347,11 @@ class GenerationMixin: elif generation_config.max_new_tokens is not None: generation_config.max_length = generation_config.max_new_tokens + input_ids_seq_length if not has_default_max_length: - logger.warn( + logger.warning( f"Both `max_new_tokens` (={generation_config.max_new_tokens}) and `max_length`(=" f"{generation_config.max_length}) seem to have been set. `max_new_tokens` will take precedence. " "Please refer to the documentation for more information. " - "(https://huggingface.co/docs/transformers/main/en/main_classes/text_generation)", - UserWarning, + "(https://huggingface.co/docs/transformers/main/en/main_classes/text_generation)" ) if generation_config.min_length is not None and generation_config.min_length > generation_config.max_length: