fix warning function call creating logger error (max_length and max_new_tokens) (#22889)

This commit is contained in:
Quentin Ambard 2023-04-20 14:08:03 +02:00 committed by GitHub
parent aa4316757d
commit aa43a76538
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 6 additions and 9 deletions

View File

@ -387,12 +387,11 @@ class FlaxGenerationMixin:
elif generation_config.max_new_tokens is not None:
generation_config.max_length = generation_config.max_new_tokens + input_ids_seq_length
if not has_default_max_length:
logger.warn(
logger.warning(
f"Both `max_new_tokens` (={generation_config.max_new_tokens}) and `max_length`(="
f"{generation_config.max_length}) seem to have been set. `max_new_tokens` will take precedence. "
"Please refer to the documentation for more information. "
"(https://huggingface.co/docs/transformers/main/en/main_classes/text_generation)",
UserWarning,
"(https://huggingface.co/docs/transformers/main/en/main_classes/text_generation)"
)
if generation_config.min_length is not None and generation_config.min_length > generation_config.max_length:

View File

@ -860,12 +860,11 @@ class TFGenerationMixin:
elif generation_config.max_new_tokens is not None:
generation_config.max_length = generation_config.max_new_tokens + input_ids_seq_length
if not has_default_max_length:
logger.warn(
logger.warning(
f"Both `max_new_tokens` (={generation_config.max_new_tokens}) and `max_length`(="
f"{generation_config.max_length}) seem to have been set. `max_new_tokens` will take precedence. "
"Please refer to the documentation for more information. "
"(https://huggingface.co/docs/transformers/main/en/main_classes/text_generation)",
UserWarning,
"(https://huggingface.co/docs/transformers/main/en/main_classes/text_generation)"
)
# If the input length is a tensor (i.e. dynamic length), skip length checks

View File

@ -1347,12 +1347,11 @@ class GenerationMixin:
elif generation_config.max_new_tokens is not None:
generation_config.max_length = generation_config.max_new_tokens + input_ids_seq_length
if not has_default_max_length:
logger.warn(
logger.warning(
f"Both `max_new_tokens` (={generation_config.max_new_tokens}) and `max_length`(="
f"{generation_config.max_length}) seem to have been set. `max_new_tokens` will take precedence. "
"Please refer to the documentation for more information. "
"(https://huggingface.co/docs/transformers/main/en/main_classes/text_generation)",
UserWarning,
"(https://huggingface.co/docs/transformers/main/en/main_classes/text_generation)"
)
if generation_config.min_length is not None and generation_config.min_length > generation_config.max_length: