mirror of
https://github.com/huggingface/transformers.git
synced 2025-08-01 18:51:14 +06:00
fix warning function call creating logger error (max_length and max_new_tokens) (#22889)
This commit is contained in:
parent
aa4316757d
commit
aa43a76538
@ -387,12 +387,11 @@ class FlaxGenerationMixin:
|
|||||||
elif generation_config.max_new_tokens is not None:
|
elif generation_config.max_new_tokens is not None:
|
||||||
generation_config.max_length = generation_config.max_new_tokens + input_ids_seq_length
|
generation_config.max_length = generation_config.max_new_tokens + input_ids_seq_length
|
||||||
if not has_default_max_length:
|
if not has_default_max_length:
|
||||||
logger.warn(
|
logger.warning(
|
||||||
f"Both `max_new_tokens` (={generation_config.max_new_tokens}) and `max_length`(="
|
f"Both `max_new_tokens` (={generation_config.max_new_tokens}) and `max_length`(="
|
||||||
f"{generation_config.max_length}) seem to have been set. `max_new_tokens` will take precedence. "
|
f"{generation_config.max_length}) seem to have been set. `max_new_tokens` will take precedence. "
|
||||||
"Please refer to the documentation for more information. "
|
"Please refer to the documentation for more information. "
|
||||||
"(https://huggingface.co/docs/transformers/main/en/main_classes/text_generation)",
|
"(https://huggingface.co/docs/transformers/main/en/main_classes/text_generation)"
|
||||||
UserWarning,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if generation_config.min_length is not None and generation_config.min_length > generation_config.max_length:
|
if generation_config.min_length is not None and generation_config.min_length > generation_config.max_length:
|
||||||
|
@ -860,12 +860,11 @@ class TFGenerationMixin:
|
|||||||
elif generation_config.max_new_tokens is not None:
|
elif generation_config.max_new_tokens is not None:
|
||||||
generation_config.max_length = generation_config.max_new_tokens + input_ids_seq_length
|
generation_config.max_length = generation_config.max_new_tokens + input_ids_seq_length
|
||||||
if not has_default_max_length:
|
if not has_default_max_length:
|
||||||
logger.warn(
|
logger.warning(
|
||||||
f"Both `max_new_tokens` (={generation_config.max_new_tokens}) and `max_length`(="
|
f"Both `max_new_tokens` (={generation_config.max_new_tokens}) and `max_length`(="
|
||||||
f"{generation_config.max_length}) seem to have been set. `max_new_tokens` will take precedence. "
|
f"{generation_config.max_length}) seem to have been set. `max_new_tokens` will take precedence. "
|
||||||
"Please refer to the documentation for more information. "
|
"Please refer to the documentation for more information. "
|
||||||
"(https://huggingface.co/docs/transformers/main/en/main_classes/text_generation)",
|
"(https://huggingface.co/docs/transformers/main/en/main_classes/text_generation)"
|
||||||
UserWarning,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# If the input length is a tensor (i.e. dynamic length), skip length checks
|
# If the input length is a tensor (i.e. dynamic length), skip length checks
|
||||||
|
@ -1347,12 +1347,11 @@ class GenerationMixin:
|
|||||||
elif generation_config.max_new_tokens is not None:
|
elif generation_config.max_new_tokens is not None:
|
||||||
generation_config.max_length = generation_config.max_new_tokens + input_ids_seq_length
|
generation_config.max_length = generation_config.max_new_tokens + input_ids_seq_length
|
||||||
if not has_default_max_length:
|
if not has_default_max_length:
|
||||||
logger.warn(
|
logger.warning(
|
||||||
f"Both `max_new_tokens` (={generation_config.max_new_tokens}) and `max_length`(="
|
f"Both `max_new_tokens` (={generation_config.max_new_tokens}) and `max_length`(="
|
||||||
f"{generation_config.max_length}) seem to have been set. `max_new_tokens` will take precedence. "
|
f"{generation_config.max_length}) seem to have been set. `max_new_tokens` will take precedence. "
|
||||||
"Please refer to the documentation for more information. "
|
"Please refer to the documentation for more information. "
|
||||||
"(https://huggingface.co/docs/transformers/main/en/main_classes/text_generation)",
|
"(https://huggingface.co/docs/transformers/main/en/main_classes/text_generation)"
|
||||||
UserWarning,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if generation_config.min_length is not None and generation_config.min_length > generation_config.max_length:
|
if generation_config.min_length is not None and generation_config.min_length > generation_config.max_length:
|
||||||
|
Loading…
Reference in New Issue
Block a user