mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
Examples: check max_position_embeddings
in the translation example (#29600)
check max_position_embeddings
This commit is contained in:
parent
6b660d5ed5
commit
d47966536c
@ -469,6 +469,19 @@ def main():
|
||||
source_lang = data_args.source_lang.split("_")[0]
|
||||
target_lang = data_args.target_lang.split("_")[0]
|
||||
|
||||
# Check the whether the source target length fits in the model, if it has absolute positional embeddings
|
||||
if (
|
||||
hasattr(model.config, "max_position_embeddings")
|
||||
and not hasattr(model.config, "relative_attention_max_distance")
|
||||
and model.config.max_position_embeddings < data_args.max_source_length
|
||||
):
|
||||
raise ValueError(
|
||||
f"`--max_source_length` is set to {data_args.max_source_length}, but the model only has"
|
||||
f" {model.config.max_position_embeddings} position encodings. Consider either reducing"
|
||||
f" `--max_source_length` to {model.config.max_position_embeddings} or using a model with larger position "
|
||||
"embeddings"
|
||||
)
|
||||
|
||||
# Temporarily set max_target_length for training.
|
||||
max_target_length = data_args.max_target_length
|
||||
padding = "max_length" if data_args.pad_to_max_length else False
|
||||
|
Loading…
Reference in New Issue
Block a user