From e6522e49a7ff8a58793d13653aa94adfe56b44ce Mon Sep 17 00:00:00 2001 From: Adam Louly Date: Wed, 15 Nov 2023 10:16:42 -0800 Subject: [PATCH] Fixing the failure of models without max_position_embeddings attribute. (#27499) fix max pos issue Co-authored-by: Adam Louly --- examples/pytorch/language-modeling/run_clm.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/examples/pytorch/language-modeling/run_clm.py b/examples/pytorch/language-modeling/run_clm.py index 4e153e72fe4..7c668919253 100755 --- a/examples/pytorch/language-modeling/run_clm.py +++ b/examples/pytorch/language-modeling/run_clm.py @@ -497,15 +497,20 @@ def main(): batched=True, remove_columns=column_names, ) + if hasattr(config, "max_position_embeddings"): + max_pos_embeddings = config.max_position_embeddings + else: + # Define a default value if the attribute is missing in the config. + max_pos_embeddings = 1024 if data_args.block_size is None: block_size = tokenizer.model_max_length - if block_size > config.max_position_embeddings: + if block_size > max_pos_embeddings: logger.warning( f"The tokenizer picked seems to have a very large `model_max_length` ({tokenizer.model_max_length}). " - f"Using block_size={min(1024, config.max_position_embeddings)} instead. You can change that default value by passing --block_size xxx." + f"Using block_size={min(1024, max_pos_embeddings)} instead. You can change that default value by passing --block_size xxx." ) - block_size = min(1024, config.max_position_embeddings) + block_size = min(1024, max_pos_embeddings) else: if data_args.block_size > tokenizer.model_max_length: logger.warning(