mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-03 12:50:06 +06:00
Fixing the failure of models without max_position_embeddings attribute. (#27499)
fix max pos issue Co-authored-by: Adam Louly <adamlouly@microsoft.com@orttrainingdev9.d32nl1ml4oruzj4qz3bqlggovf.px.internal.cloudapp.net>
This commit is contained in:
parent
a0633c4483
commit
e6522e49a7
@ -497,15 +497,20 @@ def main():
|
||||
batched=True,
|
||||
remove_columns=column_names,
|
||||
)
|
||||
if hasattr(config, "max_position_embeddings"):
|
||||
max_pos_embeddings = config.max_position_embeddings
|
||||
else:
|
||||
# Define a default value if the attribute is missing in the config.
|
||||
max_pos_embeddings = 1024
|
||||
|
||||
if data_args.block_size is None:
|
||||
block_size = tokenizer.model_max_length
|
||||
if block_size > config.max_position_embeddings:
|
||||
if block_size > max_pos_embeddings:
|
||||
logger.warning(
|
||||
f"The tokenizer picked seems to have a very large `model_max_length` ({tokenizer.model_max_length}). "
|
||||
f"Using block_size={min(1024, config.max_position_embeddings)} instead. You can change that default value by passing --block_size xxx."
|
||||
f"Using block_size={min(1024, max_pos_embeddings)} instead. You can change that default value by passing --block_size xxx."
|
||||
)
|
||||
block_size = min(1024, config.max_position_embeddings)
|
||||
block_size = min(1024, max_pos_embeddings)
|
||||
else:
|
||||
if data_args.block_size > tokenizer.model_max_length:
|
||||
logger.warning(
|
||||
|
Loading…
Reference in New Issue
Block a user