Don't crash if fine-tuned model doesn't end with a number (#3099)

That's the same fix applied in https://github.com/huggingface/transformers/issues/2258 , but for the GLUE example
This commit is contained in:
Davide Fiocco 2020-03-03 14:59:47 +01:00 committed by GitHub
parent eec5ec8071
commit c0c7ec3458
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -183,8 +183,11 @@ def train(args, train_dataset, model, tokenizer):
steps_trained_in_current_epoch = 0
# Check if continuing training from a checkpoint
if os.path.exists(args.model_name_or_path):
# set global_step to gobal_step of last saved checkpoint from model path
global_step = int(args.model_name_or_path.split("-")[-1].split("/")[0])
# set global_step to global_step of last saved checkpoint from model path
try:
global_step = int(args.model_name_or_path.split("-")[-1].split("/")[0])
except ValueError:
global_step = 0
epochs_trained = global_step // (len(train_dataloader) // args.gradient_accumulation_steps)
steps_trained_in_current_epoch = global_step % (len(train_dataloader) // args.gradient_accumulation_steps)