mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 10:12:23 +06:00
Revert "Not always consider a local model a checkpoint in run_glue"
This reverts commit f3660613bc
.
This commit is contained in:
parent
f3660613bc
commit
745ea78dcc
@ -406,15 +406,12 @@ def main():
|
||||
|
||||
# Training
|
||||
if training_args.do_train:
|
||||
checkpoint = None
|
||||
if last_checkpoint is not None:
|
||||
checkpoint = last_checkpoint
|
||||
elif os.path.isdir(model_args.model_name_or_path):
|
||||
# Check the config from that potential checkpoint has the right number of labels before using it as a
|
||||
# checkpoint.
|
||||
if AutoConfig.from_pretrained(model_args.model_name_or_path).num_labels == num_labels:
|
||||
checkpoint = model_args.model_name_or_path
|
||||
|
||||
checkpoint = model_args.model_name_or_path
|
||||
else:
|
||||
checkpoint = None
|
||||
train_result = trainer.train(resume_from_checkpoint=checkpoint)
|
||||
metrics = train_result.metrics
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user