mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-25 23:38:59 +06:00
small update to run_glue
This commit is contained in:
parent
d743f2f34e
commit
3b7cb7bf44
@ -382,7 +382,8 @@ def main():
|
|||||||
|
|
||||||
# Save a trained model, configuration and tokenizer using `save_pretrained()`.
|
# Save a trained model, configuration and tokenizer using `save_pretrained()`.
|
||||||
# They can then be reloaded using `from_pretrained()`
|
# They can then be reloaded using `from_pretrained()`
|
||||||
model.save_pretrained(args.output_dir)
|
model_to_save = model.module if hasattr(model, 'module') else model # Take care of distributed/parallel training
|
||||||
|
model_to_save.save_pretrained(args.output_dir)
|
||||||
tokenizer.save_pretrained(args.output_dir)
|
tokenizer.save_pretrained(args.output_dir)
|
||||||
|
|
||||||
# Good practice: save your training arguments together with the trained model
|
# Good practice: save your training arguments together with the trained model
|
||||||
|
Loading…
Reference in New Issue
Block a user