mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
Update training_args.py (#12328)
mention in `save_strategy` param description that `load_best_model_at_end` can override
This commit is contained in:
parent
2150dfed31
commit
3694484d0a
@ -171,12 +171,12 @@ class TrainingArguments:
|
||||
logging_steps (:obj:`int`, `optional`, defaults to 500):
|
||||
Number of update steps between two logs if :obj:`logging_strategy="steps"`.
|
||||
save_strategy (:obj:`str` or :class:`~transformers.trainer_utils.IntervalStrategy`, `optional`, defaults to :obj:`"steps"`):
|
||||
The checkpoint save strategy to adopt during training. Possible values are:
|
||||
The checkpoint save strategy to adopt during training (Note that when :obj:`load_best_model_at_end=True`,
|
||||
this parameter is ignored and the model is saved after each evaluation). Possible values are:
|
||||
|
||||
* :obj:`"no"`: No save is done during training.
|
||||
* :obj:`"epoch"`: Save is done at the end of each epoch.
|
||||
* :obj:`"steps"`: Save is done every :obj:`save_steps`.
|
||||
|
||||
save_steps (:obj:`int`, `optional`, defaults to 500):
|
||||
Number of updates steps before two checkpoint saves if :obj:`save_strategy="steps"`.
|
||||
save_total_limit (:obj:`int`, `optional`):
|
||||
|
Loading…
Reference in New Issue
Block a user