mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
Fix LR scheduler based on bs from auto bs finder (#24521)
* One solution * args -> self
This commit is contained in:
parent
38db04ece0
commit
fb6a62762f
@ -1553,7 +1553,7 @@ class Trainer:
|
||||
# number of training epochs: num_train_epochs
|
||||
# number of training steps per epoch: num_update_steps_per_epoch
|
||||
# total number of training steps to execute: max_steps
|
||||
total_train_batch_size = args.train_batch_size * args.gradient_accumulation_steps * args.world_size
|
||||
total_train_batch_size = self._train_batch_size * args.gradient_accumulation_steps * args.world_size
|
||||
|
||||
len_dataloader = None
|
||||
if has_length(train_dataloader):
|
||||
|
Loading…
Reference in New Issue
Block a user