mirror of
https://github.com/huggingface/transformers.git
synced 2025-08-02 19:21:31 +06:00
Do not prepare lr scheduler as it as the right number of steps (#24088)
* Do not prepare lr scheduler as it as the right number of steps * Trigger CI * Trigger CI * Trigger CI * Add fake comment * Remove fake comment * Trigger CI please!
This commit is contained in:
parent
12298cb65c
commit
2c887cf8e0
@ -1747,9 +1747,7 @@ class Trainer:
|
||||
|
||||
# prepare using `accelerator` prepare
|
||||
if use_accelerator_prepare:
|
||||
model, self.optimizer, self.lr_scheduler = self.accelerator.prepare(
|
||||
self.model, self.optimizer, self.lr_scheduler
|
||||
)
|
||||
model, self.optimizer = self.accelerator.prepare(self.model, self.optimizer)
|
||||
|
||||
if self.is_fsdp_enabled:
|
||||
self.model = model
|
||||
@ -1996,6 +1994,7 @@ class Trainer:
|
||||
optimizer_was_run = scale_before <= scale_after
|
||||
else:
|
||||
self.optimizer.step()
|
||||
optimizer_was_run = not self.accelerator.optimizer_step_was_skipped
|
||||
|
||||
if optimizer_was_run:
|
||||
# Delay optimizer scheduling until metrics are generated
|
||||
|
Loading…
Reference in New Issue
Block a user