Do not prepare lr scheduler as it as the right number of steps (#24088)

* Do not prepare lr scheduler as it as the right number of steps

* Trigger CI

* Trigger CI

* Trigger CI

* Add fake comment

* Remove fake comment

* Trigger CI please!
This commit is contained in:
Sylvain Gugger 2023-06-07 15:31:32 -04:00 committed by GitHub
parent 12298cb65c
commit 2c887cf8e0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -1747,9 +1747,7 @@ class Trainer:
# prepare using `accelerator` prepare
if use_accelerator_prepare:
model, self.optimizer, self.lr_scheduler = self.accelerator.prepare(
self.model, self.optimizer, self.lr_scheduler
)
model, self.optimizer = self.accelerator.prepare(self.model, self.optimizer)
if self.is_fsdp_enabled:
self.model = model
@ -1996,6 +1994,7 @@ class Trainer:
optimizer_was_run = scale_before <= scale_after
else:
self.optimizer.step()
optimizer_was_run = not self.accelerator.optimizer_step_was_skipped
if optimizer_was_run:
# Delay optimizer scheduling until metrics are generated