[trainer] solve "scheduler before optimizer step" warning (#11144)

* solve "scheduler before optimizer step" warning

* style

* correct the state evaluation test
This commit is contained in:
Stas Bekman 2021-04-08 11:28:48 -07:00 committed by GitHub
parent 02ec02d6d3
commit 1ed24afe91
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -1151,17 +1151,21 @@ class Trainer:
)
# Optimizer step
optimizer_was_run = True
if self.deepspeed:
pass # called outside the loop
elif is_torch_tpu_available():
xm.optimizer_step(self.optimizer)
elif self.use_amp:
scale_before = self.scaler.get_scale()
self.scaler.step(self.optimizer)
self.scaler.update()
scale_after = self.scaler.get_scale()
optimizer_was_run = scale_before <= scale_after
else:
self.optimizer.step()
if not self.deepspeed:
if optimizer_was_run and not self.deepspeed:
self.lr_scheduler.step()
model.zero_grad()