mirror of
https://github.com/huggingface/transformers.git
synced 2025-08-03 03:31:05 +06:00
[trainer] solve "scheduler before optimizer step" warning (#11144)
* solve "scheduler before optimizer step" warning * style * correct the state evaluation test
This commit is contained in:
parent
02ec02d6d3
commit
1ed24afe91
@ -1151,17 +1151,21 @@ class Trainer:
|
||||
)
|
||||
|
||||
# Optimizer step
|
||||
optimizer_was_run = True
|
||||
if self.deepspeed:
|
||||
pass # called outside the loop
|
||||
elif is_torch_tpu_available():
|
||||
xm.optimizer_step(self.optimizer)
|
||||
elif self.use_amp:
|
||||
scale_before = self.scaler.get_scale()
|
||||
self.scaler.step(self.optimizer)
|
||||
self.scaler.update()
|
||||
scale_after = self.scaler.get_scale()
|
||||
optimizer_was_run = scale_before <= scale_after
|
||||
else:
|
||||
self.optimizer.step()
|
||||
|
||||
if not self.deepspeed:
|
||||
if optimizer_was_run and not self.deepspeed:
|
||||
self.lr_scheduler.step()
|
||||
|
||||
model.zero_grad()
|
||||
|
Loading…
Reference in New Issue
Block a user