mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
Drop inplace operation for loss computation with gradient accumulation (#35416)
Fix inplace loss computation
This commit is contained in:
parent
24c91f095f
commit
4eb17b26e7
@ -3700,7 +3700,7 @@ class Trainer:
|
||||
else:
|
||||
# Finally we need to normalize the loss for reporting
|
||||
if num_items_in_batch is None:
|
||||
loss /= self.args.gradient_accumulation_steps
|
||||
loss = loss / self.args.gradient_accumulation_steps
|
||||
|
||||
self.accelerator.backward(loss, **kwargs)
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user