mirror of
https://github.com/huggingface/transformers.git
synced 2025-08-02 03:01:07 +06:00
Fix batch size in evaluation loop (#16763)
* Fix batch size in evaluation loop * remove debug statement
This commit is contained in:
parent
d8269eb4d5
commit
b151ddb9b9
@ -2411,7 +2411,7 @@ class Trainer:
|
||||
elif args.bf16_full_eval:
|
||||
model = model.to(dtype=torch.bfloat16, device=args.device)
|
||||
|
||||
batch_size = self.args.per_device_eval_batch_size
|
||||
batch_size = self.args.eval_batch_size
|
||||
|
||||
logger.info(f"***** Running {description} *****")
|
||||
if has_length(dataloader):
|
||||
|
Loading…
Reference in New Issue
Block a user