Fix batch size in evaluation loop (#16763)

* Fix batch size in evaluation loop

* remove debug statement
This commit is contained in:
Sylvain Gugger 2022-04-14 09:22:54 -04:00 committed by GitHub
parent d8269eb4d5
commit b151ddb9b9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -2411,7 +2411,7 @@ class Trainer:
elif args.bf16_full_eval:
model = model.to(dtype=torch.bfloat16, device=args.device)
batch_size = self.args.per_device_eval_batch_size
batch_size = self.args.eval_batch_size
logger.info(f"***** Running {description} *****")
if has_length(dataloader):