From 0a289d1630ae32e867211a98dd5fc6102e556a28 Mon Sep 17 00:00:00 2001 From: Keshav Singh <130352102+KeshavSingh29@users.noreply.github.com> Date: Thu, 19 Jun 2025 01:26:46 +0900 Subject: [PATCH] log: Add logging when using split_batches and per_device_train_batch_size (#38633) * log: Add logging when user uses split_batches and per_device_train_batch_size * refactor: remove whitespace from blank line * Update src/transformers/training_args.py Change logging level to info Co-authored-by: Marc Sun <57196510+SunMarc@users.noreply.github.com> --------- Co-authored-by: Marc Sun <57196510+SunMarc@users.noreply.github.com> --- src/transformers/training_args.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/transformers/training_args.py b/src/transformers/training_args.py index 03ace47468a..1a6f680d65a 100644 --- a/src/transformers/training_args.py +++ b/src/transformers/training_args.py @@ -1753,6 +1753,11 @@ class TrainingArguments: ) else: self.accelerator_config = AcceleratorConfig.from_json_file(self.accelerator_config) + if self.accelerator_config.split_batches: + logger.info( + "Using `split_batches=True` in `accelerator_config` will override the `per_device_train_batch_size` " + "Batches will be split across all processes equally when using `split_batches=True`." + ) # Initialize device before we proceed if self.framework == "pt" and is_torch_available():