mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-03 04:40:06 +06:00
log: Add logging when using split_batches and per_device_train_batch_size (#38633)
* log: Add logging when user uses split_batches and per_device_train_batch_size * refactor: remove whitespace from blank line * Update src/transformers/training_args.py Change logging level to info Co-authored-by: Marc Sun <57196510+SunMarc@users.noreply.github.com> --------- Co-authored-by: Marc Sun <57196510+SunMarc@users.noreply.github.com>
This commit is contained in:
parent
c55d806355
commit
0a289d1630
@ -1753,6 +1753,11 @@ class TrainingArguments:
|
||||
)
|
||||
else:
|
||||
self.accelerator_config = AcceleratorConfig.from_json_file(self.accelerator_config)
|
||||
if self.accelerator_config.split_batches:
|
||||
logger.info(
|
||||
"Using `split_batches=True` in `accelerator_config` will override the `per_device_train_batch_size` "
|
||||
"Batches will be split across all processes equally when using `split_batches=True`."
|
||||
)
|
||||
|
||||
# Initialize device before we proceed
|
||||
if self.framework == "pt" and is_torch_available():
|
||||
|
Loading…
Reference in New Issue
Block a user