fix: bf16 with TPU is allowed in configuration (#38670)

* fix: tpu bf16

* fix: style

---------

Co-authored-by: Marc Sun <57196510+SunMarc@users.noreply.github.com>
This commit is contained in:
ye 2025-06-11 21:35:01 +09:00 committed by GitHub
parent c8c1e525ed
commit b426c2b313
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -1692,7 +1692,7 @@ class TrainingArguments:
# cpu
raise ValueError("Your setup doesn't support bf16/(cpu, tpu, neuroncore). You need torch>=1.10")
elif not self.use_cpu:
if not is_torch_bf16_gpu_available():
if not is_torch_bf16_gpu_available() and not is_torch_xla_available(): # added for tpu support
error_message = "Your setup doesn't support bf16/gpu."
if is_torch_cuda_available():
error_message += " You need Ampere+ GPU with cuda>=11.0"