mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-03 12:50:06 +06:00
fix: bf16 with TPU is allowed in configuration (#38670)
* fix: tpu bf16 * fix: style --------- Co-authored-by: Marc Sun <57196510+SunMarc@users.noreply.github.com>
This commit is contained in:
parent
c8c1e525ed
commit
b426c2b313
@ -1692,7 +1692,7 @@ class TrainingArguments:
|
||||
# cpu
|
||||
raise ValueError("Your setup doesn't support bf16/(cpu, tpu, neuroncore). You need torch>=1.10")
|
||||
elif not self.use_cpu:
|
||||
if not is_torch_bf16_gpu_available():
|
||||
if not is_torch_bf16_gpu_available() and not is_torch_xla_available(): # added for tpu support
|
||||
error_message = "Your setup doesn't support bf16/gpu."
|
||||
if is_torch_cuda_available():
|
||||
error_message += " You need Ampere+ GPU with cuda>=11.0"
|
||||
|
Loading…
Reference in New Issue
Block a user