mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
Fix test for BF16 detection (#17803)
This commit is contained in:
parent
7cced021fa
commit
ef23fae596
@ -1042,7 +1042,7 @@ class TrainingArguments:
|
||||
if self.no_cuda and not is_torch_bf16_cpu_available():
|
||||
# cpu
|
||||
raise ValueError("Your setup doesn't support bf16/cpu. You need torch>=1.10")
|
||||
elif not is_torch_bf16_gpu_available():
|
||||
elif not self.no_cuda and not is_torch_bf16_gpu_available():
|
||||
# gpu
|
||||
raise ValueError(
|
||||
"Your setup doesn't support bf16/gpu. You need torch>=1.10, using Ampere GPU with cuda>=11.0"
|
||||
|
Loading…
Reference in New Issue
Block a user