mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-24 23:08:57 +06:00
This PR fixes the "RuntimeError: No CUDA GPUs are available" when running with --bf16 option on Neuron. Related PRs: https://github.com/huggingface/transformers/pull/20684 https://github.com/huggingface/transformers/pull/22300
This commit is contained in:
parent
aef488c503
commit
ec9b18f62d
@ -588,7 +588,12 @@ class Trainer:
|
||||
|
||||
if args.fp16 or args.bf16:
|
||||
if args.half_precision_backend == "auto":
|
||||
if args.device == torch.device("cpu"):
|
||||
if is_torch_neuroncore_available():
|
||||
if args.fp16:
|
||||
raise ValueError("Tried to use `fp16` but this option is not yet supported on Neuron.")
|
||||
else:
|
||||
args.half_precision_backend = "cpu_amp"
|
||||
elif args.device == torch.device("cpu"):
|
||||
if args.fp16:
|
||||
raise ValueError("Tried to use `fp16` but it is not supported on cpu")
|
||||
elif _is_native_cpu_amp_available:
|
||||
|
Loading…
Reference in New Issue
Block a user