mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
* Warn if debug requested without TPU fixes (#6308) Check whether a PyTorch compatible TPU is available before attempting to print TPU metrics after training has completed. This way, users who apply `--debug` without reading the documentation aren't suprised by a stacktrace. * Style Co-authored-by: Lysandre <lysandre.debut@reseau.eseo.fr>
This commit is contained in:
parent
cdf1f7edb2
commit
87e124c245
@ -624,8 +624,14 @@ class Trainer:
|
||||
train_iterator.close()
|
||||
break
|
||||
if self.args.tpu_metrics_debug or self.args.debug:
|
||||
# tpu-comment: Logging debug metrics for PyTorch/XLA (compile, execute times, ops, etc.)
|
||||
xm.master_print(met.metrics_report())
|
||||
if is_torch_tpu_available():
|
||||
# tpu-comment: Logging debug metrics for PyTorch/XLA (compile, execute times, ops, etc.)
|
||||
xm.master_print(met.metrics_report())
|
||||
else:
|
||||
logger.warning(
|
||||
"You enabled PyTorch/XLA debug metrics but you don't have a TPU "
|
||||
"configured. Check your training configuration if this is unexpected."
|
||||
)
|
||||
|
||||
if self.tb_writer:
|
||||
self.tb_writer.close()
|
||||
|
Loading…
Reference in New Issue
Block a user