mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
fix the wandb logging issue (#33464)
* fix the wandb logging issue * handle ConfigError in WandbCallback; move import to local scope * update integration_utils.py; move import of ConfigError * Update integration_utils.py: remove trailing whitespace
This commit is contained in:
parent
5427eaad43
commit
4f1e9bae4e
@ -803,6 +803,10 @@ class WandbCallback(TrainerCallback):
|
||||
if self._wandb is None:
|
||||
return
|
||||
self._initialized = True
|
||||
|
||||
# prepare to handle potential configuration issues during setup
|
||||
from wandb.sdk.lib.config_util import ConfigError as WandbConfigError
|
||||
|
||||
if state.is_world_process_zero:
|
||||
logger.info(
|
||||
'Automatic Weights & Biases logging enabled, to disable set os.environ["WANDB_DISABLED"] = "true"'
|
||||
@ -852,7 +856,13 @@ class WandbCallback(TrainerCallback):
|
||||
try:
|
||||
self._wandb.config["model/num_parameters"] = model.num_parameters()
|
||||
except AttributeError:
|
||||
logger.info("Could not log the number of model parameters in Weights & Biases.")
|
||||
logger.info(
|
||||
"Could not log the number of model parameters in Weights & Biases due to an AttributeError."
|
||||
)
|
||||
except WandbConfigError:
|
||||
logger.warning(
|
||||
"A ConfigError was raised whilst setting the number of model parameters in Weights & Biases config."
|
||||
)
|
||||
|
||||
# log the initial model architecture to an artifact
|
||||
if self._log_model.is_enabled:
|
||||
|
Loading…
Reference in New Issue
Block a user