feat: add trainer label to wandb run upon initialization (#26466)

This commit is contained in:
Bharat Ramanathan 2023-10-04 18:27:41 +05:30 committed by GitHub
parent 4fdf47cd3c
commit 122b2657f8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -753,6 +753,7 @@ class WandbCallback(TrainerCallback):
_watch_model = os.getenv("WANDB_WATCH", "false") _watch_model = os.getenv("WANDB_WATCH", "false")
if not is_torch_tpu_available() and _watch_model in ("all", "parameters", "gradients"): if not is_torch_tpu_available() and _watch_model in ("all", "parameters", "gradients"):
self._wandb.watch(model, log=_watch_model, log_freq=max(100, state.logging_steps)) self._wandb.watch(model, log=_watch_model, log_freq=max(100, state.logging_steps))
self._wandb.run._label(code="transformers_trainer")
def on_train_begin(self, args, state, control, model=None, **kwargs): def on_train_begin(self, args, state, control, model=None, **kwargs):
if self._wandb is None: if self._wandb is None: