From 122b2657f8f36dbfc8e742ca5ec13fa69138ec64 Mon Sep 17 00:00:00 2001 From: Bharat Ramanathan Date: Wed, 4 Oct 2023 18:27:41 +0530 Subject: [PATCH] feat: add trainer label to wandb run upon initialization (#26466) --- src/transformers/integrations/integration_utils.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/transformers/integrations/integration_utils.py b/src/transformers/integrations/integration_utils.py index ae50dfbc06a..5904d11caff 100644 --- a/src/transformers/integrations/integration_utils.py +++ b/src/transformers/integrations/integration_utils.py @@ -753,6 +753,7 @@ class WandbCallback(TrainerCallback): _watch_model = os.getenv("WANDB_WATCH", "false") if not is_torch_tpu_available() and _watch_model in ("all", "parameters", "gradients"): self._wandb.watch(model, log=_watch_model, log_freq=max(100, state.logging_steps)) + self._wandb.run._label(code="transformers_trainer") def on_train_begin(self, args, state, control, model=None, **kwargs): if self._wandb is None: