diff --git a/examples/contrib/run_swag.py b/examples/contrib/run_swag.py index 5daad8e558f..497ddeca9de 100644 --- a/examples/contrib/run_swag.py +++ b/examples/contrib/run_swag.py @@ -341,7 +341,7 @@ def train(args, train_dataset, model, tokenizer): tr_loss, logging_loss = 0.0, 0.0 model.zero_grad() train_iterator = trange(int(args.num_train_epochs), desc="Epoch", disable=args.local_rank not in [-1, 0]) - set_seed(args) # Added here for reproductibility (even between python 2 and 3) + set_seed(args) # Added here for reproductibility for _ in train_iterator: epoch_iterator = tqdm(train_dataloader, desc="Iteration", disable=args.local_rank not in [-1, 0]) for step, batch in enumerate(epoch_iterator): diff --git a/examples/distillation/run_squad_w_distillation.py b/examples/distillation/run_squad_w_distillation.py index 86be0010ea9..e5a2265ed63 100644 --- a/examples/distillation/run_squad_w_distillation.py +++ b/examples/distillation/run_squad_w_distillation.py @@ -159,7 +159,7 @@ def train(args, train_dataset, model, tokenizer, teacher=None): tr_loss, logging_loss = 0.0, 0.0 model.zero_grad() train_iterator = trange(int(args.num_train_epochs), desc="Epoch", disable=args.local_rank not in [-1, 0]) - set_seed(args) # Added here for reproductibility (even between python 2 and 3) + set_seed(args) # Added here for reproductibility for _ in train_iterator: epoch_iterator = tqdm(train_dataloader, desc="Iteration", disable=args.local_rank not in [-1, 0]) for step, batch in enumerate(epoch_iterator): diff --git a/examples/mm-imdb/run_mmimdb.py b/examples/mm-imdb/run_mmimdb.py index eb12867f3a1..c7e9f7b47e0 100644 --- a/examples/mm-imdb/run_mmimdb.py +++ b/examples/mm-imdb/run_mmimdb.py @@ -164,7 +164,7 @@ def train(args, train_dataset, model, tokenizer, criterion): best_f1, n_no_improve = 0, 0 model.zero_grad() train_iterator = trange(int(args.num_train_epochs), desc="Epoch", disable=args.local_rank not in [-1, 0]) - set_seed(args) # Added here for reproductibility (even between python 2 and 3) + set_seed(args) # Added here for reproductibility for _ in train_iterator: epoch_iterator = tqdm(train_dataloader, desc="Iteration", disable=args.local_rank not in [-1, 0]) for step, batch in enumerate(epoch_iterator): diff --git a/examples/run_glue.py b/examples/run_glue.py index a8e73569258..9f362c1151d 100644 --- a/examples/run_glue.py +++ b/examples/run_glue.py @@ -185,7 +185,7 @@ def train(args, train_dataset, model, tokenizer): train_iterator = trange( epochs_trained, int(args.num_train_epochs), desc="Epoch", disable=args.local_rank not in [-1, 0] ) - set_seed(args) # Added here for reproductibility (even between python 2 and 3) + set_seed(args) # Added here for reproductibility for _ in train_iterator: epoch_iterator = tqdm(train_dataloader, desc="Iteration", disable=args.local_rank not in [-1, 0]) for step, batch in enumerate(epoch_iterator): diff --git a/examples/run_lm_finetuning.py b/examples/run_lm_finetuning.py index 24fdefa1d6d..95cbb7c1637 100644 --- a/examples/run_lm_finetuning.py +++ b/examples/run_lm_finetuning.py @@ -281,7 +281,7 @@ def train(args, train_dataset, model, tokenizer): train_iterator = trange( epochs_trained, int(args.num_train_epochs), desc="Epoch", disable=args.local_rank not in [-1, 0] ) - set_seed(args) # Added here for reproducibility (even between python 2 and 3) + set_seed(args) # Added here for reproducibility for _ in train_iterator: epoch_iterator = tqdm(train_dataloader, desc="Iteration", disable=args.local_rank not in [-1, 0]) for step, batch in enumerate(epoch_iterator): diff --git a/examples/run_multiple_choice.py b/examples/run_multiple_choice.py index 8b3ac9cf541..79894228895 100644 --- a/examples/run_multiple_choice.py +++ b/examples/run_multiple_choice.py @@ -145,7 +145,7 @@ def train(args, train_dataset, model, tokenizer): best_steps = 0 model.zero_grad() train_iterator = trange(int(args.num_train_epochs), desc="Epoch", disable=args.local_rank not in [-1, 0]) - set_seed(args) # Added here for reproductibility (even between python 2 and 3) + set_seed(args) # Added here for reproductibility for _ in train_iterator: epoch_iterator = tqdm(train_dataloader, desc="Iteration", disable=args.local_rank not in [-1, 0]) for step, batch in enumerate(epoch_iterator): diff --git a/examples/run_ner.py b/examples/run_ner.py index 3ec9fbe0f5b..63f81b077e6 100644 --- a/examples/run_ner.py +++ b/examples/run_ner.py @@ -169,7 +169,7 @@ def train(args, train_dataset, model, tokenizer, labels, pad_token_label_id): train_iterator = trange( epochs_trained, int(args.num_train_epochs), desc="Epoch", disable=args.local_rank not in [-1, 0] ) - set_seed(args) # Added here for reproductibility (even between python 2 and 3) + set_seed(args) # Added here for reproductibility for _ in train_iterator: epoch_iterator = tqdm(train_dataloader, desc="Iteration", disable=args.local_rank not in [-1, 0]) for step, batch in enumerate(epoch_iterator): diff --git a/examples/run_squad.py b/examples/run_squad.py index 3df30144c49..028557b79fd 100644 --- a/examples/run_squad.py +++ b/examples/run_squad.py @@ -185,7 +185,7 @@ def train(args, train_dataset, model, tokenizer): train_iterator = trange( epochs_trained, int(args.num_train_epochs), desc="Epoch", disable=args.local_rank not in [-1, 0] ) - # Added here for reproductibility (even between python 2 and 3) + # Added here for reproductibility set_seed(args) for _ in train_iterator: diff --git a/examples/run_xnli.py b/examples/run_xnli.py index 77842bb0eeb..2377db74d92 100644 --- a/examples/run_xnli.py +++ b/examples/run_xnli.py @@ -164,7 +164,7 @@ def train(args, train_dataset, model, tokenizer): train_iterator = trange( epochs_trained, int(args.num_train_epochs), desc="Epoch", disable=args.local_rank not in [-1, 0] ) - set_seed(args) # Added here for reproductibility (even between python 2 and 3) + set_seed(args) # Added here for reproductibility for _ in train_iterator: epoch_iterator = tqdm(train_dataloader, desc="Iteration", disable=args.local_rank not in [-1, 0]) for step, batch in enumerate(epoch_iterator): diff --git a/setup.py b/setup.py index 6e9dbba2615..65845a2ad5b 100644 --- a/setup.py +++ b/setup.py @@ -14,7 +14,7 @@ To create the package for pypi. creating the wheel and the source distribution (obviously). For the wheel, run: "python setup.py bdist_wheel" in the top level directory. - (this will build a wheel for the python version you use to build it - make sure you use python 3.x). + (this will build a wheel for the python version you use to build it). For the sources, run: "python setup.py sdist" You should now have a /dist directory with both .whl and .tar.gz source versions. diff --git a/templates/adding_a_new_example_script/run_xxx.py b/templates/adding_a_new_example_script/run_xxx.py index d2453ad3056..77d9ae4c7c0 100644 --- a/templates/adding_a_new_example_script/run_xxx.py +++ b/templates/adding_a_new_example_script/run_xxx.py @@ -155,7 +155,7 @@ def train(args, train_dataset, model, tokenizer): tr_loss, logging_loss = 0.0, 0.0 model.zero_grad() train_iterator = trange(int(args.num_train_epochs), desc="Epoch", disable=args.local_rank not in [-1, 0]) - set_seed(args) # Added here for reproductibility (even between python 2 and 3) + set_seed(args) # Added here for reproductibility for _ in train_iterator: epoch_iterator = tqdm(train_dataloader, desc="Iteration", disable=args.local_rank not in [-1, 0]) for step, batch in enumerate(epoch_iterator):