From 1718fb9e7444a0883a550ab90c61bb8bf8a97076 Mon Sep 17 00:00:00 2001 From: Aidan Kierans <31550769+aidankierans@users.noreply.github.com> Date: Thu, 19 Dec 2019 16:23:18 -0500 Subject: [PATCH] Minor/basic text fixes (#2229) * Small clarification Matches line 431 to line 435 for additional clarity and consistency. * Fixed minor typo The letter "s" was previously omitted from the word "docstrings". --- CONTRIBUTING.md | 2 +- examples/run_lm_finetuning.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 8228dd59d8e..7d7f2c73ff6 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -168,7 +168,7 @@ Follow these steps to start contributing: to be merged; 4. Make sure pre-existing tests still pass; 5. Add high-coverage tests. No quality test, no merge; -6. All public methods must have informative doctrings; +6. All public methods must have informative docstrings; ### Style guide diff --git a/examples/run_lm_finetuning.py b/examples/run_lm_finetuning.py index c4c73e71afc..d8127e24a5e 100644 --- a/examples/run_lm_finetuning.py +++ b/examples/run_lm_finetuning.py @@ -428,9 +428,9 @@ def main(): parser.add_argument('--gradient_accumulation_steps', type=int, default=1, help="Number of updates steps to accumulate before performing a backward/update pass.") parser.add_argument("--learning_rate", default=5e-5, type=float, - help="The initial learning rate for Adam.") + help="The initial learning rate for Adam optimizer.") parser.add_argument("--weight_decay", default=0.0, type=float, - help="Weight deay if we apply some.") + help="Weight decay if we apply some.") parser.add_argument("--adam_epsilon", default=1e-8, type=float, help="Epsilon for Adam optimizer.") parser.add_argument("--max_grad_norm", default=1.0, type=float,