mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-04 05:10:06 +06:00
Minor/basic text fixes (#2229)
* Small clarification Matches line 431 to line 435 for additional clarity and consistency. * Fixed minor typo The letter "s" was previously omitted from the word "docstrings".
This commit is contained in:
parent
9a399ead25
commit
1718fb9e74
@ -168,7 +168,7 @@ Follow these steps to start contributing:
|
||||
to be merged;
|
||||
4. Make sure pre-existing tests still pass;
|
||||
5. Add high-coverage tests. No quality test, no merge;
|
||||
6. All public methods must have informative doctrings;
|
||||
6. All public methods must have informative docstrings;
|
||||
|
||||
|
||||
### Style guide
|
||||
|
@ -428,9 +428,9 @@ def main():
|
||||
parser.add_argument('--gradient_accumulation_steps', type=int, default=1,
|
||||
help="Number of updates steps to accumulate before performing a backward/update pass.")
|
||||
parser.add_argument("--learning_rate", default=5e-5, type=float,
|
||||
help="The initial learning rate for Adam.")
|
||||
help="The initial learning rate for Adam optimizer.")
|
||||
parser.add_argument("--weight_decay", default=0.0, type=float,
|
||||
help="Weight deay if we apply some.")
|
||||
help="Weight decay if we apply some.")
|
||||
parser.add_argument("--adam_epsilon", default=1e-8, type=float,
|
||||
help="Epsilon for Adam optimizer.")
|
||||
parser.add_argument("--max_grad_norm", default=1.0, type=float,
|
||||
|
Loading…
Reference in New Issue
Block a user