Merge pull request #960 from ethanjperez/patch-1

Fixing unused weight_decay argument
This commit is contained in:
Thomas Wolf 2019-08-07 10:09:55 +02:00 committed by GitHub
commit b4f9464f90
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -205,7 +205,7 @@ def main():
param_optimizer = list(model.named_parameters())
no_decay = ['bias', 'LayerNorm.bias', 'LayerNorm.weight']
optimizer_grouped_parameters = [
{'params': [p for n, p in param_optimizer if not any(nd in n for nd in no_decay)], 'weight_decay': 0.01},
{'params': [p for n, p in param_optimizer if not any(nd in n for nd in no_decay)], 'weight_decay': args.weight_decay},
{'params': [p for n, p in param_optimizer if any(nd in n for nd in no_decay)], 'weight_decay': 0.0}
]
optimizer = AdamW(optimizer_grouped_parameters, lr=args.learning_rate, eps=args.adam_epsilon)