update optimizer run_squad

This commit is contained in:
thomwolf 2018-11-03 17:56:34 +01:00
parent f514cbbf30
commit 25f73add07

View File

@ -800,13 +800,17 @@ def main():
if n_gpu > 1: if n_gpu > 1:
model = torch.nn.DataParallel(model) model = torch.nn.DataParallel(model)
optimizer = BERTAdam([{'params': [p for n, p in model.named_parameters() if n != 'bias'], 'l2': 0.01}, no_decay = ['bias', 'gamma', 'beta']
{'params': [p for n, p in model.named_parameters() if n == 'bias'], 'l2': 0.} optimizer_parameters = [
], {'params': [p for n, p in model.named_parameters() if n not in no_decay], 'weight_decay_rate': 0.01},
lr=args.learning_rate, schedule='warmup_linear', {'params': [p for n, p in model.named_parameters() if n in no_decay], 'weight_decay_rate': 0.0}
]
optimizer = BERTAdam(optimizer_parameters,
lr=args.learning_rate,
warmup=args.warmup_proportion, warmup=args.warmup_proportion,
t_total=num_train_steps) t_total=num_train_steps)
global_step = 0 global_step = 0
if args.do_train: if args.do_train:
train_features = convert_examples_to_features( train_features = convert_examples_to_features(