Merge pull request #1026 from rabeehk/master

loads the tokenizer for each checkpoint, to solve the reproducability…
This commit is contained in:
Thomas Wolf 2019-08-30 14:15:36 +02:00 committed by GitHub
commit 01ad55f8cf
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -482,6 +482,7 @@ def main():
for checkpoint in checkpoints:
global_step = checkpoint.split('-')[-1] if len(checkpoints) > 1 else ""
model = model_class.from_pretrained(checkpoint)
tokenizer = tokenizer_class.from_pretrained(checkpoint)
model.to(args.device)
result = evaluate(args, model, tokenizer, prefix=global_step)
result = dict((k + '_{}'.format(global_step), v) for k, v in result.items())