resize model when special tokenizer present

This commit is contained in:
İbrahim Ethem Demirci 2019-11-16 16:49:37 +03:00 committed by Lysandre Debut
parent 07bf43074f
commit aa92a184d2

View File

@ -215,6 +215,7 @@ def train(args, train_dataset, model, tokenizer):
global_step = 0
tr_loss, logging_loss = 0.0, 0.0
model.resize_token_embeddings(len(tokenizer))
model.zero_grad()
train_iterator = trange(int(args.num_train_epochs), desc="Epoch", disable=args.local_rank not in [-1, 0])
set_seed(args) # Added here for reproducibility (even between python 2 and 3)