mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-29 09:12:21 +06:00
Fixed the RoBERTa checkpoint conversion script according to the LM head refactoring.
This commit is contained in:
parent
770043eea2
commit
39d72bcc7b
@ -123,7 +123,7 @@ def convert_roberta_checkpoint_to_pytorch(roberta_checkpoint_path, pytorch_dump_
|
||||
model.lm_head.layer_norm.weight = roberta.model.decoder.lm_head.layer_norm.weight
|
||||
model.lm_head.layer_norm.bias = roberta.model.decoder.lm_head.layer_norm.bias
|
||||
model.lm_head.layer_norm.variance_epsilon = roberta.model.decoder.lm_head.layer_norm.eps
|
||||
model.lm_head.weight = roberta.model.decoder.lm_head.weight
|
||||
model.lm_head.decoder.weight = roberta.model.decoder.lm_head.weight
|
||||
model.lm_head.bias = roberta.model.decoder.lm_head.bias
|
||||
|
||||
# Let's check that we get the same results.
|
||||
|
Loading…
Reference in New Issue
Block a user