mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
converter: fix vocab size
This commit is contained in:
parent
a701a0cee1
commit
d064009b72
@ -47,7 +47,7 @@ def convert_roberta_checkpoint_to_pytorch(roberta_checkpoint_path, pytorch_dump_
|
||||
roberta = FairseqRobertaModel.from_pretrained(roberta_checkpoint_path, bpe = 'sentencepiece')
|
||||
roberta.eval() # disable dropout
|
||||
config = BertConfig(
|
||||
vocab_size_or_config_json_file=250004,
|
||||
vocab_size_or_config_json_file=250002,
|
||||
hidden_size=roberta.args.encoder_embed_dim,
|
||||
num_hidden_layers=roberta.args.encoder_layers,
|
||||
num_attention_heads=roberta.args.encoder_attention_heads,
|
||||
|
Loading…
Reference in New Issue
Block a user