mirror of
https://github.com/huggingface/transformers.git
synced 2025-08-03 03:31:05 +06:00
fix tokenizer_type
to avoid error when loading checkpoint back (#20062)
This commit is contained in:
parent
3502c202f9
commit
19067711e7
@ -648,7 +648,7 @@ def convert_checkpoint_from_transformers_to_megatron(args):
|
||||
"data_parallel_size": args.target_data_parallel_size,
|
||||
"make_vocab_size_divisible_by": args.make_vocab_size_divisible_by,
|
||||
"rank": 0,
|
||||
"tokenizer_type": None,
|
||||
"tokenizer_type": "GPT2BPETokenizer",
|
||||
}
|
||||
|
||||
if config.activation_function == "gelu":
|
||||
|
Loading…
Reference in New Issue
Block a user