mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-30 09:42:22 +06:00
Empty list in defaults for LLaMA special tokens during weights conversion (#32342)
empty list in defaults
This commit is contained in:
parent
2229ebe722
commit
db8c7caeb6
@ -449,7 +449,8 @@ def main():
|
||||
if args.model_size is None and args.num_shards is None:
|
||||
raise ValueError("You have to set at least `num_shards` if you are not giving the `model_size`")
|
||||
if args.special_tokens is None:
|
||||
args.special_tokens = DEFAULT_LLAMA_SPECIAL_TOKENS[str(args.llama_version)]
|
||||
# no special tokens by default
|
||||
args.special_tokens = DEFAULT_LLAMA_SPECIAL_TOKENS.get(str(args.llama_version), [])
|
||||
|
||||
spm_path = os.path.join(args.input_dir, "tokenizer.model")
|
||||
vocab_size = len(
|
||||
|
Loading…
Reference in New Issue
Block a user