mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-20 04:58:22 +06:00
Raise an exception if the pipeline allocator can't determine the tokenizer from the model.
This commit is contained in:
parent
be5bf7b81b
commit
28e64ad5a4
@ -370,11 +370,12 @@ def pipeline(task: str, model, config: Optional[PretrainedConfig] = None, tokeni
|
|||||||
Utility factory method to build pipeline.
|
Utility factory method to build pipeline.
|
||||||
"""
|
"""
|
||||||
# Try to infer tokenizer from model name (if provided as str)
|
# Try to infer tokenizer from model name (if provided as str)
|
||||||
if tokenizer is None and isinstance(model, str):
|
if not isinstance(tokenizer, PreTrainedTokenizer):
|
||||||
tokenizer = model
|
if not isinstance(model, str):
|
||||||
else:
|
# Impossible to guest what is the right tokenizer here
|
||||||
# Impossible to guest what is the right tokenizer here
|
raise Exception('Tokenizer cannot be None if provided model is a PreTrainedModel instance')
|
||||||
raise Exception('Tokenizer cannot be None if provided model is a PreTrainedModel instance')
|
else:
|
||||||
|
tokenizer = model
|
||||||
|
|
||||||
tokenizer = tokenizer if isinstance(tokenizer, PreTrainedTokenizer) else AutoTokenizer.from_pretrained(tokenizer)
|
tokenizer = tokenizer if isinstance(tokenizer, PreTrainedTokenizer) else AutoTokenizer.from_pretrained(tokenizer)
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user