mirror of
https://github.com/huggingface/transformers.git
synced 2025-08-01 02:31:11 +06:00
Update tokenization_openai.py
This commit is contained in:
parent
562b998366
commit
f5e2ed0fd8
@ -91,7 +91,7 @@ class OpenAIGPTTokenizer(PreTrainedTokenizer):
|
||||
import ftfy
|
||||
from spacy.lang.en import English
|
||||
_nlp = English()
|
||||
self.nlp = nlp.Defaults.create_tokenizer(_nlp)
|
||||
self.nlp = _nlp.Defaults.create_tokenizer(_nlp)
|
||||
self.fix_text = ftfy.fix_text
|
||||
except ImportError:
|
||||
logger.warning("ftfy or spacy is not installed using BERT BasicTokenizer instead of SpaCy & ftfy.")
|
||||
|
Loading…
Reference in New Issue
Block a user