From f5e2ed0fd89d5730126d71c03324fa07ae674ca7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Guillem=20Garc=C3=ADa=20Subies?= <37592763+GuillemGSubies@users.noreply.github.com> Date: Tue, 20 Aug 2019 14:19:25 +0200 Subject: [PATCH] Update tokenization_openai.py --- pytorch_transformers/tokenization_openai.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pytorch_transformers/tokenization_openai.py b/pytorch_transformers/tokenization_openai.py index 79eb023a8d1..51b418ebd36 100644 --- a/pytorch_transformers/tokenization_openai.py +++ b/pytorch_transformers/tokenization_openai.py @@ -91,7 +91,7 @@ class OpenAIGPTTokenizer(PreTrainedTokenizer): import ftfy from spacy.lang.en import English _nlp = English() - self.nlp = nlp.Defaults.create_tokenizer(_nlp) + self.nlp = _nlp.Defaults.create_tokenizer(_nlp) self.fix_text = ftfy.fix_text except ImportError: logger.warning("ftfy or spacy is not installed using BERT BasicTokenizer instead of SpaCy & ftfy.")