Remove annoying tokenization message

This commit is contained in:
LysandreJik 2019-12-02 18:23:00 -05:00
parent e85855f2c4
commit fbaf05bd92

View File

@ -981,7 +981,6 @@ class PreTrainedTokenizer(object):
return (ids, pair_ids, overflowing_tokens)
def create_token_type_ids_from_sequences(self, token_ids_0, token_ids_1=None):
logger.warning("This tokenizer does not make use of special tokens.")
if token_ids_1 is None:
return len(token_ids_0) * [0]
return [0] * len(token_ids_0) + [1] * len(token_ids_1)
@ -994,7 +993,6 @@ class PreTrainedTokenizer(object):
single sequence: <s> X </s>
pair of sequences: <s> A </s></s> B </s>
"""
logger.warning("This tokenizer does not make use of special tokens. Input is returned with no modification.")
if token_ids_1 is None:
return token_ids_0
return token_ids_0 + token_ids_1