From a2e379743c9a4cf87e4af1f112bcc3f1ebcaa438 Mon Sep 17 00:00:00 2001 From: Sylvain Gugger Date: Sat, 20 Feb 2021 15:46:54 -0500 Subject: [PATCH] Fix style --- src/transformers/tokenization_utils_base.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/transformers/tokenization_utils_base.py b/src/transformers/tokenization_utils_base.py index b6e34986740..a895df01c09 100644 --- a/src/transformers/tokenization_utils_base.py +++ b/src/transformers/tokenization_utils_base.py @@ -3054,11 +3054,12 @@ class PreTrainedTokenizerBase(SpecialTokensMixin): def convert_tokens_to_string(self, tokens: List[str]) -> str: """ - Converts a sequence of tokens in a single string. The most simple way to do it is ``" ".join(tokens)`` but - we often want to remove sub-word tokenization artifacts at the same time. + Converts a sequence of tokens in a single string. The most simple way to do it is ``" ".join(tokens)`` but we + often want to remove sub-word tokenization artifacts at the same time. Args: tokens (:obj:`List[str]`): The token to join in a string. + Returns: :obj:`str`: The joined tokens. """