From 9aeb0b9b8ad23edf4aa89ea5ad4139754fa6e49f Mon Sep 17 00:00:00 2001 From: Lysandre Date: Tue, 14 Jan 2020 17:43:00 -0500 Subject: [PATCH] Improve padding side documentation --- src/transformers/tokenization_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/transformers/tokenization_utils.py b/src/transformers/tokenization_utils.py index 3c9d69ddcd3..1c8a84b90dc 100644 --- a/src/transformers/tokenization_utils.py +++ b/src/transformers/tokenization_utils.py @@ -800,7 +800,7 @@ class PreTrainedTokenizer(object): - 'do_not_truncate': Does not truncate (raise an error if the input sequence is longer than max_length) pad_to_max_length: if set to True, the returned sequences will be padded according to the model's padding side and padding index, up to their max length. If no max length is specified, the padding is done up to the model's max length. - The tokenizer padding sides are handled by the following strings: + The tokenizer padding sides are handled by the class attribute `padding_side` which can be set to the following strings: - 'left': pads on the left of the sequences - 'right': pads on the right of the sequences Defaults to False: no padding.