mirror of
https://github.com/huggingface/transformers.git
synced 2025-08-02 11:11:05 +06:00
Fix tokenizer UnboundLocalError when padding is set to PaddingStrategy.MAX_LENGTH (#7610)
* Fix UnboundLocalError when PaddingStrategy is MAX_LENGTH * Fix UnboundLocalError for TruncationStrategy
This commit is contained in:
parent
adfe6ace88
commit
e084089eb9
@ -1781,6 +1781,8 @@ class PreTrainedTokenizerBase(SpecialTokensMixin):
|
||||
padding_strategy = PaddingStrategy.LONGEST # Default to pad to the longest sequence in the batch
|
||||
elif not isinstance(padding, PaddingStrategy):
|
||||
padding_strategy = PaddingStrategy(padding)
|
||||
elif isinstance(padding, PaddingStrategy):
|
||||
padding_strategy = padding
|
||||
else:
|
||||
padding_strategy = PaddingStrategy.DO_NOT_PAD
|
||||
|
||||
@ -1806,6 +1808,8 @@ class PreTrainedTokenizerBase(SpecialTokensMixin):
|
||||
) # Default to truncate the longest sequences in pairs of inputs
|
||||
elif not isinstance(truncation, TruncationStrategy):
|
||||
truncation_strategy = TruncationStrategy(truncation)
|
||||
elif isinstance(truncation, TruncationStrategy):
|
||||
truncation_strategy = truncation
|
||||
else:
|
||||
truncation_strategy = TruncationStrategy.DO_NOT_TRUNCATE
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user