mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
special_tokens_mask value was unused and calculated twice
This commit is contained in:
parent
fb0d2f1da1
commit
7f998b1b83
@ -910,7 +910,7 @@ class PreTrainedTokenizer(object):
|
||||
token_type_ids = [0] * len(ids) + ([1] * len(pair_ids) if pair else [])
|
||||
special_tokens_mask = [0] * (len(ids) + (len(pair_ids) if pair else 0))
|
||||
if return_special_tokens_mask:
|
||||
encoded_inputs["special_tokens_mask"] = self.get_special_tokens_mask(ids, pair_ids)
|
||||
encoded_inputs["special_tokens_mask"] = special_tokens_mask
|
||||
|
||||
# Prepare inputs as tensors if asked
|
||||
if return_tensors == 'tf' and is_tf_available():
|
||||
|
Loading…
Reference in New Issue
Block a user