mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
Fix type of max_seq_length arg in run_swag.py (#12832)
This commit is contained in:
parent
27a8c9e4f1
commit
fcf83011df
@ -106,7 +106,7 @@ class DataTrainingArguments:
|
||||
default=None,
|
||||
metadata={"help": "The number of processes to use for the preprocessing."},
|
||||
)
|
||||
max_seq_length: int = field(
|
||||
max_seq_length: Optional[int] = field(
|
||||
default=None,
|
||||
metadata={
|
||||
"help": "The maximum total input sequence length after tokenization. If passed, sequences longer "
|
||||
|
Loading…
Reference in New Issue
Block a user