mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
Fix code quality
This commit is contained in:
parent
1d5ea34f6a
commit
d6175a4268
@ -91,10 +91,11 @@ class DataTrainingArguments:
|
||||
default=None, metadata={"help": "The input training data file (a text file)."}
|
||||
)
|
||||
train_data_files: Optional[str] = field(
|
||||
default=None, metadata={
|
||||
default=None,
|
||||
metadata={
|
||||
"help": "The input training data files (multiple files in glob format). "
|
||||
"Very often splitting large files to smaller files can prevent tokenizer going out of memory"
|
||||
}
|
||||
"Very often splitting large files to smaller files can prevent tokenizer going out of memory"
|
||||
},
|
||||
)
|
||||
eval_data_file: Optional[str] = field(
|
||||
default=None,
|
||||
|
Loading…
Reference in New Issue
Block a user