mirror of
https://github.com/huggingface/transformers.git
synced 2025-08-02 03:01:07 +06:00
Fix typo
This commit is contained in:
parent
2152bfeae8
commit
c0cf0a04d5
@ -163,7 +163,7 @@ def main():
|
|||||||
datasets = (train_dataset, eval_dataset)
|
datasets = (train_dataset, eval_dataset)
|
||||||
encoded_datasets = tokenize_and_encode(datasets)
|
encoded_datasets = tokenize_and_encode(datasets)
|
||||||
|
|
||||||
# Compute the mex input length for the Transformer
|
# Compute the max input length for the Transformer
|
||||||
max_length = model.config.n_positions // 2 - 2
|
max_length = model.config.n_positions // 2 - 2
|
||||||
input_length = max(len(story[:max_length]) + max(len(cont1[:max_length]), len(cont2[:max_length])) + 3 \
|
input_length = max(len(story[:max_length]) + max(len(cont1[:max_length]), len(cont2[:max_length])) + 3 \
|
||||||
for dataset in encoded_datasets for story, cont1, cont2, _ in dataset)
|
for dataset in encoded_datasets for story, cont1, cont2, _ in dataset)
|
||||||
|
Loading…
Reference in New Issue
Block a user