mirror of
https://github.com/huggingface/transformers.git
synced 2025-08-02 19:21:31 +06:00
parent
174aecd099
commit
03847ef451
@ -144,7 +144,7 @@ class FlaxDataCollatorForWav2Vec2Pretraining:
|
||||
The Wav2Vec2 model used for pretraining. The data collator needs to have access
|
||||
to config and ``_get_feat_extract_output_lengths`` function for correct padding.
|
||||
feature_extractor (:class:`~transformers.Wav2Vec2FeatureExtractor`):
|
||||
The processor used for proccessing the data.
|
||||
The processor used for processing the data.
|
||||
padding (:obj:`bool`, :obj:`str` or :class:`~transformers.tokenization_utils_base.PaddingStrategy`, `optional`, defaults to :obj:`True`):
|
||||
Select a strategy to pad the returned sequences (according to the model's padding side and padding index)
|
||||
among:
|
||||
|
@ -410,5 +410,5 @@ def generic_train(
|
||||
trainer.fit(model)
|
||||
|
||||
else:
|
||||
print("RAG modeling tests with new set functions successfuly executed!")
|
||||
print("RAG modeling tests with new set functions successfully executed!")
|
||||
return trainer
|
||||
|
Loading…
Reference in New Issue
Block a user