mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-05 13:50:13 +06:00

* Reorganize example folder * Continue reorganization * Change requirements for tests * Final cleanup * Finish regroup with tests all passing * Copyright * Requirements and readme * Make a full link for the documentation * Address review comments * Apply suggestions from code review Co-authored-by: Lysandre Debut <lysandre@huggingface.co> * Add symlink * Reorg again * Apply suggestions from code review Co-authored-by: Thomas Wolf <thomwolf@users.noreply.github.com> * Adapt title * Update to new strucutre * Remove test * Update READMEs Co-authored-by: Lysandre Debut <lysandre@huggingface.co> Co-authored-by: Thomas Wolf <thomwolf@users.noreply.github.com>
19 lines
598 B
Bash
Executable File
19 lines
598 B
Bash
Executable File
#!/usr/bin/env bash
|
|
export PYTHONPATH="../":"${PYTHONPATH}"
|
|
|
|
python finetune.py \
|
|
--learning_rate=3e-5 \
|
|
--fp16 \
|
|
--do_train \
|
|
--val_check_interval=0.25 \
|
|
--adam_eps 1e-06 \
|
|
--num_train_epochs 6 --src_lang en_XX --tgt_lang ro_RO \
|
|
--data_dir $ENRO_DIR \
|
|
--max_source_length $MAX_LEN --max_target_length $MAX_LEN --val_max_target_length $MAX_LEN --test_max_target_length $MAX_LEN \
|
|
--train_batch_size=$BS --eval_batch_size=$BS \
|
|
--task translation \
|
|
--warmup_steps 500 \
|
|
--freeze_embeds \
|
|
--model_name_or_path=facebook/mbart-large-cc25 \
|
|
"$@"
|