mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-03 12:50:06 +06:00

* Created using Colaboratory * [examples] reorganize files * remove run_tpu_glue.py as superseded by TPU support in Trainer * Bugfix: int, not tuple * move files around
19 lines
488 B
Bash
Executable File
19 lines
488 B
Bash
Executable File
export OUTPUT_DIR_NAME=bart_sum
|
|
export CURRENT_DIR=${PWD}
|
|
export OUTPUT_DIR=${CURRENT_DIR}/${OUTPUT_DIR_NAME}
|
|
|
|
# Make output directory if it doesn't exist
|
|
mkdir -p $OUTPUT_DIR
|
|
|
|
# Add parent directory to python path to access lightning_base.py
|
|
export PYTHONPATH="../../":"${PYTHONPATH}"
|
|
|
|
python finetune.py \
|
|
--data_dir=./cnn-dailymail/cnn_dm \
|
|
--model_name_or_path=bart-large \
|
|
--learning_rate=3e-5 \
|
|
--train_batch_size=4 \
|
|
--eval_batch_size=4 \
|
|
--output_dir=$OUTPUT_DIR \
|
|
--do_train $@
|