conda create --name t5-stable python=3.7
conda activate t5-stable
pip install torch==1.7.0+cu101 torchvision==0.8.1+cu101 torchaudio==0.7.0 -f https://download.pytorch.org/whl/torch_stable.html
pip install pytorch-lightning==0.9.0
git clone https://github.com/huggingface/transformers
cd transformers
pip install .
python models/t5/predict.py \
--data_dir data_dir/ \
--model_name_or_path allenai/unifiedqa-t5-base \
--output_dir models/t5/outputs/unifiedqa-base-zeroshot \
--max_seq_length 128 \
--eval_batch_size 4 \
--model_parallel True
python models/t5/eval.py \
--ground_truth_labels_dir data_dir/ \
--predicted_labels_dir models/t5/outputs/unifiedqa-base-zeroshot
python models/t5/predict.py \
--data_dir data_dir/ \
--model_name_or_path allenai/unifiedqa-t5-large \
--output_dir models/t5/outputs/unifiedqa-large-zeroshot \
--max_seq_length 128 \
--eval_batch_size 4 \
--model_parallel True
python models/t5/eval.py \
--ground_truth_labels_dir data_dir/ \
--predicted_labels_dir models/t5/outputs/unifiedqa-large-zeroshot
python models/t5/predict.py \
--data_dir data_dir/ \
--model_name_or_path allenai/unifiedqa-t5-3b \
--output_dir models/t5/outputs/unifiedqa-3b-zeroshot \
--max_seq_length 128 \
--eval_batch_size 4 \
--model_parallel True
python models/t5/eval.py \
--ground_truth_labels_dir data_dir/ \
--predicted_labels_dir models/t5/outputs/unifiedqa-3b-zeroshot
python models/t5/predict.py \
--data_dir data_dir/ \
--model_name_or_path allenai/unifiedqa-t5-11b \
--output_dir models/t5/outputs/unifiedqa-11b-zeroshot \
--max_seq_length 128 \
--eval_batch_size 4 \
--model_parallel True
python models/t5/eval.py \
--ground_truth_labels_dir data_dir/ \
--predicted_labels_dir models/t5/outputs/unifiedqa-11b-zeroshot
python models/t5/finetune.py \
--data_dir data_dir/ \
--output_dir models/t5/checkpoints/unifiedqa-base \
--model_name_or_path allenai/unifiedqa-t5-base \
--tokenizer_name_or_path allenai/unifiedqa-t5-base \
--learning_rate 3e-4 \
--max_seq_length 128 \
--train_batch_size 16 \
--gradient_accumulation_steps 4 \
--model_parallel True
Test (From Fine-tuned base)
python models/t5/predict.py \
--data_dir data_dir/ \
--tokenizer_name_or_path allenai/unifiedqa-t5-base \
--checkpoint_dir models/t5/checkpoints/unifiedqa-base \
--output_dir models/t5/outputs/unifiedqa-base-finetune \
--max_seq_length 128 \
--eval_batch_size 4 \
--model_parallel True
python models/t5/eval.py \
--ground_truth_labels_dir data_dir/ \
--predicted_labels_dir models/t5/outputs/unifiedqa-base-finetune
sbatch models/t5/finetune_large.sh
python models/t5/finetune.py \
--data_dir data_dir/ \
--output_dir models/t5/checkpoints/unifiedqa-large \
--model_name_or_path allenai/unifiedqa-t5-large \
--tokenizer_name_or_path allenai/unifiedqa-t5-large \
--learning_rate 3e-4 \
--max_seq_length 128 \
--train_batch_size 16 \
--gradient_accumulation_steps 4 \
--model_parallel True
Test (From Fine-tuned large)
python models/t5/predict.py \
--data_dir data_dir/ \
--tokenizer_name_or_path allenai/unifiedqa-t5-large \
--checkpoint_dir models/t5/checkpoints/unifiedqa-large \
--output_dir models/t5/outputs/unifiedqa-large-finetune \
--max_seq_length 128 \
--eval_batch_size 4 \
--model_parallel True
python models/t5/eval.py \
--ground_truth_labels_dir data_dir/ \
--predicted_labels_dir models/t5/outputs/unifiedqa-large-finetune
python models/t5/finetune.py \
--data_dir data_dir/ \
--output_dir models/t5/checkpoints/unifiedqa-3b \
--model_name_or_path allenai/unifiedqa-t5-3b \
--tokenizer_name_or_path allenai/unifiedqa-t5-3b \
--learning_rate 3e-4 \
--max_seq_length 128 \
--train_batch_size 16 \
--gradient_accumulation_steps 4 \
--model_parallel True
Test (From Fine-tuned 3B)
python models/t5/predict.py \
--data_dir data_dir/ \
--tokenizer_name_or_path allenai/unifiedqa-t5-3b \
--checkpoint_dir models/t5/checkpoints/unifiedqa-3b \
--output_dir models/t5/outputs/unifiedqa-3b-finetune \
--max_seq_length 128 \
--eval_batch_size 4 \
--model_parallel True
python models/t5/eval.py \
--ground_truth_labels_dir data_dir/ \
--predicted_labels_dir models/t5/outputs/unifiedqa-3b-finetune