-
Notifications
You must be signed in to change notification settings - Fork 544
/
bert_qa.yaml
42 lines (36 loc) · 1.11 KB
/
bert_qa.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
name: bert_qa
resources:
accelerators: V100:1
# Assume your working directory is under `~/transformers`.
# To make this example work, please run the following command:
# git clone https://github.com/huggingface/transformers.git ~/transformers
workdir: ~/transformers
file_mounts:
/checkpoint:
name: # NOTE: Fill in your bucket name
mode: MOUNT
setup: |
# Fill in your wandb key: copy from https://wandb.ai/authorize
echo export WANDB_API_KEY=[YOUR-WANDB-API-KEY] >> ~/.bashrc
git checkout v4.18.0
pip install -e .
cd examples/pytorch/question-answering/
pip install -r requirements.txt
pip install wandb
run: |
cd examples/pytorch/question-answering/
python run_qa.py \
--model_name_or_path bert-base-uncased \
--dataset_name squad \
--do_train \
--do_eval \
--per_device_train_batch_size 12 \
--learning_rate 3e-5 \
--num_train_epochs 50 \
--max_seq_length 384 \
--doc_stride 128 \
--report_to wandb \
--run_name $SKYPILOT_RUN_ID \
--output_dir /checkpoint/bert_qa/ \
--save_total_limit 10 \
--save_steps 1000