-
Notifications
You must be signed in to change notification settings - Fork 5
/
script_pretrain_mamp.sh
executable file
·38 lines (32 loc) · 2.28 KB
/
script_pretrain_mamp.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
export OMP_NUM_THREADS=1
export CUDA_VISIBLE_DEVICES=4,5,6,7
# NTU60 xsub
python -m torch.distributed.launch --nproc_per_node=4 --master_port 11234 main_pretrain.py \
--config ./config/ntu60_xsub_joint/pretrain_mamp_t120_layer8+3_mask90.yaml \
--output_dir ./output_dir/ntu60_xsub_joint/pretrain_mamp_t120_layer8+3_mask90_tau0.80_ep400_noamp \
--log_dir ./output_dir/ntu60_xsub_joint/pretrain_mamp_t120_layer8+3_mask90_tau0.80_ep400_noamp
# NTU60 xview
python -m torch.distributed.launch --nproc_per_node=4 --master_port 11234 main_pretrain.py \
--config ./config/ntu60_xview_joint/pretrain_mamp_t120_layer8+3_mask90.yaml \
--output_dir ./output_dir/ntu60_xview_joint/pretrain_mamp_t120_layer8+3_mask90_tau0.80_ep400_noamp \
--log_dir ./output_dir/ntu60_xview_joint/pretrain_mamp_t120_layer8+3_mask90_tau0.80_ep400_noamp
# NTU120 xset
python -m torch.distributed.launch --nproc_per_node=4 --master_port 11234 main_pretrain.py \
--config ./config/ntu120_xset_joint/pretrain_mamp_t120_layer8+5_mask90.yaml \
--output_dir ./output_dir/ntu120_xset_joint/pretrain_mamp_t120_layer8+5_mask90_tau0.75_ep400_noamp \
--log_dir ./output_dir/ntu120_xset_joint/pretrain_mamp_t120_layer8+5_mask90_tau0.75_ep400_noamp
# NTU120 xsub
python -m torch.distributed.launch --nproc_per_node=4 --master_port 11234 main_pretrain.py \
--config ./config/ntu120_xsub_joint/pretrain_mamp_t120_layer8+5_mask90.yaml \
--output_dir ./output_dir/ntu120_xsub_joint/pretrain_mamp_t120_layer8+5_mask90_tau0.75_ep400_noamp \
--log_dir ./output_dir/ntu120_xsub_joint/pretrain_mamp_t120_layer8+5_mask90_tau0.75_ep400_noamp
# PKU v1
python -m torch.distributed.launch --nproc_per_node=4 --master_port 11234 main_pretrain.py \
--config ./config/pkuv1_xsub_joint/pretrain_mamp_t120_layer8+5_mask90.yaml \
--output_dir ./output_dir/pkuv1_xsub_joint/pretrain_mamp_t120_layer8+5_mask90_tau0.80_ep400_noamp \
--log_dir ./output_dir/pkuv1_xsub_joint/pretrain_mamp_t120_layer8+5_mask90_tau0.80_ep400_noamp
# PKU v2
python -m torch.distributed.launch --nproc_per_node=4 --master_port 11234 main_pretrain.py \
--config ./config/pkuv2_xsub_joint/pretrain_mamp_t120_layer8+5_mask90.yaml \
--output_dir ./output_dir/pkuv2_xsub_joint/pretrain_mamp_t120_layer8+5_mask90_tau0.80_ep400_noamp \
--log_dir ./output_dir/pkuv2_xsub_joint/pretrain_mamp_t120_layer8+5_mask90_tau0.80_ep400_noamp