-
Notifications
You must be signed in to change notification settings - Fork 3
/
Copy pathrun_job.sh
116 lines (92 loc) · 2.52 KB
/
run_job.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
export CUDA_VISIBLE_DEVICES=2,3
DATA_PATH=/home/sichenglei/sst-2
PWWS_DATA_PATH=/home/sichenglei/sst-2/pwws-rbt
TF_DATA_PATH=/home/sichenglei/sst-2/textfooler-rbt
# DATA_PATH=/home/sichenglei/contrast_imdb_ori/textfooler
# DATA_PATH=/home/sichenglei/sst-2
MODEL_PATH=/home/sichenglei/roberta-base
# ALPHA=0.4
EPOCHS=5
# OUTPUT_PATH=/data2/private/clsi/bert-imdb-iterADASmix-textfooler
SEQLEN=128
# mix_option: 0: no mix, 1: TMix, 2: SimMix
## BERT-TMixADA-PWWS
python run_simMix.py \
--model_type roberta \
--mix_type tmix \
--iterative true \
--attacker pwws \
--num_adv 300 \
--task_name sst-2 \
--data_dir ${DATA_PATH} \
--model_name_or_path ${MODEL_PATH} \
--output_dir /data1/private/clsi/rbt-sst-tmixada-pwws-iterative \
--max_seq_length $SEQLEN \
--mix-layers-set 7 9 12 \
--alpha 2.0 \
--num_labels 2 \
--do_lower_case \
--per_gpu_train_batch_size 16 \
--per_gpu_eval_batch_size 64 \
--gradient_accumulation_steps 1 \
--learning_rate 3e-5 \
--weight_decay 0.0 \
--adam_epsilon 1e-8 \
--max_grad_norm 1.0 \
--num_train_epochs $EPOCHS \
--warmup_steps 0 \
--logging_steps 200 \
--eval_all_checkpoints \
--seed 2020 \
--overwrite_output_dir \
--overwrite_cache \
--do_train \
--fp16
# --second_data_dir $SECOND_DATA_PATH \
# --third_data_dir $THIRD_DATA_PATH \
python attackEval.py \
--model_name_or_path /data2/private/clsi/rbt-sst-tmixada-pwws/final-checkpoint \
--model_type roberta \
--attacker pwws \
--data_dir /home/sichenglei/sst-2/test.tsv \
--max_seq_len $SEQLEN
## BERT-TMixADA-Textfooler
python run_simMix.py \
--model_type roberta \
--mix_type tmix \
--iterative true \
--attacker textfooler \
--num_adv 1500 \
--task_name sst-2 \
--data_dir ${DATA_PATH} \
--model_name_or_path ${MODEL_PATH} \
--output_dir /data1/private/clsi/rbt-sst-tmixada-textfooler-iterative \
--max_seq_length $SEQLEN \
--mix-layers-set 7 9 12 \
--alpha 0.4 \
--num_labels 2 \
--do_lower_case \
--per_gpu_train_batch_size 16 \
--per_gpu_eval_batch_size 64 \
--gradient_accumulation_steps 1 \
--learning_rate 3e-5 \
--weight_decay 0.0 \
--adam_epsilon 1e-8 \
--max_grad_norm 1.0 \
--num_train_epochs $EPOCHS \
--warmup_steps 0 \
--logging_steps 200 \
--eval_all_checkpoints \
--seed 2020 \
--overwrite_output_dir \
--overwrite_cache \
--do_train \
--fp16
# --second_data_dir $SECOND_DATA_PATH \
# --third_data_dir $THIRD_DATA_PATH \
python attackEval.py \
--model_name_or_path /data1/private/clsi/rbt-sst-tmixada-textfooler-iterative/final-checkpoint \
--model_type roberta \
--attacker textfooler \
--data_dir /home/sichenglei/sst-2/test.tsv \
--max_seq_len $SEQLEN