forked from LeoWood/bert
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Sbatch_Data_wwm_128_pre_rest.sbatch
99 lines (97 loc) · 6.43 KB
/
Sbatch_Data_wwm_128_pre_rest.sbatch
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
#!/bin/bash
#SBATCH -p normal
#SBATCH -N 1
#SBATCH --exclusive
#SBATCH --mem=100G
#SBATCH -J data_wwm_128
#SBATCH -o data_wwm_128_rest.out
python create_ernie_pretraining_data.py \
--vocab_file /public/home/zzx6320/lh/Projects/bert/models/chinese_L-12_H-768_A-12/chinese_L-12_H-768_A-12/vocab.txt \
--output_file /work1/zzx6320/lh/Projects/bert/data/cscd_pre_wwm/pre_training_V_128_wwm.tfrecord \
--input_file /work1/zzx6320/lh/Projects/Data/Pretraining_Raw_New/V.txt \
--max_seq_length 128
python create_ernie_pretraining_data.py \
--vocab_file /public/home/zzx6320/lh/Projects/bert/models/chinese_L-12_H-768_A-12/chinese_L-12_H-768_A-12/vocab.txt \
--output_file /work1/zzx6320/lh/Projects/bert/data/cscd_pre_wwm/pre_training_X_128_wwm.tfrecord \
--input_file /work1/zzx6320/lh/Projects/Data/Pretraining_Raw_New/X.txt \
--max_seq_length 128
python create_ernie_pretraining_data.py \
--vocab_file /public/home/zzx6320/lh/Projects/bert/models/chinese_L-12_H-768_A-12/chinese_L-12_H-768_A-12/vocab.txt \
--output_file /work1/zzx6320/lh/Projects/bert/data/cscd_pre_wwm/pre_training_TB_128_wwm.tfrecord \
--input_file /work1/zzx6320/lh/Projects/Data/Pretraining_Raw_New/TB.txt \
--max_seq_length 128
python create_ernie_pretraining_data.py \
--vocab_file /public/home/zzx6320/lh/Projects/bert/models/chinese_L-12_H-768_A-12/chinese_L-12_H-768_A-12/vocab.txt \
--output_file /work1/zzx6320/lh/Projects/bert/data/cscd_pre_wwm/pre_training_TD_128_wwm.tfrecord \
--input_file /work1/zzx6320/lh/Projects/Data/Pretraining_Raw_New/TD.txt \
--max_seq_length 128
python create_ernie_pretraining_data.py \
--vocab_file /public/home/zzx6320/lh/Projects/bert/models/chinese_L-12_H-768_A-12/chinese_L-12_H-768_A-12/vocab.txt \
--output_file /work1/zzx6320/lh/Projects/bert/data/cscd_pre_wwm/pre_training_TE_128_wwm.tfrecord \
--input_file /work1/zzx6320/lh/Projects/Data/Pretraining_Raw_New/TE.txt \
--max_seq_length 128
python create_ernie_pretraining_data.py \
--vocab_file /public/home/zzx6320/lh/Projects/bert/models/chinese_L-12_H-768_A-12/chinese_L-12_H-768_A-12/vocab.txt \
--output_file /work1/zzx6320/lh/Projects/bert/data/cscd_pre_wwm/pre_training_TF_128_wwm.tfrecord \
--input_file /work1/zzx6320/lh/Projects/Data/Pretraining_Raw_New/TF.txt \
--max_seq_length 128
python create_ernie_pretraining_data.py \
--vocab_file /public/home/zzx6320/lh/Projects/bert/models/chinese_L-12_H-768_A-12/chinese_L-12_H-768_A-12/vocab.txt \
--output_file /work1/zzx6320/lh/Projects/bert/data/cscd_pre_wwm/pre_training_TG_128_wwm.tfrecord \
--input_file /work1/zzx6320/lh/Projects/Data/Pretraining_Raw_New/TG.txt \
--max_seq_length 128
python create_ernie_pretraining_data.py \
--vocab_file /public/home/zzx6320/lh/Projects/bert/models/chinese_L-12_H-768_A-12/chinese_L-12_H-768_A-12/vocab.txt \
--output_file /work1/zzx6320/lh/Projects/bert/data/cscd_pre_wwm/pre_training_TH_128_wwm.tfrecord \
--input_file /work1/zzx6320/lh/Projects/Data/Pretraining_Raw_New/TH.txt \
--max_seq_length 128
python create_ernie_pretraining_data.py \
--vocab_file /public/home/zzx6320/lh/Projects/bert/models/chinese_L-12_H-768_A-12/chinese_L-12_H-768_A-12/vocab.txt \
--output_file /work1/zzx6320/lh/Projects/bert/data/cscd_pre_wwm/pre_training_TJ_128_wwm.tfrecord \
--input_file /work1/zzx6320/lh/Projects/Data/Pretraining_Raw_New/TJ.txt \
--max_seq_length 128
python create_ernie_pretraining_data.py \
--vocab_file /public/home/zzx6320/lh/Projects/bert/models/chinese_L-12_H-768_A-12/chinese_L-12_H-768_A-12/vocab.txt \
--output_file /work1/zzx6320/lh/Projects/bert/data/cscd_pre_wwm/pre_training_TK_128_wwm.tfrecord \
--input_file /work1/zzx6320/lh/Projects/Data/Pretraining_Raw_New/TK.txt \
--max_seq_length 128
python create_ernie_pretraining_data.py \
--vocab_file /public/home/zzx6320/lh/Projects/bert/models/chinese_L-12_H-768_A-12/chinese_L-12_H-768_A-12/vocab.txt \
--output_file /work1/zzx6320/lh/Projects/bert/data/cscd_pre_wwm/pre_training_TL_128_wwm.tfrecord \
--input_file /work1/zzx6320/lh/Projects/Data/Pretraining_Raw_New/TL.txt \
--max_seq_length 128
python create_ernie_pretraining_data.py \
--vocab_file /public/home/zzx6320/lh/Projects/bert/models/chinese_L-12_H-768_A-12/chinese_L-12_H-768_A-12/vocab.txt \
--output_file /work1/zzx6320/lh/Projects/bert/data/cscd_pre_wwm/pre_training_TM_128_wwm.tfrecord \
--input_file /work1/zzx6320/lh/Projects/Data/Pretraining_Raw_New/TM.txt \
--max_seq_length 128
python create_ernie_pretraining_data.py \
--vocab_file /public/home/zzx6320/lh/Projects/bert/models/chinese_L-12_H-768_A-12/chinese_L-12_H-768_A-12/vocab.txt \
--output_file /work1/zzx6320/lh/Projects/bert/data/cscd_pre_wwm/pre_training_TN_128_wwm.tfrecord \
--input_file /work1/zzx6320/lh/Projects/Data/Pretraining_Raw_New/TN.txt \
--max_seq_length 128
python create_ernie_pretraining_data.py \
--vocab_file /public/home/zzx6320/lh/Projects/bert/models/chinese_L-12_H-768_A-12/chinese_L-12_H-768_A-12/vocab.txt \
--output_file /work1/zzx6320/lh/Projects/bert/data/cscd_pre_wwm/pre_training_TP_128_wwm.tfrecord \
--input_file /work1/zzx6320/lh/Projects/Data/Pretraining_Raw_New/TP.txt \
--max_seq_length 128
python create_ernie_pretraining_data.py \
--vocab_file /public/home/zzx6320/lh/Projects/bert/models/chinese_L-12_H-768_A-12/chinese_L-12_H-768_A-12/vocab.txt \
--output_file /work1/zzx6320/lh/Projects/bert/data/cscd_pre_wwm/pre_training_TQ_128_wwm.tfrecord \
--input_file /work1/zzx6320/lh/Projects/Data/Pretraining_Raw_New/TQ.txt \
--max_seq_length 128
python create_ernie_pretraining_data.py \
--vocab_file /public/home/zzx6320/lh/Projects/bert/models/chinese_L-12_H-768_A-12/chinese_L-12_H-768_A-12/vocab.txt \
--output_file /work1/zzx6320/lh/Projects/bert/data/cscd_pre_wwm/pre_training_TS_128_wwm.tfrecord \
--input_file /work1/zzx6320/lh/Projects/Data/Pretraining_Raw_New/TS.txt \
--max_seq_length 128
python create_ernie_pretraining_data.py \
--vocab_file /public/home/zzx6320/lh/Projects/bert/models/chinese_L-12_H-768_A-12/chinese_L-12_H-768_A-12/vocab.txt \
--output_file /work1/zzx6320/lh/Projects/bert/data/cscd_pre_wwm/pre_training_TU_128_wwm.tfrecord \
--input_file /work1/zzx6320/lh/Projects/Data/Pretraining_Raw_New/TU.txt \
--max_seq_length 128
python create_ernie_pretraining_data.py \
--vocab_file /public/home/zzx6320/lh/Projects/bert/models/chinese_L-12_H-768_A-12/chinese_L-12_H-768_A-12/vocab.txt \
--output_file /work1/zzx6320/lh/Projects/bert/data/cscd_pre_wwm/pre_training_TV_128_wwm.tfrecord \
--input_file /work1/zzx6320/lh/Projects/Data/Pretraining_Raw_New/TV.txt \
--max_seq_length 128