-
Notifications
You must be signed in to change notification settings - Fork 4
/
Copy pathrun_causeme_experiments.py
174 lines (160 loc) · 4.51 KB
/
run_causeme_experiments.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
from train_NAVAR import train_NAVAR
import numpy as np
import argparse
import zipfile
import json
import bz2
parser = argparse.ArgumentParser(description='Train NAVAR on CauseMe data')
parser.add_argument('--experiment', metavar='experiment', type=str, help='name of the experiment (e')
parser.add_argument('--method_sha', metavar='method_sha', type=str, help='name of the experiment (e')
parser.add_argument('--lstm', action='store_true')
args = parser.parse_args()
experiment = args.experiment
method_sha = args.method_sha
lstm = args.lstm
if experiment == 'nonlinear-VAR_N-3_T-300':
if lstm:
lambda1 = 0.1370
batch_size = 64
wd = 8.952e-4
learning_rate = 0.0001
hidden_nodes = 16
hl = 1
maxlags = 5
else:
lambda1 = 0.1344
batch_size = 64
wd = 2.903e-3
hidden_nodes = 32
learning_rate = 0.00005
hl = 1
maxlags = 5
elif experiment == 'nonlinear-VAR_N-5_T-300':
if lstm:
lambda1 = 0.2445
batch_size = 32
wd = 2.6756e-4
learning_rate = 0.00005
hidden_nodes = 32
hl = 1
maxlags = 5
else:
lambda1 = 0.1596
batch_size = 64
wd = 2.420e-3
hidden_nodes = 16
learning_rate = 0.0001
hl = 1
maxlags = 5
elif experiment == 'nonlinear-VAR_N-10_T-300':
if lstm:
lambda1 = 0.0784
batch_size = 128
wd = 7.1237e-4
hidden_nodes = 64
learning_rate = 0.0001
hl = 1
maxlags = 5
else:
lambda1 = 0.2014
batch_size = 64
wd = 8.557e-3
hidden_nodes = 128
learning_rate = 0.0005
hl = 1
maxlags = 5
elif experiment == 'nonlinear-VAR_N-20_T-300':
if lstm:
lambda1 = 0.3512
batch_size = 64
wd = 1.901e-6
hidden_nodes = 128
learning_rate = 0.00005
hl = 1
maxlags = 5
else:
lambda1 = 0.2434
batch_size = 64
wd = 4.508e-3
hidden_nodes = 32
learning_rate = 0.0002
hl = 1
maxlags = 5
elif experiment == 'TestCLIM_N-40_T-250':
if lstm:
lambda1 = 0.2334
batch_size = 128
wd = 6.231e-4
hidden_nodes = 64
learning_rate = 0.0002
hl = 1
maxlags = 2
else:
lambda1 = 0.3924
batch_size = 16
wd = 4.322e-3
hidden_nodes = 32
learning_rate = 0.0002
hl = 1
maxlags = 2
elif experiment == 'TestWEATH_N-10_T-2000':
if lstm:
lambda1 = 0.0172
batch_size = 256
wd = 1.678e-3
hidden_nodes = 8
learning_rate = 0.005
hl = 1
maxlags = 5
else:
lambda1 = 0.0560
batch_size = 64
wd = 4.903e-3
hidden_nodes = 32
learning_rate = 0.0001
hl = 1
maxlags = 5
elif experiment == 'river-runoff_N-12_T-4600':
if lstm:
lambda1 = 0.054430975523470315
batch_size = 128
wd = 4.465e-4
hidden_nodes = 128
learning_rate = 0.001
hl = 1
maxlags = 5
else:
lambda1 = 0.1708744133515745
batch_size = 256
wd = 0.0005092700042638143
hidden_nodes = 8
learning_rate = 0.0001
hl = 1
maxlags = 5
# prepare results file
results = {}
results["method_sha"] = method_sha
results["parameter_values"] = f'maxlags: {maxlags}'
results['model'] = experiment.split('_')[0]
results['experiment'] = experiment
results_file = f'results/{experiment}.json.bz2'
scores = []
# load the data
file = f'experiments/{experiment}.zip'
with zipfile.ZipFile(file, "r") as zip_ref:
datasets = sorted(zip_ref.namelist())
for dataset in datasets:
print(f"Training NAVAR on: {dataset}")
data = np.loadtxt(zip_ref.open(dataset))
# start training NAVAR
score_matrix, _, _ = train_NAVAR(data, maxlags=maxlags, hidden_nodes=hidden_nodes, dropout=0, epochs=5000,
learning_rate=learning_rate, batch_size=batch_size, lambda1=lambda1,
val_proportion=0.0, weight_decay=wd, check_every=500, hidden_layers=hl,
normalize=True, split_timeseries=False, lstm=lstm)
scores.append(score_matrix.flatten())
# Save data
print('Writing results ...')
results['scores'] = np.array(scores).tolist()
results_json = bytes(json.dumps(results), encoding='latin1')
with bz2.BZ2File(results_file, 'w') as mybz2:
mybz2.write(results_json)