-
Notifications
You must be signed in to change notification settings - Fork 1
/
run.py
51 lines (48 loc) · 1.38 KB
/
run.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
import tensorflow as tf
import numpy as np
from mcqrnn import generate_example, train_step
from mcqrnn import (
TiltedAbsoluteLoss,
Mcqrnn,
DataTransformer,
)
# Examples setting
EPOCHS = 2000
LEARNING_RATE = 0.05
TAUS = [0.3, 0.4, 0.5, 0.6, 0.7]
N_SAMPLES = 500
OUT_FEATURES = 10
DENSE_FEATURES = 10
x_train, y_train = generate_example(N_SAMPLES)
x_test, y_test = generate_example(N_SAMPLES)
taus = np.array(TAUS)
data_transformer = DataTransformer(
x=x_train,
taus=taus,
y=y_train,
)
x_train_transform, y_train_transform, taus_transform = data_transformer()
mcqrnn_regressor = Mcqrnn(
out_features=OUT_FEATURES,
dense_features=DENSE_FEATURES,
activation=tf.nn.sigmoid,
)
optimizer = tf.keras.optimizers.Adam(learning_rate=LEARNING_RATE)
tilted_absolute_loss = TiltedAbsoluteLoss(tau=taus_transform)
for epoch in range(EPOCHS):
train_loss = train_step(
model=mcqrnn_regressor,
inputs=x_train_transform,
output=y_train_transform,
tau=taus_transform,
loss_func=tilted_absolute_loss,
optimizer=optimizer,
)
if epoch % 1000 == 0:
print(epoch, train_loss)
x_test_transform, taus_transform = data_transformer.transform(x=x_test, input_taus=taus)
y_test_predicted = mcqrnn_regressor(
inputs=x_test_transform,
tau=taus_transform,
)
y_test_predicted_reshaped = y_test_predicted.numpy().reshape(500, 5).T