-
Notifications
You must be signed in to change notification settings - Fork 130
/
Copy pathdemo-tf-native-lstm-lowmem.12ax.config
47 lines (39 loc) · 1.38 KB
/
demo-tf-native-lstm-lowmem.12ax.config
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
#!returnn.py
# kate: syntax python;
use_tensorflow = True
import os
demo_name, _ = os.path.splitext(__file__)
print("Hello, experiment: %s" % demo_name)
task = "train"
train = {"class": "Task12AXDataset", "num_seqs": 1000}
dev = {"class": "Task12AXDataset", "num_seqs": 100, "fixed_random_seed": 1}
num_inputs = 9
num_outputs = 2
batching = "random"
batch_size = 5000
max_seqs = 10
chunking = "200:200"
network = {
"fw0": {"class": "rec", "unit": "NativeLstmLowMem", "dropout": 0.1, "n_out": 10},
"output": {"class": "softmax", "loss": "ce", "from": ["fw0"]}
}
# training
optimizer = {"class": "adam"}
learning_rate = 0.01
model = "/tmp/%s/returnn/%s/model" % (os.getlogin(), demo_name) # https://github.com/tensorflow/tensorflow/issues/6537
num_epochs = 100
save_interval = 1
debug_add_check_numerics_ops = True
# log
log_verbosity = 3
#log_verbosity = 5
# example out:
"""
net params #: 822
net trainable params: [W_in_data_fw0, W_re_fw0, b_fw0, W_in_fw0_output, b_output]
start training at epoch 1 and batch 0
epoch 1 score: 0.23992684698 elapsed: 0:00:04 dev: score 0.0660622526516 error 0.024799694773
epoch 2 score: 0.0205483678894 elapsed: 0:00:04 dev: score 0.01160195778 error 0.00114460129721
epoch 3 score: 0.0103542867688 elapsed: 0:00:04 dev: score 0.013109387309 error 0.00343380389164
epoch 4 score: 0.00563815106516 elapsed: 0:00:04 dev: score 0.00576270765951 error 0.00171690194582
"""