-
Notifications
You must be signed in to change notification settings - Fork 9
/
Copy pathword_encoder.py
50 lines (43 loc) · 1.69 KB
/
word_encoder.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
#pylint: skip-file
import numpy as np
import theano
import theano.tensor as T
from gru import *
from lstm import *
from updates import *
class WordEncoderLayer(object):
def __init__(self, rng, X, in_size, out_size, hidden_size,
cell, optimizer, p, is_train, batch_size, mask):
self.X = X
self.in_size = in_size
self.out_size = out_size
self.hidden_size_list = hidden_size
self.cell = cell
self.drop_rate = p
self.is_train = is_train
self.batch_size = batch_size
self.mask = mask
self.rng = rng
self.num_hds = len(hidden_size)
self.define_layers()
def define_layers(self):
self.layers = []
self.params = []
# hidden layers
for i in xrange(self.num_hds):
if i == 0:
layer_input = self.X
shape = (self.in_size, self.hidden_size_list[0])
else:
layer_input = self.layers[i - 1].activation
shape = (self.hidden_size_list[i - 1], self.hidden_size_list[i])
if self.cell == "gru":
hidden_layer = GRULayer(self.rng, str(i), shape, layer_input,
self.mask, self.is_train, self.batch_size, self.drop_rate)
elif self.cell == "lstm":
hidden_layer = LSTMLayer(self.rng, str(i), shape, layer_input,
self.mask, self.is_train, self.batch_size, self.drop_rate)
self.layers.append(hidden_layer)
self.params += hidden_layer.params
self.activation = hidden_layer.activation
self.hidden_size = hidden_layer.out_size