This repository has been archived by the owner on Jan 2, 2021. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmodels.py
31 lines (28 loc) · 1.47 KB
/
models.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
from keras.layers import Dense, Flatten, Embedding, Conv2D, LSTM
from keras.models import Sequential
def build_dense_nn(word_vectors, train_embeddings = False, sequence_length = 60, num_classes = 5):
model = Sequential()
model.add(Embedding(word_vectors.shape[0], word_vectors.shape[1],
input_length = sequence_length,
weights = [word_vectors], trainable = train_embeddings))
model.add(Flatten())
model.add(Dense(512, activation = 'relu'))
model.add(Dense(64, activation = 'relu'))
model.add(Dense(num_classes, activation = 'softmax'))
model.compile(optimizer = 'adam', loss = 'categorical_crossentropy', metrics = ['accuracy'])
print(model.summary())
return model
def build_cnn(word_vectors, train_embeddings = False, sequence_length = 60, num_classes = 5):
# TODO: build CNN based model
pass
def build_lstm(word_vectors, train_embeddings = False, sequence_length = 60, num_classes = 5):
model = Sequential()
model.add(Embedding(word_vectors.shape[0], word_vectors.shape[1],
input_length = sequence_length,
weights = [word_vectors], trainable = train_embeddings))
model.add(LSTM(128, dropout = 0.1, recurrent_dropout = 0.1, return_sequences = True))
model.add(LSTM(32, dropout = 0.1, recurrent_dropout = 0.1))
model.add(Dense(num_classes, activation = 'softmax'))
model.compile(optimizer = 'adam', loss = 'categorical_crossentropy', metrics = ['accuracy'])
print(model.summary())
return model