-
Notifications
You must be signed in to change notification settings - Fork 227
/
Copy pathGRU.py
45 lines (34 loc) · 1.51 KB
/
GRU.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Import useful packages
import tensorflow as tf
def GRU(Input, max_time, n_input, gru_size, keep_prob, weights_1, biases_1, weights_2, biases_2):
'''
Args:
Input: The reshaped input EEG signals
max_time: The unfolded time slice of GRU Model
n_input: The input signal size at one time
rnn_size: The number of GRU units inside the GRU Model
keep_prob: The Keep probability of Dropout
weights_1: The Weights of first fully-connected layer
biases_1: The biases of first fully-connected layer
weights_2: The Weights of second fully-connected layer
biases_2: The biases of second fully-connected layer
Returns:
FC_2: Final prediction of GRU Model
FC_1: Extracted features from the first fully connected layer
'''
# One layer GRU Model
Input = tf.reshape(Input, [-1, max_time, n_input])
gru_cell = tf.contrib.rnn.GRUCell(num_units=gru_size)
gru_drop = tf.contrib.rnn.DropoutWrapper(cell=gru_cell, input_keep_prob=keep_prob)
outputs, final_state = tf.nn.dynamic_rnn(cell=gru_drop, inputs=Input, dtype=tf.float32)
# First fully-connected layer
FC_1 = tf.matmul(final_state, weights_1) + biases_1
FC_1 = tf.layers.batch_normalization(FC_1, training=True)
FC_1 = tf.nn.softplus(FC_1)
FC_1 = tf.nn.dropout(FC_1, keep_prob)
# Second fully-connected layer
FC_2 = tf.matmul(FC_1, weights_2) + biases_2
FC_2 = tf.nn.softmax(FC_2)
return FC_2, FC_1