-
Notifications
You must be signed in to change notification settings - Fork 0
/
keras_train.py
38 lines (34 loc) · 1.09 KB
/
keras_train.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
import numpy as np
import keras
from keras.models import Sequential
from keras.layers import Dense, Dropout
from keras.optimizers import Adam
def some_function_name(data):
# batch_size = 128
# num_classes = 10
# epochs = 1
#
# num_training_examples = data['x_train'].shape[0]
# num_features = 784
# data['x_train'] = data['x_train'].reshape(num_training_examples, num_features)
# data['y_train'] = data['y_train'].reshape(num_training_examples, 1)
# data['x_train'] = data['x_train'].astype('float32')
# data['x_train'] /= 255
# data['y_train'] = keras.utils.to_categorical(data['y_train'], num_classes)
#
# model = Sequential()
# model.add(Dense(512, activation='relu', input_shape=(num_features,)))
# model.add(Dropout(0.2))
# model.add(Dense(512, activation='relu'))
# model.add(Dropout(0.2))
# model.add(Dense(num_classes, activation='softmax'))
#
# model.compile(loss='categorical_crossentropy',
# optimizer=Adam(),
# metrics=['accuracy'])
result = data
# history = model.fit(data['x_train'], data['y_train'],
# batch_size=batch_size,
# epochs=epochs,
# verbose=1)
return result