-
Notifications
You must be signed in to change notification settings - Fork 0
/
main.py
98 lines (84 loc) · 3.36 KB
/
main.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
from src.params import Params
from src.optimizer import OptimizerGA
def generate_nmist_dataset():
import keras
from keras.datasets import mnist
inputs = 784 # Images for 28x28
outputs = 10 # clasess
# the data, split between train and test sets
(x_train, y_train), (x_test, y_test) = mnist.load_data()
x_train = x_train.reshape(60000, 784)
x_test = x_test.reshape(10000, 784)
x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
x_train /= 255
x_test /= 255
print(x_train.shape[0], 'train samples')
print(x_test.shape[0], 'test samples')
# convert class vectors to binary class matrices
y_train = keras.utils.to_categorical(y_train, outputs)
y_test = keras.utils.to_categorical(y_test, outputs)
return (x_train, y_train), (x_test, y_test)
variable_params = {
"hidden_layers": range(1, 5),
"neurons": [2 ** x for x in range(6, 12)],
"dropout": [ x / 10 for x in range(1, 10)],
"activation_functions": ['relu', 'tanh', 'sigmoid'],
}
def generate_model_ann(params):
'''Create a simple neural network model with the input params'''
import keras
from keras.models import Sequential
from keras.layers import Dense, Dropout
from keras.optimizers import RMSprop
inputs = 784 # Images for 28x28
outputs = 10 # clasess
model = Sequential()
model.add(Dense(params['neurons'], activation=params['activation_functions'], input_shape=(inputs,)))
model.add(Dropout(params['dropout']))
for layer in range(1, params['hidden_layers']):
model.add(Dense(params['neurons'], activation=params['activation_functions']))
model.add(Dropout(params['dropout']))
model.add(Dense(outputs, activation='softmax'))
model.compile(loss='categorical_crossentropy',
optimizer=RMSprop(),
metrics=['accuracy'])
return model
def sample_nmist(size, iterations, epochs, path):
'''Metodo de ejemplo para ejecución de optimizador de parametros con red neuronal'''
import numpy as np
np.random.seed(1)
train, test = generate_nmist_dataset()
params = Params(variable_params)
print(params.optimize_params)
print(len(train))
optimizer = OptimizerGA(train, test, params, generate_model_ann)
optimizer.verbose_train = 0
optimizer.epochs_train = epochs
optimizer.generate_population(size)
for i in range(0, iterations):
print("=> Generación ", i)
optimizer.evolve(i == 0)
print(optimizer.population_score())
print(optimizer.population)
optimizer.population_save(path)
def sample_nmist_parallel(size, iterations, epochs, path):
'''Metodo de ejemplo para ejecución de optimizador de parametros en paralelo con red neuronal'''
import numpy as np
np.random.seed(1)
is_master = False
train, test = generate_nmist_dataset()
params = Params(variable_params)
optimizer = OptimizerGA(train, test, params, generate_model_ann)
optimizer.verbose_train = 0
optimizer.epochs_train = epochs
optimizer.generate_population(size)
for i in range(0, iterations):
print("=> Generación ", i)
is_master = optimizer.evolve_mpi(i == 0, best_prune=0.5) == "master"
if is_master: print(optimizer.population_score())
if is_master:
print(optimizer.population)
optimizer.population_save(path)
else:
print("Slave destroy.")