-
Notifications
You must be signed in to change notification settings - Fork 50
/
mlp_models.py
79 lines (62 loc) · 2.43 KB
/
mlp_models.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
from typing import Any, Dict, Optional
from keras.layers import Dense, Input
from keras.models import Model
def dynamic_classifier(
hidden_layer_sizes=(10,),
meta: Optional[Dict[str, Any]] = None,
compile_kwargs: Optional[Dict[str, Any]] = None,
) -> Model:
"""Creates a basic MLP classifier dynamically choosing binary/multiclass
classification loss and ouput activations.
"""
# get parameters
n_features_in_ = meta["n_features_in_"]
target_type_ = meta["target_type_"]
n_classes_ = meta["n_classes_"]
n_outputs_expected_ = meta["n_outputs_expected_"]
inp = Input(shape=(n_features_in_,))
hidden = inp
for layer_size in hidden_layer_sizes:
hidden = Dense(layer_size, activation="relu")(hidden)
if target_type_ == "binary":
compile_kwargs["loss"] = compile_kwargs["loss"] or "binary_crossentropy"
out = [Dense(1, activation="sigmoid")(hidden)]
elif target_type_ == "multilabel-indicator":
compile_kwargs["loss"] = compile_kwargs["loss"] or "binary_crossentropy"
if isinstance(n_classes_, list):
out = [
Dense(1, activation="sigmoid")(hidden)
for _ in range(n_outputs_expected_)
]
else:
out = Dense(n_classes_, activation="softmax")(hidden)
elif target_type_ == "multiclass-multioutput":
compile_kwargs["loss"] = compile_kwargs["loss"] or "binary_crossentropy"
out = [Dense(n, activation="softmax")(hidden) for n in n_classes_]
else:
# multiclass
compile_kwargs["loss"] = (
compile_kwargs["loss"] or "sparse_categorical_crossentropy"
)
out = [Dense(n_classes_, activation="softmax")(hidden)]
model = Model(inp, out)
model.compile(**compile_kwargs)
return model
def dynamic_regressor(
hidden_layer_sizes=(10,),
meta: Optional[Dict[str, Any]] = None,
compile_kwargs: Optional[Dict[str, Any]] = None,
) -> Model:
"""Creates a basic MLP regressor dynamically."""
# get parameters
n_features_in_ = meta["n_features_in_"]
n_outputs_ = meta["n_outputs_"]
compile_kwargs["loss"] = compile_kwargs["loss"] or "mse"
inp = Input(shape=(n_features_in_,))
hidden = inp
for layer_size in hidden_layer_sizes:
hidden = Dense(layer_size, activation="relu")(hidden)
out = Dense(n_outputs_)(hidden)
model = Model(inp, out)
model.compile(**compile_kwargs)
return model