-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathCNN_denoiser.py
122 lines (104 loc) · 6.35 KB
/
CNN_denoiser.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
import matplotlib.pyplot as plt
import numpy as np
import autoencoder
class CNN_denoiser():
def __init__(self, batch_size=10, nu_epochs=50, validation_split=0, img_height=64, img_width=64):
img_width, img_height = img_height, img_width
self.batch_size = batch_size
self.nu_epochs = nu_epochs
self.validation_split = validation_split
if img_width == 128 and img_height == 128:
self.model = autoencoder.get_autoencoder_model128(img_width=img_width, img_height=img_height)
else:
self.model = autoencoder.get_autoencoder_model(img_width=img_width, img_height=img_height)
def model_plots(self, noise_prop, noise_mean, noise_std):
# summarize history for loss
plt.figure()
plt.plot(np.arange(0, self.nu_epochs), self.model.history.history["loss"], label="train_loss")
if self.validation_split != 0:
plt.plot(np.arange(0, self.nu_epochs), self.model.history.history["val_loss"], label="val_loss")
plt.title(
"Model Loss on Dataset\nNoise Proportion: {0} - Mean: {1} - Standard Deviation: {2}".format(noise_prop,
noise_mean,
noise_std))
plt.xlabel("Epoch #")
plt.ylabel("Loss")
plt.legend(loc="lower left")
plt.show()
@staticmethod # Split 2 datasets
def train_test_split(set1, set2, train_split=0.9, shuffle_test_set=False, img_height=64, img_width=64):
images_set = set1[:int(set1.shape[0] * train_split)]
images_set = np.append(images_set, set2[:int(set2.shape[0] * train_split)], axis=0)
images_set = np.append(images_set, set2[int(set2.shape[0] * train_split):], axis=0)
images_set = np.append(images_set, set1[int(set1.shape[0] * train_split):], axis=0)
train_size = int(set1.shape[0] * train_split + set2.shape[0] * train_split)
input_train = images_set[0:train_size]
input_test = images_set[train_size:]
np.random.shuffle(input_train) # Shuffle input train set
if shuffle_test_set:
np.random.shuffle(input_test)
input_train = input_train.reshape(input_train.shape[0], img_width, img_height, 1)
input_test = input_test.reshape(input_test.shape[0], img_width, img_height, 1)
return input_train, input_test
@staticmethod # Split 1 dataset
def train_test_split1(images_set, train_split=0.9, shuffle_test_set=False, img_height=64, img_width=64):
train_size = int(images_set.shape[0] * train_split)
input_train = images_set[0:train_size]
input_test = images_set[train_size:]
if shuffle_test_set:
np.random.shuffle(input_test)
input_train = input_train.reshape(input_train.shape[0], img_width, img_height, 1)
input_test = input_test.reshape(input_test.shape[0], img_width, img_height, 1)
return input_train, input_test
@staticmethod # Split 3 datasets
def train_test_split3(set1, set2, set3, train_split=0.9, shuffle_test_set=False, img_height=64, img_width=64):
images_set = set1[:int(set1.shape[0] * train_split)]
images_set = np.append(images_set, set2[:int(set2.shape[0] * train_split)], axis=0)
images_set = np.append(images_set, set3[:int(set3.shape[0] * train_split)], axis=0)
images_set = np.append(images_set, set3[int(set3.shape[0] * train_split):], axis=0)
images_set = np.append(images_set, set2[int(set2.shape[0] * train_split):], axis=0)
images_set = np.append(images_set, set1[int(set1.shape[0] * train_split):], axis=0)
train_size = int(set1.shape[0] * train_split + set2.shape[0] * train_split + set3.shape[0] * train_split)
input_train = images_set[0:train_size]
input_test = images_set[train_size:]
np.random.shuffle(input_train)
if shuffle_test_set:
np.random.shuffle(input_test)
input_train = input_train.reshape(input_train.shape[0], img_width, img_height, 1)
input_test = input_test.reshape(input_test.shape[0], img_width, img_height, 1)
return input_train, input_test
@staticmethod # Split 4 datasets
def train_test_split4(set1, set2, set3, set4, train_split=0.9, shuffle_test_set=False, img_height=64, img_width=64):
images_set = set1[:int(set1.shape[0] * train_split)]
images_set = np.append(images_set, set2[:int(set2.shape[0] * train_split)], axis=0)
images_set = np.append(images_set, set3[:int(set3.shape[0] * train_split)], axis=0)
images_set = np.append(images_set, set4[:int(set4.shape[0] * train_split)], axis=0)
images_set = np.append(images_set, set4[int(set4.shape[0] * train_split):], axis=0)
images_set = np.append(images_set, set3[int(set3.shape[0] * train_split):], axis=0)
images_set = np.append(images_set, set2[int(set2.shape[0] * train_split):], axis=0)
images_set = np.append(images_set, set1[int(set1.shape[0] * train_split):], axis=0)
train_size = int(set1.shape[0] * train_split + set2.shape[0] * train_split + set3.shape[0] * train_split
+ set4.shape[0] * train_split)
input_train = images_set[0:train_size]
input_test = images_set[train_size:]
np.random.shuffle(input_train)
if shuffle_test_set:
np.random.shuffle(input_test)
input_train = input_train.reshape(input_train.shape[0], img_width, img_height, 1)
input_test = input_test.reshape(input_test.shape[0], img_width, img_height, 1)
return input_train, input_test
def train(self, noisy_input, pure, save=False, verbosity=0):
self.model.fit(noisy_input, pure,
epochs=self.nu_epochs,
batch_size=self.batch_size, validation_split=self.validation_split, verbose=verbosity)
if save:
self.model.save("trainedModel.h5")
def evaluate(self, noisy_input_test, pure_test):
test_scores = self.model.evaluate(noisy_input_test, pure_test, verbose=2)
print("[EVALUATION] Test loss:", test_scores[0])
print("[EVALUATION] Test accuracy:", test_scores[1])
return test_scores
def predict(self, samples):
return self.model.predict(samples)
if __name__ == "__main__":
print("Please run main.py")