-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathgan.py
116 lines (82 loc) · 4.11 KB
/
gan.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
import os
import tensorflow as tf
from tqdm.auto import trange
generator, generator_optimizer = None, None
discriminator, discriminator_optimizer = None, None
noise = None
gen_checkpoint, disc_checkpoint = None, None
cross_entropy = tf.keras.losses.BinaryCrossentropy(from_logits=True)
class Generator(tf.keras.models.Sequential):
def __init__(self):
super().__init__()
def create_model(self):
self.add(tf.keras.layers.Dense(5 * 5 * 256, use_bias=False, input_shape=(100,)))
self.add(tf.keras.layers.BatchNormalization())
self.add(tf.keras.layers.LeakyReLU())
self.add(tf.keras.layers.Reshape((5, 5, 256)))
self.add(tf.keras.layers.Conv2DTranspose(128, (5, 5), strides=(1, 1), padding='same', use_bias=False))
assert self.output_shape == (None, 5, 5, 128)
self.add(tf.keras.layers.BatchNormalization())
self.add(tf.keras.layers.LeakyReLU())
self.add(tf.keras.layers.Conv2DTranspose(64, (5, 5), strides=(1, 2), padding='same', use_bias=False))
assert self.output_shape == (None, 5, 10, 64)
self.add(tf.keras.layers.BatchNormalization())
self.add(tf.keras.layers.LeakyReLU())
self.add(tf.keras.layers.Conv2DTranspose(8, (5, 5), strides=(1, 2), padding='same', use_bias=False,
activation='tanh'))
assert self.output_shape == (None, 5, 20, 8)
class Discriminator(tf.keras.models.Sequential):
def __init__(self):
super().__init__()
def create_model(self):
self.add(tf.keras.layers.Conv2D(64, (5, 5), strides=(2, 2), padding='same', input_shape=[5, 20, 8]))
self.add(tf.keras.layers.LeakyReLU())
self.add(tf.keras.layers.Dropout(0.3))
self.add(tf.keras.layers.Conv2D(128, (5, 5), strides=(2, 2), padding='same'))
self.add(tf.keras.layers.LeakyReLU())
self.add(tf.keras.layers.Dropout(0.3))
self.add(tf.keras.layers.Flatten())
self.add(tf.keras.layers.Dense(1))
def discriminator_loss(real_output, fake_output):
real_loss = cross_entropy(tf.ones_like(real_output), real_output)
fake_loss = cross_entropy(tf.zeros_like(fake_output), fake_output)
total_loss = real_loss + fake_loss
return total_loss
def generator_loss(fake_output):
return cross_entropy(tf.ones_like(fake_output), fake_output)
@tf.function
def train_step(images):
with tf.GradientTape() as gen_tape, tf.GradientTape() as disc_tape:
generated_images = generator(noise, training=True)
real_output = discriminator(images, training=True)
fake_output = discriminator(generated_images, training=True)
gen_loss = generator_loss(fake_output)
disc_loss = discriminator_loss(real_output, fake_output)
gradients_of_generator = gen_tape.gradient(gen_loss, generator.trainable_variables)
gradients_of_discriminator = disc_tape.gradient(disc_loss, discriminator.trainable_variables)
generator_optimizer.apply_gradients(zip(gradients_of_generator, generator.trainable_variables))
discriminator_optimizer.apply_gradients(zip(gradients_of_discriminator, discriminator.trainable_variables))
def init_gan(gan_checkpoint_dir, batch_size, noise_dim):
global generator, generator_optimizer
generator = Generator()
generator.create_model()
generator_optimizer = tf.keras.optimizers.Adam(1e-4)
global discriminator, discriminator_optimizer
discriminator = Discriminator()
discriminator.create_model()
discriminator_optimizer = tf.keras.optimizers.Adam(1e-4)
global gen_checkpoint, disc_checkpoint
gen_checkpoint = os.path.join(gan_checkpoint_dir, 'generator')
disc_checkpoint = os.path.join(gan_checkpoint_dir, 'discriminator')
global noise
noise = tf.random.normal([batch_size, noise_dim])
return generator, discriminator
def train_gan(dataset, epochs):
for epoch in trange(epochs, desc='train GAN'):
for data in dataset:
x, y = data
image_batch = x
train_step(image_batch)
generator.save_weights(gen_checkpoint)
discriminator.save_weights(disc_checkpoint)
return generator, discriminator