PADME Train Wiki
Train Selection
Search
Trains
rainGAN
421
rainGAN.py
Code fragments of rainGAN.py
import os
from os import path
import numpy as np
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.layers import Input, Dense, Reshape, Flatten, Conv2D, UpSampling2D, BatchNormalization, MaxPooling2D, Activation
from tensorflow.keras.models import Sequential, Model
lr = 0.0002
batch_size = 128
optimizer = Adam(lr=lr)
def generator():
model = Sequential()
model.add(Dense(1024, input_shape=(100,)))
model.add(BatchNormalization(momentum=0.8))
model.add(Activation('tanh'))
model.add(Reshape((2, 2, 256), input_shape=(1024,)))
model.add(UpSampling2D(size=(2, 2)))
model.add(Conv2D(512, (5, 5), padding="same"))
model.add(BatchNormalization(momentum=0.8))
model.add(Activation('tanh'))
model.add(UpSampling2D(size=(2, 2)))
model.add(Conv2D(256, (5, 5), padding="same"))
model.add(Activation('tanh'))
model.add(UpSampling2D(size=(2, 2)))
model.add(Conv2D(128, (5, 5), padding="same"))
model.add(Activation('tanh'))
model.add(UpSampling2D(size=(2, 2)))
model.add(Conv2D(1, (5, 5), padding='same'))
model.add(Activation('tanh'))
noise = Input((100,))
generated = model(noise)
return Model(noise, generated)
def discriminator():
model = Sequential()
model.add(Conv2D(64, (5, 5), padding='same', input_shape=(32, 32, 1)))
model.add(Activation('tanh'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Conv2D(128, (5, 5), padding='same'))
model.add(Activation('tanh'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Conv2D(256, (5, 5), padding='same'))
model.add(Activation('tanh'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Conv2D(512, (5, 5), padding='same'))
model.add(Activation('tanh'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Flatten())
model.add(Dense(1))
model.add(Activation('sigmoid'))
data = Input((32, 32, 1))
validity = model(data)
return Model(data, validity)
def get_avg_weights(array_weights):
new_weights = list()
for weights_list_tuple in zip(*array_weights):
new_weights.append( np.array([np.array(w).mean(axis=0) for w in zip(*weights_list_tuple)]) )
return new_weights
def build_models():
dis = discriminator()
dis.compile(loss='binary_crossentropy', optimizer=optimizer, metrics=['accuracy'])
gen = generator()
dis.trainable = False
z = Input((100,))
img = gen(z)
valid = dis(img)
combined = Model(z, valid)
combined.compile(loss='binary_crossentropy', optimizer=optimizer)
dis.trainable = True
return combined, dis, gen
def compile_models(dis, gen):
dis.compile(loss='binary_crossentropy', optimizer=optimizer, metrics=['accuracy'])
dis.trainable = False
z = Input((100,))
img = gen(z)
valid = dis(img)
combined = Model(z, valid)
combined.compile(loss='binary_crossentropy', optimizer=optimizer)
dis.trainable = True
return combined, dis, gen
def ciil(combined, dis, gen, epochs_per_cycle, dataset, min_acc):
normalise_dataset(dataset)
c_epoch = 0
for epoch in range(epochs_per_cycle):
c_epoch = epoch_procedure(c_epoch, combined, dataset, dis, gen, min_acc)
print("Cycle: [" + str(epoch + 1) + "/" + str(epochs_per_cycle) + "] complete.")
save(c_epoch, dis, gen, combined)
def normalise_dataset(dataset):
for i in range(len(dataset)):
dataset[i] = (dataset[i].astype(np.float32) - 2047.5) / 2047.5
def epoch_procedure(c_epoch, combined, dataset, dis, gen, min_acc):
c_epoch += 1
d_loss = [0, 0]
while d_loss[1] < min_acc:
rand = np.random.randint(0, dataset.shape[0], batch_size // 2)
trainset = dataset[rand]
noise = np.random.normal(0, 1, (batch_size // 2, 100))
generated = gen.predict(noise)
d_loss_real = dis.train_on_batch(trainset, np.ones((batch_size // 2, 1)))
d_loss_fake = dis.train_on_batch(generated, np.zeros((batch_size // 2, 1)))
d_loss = 0.5 * np.add(d_loss_real, d_loss_fake)
noise = np.random.normal(0, 1, (batch_size, 100))
valid_y = np.array([1] * batch_size)
g_loss = combined.train_on_batch(noise, valid_y)
return c_epoch
def save(dis, gen, combined):
print("Saving Generator Instance...")
combined.save("./models/modelCombined.h5")
dis.save("./models/modelDis.h5")
gen.save("./models/modelGen.h5")
Graph
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
undefined
rainGAN.py
import os
None
from os import path
import numpy as np
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.layers import Input, Dense, Reshape, Flatten, Conv2D, UpSampling2D, BatchNormalization, MaxPooling2D, Activation
from tensorflow.keras.models import Sequential, Model
lr = 0.0002
batch_size = 128
optimizer = Adam(lr=lr)
def generator():
model = Sequential()
model.add(Dense(1024, input_shape=(100,)))
model.add(BatchNormalization(momentum=0.8))
model.add(Activation('tanh'))
model.add(Reshape((2, 2, 256), input_shape=(1024,)))
model.add(UpSampling2D(size=(2, 2)))
model.add(Conv2D(512, (5, 5), padding="same"))
model.add(Conv2D(256, (5, 5), padding="same"))
model.add(Conv2D(128, (5, 5), padding="same"))
model.add(Conv2D(1, (5, 5), padding='same'))
noise = Input((100,))
generated = model(noise)
return Model(noise, generated)
def discriminator():
model.add(Conv2D(64, (5, 5), padding='same', input_shape=(32, 32, 1)))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Conv2D(128, (5, 5), padding='same'))
model.add(Conv2D(256, (5, 5), padding='same'))
model.add(Conv2D(512, (5, 5), padding='same'))
model.add(Flatten())
model.add(Dense(1))
model.add(Activation('sigmoid'))
data = Input((32, 32, 1))
validity = model(data)
return Model(data, validity)
def get_avg_weights(array_weights):
new_weights = list()
for weights_list_tuple in zip(*array_weights):
new_weights.append( np.array([np.array(w).mean(axis=0) for w in zip(*weights_list_tuple)]) )
return new_weights
def build_models():
dis = discriminator()
dis.compile(loss='binary_crossentropy', optimizer=optimizer, metrics=['accuracy'])
gen = generator()
dis.trainable = False
z = Input((100,))
img = gen(z)
valid = dis(img)
combined = Model(z, valid)
combined.compile(loss='binary_crossentropy', optimizer=optimizer)
dis.trainable = True
return combined, dis, gen
def compile_models(dis, gen):
def ciil(combined, dis, gen, epochs_per_cycle, dataset, min_acc):
normalise_dataset(dataset)
c_epoch = 0
for epoch in range(epochs_per_cycle):
c_epoch = epoch_procedure(c_epoch, combined, dataset, dis, gen, min_acc)
print("Cycle: [" + str(epoch + 1) + "/" + str(epochs_per_cycle) + "] complete.")
save(c_epoch, dis, gen, combined)
def normalise_dataset(dataset):
for i in range(len(dataset)):
dataset[i] = (dataset[i].astype(np.float32) - 2047.5) / 2047.5
def epoch_procedure(c_epoch, combined, dataset, dis, gen, min_acc):
c_epoch += 1
d_loss = [0, 0]
while d_loss[1] < min_acc:
rand = np.random.randint(0, dataset.shape[0], batch_size // 2)
trainset = dataset[rand]
noise = np.random.normal(0, 1, (batch_size // 2, 100))
generated = gen.predict(noise)
d_loss_real = dis.train_on_batch(trainset, np.ones((batch_size // 2, 1)))
d_loss_fake = dis.train_on_batch(generated, np.zeros((batch_size // 2, 1)))
d_loss = 0.5 * np.add(d_loss_real, d_loss_fake)
noise = np.random.normal(0, 1, (batch_size, 100))
valid_y = np.array([1] * batch_size)
g_loss = combined.train_on_batch(noise, valid_y)
return c_epoch
def save(dis, gen, combined):
print("Saving Generator Instance...")
combined.save("./models/modelCombined.h5")
dis.save("./models/modelDis.h5")
gen.save("./models/modelGen.h5")
Search
Train Selection