diff options
author | nunzip <np.scarh@gmail.com> | 2019-03-04 21:53:18 +0000 |
---|---|---|
committer | nunzip <np.scarh@gmail.com> | 2019-03-04 21:53:18 +0000 |
commit | 34057507d6b7ae5cafd2b7b8cb2b69c20780ffd5 (patch) | |
tree | a6cfd5a1f624674e010f8ca897ef63ef8eb226bd /cgan.py | |
parent | 6529cc095c57e375f34d69fb6bfb36d058dd2192 (diff) | |
download | e4-gan-34057507d6b7ae5cafd2b7b8cb2b69c20780ffd5.tar.gz e4-gan-34057507d6b7ae5cafd2b7b8cb2b69c20780ffd5.tar.bz2 e4-gan-34057507d6b7ae5cafd2b7b8cb2b69c20780ffd5.zip |
Make single sided smoothing parameters accessible
Diffstat (limited to 'cgan.py')
-rwxr-xr-x | cgan.py | 10 |
1 files changed, 3 insertions, 7 deletions
@@ -107,7 +107,7 @@ class CGAN(): return Model([img, label], validity) - def train(self, epochs, batch_size=128, sample_interval=50, graph=False, smooth=False): + def train(self, epochs, batch_size=128, sample_interval=50, graph=False, smooth_real=1, smooth_fake=0): # Load the dataset (X_train, y_train), (_, _) = mnist.load_data() @@ -140,12 +140,8 @@ class CGAN(): gen_imgs = self.generator.predict([noise, labels]) # Train the discriminator - if smooth == True: - d_loss_real = self.discriminator.train_on_batch([imgs, labels], valid*0.9) - d_loss_fake = self.discriminator.train_on_batch([gen_imgs, labels], valid*0.1) - else: - d_loss_real = self.discriminator.train_on_batch([imgs, labels], valid) - d_loss_fake = self.discriminator.train_on_batch([gen_imgs, labels], fake) + d_loss_real = self.discriminator.train_on_batch([imgs, labels], valid*smooth_real) + d_loss_fake = self.discriminator.train_on_batch([gen_imgs, labels], valid*smooth_fake) d_loss = 0.5 * np.add(d_loss_real, d_loss_fake) # --------------------- |