Updated 10/Dec/2021 by Yoshihisa Nitta  

CycleGAN_VIdTIMIT_Train2 をローカルのWindows上で動作するJupyterで実行する

https://nw.tsuda.ac.jp/lec/GoogleColab/pub/html/CycleGAN_VidTIMIT_Train2.html

[注意] Google Colab (tensorflow 2.7.0) 上では tf.keras.utils.load_img() であったが、本「生成ディープラーニング」のpython 仮想環境 generative (tensorflow 2.2.0) では tf.keras.preprocessing.image.load_img() である。

In [1]:
MAX_EPOCHS = 100     # Change this value and run this ipynb many times

save_path = 'run'
In [2]:
import tensorflow as tf
print(tf.__version__)
2.2.0
In [3]:
import numpy as np

np.random.seed(2022)

CycleGAN クラスの定義

In [4]:
import tensorflow as tf
import tensorflow_addons as tf_addons
import numpy as np

import matplotlib.pyplot as plt

from collections import deque

import os
import pickle as pkl
import random
import datetime


################################################################################
# Data Loader
################################################################################
class PairDataset():
    def __init__(self, paths_A, paths_B, batch_size= 1, target_size = None, unaligned=False):
        self.paths_A = np.array(paths_A)
        self.paths_B = np.array(paths_B)
        self.target_size = target_size
        self.batch_size = batch_size
        self.unaligned = unaligned

        self.lenA = len(paths_A)
        self.lenB = len(paths_B)
        self.index = 0

    def __len__(self):
        return max(self.lenA, self.lenB)

    def __getitem__(self, index):
        if isinstance(index, slice):
            start, stop, step = index.indices(self.__len__())
            if start == None: start = 0
            if stop == None: stop = self.__len__()
            if step == None:
                if start < stop:
                    step = 1
                elif start > stop:
                    step = -1
                else:
                    step = 0
            return np.array([self.__getitemInt__(i) for i in range(start, stop, step) ])
        else:
            return self.__getitemInt__(index)

    def __getitemInt__(self, index):
        path_A = self.paths_A[index % self.lenA]
        if self.unaligned:
            path_B = self.paths_B[np.random.choice(self.lenB, 1)]
        else:
            path_B = self.paths_B[index % self.lenB]
        img_A = np.array(tf.keras.preprocessing.image.load_img(path_A, target_size = self.target_size))
        img_B = np.array(tf.keras.preprocessing.image.load_img(path_B, target_size = self.target_size))
        img_A = (img_A.astype('float32') - 127.5) / 127.5
        img_B = (img_B.astype('float32') - 127.5) / 127.5
        return np.array([img_A, img_B])

    def __next__(self):
        self.index += 1
        return self.__getitem__(self.index-1)



################################################################################
# Layer
################################################################################
class ReflectionPadding2D(tf.keras.layers.Layer):
    def __init__(self, padding=(1, 1), **kwargs):
        self.padding = tuple(padding)
        self.input_spec = [tf.keras.layers.InputSpec(ndim=4)]
        super().__init__(**kwargs)

    def compute_output_shape(self, s):
        '''
        If you are using "channels_last" configuration
        '''
        return (s[0], s[1]+2*self.padding[0], s[2]+2*self.padding[1], s[3])

    def call(self, x, mask=None):
        w_pad, h_pad = self.padding
        return tf.pad(x, [[0, 0], [h_pad, h_pad], [w_pad, w_pad], [0, 0]], 'REFLECT')


################################################################################
# Model
################################################################################
class CycleGAN():
    def __init__(
        self,
        input_dim,
        learning_rate,
        lambda_validation,
        lambda_reconstr,
        lambda_id,
        generator_type,
        gen_n_filters,
        disc_n_filters,
        buffer_max_length = 50,
        epoch = 0, 
        d_losses = [],
        g_losses = []
    ):
        self.input_dim = input_dim
        self.learning_rate = learning_rate
        self.buffer_max_length = buffer_max_length
        self.lambda_validation = lambda_validation
        self.lambda_reconstr = lambda_reconstr
        self.lambda_id = lambda_id
        self.generator_type = generator_type
        self.gen_n_filters = gen_n_filters
        self.disc_n_filters = disc_n_filters

        # Input shape
        self.img_rows = input_dim[0]
        self.img_cols = input_dim[1]
        self.channels = input_dim[2]
        self.img_shape = (self.img_rows, self.img_cols, self.channels)
        
        self.epoch = epoch
        self.d_losses = d_losses
        self.g_losses = g_losses

        self.buffer_A = deque(maxlen=self.buffer_max_length)
        self.buffer_B = deque(maxlen=self.buffer_max_length)

        # Calculate output shape of D (PatchGAN)
        patch = int(self.img_rows / 2**3)
        self.disc_patch = (patch, patch, 1)
        
        self.weight_init = tf.keras.initializers.RandomNormal(mean=0.0, stddev=0.02)
        
        self.compile_models()


    def compile_models(self):
        # Build and compile the discriminators
        self.d_A = self.build_discriminator()
        self.d_B = self.build_discriminator()

        self.d_A.compile(
            loss='mse',
            optimizer=tf.keras.optimizers.Adam(self.learning_rate, 0.5),
            metrics=['accuracy']
        )
        self.d_B.compile(
            loss='mse',
            optimizer=tf.keras.optimizers.Adam(self.learning_rate, 0.5),
            metrics=['accuracy']
        )

        # Build the generators
        if self.generator_type == 'unet':
            self.g_AB = self.build_generator_unet()
            self.g_BA = self.build_generator_unet()
        else:
            self.g_AB = self.build_generator_resnet()
            self.g_BA = self.build_generator_resnet()

        # For the combined model we will only train the generators
        self.d_A.trainable = False
        self.d_B.trainable = False

        # Input images from both domains
        img_A = tf.keras.layers.Input(shape=self.img_shape)
        img_B = tf.keras.layers.Input(shape=self.img_shape)

        # Translate images to the other domain
        fake_B = self.g_AB(img_A)
        fake_A = self.g_BA(img_B)

        # translate images back to original domain
        reconstr_A = self.g_BA(fake_B)
        reconstr_B = self.g_AB(fake_A)

        # Identity mapping of images
        img_A_id = self.g_BA(img_A)   # [self memo] ??? translate *A* from domainB to domainA 
        img_B_id = self.g_AB(img_B)   # [self memo] ??? translate *B* from domainA to domainB 
        
        # Discriminators determines validity of traslated images
        valid_A = self.d_A(fake_A)
        valid_B = self.d_B(fake_B)

        # Combined model trains generators to fool discriminators
        self.combined = tf.keras.models.Model(
            inputs=[img_A, img_B],
            outputs=[valid_A, valid_B, reconstr_A, reconstr_B, img_A_id, img_B_id]
        )
        self.combined.compile(
            loss=['mse', 'mse', 'mae', 'mae', 'mae', 'mae'],  # Mean Squared Error, Mean Absolute Error
            loss_weights=[ self.lambda_validation, self.lambda_validation,
                         self.lambda_reconstr, self.lambda_reconstr,
                         self.lambda_id, self.lambda_id ],
            optimizer=tf.keras.optimizers.Adam(0.0002, 0.5)
        )
        self.d_A.trainable = True
        self.d_B.trainable = True


    def build_generator_unet(self):
        def downsample(layer_input, filters, f_size=4):
            d = tf.keras.layers.Conv2D(
                filters,
                kernel_size=f_size,
                strides=2,
                padding='same',
                kernel_initializer = self.weight_init  # [self memo] added by nitta
            )(layer_input)
            d = tf_addons.layers.InstanceNormalization(axis=-1, center=False, scale=False)(d)
            d = tf.keras.layers.Activation('relu')(d)
            return d
        def upsample(layer_input, skip_input, filters, f_size=4, dropout_rate=0):
            u = tf.keras.layers.UpSampling2D(size=2)(layer_input)
            u = tf.keras.layers.Conv2D(
                filters, 
                kernel_size=f_size, 
                strides=1, 
                padding='same',
                kernel_initializer = self.weight_init  # [self memo] added by nitta
            )(u)
            u = tf_addons.layers.InstanceNormalization(axis=-1, center=False, scale=False)(u)
            u = tf.keras.layers.Activation('relu')(u)
            if dropout_rate:
                u = tf.keras.layers.Dropout(dropout_rate)(u)
            u = tf.keras.layers.Concatenate()([u, skip_input])
            return u
        # Image input
        img = tf.keras.layers.Input(shape=self.img_shape)
        # Downsampling
        d1 = downsample(img, self.gen_n_filters)
        d2 = downsample(d1, self.gen_n_filters*2)
        d3 = downsample(d2, self.gen_n_filters*4)
        d4 = downsample(d3, self.gen_n_filters*8)

        # Upsampling
        u1 = upsample(d4, d3, self.gen_n_filters*4)
        u2 = upsample(u1, d2, self.gen_n_filters*2)
        u3 = upsample(u2, d1, self.gen_n_filters)

        u4 = tf.keras.layers.UpSampling2D(size=2)(u3)
        output_img = tf.keras.layers.Conv2D(
            self.channels, 
            kernel_size=4,
            strides=1, 
            padding='same',
            activation='tanh',
            kernel_initializer = self.weight_init  # [self memo] added by nitta
        )(u4)

        return tf.keras.models.Model(img, output_img)


    def build_generator_resnet(self):
        def conv7s1(layer_input, filters, final):
            y = ReflectionPadding2D(padding=(3,3))(layer_input)
            y = tf.keras.layers.Conv2D(
                filters,
                kernel_size=(7,7),
                strides=1,
                padding='valid',
                kernel_initializer=self.weight_init
            )(y)
            if final:
                y = tf.keras.layers.Activation('tanh')(y)
            else:
                y = tf_addons.layers.InstanceNormalization(axis=-1, center=False, scale=False)(y)
                y = tf.keras.layers.Activation('relu')(y)
            return y

        def downsample(layer_input, filters):
            y = tf.keras.layers.Conv2D(
                filters, 
                kernel_size=(3,3), 
                strides=2, 
                padding='same',
                kernel_initializer = self.weight_init
            )(layer_input)
            y = tf_addons.layers.InstanceNormalization(axis=-1, center=False, scale=False)(y)
            y = tf.keras.layers.Activation('relu')(y)
            return y

        def residual(layer_input, filters):
            shortcut = layer_input
            y = ReflectionPadding2D(padding=(1,1))(layer_input)
            y = tf.keras.layers.Conv2D(
                filters,
                kernel_size=(3,3),
                strides=1,
                padding='valid',
                kernel_initializer=self.weight_init
            )(y)
            y = tf_addons.layers.InstanceNormalization(axis=-1, center=False, scale=False)(y)
            y = tf.keras.layers.Activation('relu')(y)
            y = ReflectionPadding2D(padding=(1,1))(y)
            y = tf.keras.layers.Conv2D(
                filters,
                kernel_size=(3,3),
                strides=1,
                padding='valid',
                kernel_initializer=self.weight_init
            )(y)
            y = tf_addons.layers.InstanceNormalization(axis=-1, center=False, scale=False)(y)
            return tf.keras.layers.add([shortcut, y])
          
        def upsample(layer_input, filters):
            y = tf.keras.layers.Conv2DTranspose(
                filters, 
                kernel_size=(3,3), 
                strides=2,
                padding='same',
                kernel_initializer=self.weight_init
            )(layer_input)
            y = tf_addons.layers.InstanceNormalization(axis=-1, center=False, scale=False)(y)
            y = tf.keras.layers.Activation('relu')(y)
            return y

        # Image input
        img = tf.keras.layers.Input(shape=self.img_shape)

        y = img
        y = conv7s1(y, self.gen_n_filters, False)
        y = downsample(y, self.gen_n_filters * 2)
        y = downsample(y, self.gen_n_filters * 4)
        y = residual(y, self.gen_n_filters * 4)
        y = residual(y, self.gen_n_filters * 4)
        y = residual(y, self.gen_n_filters * 4)
        y = residual(y, self.gen_n_filters * 4)
        y = residual(y, self.gen_n_filters * 4)
        y = residual(y, self.gen_n_filters * 4)
        y = residual(y, self.gen_n_filters * 4)
        y = residual(y, self.gen_n_filters * 4)
        y = residual(y, self.gen_n_filters * 4)
        y = upsample(y, self.gen_n_filters * 2)
        y = upsample(y, self.gen_n_filters)
        y = conv7s1(y, 3, True)
        output = y
        
        return tf.keras.models.Model(img, output)


    def build_discriminator(self):
        def conv4(layer_input, filters, stride=2, norm=True):
            y = tf.keras.layers.Conv2D(
                filters,
                kernel_size=(4,4),
                strides=stride,
                padding='same',
                kernel_initializer = self.weight_init
              )(layer_input)
            if norm:
                y = tf_addons.layers.InstanceNormalization(axis=-1, center=False, scale=False)(y)
            y = tf.keras.layers.LeakyReLU(0.2)(y)
            return y

        img = tf.keras.layers.Input(shape=self.img_shape)
        y = conv4(img, self.disc_n_filters, stride=2, norm=False)
        y = conv4(y, self.disc_n_filters*2, stride=2)
        y = conv4(y, self.disc_n_filters*4, stride=2)
        y = conv4(y, self.disc_n_filters*8, stride=1)
        output = tf.keras.layers.Conv2D(
            1,
            kernel_size=4,
            strides=1,
            padding='same',
            kernel_initializer=self.weight_init
        )(y)
        return tf.keras.models.Model(img, output)


    def train_discriminators(self, imgs_A, imgs_B, valid, fake):
        # Translate images to opposite domain
        fake_B = self.g_AB.predict(imgs_A)
        fake_A = self.g_BA.predict(imgs_B)
        
        self.buffer_B.append(fake_B)
        self.buffer_A.append(fake_A)

        fake_A_rnd = random.sample(self.buffer_A, min(len(self.buffer_A), len(imgs_A))) # random sampling without replacement 
        fake_B_rnd = random.sample(self.buffer_B, min(len(self.buffer_B), len(imgs_B)))
        
        # Train the discriminators (original images=real / translated = fake)
        dA_loss_real = self.d_A.train_on_batch(imgs_A, valid)
        dA_loss_fake = self.d_A.train_on_batch(fake_A_rnd, fake)
        dA_loss = 0.5 * np.add(dA_loss_real, dA_loss_fake)

        dB_loss_real = self.d_B.train_on_batch(imgs_B, valid)
        dB_loss_fake = self.d_B.train_on_batch(fake_B_rnd, fake)
        dB_loss = 0.5 * np.add(dB_loss_real, dB_loss_fake)

        # Total discriminator loss
        d_loss_total = 0.5 * np.add(dA_loss, dB_loss)

        return (
            d_loss_total[0], 
            dA_loss[0], dA_loss_real[0], dA_loss_fake[0],
            dB_loss[0], dB_loss_real[0], dB_loss_fake[0],
            d_loss_total[1], 
            dA_loss[1], dA_loss_real[1], dA_loss_fake[1],
            dB_loss[1], dB_loss_real[1], dB_loss_fake[1]
        )


    def train_generators(self, imgs_A, imgs_B, valid):
        # Train the generators
        return self.combined.train_on_batch(
          [imgs_A, imgs_B], 
          [valid, valid, imgs_A, imgs_B, imgs_A, imgs_B]
        )


    def train(self, data_loader, epochs, batch_size=1, run_folder='./run', print_step_interval=100, save_epoch_interval=100):
        start_time = datetime.datetime.now()
        # Adversarial loss ground truthes
        valid = np.ones((batch_size,) + self.disc_patch)
        fake = np.zeros((batch_size,) + self.disc_patch)

        steps = len(data_loader) // batch_size
        for epoch in range(self.epoch, epochs):
            step_d_losses = []
            step_g_losses = []
            for step in range(steps):
                start = step * batch_size
                end = start + batch_size
                pairs = data_loader[start:end]    # ((a,b), (a, b), ....)
                imgs_A, imgs_B = [], []
                for img_A, img_B in pairs:
                    imgs_A.append(img_A)
                    imgs_B.append(img_B)

                imgs_A = np.array(imgs_A)
                imgs_B = np.array(imgs_B)

                step_d_loss = self.train_discriminators(imgs_A, imgs_B, valid, fake)
                step_g_loss = self.train_generators(imgs_A, imgs_B, valid)

                step_d_losses.append(step_d_loss)
                step_g_losses.append(step_g_loss)

                elapsed_time = datetime.datetime.now() - start_time
                if (step+1) % print_step_interval == 0:
                    print(f'Epoch {epoch+1}/{epochs} {step+1}/{steps} [D loss: {step_d_loss[0]:.3f} acc: {step_d_loss[7]:.3f}][G loss: {step_g_loss[0]:.3f} adv: {np.sum(step_g_loss[1:3]):.3f} recon: {np.sum(step_g_loss[3:5]):.3f} id: {np.sum(step_g_loss[5:7]):.3f} time: {elapsed_time:}')

            d_loss = np.mean(step_d_losses, axis=0)
            g_loss = np.mean(step_g_losses, axis=0)

            elapsed_time = datetime.datetime.now() - start_time

            elapsed_time = datetime.datetime.now() - start_time
            print(f'Epoch {epoch+1}/{epochs} [D loss: {d_loss[0]:.3f} acc: {d_loss[7]:.3f}][G loss: {g_loss[0]:.3f} adv: {np.sum(g_loss[1:3]):.3f} recon: {np.sum(g_loss[3:5]):.3f} id: {np.sum(g_loss[5:7]):.3f} time: {elapsed_time:}')
                    
            self.d_losses.append(d_loss)
            self.g_losses.append(g_loss)

            self.epoch += 1
            if (self.epoch) % save_epoch_interval == 0:
                self.save(run_folder, self.epoch)
                self.save(run_folder)

        self.save(run_folder, self.epoch)
        self.save(run_folder)


    def save(self, folder, epoch=None):
        self.save_params(folder, epoch)
        self.save_weights(folder,epoch)


    @staticmethod
    def load(folder, epoch=None):
        params = CycleGAN.load_params(folder, epoch)
        gan = CycleGAN(*params)
        gan.load_weights(folder, epoch)
        return gan


    def save_weights(self, run_folder, epoch=None):
        if epoch is None:
            self.save_model_weights(self.combined, os.path.join(run_folder, 'weights/combined-weights.h5'))
            self.save_model_weights(self.d_A, os.path.join(run_folder, 'weights/d_A-weights.h5'))
            self.save_model_weights(self.d_B, os.path.join(run_folder, 'weights/d_B-weights.h5'))
            self.save_model_weights(self.g_AB, os.path.join(run_folder, 'weights/g_AB-weights.h5'))
            self.save_model_weights(self.g_BA, os.path.join(run_folder, 'weights/g_BA-weights.h5'))
        else:
            self.save_model_weights(self.combined, os.path.join(run_folder, f'weights/combined-weights_{epoch}.h5'))
            self.save_model_weights(self.d_A, os.path.join(run_folder, f'weights/d_A-weights_{epoch}.h5'))
            self.save_model_weights(self.d_B, os.path.join(run_folder, f'weights/d_B-weights_{epoch}.h5'))
            self.save_model_weights(self.g_AB, os.path.join(run_folder, f'weights/g_AB-weights_{epoch}.h5'))
            self.save_model_weights(self.g_BA, os.path.join(run_folder, f'weights/g_BA-weights_{epoch}.h5'))


    def load_weights(self, run_folder, epoch=None):
        if epoch is None:
            self.load_model_weights(self.combined, os.path.join(run_folder, 'weights/combined-weights.h5'))
            self.load_model_weights(self.d_A, os.path.join(run_folder, 'weights/d_A-weights.h5'))
            self.load_model_weights(self.d_B, os.path.join(run_folder, 'weights/d_B-weights.h5'))
            self.load_model_weights(self.g_AB, os.path.join(run_folder, 'weights/g_AB-weights.h5'))
            self.load_model_weights(self.g_BA, os.path.join(run_folder, 'weights/g_BA-weights.h5'))
        else:
            self.load_model_weights(self.combined, os.path.join(run_folder, f'weights/combined-weights_{epoch}.h5'))
            self.load_model_weights(self.d_A, os.path.join(run_folder, f'weights/d_A-weights_{epoch}.h5'))
            self.load_model_weights(self.d_B, os.path.join(run_folder, f'weights/d_B-weights_{epoch}.h5'))
            self.load_model_weights(self.g_AB, os.path.join(run_folder, f'weights/g_AB-weights_{epoch}.h5'))
            self.load_model_weights(self.g_BA, os.path.join(run_folder, f'weights/g_BA-weights_{epoch}.h5'))


    def save_model_weights(self, model, filepath):
        dpath, fname = os.path.split(filepath)
        if dpath != '' and not os.path.exists(dpath):
            os.makedirs(dpath)
        model.save_weights(filepath)


    def load_model_weights(self, model, filepath):
        model.load_weights(filepath)


    def save_params(self, folder, epoch=None):
        if epoch is None:
            filepath = os.path.join(folder, 'params.pkl')
        else:
            filepath = os.path.join(folder, f'params_{epoch}.pkl')

        dpath, fname = os.path.split(filepath)
        if dpath != '' and not os.path.exists(dpath):
            os.makedirs(dpath)

        with open(filepath, 'wb') as f:
            pkl.dump([
                self.input_dim,
                self.learning_rate,
                self.lambda_validation,
                self.lambda_reconstr,
                self.lambda_id,
                self.generator_type,
                self.gen_n_filters,
                self.disc_n_filters,
                self.buffer_max_length,
                self.epoch,
                self.d_losses,
                self.g_losses
              ], f)


    @staticmethod
    def load_params(folder, epoch=None):
        if epoch is None:
            filepath = os.path.join(folder, 'params.pkl')
        else:
            filepath = os.path.join(folder, f'params_{epoch}.pkl')

        with open(filepath, 'rb') as f:
            params = pkl.load(f)
        return params


    def generate_image(self, img_A, img_B):
        gen_A = self.generate_image_from_A(img_A)
        gen_B = self.generate_image_from_B(img_B)
        return np.concatenate([gen_A, gen_B], axis=0)


    def generate_image_from_A(self, img_A):
        fake_B = self.g_AB.predict(img_A)      # Translate images to the other domain
        reconstr_A = self.g_BA.predict(fake_B)  # Translate back to original domain
        id_A = self.g_BA.predict(img_A)    # ID the images
        return np.concatenate([img_A, fake_B, reconstr_A, id_A])


    def generate_image_from_B(self, img_B):
        fake_A = self.g_BA.predict(img_B)
        reconstr_B = self.g_AB.predict(fake_A)
        id_B = self.g_AB.predict(img_B)
        return np.concatenate([img_B, fake_A, reconstr_B, id_B])


    @staticmethod
    def showImages(imgs, trans, recon, idimg, w=2.8, h=2.8, filepath=None):
        N = len(imgs)
        M = len(imgs[0])
        titles = ['Original', 'Translated', 'Reconstructed', 'ID']

        fig, ax = plt.subplots(N, M, figsize=(w*M, h*N))
        for i in range(N):
            for j in range(M):
                ax[i][j].imshow(imgs[i][j])
                ax[i][j].set_title(title[j])
                ax[i][j].axis('off')

        if not filepath is None:
            dpath, fname = os.path.split(filepath)
            if dpath != '' and not os.path.exists(dpath):
                os.makedirs(dpath)
            fig.savefig(filepath, dpi=600)
            plt.close()
        else:
            plt.show()
        

    def showLoss(self, xlim=[], ylim=[]):
        print('loss AB')
        self.showLossAB(xlim, ylim)
        print('loss BA')
        self.showLossBA(xlim, ylim)


    def showLossAB(self, xlim=[], ylim=[]):
        g = np.array(self.g_losses)
        g_loss = g[:, 0]
        g_adv = g[:, 1]
        g_recon = g[:, 3]
        g_id = g[:, 5]
        CycleGAN.plot_history(
            [g_loss, g_adv, g_recon, g_id],
            ['g_loss', 'AB discrim', 'AB cycle', 'AB id'],
            xlim,
            ylim)

    def showLossBA(self, xlim=[], ylim=[]):
        g = np.array(self.g_losses)
        g_loss = g[:, 0]
        g_adv = g[:, 2]
        g_recon = g[:, 4]
        g_id = g[:, 6]
        CycleGAN.plot_history(
            [g_loss, g_adv, g_recon, g_id],
            ['g_loss', 'BA discrim', 'BA cycle', 'BA id'],
            xlim,
            ylim)


    @staticmethod
    def plot_history(vals, labels, xlim=[], ylim=[]):
        colors = ['red', 'blue', 'green', 'orange', 'black', 'pink']
        n = len(vals)
        fig, ax = plt.subplots(1, 1, figsize=(12,6))
        for i in range(n):
            ax.plot(vals[i], c=colors[i], label=labels[i])
        ax.legend(loc='upper right')
        ax.set_xlabel('epochs')
        # ax.set_ylabel('loss')

        if xlim != []:
            ax.set_xlim(xlim[0], xlim[1])
        if ylim != []:
            ax.set_ylim(ylim[0], ylim[1])
        
        plt.show()

[注意] 上記のfadg0.zip, faks0.zip をブラウザを使って手動でダウンロードして、以下のフォルダに解凍したものとする。

In [5]:
VidTIMIT_fnames = [ 'fadg0', 'faks0']
data_dir = 'D:\\data\\torch_book1\\ch06'
In [6]:
!dir {data_dir}
 ドライブ D のボリューム ラベルがありません。
 ボリューム シリアル番号は 606C-349E です

 D:\data\torch_book1\ch06 のディレクトリ

2021/12/10  16:23    <DIR>          .
2021/12/10  16:23    <DIR>          ..
2021/12/10  16:20    <DIR>          fadg0
2021/12/10  16:20    <DIR>          faks0
               0 個のファイル                   0 バイト
               4 個のディレクトリ  1,758,399,877,120 バイトの空き領域
In [7]:
IMAGE_SIZE = 128
In [8]:
import os
import glob

imgA_paths = glob.glob(os.path.join(data_dir, VidTIMIT_fnames[0], 'video/*/[0-9]*'))
imgB_paths = glob.glob(os.path.join(data_dir, VidTIMIT_fnames[1], 'video/*/[0-9]*'))
In [9]:
import numpy as np

validation_split = 0.05

nA, nB = len(imgA_paths), len(imgB_paths)
splitA = int(nA * (1 - validation_split))
splitB = int(nB * (1 - validation_split))

np.random.shuffle(imgA_paths)
np.random.shuffle(imgB_paths)

train_imgA_paths = imgA_paths[:splitA]
test_imgA_paths = imgA_paths[splitA:]
train_imgB_paths = imgB_paths[:splitB]
test_imgB_paths = imgB_paths[splitB:]
In [10]:
print(nA, nB)
2732 2138
In [11]:
# Image: [-1, 1] --> [0, 1]
def M1P1_ZeroP1(imgs):
    imgs = (imgs + 1) * 0.5
    return np.clip(imgs, 0, 1)

# Image: [0, 1] --> [-1, 1]
def ZeroP1_M1P1(imgs):
    return imgs * 2 - 1
In [12]:
pair_flow = PairDataset(train_imgA_paths, train_imgB_paths, target_size=(IMAGE_SIZE, IMAGE_SIZE))
test_pair_flow = PairDataset(test_imgA_paths, test_imgB_paths, target_size=(IMAGE_SIZE, IMAGE_SIZE))

保存したニューラルネットワーク・モデルの重みをロードする

In [13]:
gan = CycleGAN.load(save_path)

print(gan.epoch)
5

さらに訓練を進める

In [14]:
gan.train(
    pair_flow,
    epochs = MAX_EPOCHS,
    batch_size=1,
    run_folder = save_path,
    print_step_interval = 1000,
    save_epoch_interval = 50
)
Epoch 6/100 1000/2595 [D loss: 0.126 acc: 0.890][G loss: 1.612 adv: 0.663 recon: 0.079 id: 0.078 time: 0:02:17.480424
Epoch 6/100 2000/2595 [D loss: 0.149 acc: 0.806][G loss: 2.594 adv: 1.439 recon: 0.096 id: 0.097 time: 0:04:23.299613
Epoch 6/100 [D loss: 0.080 acc: 0.925][G loss: 1.983 adv: 1.032 recon: 0.079 id: 0.078 time: 0:05:38.296418
Epoch 7/100 1000/2595 [D loss: 0.091 acc: 0.914][G loss: 1.371 adv: 0.452 recon: 0.077 id: 0.073 time: 0:07:44.114719
Epoch 7/100 2000/2595 [D loss: 0.164 acc: 0.815][G loss: 2.330 adv: 1.239 recon: 0.091 id: 0.092 time: 0:09:49.881456
Epoch 7/100 [D loss: 0.077 acc: 0.929][G loss: 1.953 adv: 1.023 recon: 0.078 id: 0.077 time: 0:11:04.861470
Epoch 8/100 1000/2595 [D loss: 0.074 acc: 0.955][G loss: 1.809 adv: 0.926 recon: 0.074 id: 0.074 time: 0:13:10.788274
Epoch 8/100 2000/2595 [D loss: 0.124 acc: 0.854][G loss: 2.447 adv: 1.334 recon: 0.093 id: 0.093 time: 0:15:16.748974
Epoch 8/100 [D loss: 0.073 acc: 0.937][G loss: 1.981 adv: 1.063 recon: 0.077 id: 0.076 time: 0:16:31.700854
Epoch 9/100 1000/2595 [D loss: 0.095 acc: 0.896][G loss: 1.683 adv: 0.772 recon: 0.076 id: 0.075 time: 0:18:37.517496
Epoch 9/100 2000/2595 [D loss: 0.188 acc: 0.654][G loss: 2.780 adv: 1.705 recon: 0.089 id: 0.090 time: 0:20:43.319625
Epoch 9/100 [D loss: 0.072 acc: 0.938][G loss: 2.044 adv: 1.124 recon: 0.077 id: 0.076 time: 0:21:58.470984
Epoch 10/100 1000/2595 [D loss: 0.222 acc: 0.712][G loss: 2.039 adv: 1.185 recon: 0.071 id: 0.070 time: 0:24:04.390568
Epoch 10/100 2000/2595 [D loss: 0.075 acc: 0.913][G loss: 1.993 adv: 0.947 recon: 0.087 id: 0.090 time: 0:26:10.521207
Epoch 10/100 [D loss: 0.067 acc: 0.946][G loss: 2.077 adv: 1.156 recon: 0.077 id: 0.076 time: 0:27:25.666085
Epoch 11/100 1000/2595 [D loss: 0.092 acc: 0.929][G loss: 1.728 adv: 0.858 recon: 0.073 id: 0.070 time: 0:29:31.481198
Epoch 11/100 2000/2595 [D loss: 0.036 acc: 0.993][G loss: 1.691 adv: 0.639 recon: 0.088 id: 0.088 time: 0:31:37.618813
Epoch 11/100 [D loss: 0.069 acc: 0.943][G loss: 2.026 adv: 1.120 recon: 0.076 id: 0.075 time: 0:32:54.435923
Epoch 12/100 1000/2595 [D loss: 0.180 acc: 0.672][G loss: 1.829 adv: 0.910 recon: 0.077 id: 0.074 time: 0:35:01.991873
Epoch 12/100 2000/2595 [D loss: 0.123 acc: 0.808][G loss: 2.113 adv: 1.001 recon: 0.093 id: 0.092 time: 0:37:08.170775
Epoch 12/100 [D loss: 0.064 acc: 0.950][G loss: 2.104 adv: 1.195 recon: 0.076 id: 0.075 time: 0:38:23.197936
Epoch 13/100 1000/2595 [D loss: 0.117 acc: 0.872][G loss: 1.692 adv: 0.843 recon: 0.071 id: 0.070 time: 0:40:29.159939
Epoch 13/100 2000/2595 [D loss: 0.074 acc: 0.968][G loss: 2.671 adv: 1.619 recon: 0.088 id: 0.087 time: 0:42:35.275433
Epoch 13/100 [D loss: 0.063 acc: 0.952][G loss: 2.067 adv: 1.172 recon: 0.075 id: 0.074 time: 0:43:50.382121
Epoch 14/100 1000/2595 [D loss: 0.033 acc: 1.000][G loss: 2.106 adv: 1.242 recon: 0.072 id: 0.070 time: 0:45:56.342980
Epoch 14/100 2000/2595 [D loss: 0.144 acc: 0.830][G loss: 2.498 adv: 1.438 recon: 0.088 id: 0.090 time: 0:48:02.502751
Epoch 14/100 [D loss: 0.060 acc: 0.956][G loss: 2.106 adv: 1.216 recon: 0.074 id: 0.074 time: 0:49:17.632856
Epoch 15/100 1000/2595 [D loss: 0.090 acc: 0.878][G loss: 2.331 adv: 1.456 recon: 0.074 id: 0.070 time: 0:51:23.405030
Epoch 15/100 2000/2595 [D loss: 0.099 acc: 0.911][G loss: 1.699 adv: 0.669 recon: 0.085 id: 0.088 time: 0:53:29.516467
Epoch 15/100 [D loss: 0.057 acc: 0.959][G loss: 2.161 adv: 1.270 recon: 0.074 id: 0.074 time: 0:54:44.280676
Epoch 16/100 1000/2595 [D loss: 0.064 acc: 0.982][G loss: 1.721 adv: 0.883 recon: 0.070 id: 0.069 time: 0:56:50.314372
Epoch 16/100 2000/2595 [D loss: 0.090 acc: 0.932][G loss: 2.093 adv: 1.070 recon: 0.085 id: 0.088 time: 0:58:56.500406
Epoch 16/100 [D loss: 0.059 acc: 0.958][G loss: 2.165 adv: 1.274 recon: 0.074 id: 0.073 time: 1:00:11.414739
Epoch 17/100 1000/2595 [D loss: 0.040 acc: 0.995][G loss: 2.062 adv: 1.201 recon: 0.072 id: 0.068 time: 1:02:17.441134
Epoch 17/100 2000/2595 [D loss: 0.086 acc: 0.915][G loss: 2.251 adv: 1.225 recon: 0.085 id: 0.087 time: 1:04:23.168061
Epoch 17/100 [D loss: 0.060 acc: 0.956][G loss: 2.154 adv: 1.270 recon: 0.074 id: 0.073 time: 1:05:38.065124
Epoch 18/100 1000/2595 [D loss: 0.181 acc: 0.729][G loss: 1.793 adv: 0.966 recon: 0.069 id: 0.069 time: 1:07:44.237796
Epoch 18/100 2000/2595 [D loss: 0.048 acc: 0.978][G loss: 2.334 adv: 1.305 recon: 0.086 id: 0.086 time: 1:09:50.597796
Epoch 18/100 [D loss: 0.064 acc: 0.950][G loss: 2.108 adv: 1.228 recon: 0.073 id: 0.073 time: 1:11:05.857441
Epoch 19/100 1000/2595 [D loss: 0.057 acc: 0.933][G loss: 1.766 adv: 0.838 recon: 0.078 id: 0.074 time: 1:13:12.228439
Epoch 19/100 2000/2595 [D loss: 0.047 acc: 0.979][G loss: 2.586 adv: 1.526 recon: 0.089 id: 0.087 time: 1:15:18.252587
Epoch 19/100 [D loss: 0.063 acc: 0.951][G loss: 2.120 adv: 1.235 recon: 0.074 id: 0.072 time: 1:16:33.130544
Epoch 20/100 1000/2595 [D loss: 0.092 acc: 0.860][G loss: 1.547 adv: 0.678 recon: 0.073 id: 0.070 time: 1:18:39.174305
Epoch 20/100 2000/2595 [D loss: 0.126 acc: 0.831][G loss: 2.199 adv: 1.168 recon: 0.086 id: 0.085 time: 1:20:45.150378
Epoch 20/100 [D loss: 0.058 acc: 0.959][G loss: 2.162 adv: 1.275 recon: 0.074 id: 0.073 time: 1:22:00.343212
Epoch 21/100 1000/2595 [D loss: 0.039 acc: 0.994][G loss: 1.995 adv: 1.152 recon: 0.071 id: 0.068 time: 1:24:06.653524
Epoch 21/100 2000/2595 [D loss: 0.096 acc: 0.927][G loss: 2.561 adv: 1.523 recon: 0.086 id: 0.088 time: 1:26:13.192659
Epoch 21/100 [D loss: 0.056 acc: 0.962][G loss: 2.188 adv: 1.307 recon: 0.074 id: 0.072 time: 1:27:28.374766
Epoch 22/100 1000/2595 [D loss: 0.062 acc: 0.978][G loss: 2.223 adv: 1.387 recon: 0.070 id: 0.067 time: 1:29:37.334135
Epoch 22/100 2000/2595 [D loss: 0.059 acc: 0.951][G loss: 1.821 adv: 0.770 recon: 0.088 id: 0.087 time: 1:31:50.046708
Epoch 22/100 [D loss: 0.057 acc: 0.961][G loss: 2.158 adv: 1.284 recon: 0.073 id: 0.072 time: 1:33:08.091678
Epoch 23/100 1000/2595 [D loss: 0.100 acc: 0.873][G loss: 1.949 adv: 1.101 recon: 0.071 id: 0.068 time: 1:35:16.054701
Epoch 23/100 2000/2595 [D loss: 0.040 acc: 0.993][G loss: 2.442 adv: 1.426 recon: 0.085 id: 0.084 time: 1:37:25.303585
Epoch 23/100 [D loss: 0.059 acc: 0.955][G loss: 2.149 adv: 1.282 recon: 0.072 id: 0.071 time: 1:38:41.059016
Epoch 24/100 1000/2595 [D loss: 0.026 acc: 0.999][G loss: 2.179 adv: 1.291 recon: 0.074 id: 0.072 time: 1:40:47.257601
Epoch 24/100 2000/2595 [D loss: 0.126 acc: 0.839][G loss: 3.005 adv: 1.982 recon: 0.086 id: 0.083 time: 1:42:53.985200
Epoch 24/100 [D loss: 0.057 acc: 0.959][G loss: 2.149 adv: 1.281 recon: 0.073 id: 0.071 time: 1:44:09.086595
Epoch 25/100 1000/2595 [D loss: 0.110 acc: 0.862][G loss: 2.058 adv: 1.235 recon: 0.069 id: 0.067 time: 1:46:17.302018
Epoch 25/100 2000/2595 [D loss: 0.049 acc: 0.983][G loss: 2.499 adv: 1.453 recon: 0.087 id: 0.086 time: 1:50:38.492704
Epoch 25/100 [D loss: 0.061 acc: 0.954][G loss: 2.131 adv: 1.262 recon: 0.073 id: 0.071 time: 1:52:24.898653
Epoch 26/100 1000/2595 [D loss: 0.090 acc: 0.930][G loss: 2.293 adv: 1.438 recon: 0.072 id: 0.069 time: 1:55:05.708655
Epoch 26/100 2000/2595 [D loss: 0.097 acc: 0.904][G loss: 2.196 adv: 1.155 recon: 0.087 id: 0.085 time: 1:57:14.492162
Epoch 26/100 [D loss: 0.060 acc: 0.956][G loss: 2.150 adv: 1.284 recon: 0.072 id: 0.071 time: 1:58:30.408293
Epoch 27/100 1000/2595 [D loss: 0.048 acc: 0.984][G loss: 1.845 adv: 1.021 recon: 0.069 id: 0.067 time: 2:00:38.129778
Epoch 27/100 2000/2595 [D loss: 0.046 acc: 0.993][G loss: 2.634 adv: 1.607 recon: 0.086 id: 0.084 time: 2:02:47.603935
Epoch 27/100 [D loss: 0.059 acc: 0.958][G loss: 2.136 adv: 1.273 recon: 0.072 id: 0.071 time: 2:04:05.542179
Epoch 28/100 1000/2595 [D loss: 0.044 acc: 0.992][G loss: 2.421 adv: 1.594 recon: 0.069 id: 0.067 time: 2:06:17.357949
Epoch 28/100 2000/2595 [D loss: 0.075 acc: 0.917][G loss: 2.422 adv: 1.401 recon: 0.085 id: 0.085 time: 2:08:29.405199
Epoch 28/100 [D loss: 0.055 acc: 0.962][G loss: 2.197 adv: 1.331 recon: 0.072 id: 0.071 time: 2:09:47.352098
Epoch 29/100 1000/2595 [D loss: 0.073 acc: 0.908][G loss: 1.422 adv: 0.610 recon: 0.068 id: 0.067 time: 2:11:59.102927
Epoch 29/100 2000/2595 [D loss: 0.060 acc: 0.973][G loss: 1.867 adv: 0.859 recon: 0.084 id: 0.084 time: 2:14:42.550865
Epoch 29/100 [D loss: 0.056 acc: 0.960][G loss: 2.187 adv: 1.324 recon: 0.072 id: 0.071 time: 2:16:01.432472
Epoch 30/100 1000/2595 [D loss: 0.015 acc: 1.000][G loss: 1.870 adv: 1.046 recon: 0.069 id: 0.067 time: 2:18:12.722900
Epoch 30/100 2000/2595 [D loss: 0.039 acc: 1.000][G loss: 2.276 adv: 1.248 recon: 0.086 id: 0.086 time: 2:20:23.746132
Epoch 30/100 [D loss: 0.057 acc: 0.961][G loss: 2.160 adv: 1.296 recon: 0.072 id: 0.071 time: 2:21:42.580237
Epoch 31/100 1000/2595 [D loss: 0.144 acc: 0.781][G loss: 1.574 adv: 0.744 recon: 0.070 id: 0.067 time: 2:23:59.434298
Epoch 31/100 2000/2595 [D loss: 0.015 acc: 1.000][G loss: 2.402 adv: 1.401 recon: 0.083 id: 0.084 time: 2:26:50.558590
Epoch 31/100 [D loss: 0.050 acc: 0.969][G loss: 2.254 adv: 1.391 recon: 0.072 id: 0.071 time: 2:28:08.797536
Epoch 32/100 1000/2595 [D loss: 0.041 acc: 1.000][G loss: 2.223 adv: 1.364 recon: 0.072 id: 0.069 time: 2:30:21.639369
Epoch 32/100 2000/2595 [D loss: 0.057 acc: 0.967][G loss: 2.191 adv: 1.207 recon: 0.082 id: 0.084 time: 2:32:32.489858
Epoch 32/100 [D loss: 0.052 acc: 0.967][G loss: 2.196 adv: 1.340 recon: 0.072 id: 0.070 time: 2:33:51.291398
Epoch 33/100 1000/2595 [D loss: 0.068 acc: 1.000][G loss: 1.625 adv: 0.815 recon: 0.068 id: 0.066 time: 2:36:02.078848
Epoch 33/100 2000/2595 [D loss: 0.094 acc: 0.949][G loss: 2.049 adv: 1.031 recon: 0.085 id: 0.084 time: 2:38:13.407591
Epoch 33/100 [D loss: 0.054 acc: 0.963][G loss: 2.179 adv: 1.323 recon: 0.072 id: 0.070 time: 2:39:31.202537
Epoch 34/100 1000/2595 [D loss: 0.058 acc: 0.932][G loss: 2.031 adv: 1.194 recon: 0.070 id: 0.068 time: 2:41:41.845368
Epoch 34/100 2000/2595 [D loss: 0.072 acc: 0.921][G loss: 2.499 adv: 1.328 recon: 0.097 id: 0.100 time: 2:43:52.167891
Epoch 34/100 [D loss: 0.053 acc: 0.964][G loss: 2.203 adv: 1.351 recon: 0.071 id: 0.070 time: 2:45:09.579784
Epoch 35/100 1000/2595 [D loss: 0.031 acc: 0.994][G loss: 2.248 adv: 1.438 recon: 0.068 id: 0.066 time: 2:47:20.670787
Epoch 35/100 2000/2595 [D loss: 0.044 acc: 0.987][G loss: 2.053 adv: 1.040 recon: 0.085 id: 0.082 time: 2:49:31.407495
Epoch 35/100 [D loss: 0.056 acc: 0.961][G loss: 2.165 adv: 1.317 recon: 0.071 id: 0.070 time: 2:50:49.205619
Epoch 36/100 1000/2595 [D loss: 0.031 acc: 0.994][G loss: 2.138 adv: 1.334 recon: 0.067 id: 0.066 time: 2:53:00.235424
Epoch 36/100 2000/2595 [D loss: 0.042 acc: 0.998][G loss: 2.515 adv: 1.511 recon: 0.084 id: 0.083 time: 2:55:11.639864
Epoch 36/100 [D loss: 0.057 acc: 0.958][G loss: 2.187 adv: 1.336 recon: 0.071 id: 0.070 time: 2:56:29.647993
Epoch 37/100 1000/2595 [D loss: 0.043 acc: 0.991][G loss: 2.109 adv: 1.314 recon: 0.067 id: 0.065 time: 2:58:40.796000
Epoch 37/100 2000/2595 [D loss: 0.027 acc: 0.988][G loss: 2.200 adv: 1.192 recon: 0.083 id: 0.088 time: 3:00:52.117822
Epoch 37/100 [D loss: 0.052 acc: 0.966][G loss: 2.231 adv: 1.380 recon: 0.071 id: 0.070 time: 3:02:10.400873
Epoch 38/100 1000/2595 [D loss: 0.048 acc: 0.981][G loss: 2.258 adv: 1.444 recon: 0.068 id: 0.067 time: 3:04:21.037274
Epoch 38/100 2000/2595 [D loss: 0.050 acc: 0.994][G loss: 2.600 adv: 1.597 recon: 0.084 id: 0.083 time: 3:06:31.870586
Epoch 38/100 [D loss: 0.050 acc: 0.968][G loss: 2.258 adv: 1.404 recon: 0.071 id: 0.070 time: 3:07:50.367234
Epoch 39/100 1000/2595 [D loss: 0.039 acc: 0.998][G loss: 2.577 adv: 1.688 recon: 0.075 id: 0.072 time: 3:10:03.716674
Epoch 39/100 2000/2595 [D loss: 0.039 acc: 0.998][G loss: 2.216 adv: 1.205 recon: 0.084 id: 0.084 time: 3:12:15.831895
Epoch 39/100 [D loss: 0.051 acc: 0.967][G loss: 2.227 adv: 1.378 recon: 0.071 id: 0.070 time: 3:13:33.588263
Epoch 40/100 1000/2595 [D loss: 0.042 acc: 0.998][G loss: 2.173 adv: 1.349 recon: 0.069 id: 0.068 time: 3:15:43.918253
Epoch 40/100 2000/2595 [D loss: 0.026 acc: 1.000][G loss: 2.384 adv: 1.416 recon: 0.080 id: 0.082 time: 3:17:54.232181
Epoch 40/100 [D loss: 0.049 acc: 0.969][G loss: 2.246 adv: 1.399 recon: 0.071 id: 0.070 time: 3:19:12.050144
Epoch 41/100 1000/2595 [D loss: 0.023 acc: 0.997][G loss: 2.067 adv: 1.264 recon: 0.067 id: 0.067 time: 3:21:22.036115
Epoch 41/100 2000/2595 [D loss: 0.079 acc: 0.941][G loss: 2.278 adv: 1.315 recon: 0.080 id: 0.082 time: 3:23:32.186535
Epoch 41/100 [D loss: 0.046 acc: 0.973][G loss: 2.285 adv: 1.432 recon: 0.071 id: 0.070 time: 3:24:49.504444
Epoch 42/100 1000/2595 [D loss: 0.026 acc: 1.000][G loss: 1.922 adv: 1.110 recon: 0.068 id: 0.066 time: 3:27:00.451917
Epoch 42/100 2000/2595 [D loss: 0.035 acc: 0.995][G loss: 2.041 adv: 1.053 recon: 0.082 id: 0.083 time: 3:29:10.293381
Epoch 42/100 [D loss: 0.052 acc: 0.965][G loss: 2.260 adv: 1.407 recon: 0.071 id: 0.070 time: 3:30:28.233587
Epoch 43/100 1000/2595 [D loss: 0.073 acc: 0.962][G loss: 2.393 adv: 1.591 recon: 0.067 id: 0.065 time: 3:32:39.330306
Epoch 43/100 2000/2595 [D loss: 0.062 acc: 0.931][G loss: 2.799 adv: 1.831 recon: 0.080 id: 0.082 time: 3:34:50.061014
Epoch 43/100 [D loss: 0.060 acc: 0.951][G loss: 2.208 adv: 1.365 recon: 0.070 id: 0.069 time: 3:36:07.226076
Epoch 44/100 1000/2595 [D loss: 0.127 acc: 0.804][G loss: 1.866 adv: 1.070 recon: 0.067 id: 0.064 time: 3:38:16.967716
Epoch 44/100 2000/2595 [D loss: 0.033 acc: 0.986][G loss: 2.144 adv: 1.108 recon: 0.086 id: 0.087 time: 3:40:28.071611
Epoch 44/100 [D loss: 0.054 acc: 0.962][G loss: 2.270 adv: 1.387 recon: 0.074 id: 0.072 time: 3:41:45.367629
Epoch 45/100 1000/2595 [D loss: 0.037 acc: 0.970][G loss: 1.953 adv: 1.155 recon: 0.067 id: 0.066 time: 3:43:55.338012
Epoch 45/100 2000/2595 [D loss: 0.029 acc: 0.997][G loss: 2.354 adv: 1.344 recon: 0.084 id: 0.086 time: 3:46:05.689724
Epoch 45/100 [D loss: 0.051 acc: 0.965][G loss: 2.245 adv: 1.394 recon: 0.071 id: 0.070 time: 3:47:23.033946
Epoch 46/100 1000/2595 [D loss: 0.020 acc: 1.000][G loss: 1.909 adv: 1.110 recon: 0.067 id: 0.067 time: 3:49:33.444818
Epoch 46/100 2000/2595 [D loss: 0.045 acc: 0.998][G loss: 2.553 adv: 1.561 recon: 0.082 id: 0.084 time: 3:51:44.167433
Epoch 46/100 [D loss: 0.047 acc: 0.970][G loss: 2.280 adv: 1.433 recon: 0.071 id: 0.070 time: 3:53:01.854187
Epoch 47/100 1000/2595 [D loss: 0.018 acc: 1.000][G loss: 2.711 adv: 1.906 recon: 0.067 id: 0.066 time: 3:55:11.316591
Epoch 47/100 2000/2595 [D loss: 0.021 acc: 0.999][G loss: 2.115 adv: 1.133 recon: 0.082 id: 0.083 time: 3:57:20.889812
Epoch 47/100 [D loss: 0.050 acc: 0.968][G loss: 2.231 adv: 1.391 recon: 0.070 id: 0.069 time: 3:58:37.745650
Epoch 48/100 1000/2595 [D loss: 0.023 acc: 0.999][G loss: 2.091 adv: 1.291 recon: 0.067 id: 0.065 time: 4:00:48.224212
Epoch 48/100 2000/2595 [D loss: 0.054 acc: 0.993][G loss: 2.374 adv: 1.405 recon: 0.081 id: 0.082 time: 4:02:58.261981
Epoch 48/100 [D loss: 0.049 acc: 0.968][G loss: 2.274 adv: 1.427 recon: 0.071 id: 0.069 time: 4:04:15.577561
Epoch 49/100 1000/2595 [D loss: 0.021 acc: 1.000][G loss: 2.168 adv: 1.371 recon: 0.066 id: 0.066 time: 4:06:26.261062
Epoch 49/100 2000/2595 [D loss: 0.034 acc: 0.991][G loss: 2.657 adv: 1.651 recon: 0.083 id: 0.086 time: 4:08:37.024003
Epoch 49/100 [D loss: 0.049 acc: 0.966][G loss: 2.290 adv: 1.449 recon: 0.070 id: 0.069 time: 4:09:55.107799
Epoch 50/100 1000/2595 [D loss: 0.016 acc: 1.000][G loss: 2.507 adv: 1.725 recon: 0.065 id: 0.066 time: 4:12:05.729378
Epoch 50/100 2000/2595 [D loss: 0.033 acc: 0.999][G loss: 2.016 adv: 1.028 recon: 0.082 id: 0.082 time: 4:14:16.579550
Epoch 50/100 [D loss: 0.047 acc: 0.970][G loss: 2.290 adv: 1.452 recon: 0.070 id: 0.069 time: 4:15:34.233755
Epoch 51/100 1000/2595 [D loss: 0.183 acc: 0.713][G loss: 1.581 adv: 0.756 recon: 0.069 id: 0.067 time: 4:17:46.795602
Epoch 51/100 2000/2595 [D loss: 0.040 acc: 0.999][G loss: 2.357 adv: 1.384 recon: 0.080 id: 0.084 time: 4:19:58.189954
Epoch 51/100 [D loss: 0.049 acc: 0.969][G loss: 2.281 adv: 1.437 recon: 0.071 id: 0.069 time: 4:21:16.300953
Epoch 52/100 1000/2595 [D loss: 0.024 acc: 1.000][G loss: 2.374 adv: 1.406 recon: 0.081 id: 0.078 time: 4:23:27.325010
Epoch 52/100 2000/2595 [D loss: 0.044 acc: 0.996][G loss: 2.628 adv: 1.590 recon: 0.087 id: 0.084 time: 4:25:37.238209
Epoch 52/100 [D loss: 0.044 acc: 0.977][G loss: 2.365 adv: 1.483 recon: 0.074 id: 0.072 time: 4:26:54.856516
Epoch 53/100 1000/2595 [D loss: 0.154 acc: 0.764][G loss: 1.843 adv: 1.039 recon: 0.067 id: 0.066 time: 4:29:05.009458
Epoch 53/100 2000/2595 [D loss: 0.040 acc: 1.000][G loss: 2.287 adv: 1.305 recon: 0.081 id: 0.085 time: 4:31:15.297868
Epoch 53/100 [D loss: 0.047 acc: 0.972][G loss: 2.298 adv: 1.434 recon: 0.072 id: 0.071 time: 4:32:33.402677
Epoch 54/100 1000/2595 [D loss: 0.077 acc: 0.934][G loss: 1.869 adv: 0.994 recon: 0.074 id: 0.069 time: 4:34:44.233659
Epoch 54/100 2000/2595 [D loss: 0.025 acc: 0.996][G loss: 2.347 adv: 1.356 recon: 0.082 id: 0.084 time: 4:36:55.668329
Epoch 54/100 [D loss: 0.048 acc: 0.969][G loss: 2.297 adv: 1.444 recon: 0.071 id: 0.070 time: 4:38:13.455376
Epoch 55/100 1000/2595 [D loss: 0.040 acc: 0.995][G loss: 1.855 adv: 1.040 recon: 0.068 id: 0.067 time: 4:40:24.096769
Epoch 55/100 2000/2595 [D loss: 0.056 acc: 0.964][G loss: 2.459 adv: 1.451 recon: 0.084 id: 0.085 time: 4:42:34.225253
Epoch 55/100 [D loss: 0.054 acc: 0.961][G loss: 2.243 adv: 1.399 recon: 0.071 id: 0.069 time: 4:44:02.586807
Epoch 56/100 1000/2595 [D loss: 0.078 acc: 0.869][G loss: 1.979 adv: 1.156 recon: 0.069 id: 0.066 time: 4:46:55.159813
Epoch 56/100 2000/2595 [D loss: 0.040 acc: 0.999][G loss: 2.301 adv: 1.307 recon: 0.082 id: 0.085 time: 4:49:06.613858
Epoch 56/100 [D loss: 0.054 acc: 0.960][G loss: 2.251 adv: 1.401 recon: 0.071 id: 0.069 time: 4:50:24.297872
Epoch 57/100 1000/2595 [D loss: 0.053 acc: 0.942][G loss: 2.322 adv: 1.528 recon: 0.066 id: 0.066 time: 4:52:41.659311
Epoch 57/100 2000/2595 [D loss: 0.026 acc: 1.000][G loss: 2.488 adv: 1.486 recon: 0.083 id: 0.084 time: 4:54:56.209085
Epoch 57/100 [D loss: 0.054 acc: 0.958][G loss: 2.263 adv: 1.418 recon: 0.071 id: 0.069 time: 4:56:12.272670
Epoch 58/100 1000/2595 [D loss: 0.135 acc: 0.799][G loss: 2.157 adv: 1.352 recon: 0.067 id: 0.066 time: 4:58:22.576463
Epoch 58/100 2000/2595 [D loss: 0.049 acc: 1.000][G loss: 2.740 adv: 1.777 recon: 0.080 id: 0.084 time: 5:00:37.124883
Epoch 58/100 [D loss: 0.052 acc: 0.963][G loss: 2.268 adv: 1.425 recon: 0.071 id: 0.069 time: 5:01:55.885453
Epoch 59/100 1000/2595 [D loss: 0.149 acc: 0.815][G loss: 2.180 adv: 1.370 recon: 0.068 id: 0.065 time: 5:04:08.189007
Epoch 59/100 2000/2595 [D loss: 0.041 acc: 1.000][G loss: 2.179 adv: 1.201 recon: 0.081 id: 0.083 time: 5:06:19.667389
Epoch 59/100 [D loss: 0.053 acc: 0.962][G loss: 2.237 adv: 1.390 recon: 0.071 id: 0.069 time: 5:07:38.509009
Epoch 60/100 1000/2595 [D loss: 0.146 acc: 0.793][G loss: 1.801 adv: 0.981 recon: 0.069 id: 0.065 time: 5:09:48.814649
Epoch 60/100 2000/2595 [D loss: 0.041 acc: 1.000][G loss: 2.503 adv: 1.523 recon: 0.081 id: 0.084 time: 5:11:59.361344
Epoch 60/100 [D loss: 0.049 acc: 0.968][G loss: 2.263 adv: 1.420 recon: 0.071 id: 0.069 time: 5:13:16.818201
Epoch 61/100 1000/2595 [D loss: 0.043 acc: 0.998][G loss: 2.887 adv: 2.070 recon: 0.069 id: 0.065 time: 5:15:28.012492
Epoch 61/100 2000/2595 [D loss: 0.048 acc: 0.994][G loss: 2.647 adv: 1.658 recon: 0.082 id: 0.083 time: 5:17:39.091994
Epoch 61/100 [D loss: 0.051 acc: 0.965][G loss: 2.273 adv: 1.430 recon: 0.071 id: 0.069 time: 5:18:56.424654
Epoch 62/100 1000/2595 [D loss: 0.094 acc: 0.902][G loss: 2.170 adv: 1.281 recon: 0.075 id: 0.068 time: 5:21:07.258216
Epoch 62/100 2000/2595 [D loss: 0.017 acc: 1.000][G loss: 2.660 adv: 1.690 recon: 0.080 id: 0.083 time: 5:23:18.857768
Epoch 62/100 [D loss: 0.050 acc: 0.967][G loss: 2.265 adv: 1.400 recon: 0.073 id: 0.070 time: 5:24:38.741933
Epoch 63/100 1000/2595 [D loss: 0.034 acc: 0.988][G loss: 1.733 adv: 0.897 recon: 0.071 id: 0.066 time: 5:26:49.804619
Epoch 63/100 2000/2595 [D loss: 0.019 acc: 0.992][G loss: 2.418 adv: 1.443 recon: 0.081 id: 0.082 time: 5:29:00.590119
Epoch 63/100 [D loss: 0.051 acc: 0.965][G loss: 2.240 adv: 1.388 recon: 0.071 id: 0.069 time: 5:30:19.559142
Epoch 64/100 1000/2595 [D loss: 0.063 acc: 0.976][G loss: 1.927 adv: 1.107 recon: 0.069 id: 0.066 time: 5:32:30.469117
Epoch 64/100 2000/2595 [D loss: 0.048 acc: 0.995][G loss: 2.344 adv: 1.369 recon: 0.081 id: 0.083 time: 5:34:41.610185
Epoch 64/100 [D loss: 0.056 acc: 0.958][G loss: 2.230 adv: 1.375 recon: 0.072 id: 0.069 time: 5:35:59.406164
Epoch 65/100 1000/2595 [D loss: 0.038 acc: 0.998][G loss: 1.873 adv: 1.019 recon: 0.072 id: 0.066 time: 5:38:09.779377
Epoch 65/100 2000/2595 [D loss: 0.029 acc: 1.000][G loss: 2.610 adv: 1.557 recon: 0.088 id: 0.087 time: 5:40:20.094309
Epoch 65/100 [D loss: 0.053 acc: 0.963][G loss: 2.235 adv: 1.379 recon: 0.072 id: 0.069 time: 5:41:38.244567
Epoch 66/100 1000/2595 [D loss: 0.136 acc: 0.901][G loss: 1.599 adv: 0.739 recon: 0.072 id: 0.068 time: 5:43:49.024513
Epoch 66/100 2000/2595 [D loss: 0.061 acc: 0.968][G loss: 2.521 adv: 1.552 recon: 0.081 id: 0.082 time: 5:45:59.617495
Epoch 66/100 [D loss: 0.049 acc: 0.968][G loss: 2.279 adv: 1.424 recon: 0.072 id: 0.069 time: 5:47:17.607509
Epoch 67/100 1000/2595 [D loss: 0.028 acc: 1.000][G loss: 2.304 adv: 1.481 recon: 0.069 id: 0.066 time: 5:49:28.460112
Epoch 67/100 2000/2595 [D loss: 0.055 acc: 0.978][G loss: 2.110 adv: 1.140 recon: 0.080 id: 0.083 time: 5:51:39.148919
Epoch 67/100 [D loss: 0.045 acc: 0.974][G loss: 2.339 adv: 1.486 recon: 0.072 id: 0.069 time: 5:52:56.971599
Epoch 68/100 1000/2595 [D loss: 0.285 acc: 0.531][G loss: 1.985 adv: 1.171 recon: 0.068 id: 0.065 time: 5:55:07.470893
Epoch 68/100 2000/2595 [D loss: 0.053 acc: 0.999][G loss: 2.462 adv: 1.471 recon: 0.082 id: 0.085 time: 5:57:17.845156
Epoch 68/100 [D loss: 0.045 acc: 0.973][G loss: 2.362 adv: 1.508 recon: 0.072 id: 0.069 time: 5:58:36.169341
Epoch 69/100 1000/2595 [D loss: 0.041 acc: 1.000][G loss: 1.556 adv: 0.739 recon: 0.069 id: 0.065 time: 6:00:46.768758
Epoch 69/100 2000/2595 [D loss: 0.084 acc: 0.990][G loss: 2.125 adv: 1.142 recon: 0.081 id: 0.084 time: 6:02:57.454009
Epoch 69/100 [D loss: 0.043 acc: 0.976][G loss: 2.372 adv: 1.519 recon: 0.071 id: 0.069 time: 6:04:15.345366
Epoch 70/100 1000/2595 [D loss: 0.015 acc: 1.000][G loss: 2.208 adv: 1.372 recon: 0.071 id: 0.065 time: 6:06:26.313539
Epoch 70/100 2000/2595 [D loss: 0.031 acc: 0.999][G loss: 2.858 adv: 1.877 recon: 0.082 id: 0.083 time: 6:08:36.921700
Epoch 70/100 [D loss: 0.048 acc: 0.969][G loss: 2.303 adv: 1.454 recon: 0.071 id: 0.069 time: 6:09:54.931623
Epoch 71/100 1000/2595 [D loss: 0.118 acc: 0.898][G loss: 2.009 adv: 1.196 recon: 0.068 id: 0.066 time: 6:12:05.850059
Epoch 71/100 2000/2595 [D loss: 0.039 acc: 0.982][G loss: 2.464 adv: 1.439 recon: 0.085 id: 0.085 time: 6:14:16.705159
Epoch 71/100 [D loss: 0.048 acc: 0.968][G loss: 2.316 adv: 1.467 recon: 0.071 id: 0.069 time: 6:15:32.203887
Epoch 72/100 1000/2595 [D loss: 0.029 acc: 0.998][G loss: 1.948 adv: 1.143 recon: 0.068 id: 0.064 time: 6:17:38.083172
Epoch 72/100 2000/2595 [D loss: 0.020 acc: 1.000][G loss: 2.567 adv: 1.586 recon: 0.082 id: 0.082 time: 6:19:44.108238
Epoch 72/100 [D loss: 0.044 acc: 0.973][G loss: 2.358 adv: 1.513 recon: 0.071 id: 0.069 time: 6:20:59.256672
Epoch 73/100 1000/2595 [D loss: 0.016 acc: 0.999][G loss: 1.973 adv: 1.145 recon: 0.070 id: 0.066 time: 6:23:06.194228
Epoch 73/100 2000/2595 [D loss: 0.024 acc: 0.999][G loss: 2.474 adv: 1.334 recon: 0.095 id: 0.096 time: 6:25:13.395295
Epoch 73/100 [D loss: 0.039 acc: 0.979][G loss: 2.461 adv: 1.546 recon: 0.077 id: 0.075 time: 6:26:28.130140
Epoch 74/100 1000/2595 [D loss: 0.038 acc: 0.982][G loss: 1.887 adv: 1.056 recon: 0.070 id: 0.068 time: 6:28:34.180436
Epoch 74/100 2000/2595 [D loss: 0.084 acc: 0.996][G loss: 2.504 adv: 1.490 recon: 0.084 id: 0.087 time: 6:30:39.857284
Epoch 74/100 [D loss: 0.041 acc: 0.978][G loss: 2.407 adv: 1.526 recon: 0.074 id: 0.072 time: 6:31:54.859789
Epoch 75/100 1000/2595 [D loss: 0.025 acc: 0.999][G loss: 2.445 adv: 1.620 recon: 0.070 id: 0.065 time: 6:34:00.968684
Epoch 75/100 2000/2595 [D loss: 0.052 acc: 0.989][G loss: 2.560 adv: 1.580 recon: 0.081 id: 0.085 time: 6:36:06.954036
Epoch 75/100 [D loss: 0.046 acc: 0.970][G loss: 2.382 adv: 1.518 recon: 0.072 id: 0.070 time: 6:37:21.889048
Epoch 76/100 1000/2595 [D loss: 0.019 acc: 1.000][G loss: 1.726 adv: 0.927 recon: 0.067 id: 0.066 time: 6:39:27.905234
Epoch 76/100 2000/2595 [D loss: 0.038 acc: 0.991][G loss: 2.446 adv: 1.467 recon: 0.081 id: 0.083 time: 6:41:33.732043
Epoch 76/100 [D loss: 0.047 acc: 0.968][G loss: 2.361 adv: 1.508 recon: 0.071 id: 0.069 time: 6:42:48.500886
Epoch 77/100 1000/2595 [D loss: 0.041 acc: 0.998][G loss: 1.944 adv: 1.156 recon: 0.066 id: 0.066 time: 6:44:54.622539
Epoch 77/100 2000/2595 [D loss: 0.014 acc: 1.000][G loss: 2.412 adv: 1.434 recon: 0.081 id: 0.086 time: 6:47:00.783900
Epoch 77/100 [D loss: 0.046 acc: 0.969][G loss: 2.381 adv: 1.527 recon: 0.071 id: 0.070 time: 6:48:16.027652
Epoch 78/100 1000/2595 [D loss: 0.023 acc: 1.000][G loss: 2.255 adv: 1.465 recon: 0.066 id: 0.065 time: 6:50:21.707878
Epoch 78/100 2000/2595 [D loss: 0.026 acc: 1.000][G loss: 2.649 adv: 1.640 recon: 0.084 id: 0.085 time: 6:52:27.941017
Epoch 78/100 [D loss: 0.043 acc: 0.975][G loss: 2.395 adv: 1.527 recon: 0.073 id: 0.071 time: 6:53:42.948422
Epoch 79/100 1000/2595 [D loss: 0.024 acc: 1.000][G loss: 2.828 adv: 1.970 recon: 0.072 id: 0.069 time: 6:55:48.973489
Epoch 79/100 2000/2595 [D loss: 0.046 acc: 0.998][G loss: 2.459 adv: 1.478 recon: 0.082 id: 0.083 time: 6:57:55.126943
Epoch 79/100 [D loss: 0.045 acc: 0.971][G loss: 2.370 adv: 1.521 recon: 0.071 id: 0.069 time: 6:59:10.011177
Epoch 80/100 1000/2595 [D loss: 0.018 acc: 1.000][G loss: 2.351 adv: 1.556 recon: 0.066 id: 0.065 time: 7:01:15.942085
Epoch 80/100 2000/2595 [D loss: 0.090 acc: 1.000][G loss: 2.422 adv: 1.448 recon: 0.081 id: 0.083 time: 7:03:21.911580
Epoch 80/100 [D loss: 0.047 acc: 0.969][G loss: 2.330 adv: 1.492 recon: 0.070 id: 0.069 time: 7:04:36.756829
Epoch 81/100 1000/2595 [D loss: 0.019 acc: 1.000][G loss: 2.042 adv: 1.248 recon: 0.066 id: 0.065 time: 7:06:42.778701
Epoch 81/100 2000/2595 [D loss: 0.021 acc: 1.000][G loss: 2.603 adv: 1.587 recon: 0.084 id: 0.086 time: 7:08:48.668659
Epoch 81/100 [D loss: 0.046 acc: 0.970][G loss: 2.314 adv: 1.474 recon: 0.070 id: 0.069 time: 7:10:03.616205
Epoch 82/100 1000/2595 [D loss: 0.024 acc: 1.000][G loss: 1.872 adv: 1.083 recon: 0.066 id: 0.065 time: 7:12:09.781408
Epoch 82/100 2000/2595 [D loss: 0.010 acc: 1.000][G loss: 2.774 adv: 1.767 recon: 0.084 id: 0.084 time: 7:14:15.756441
Epoch 82/100 [D loss: 0.047 acc: 0.971][G loss: 2.327 adv: 1.474 recon: 0.071 id: 0.070 time: 7:15:30.599255
Epoch 83/100 1000/2595 [D loss: 0.076 acc: 0.970][G loss: 2.453 adv: 1.650 recon: 0.067 id: 0.064 time: 7:17:36.640171
Epoch 83/100 2000/2595 [D loss: 0.069 acc: 0.982][G loss: 2.796 adv: 1.798 recon: 0.083 id: 0.084 time: 7:19:42.598794
Epoch 83/100 [D loss: 0.043 acc: 0.974][G loss: 2.376 adv: 1.507 recon: 0.073 id: 0.071 time: 7:20:57.629536
Epoch 84/100 1000/2595 [D loss: 0.014 acc: 1.000][G loss: 2.000 adv: 1.206 recon: 0.066 id: 0.065 time: 7:23:03.572633
Epoch 84/100 2000/2595 [D loss: 0.012 acc: 0.999][G loss: 2.569 adv: 1.541 recon: 0.086 id: 0.086 time: 7:25:09.670789
Epoch 84/100 [D loss: 0.040 acc: 0.979][G loss: 2.413 adv: 1.542 recon: 0.073 id: 0.071 time: 7:26:24.644033
Epoch 85/100 1000/2595 [D loss: 0.044 acc: 0.985][G loss: 2.412 adv: 1.599 recon: 0.068 id: 0.065 time: 7:28:30.687103
Epoch 85/100 2000/2595 [D loss: 0.031 acc: 1.000][G loss: 2.326 adv: 1.313 recon: 0.085 id: 0.084 time: 7:30:36.713322
Epoch 85/100 [D loss: 0.041 acc: 0.978][G loss: 2.384 adv: 1.536 recon: 0.071 id: 0.070 time: 7:31:52.045968
Epoch 86/100 1000/2595 [D loss: 0.095 acc: 0.853][G loss: 1.643 adv: 0.841 recon: 0.067 id: 0.065 time: 7:33:58.491365
Epoch 86/100 2000/2595 [D loss: 0.034 acc: 1.000][G loss: 3.177 adv: 2.206 recon: 0.081 id: 0.082 time: 7:36:04.740721
Epoch 86/100 [D loss: 0.042 acc: 0.976][G loss: 2.399 adv: 1.557 recon: 0.070 id: 0.069 time: 7:37:19.737005
Epoch 87/100 1000/2595 [D loss: 0.020 acc: 1.000][G loss: 1.945 adv: 1.149 recon: 0.067 id: 0.064 time: 7:39:25.580969
Epoch 87/100 2000/2595 [D loss: 0.036 acc: 0.998][G loss: 2.418 adv: 1.439 recon: 0.081 id: 0.083 time: 7:41:31.790201
Epoch 87/100 [D loss: 0.043 acc: 0.976][G loss: 2.439 adv: 1.569 recon: 0.073 id: 0.071 time: 7:42:46.872887
Epoch 88/100 1000/2595 [D loss: 0.022 acc: 1.000][G loss: 2.442 adv: 1.591 recon: 0.071 id: 0.070 time: 7:44:52.989534
Epoch 88/100 2000/2595 [D loss: 0.038 acc: 0.999][G loss: 2.640 adv: 1.609 recon: 0.086 id: 0.087 time: 7:46:59.107604
Epoch 88/100 [D loss: 0.036 acc: 0.983][G loss: 2.554 adv: 1.620 recon: 0.078 id: 0.076 time: 7:48:13.925348
Epoch 89/100 1000/2595 [D loss: 0.044 acc: 0.974][G loss: 2.231 adv: 1.427 recon: 0.067 id: 0.066 time: 7:50:20.216596
Epoch 89/100 2000/2595 [D loss: 0.045 acc: 0.980][G loss: 2.503 adv: 1.493 recon: 0.084 id: 0.085 time: 7:52:26.316514
Epoch 89/100 [D loss: 0.041 acc: 0.978][G loss: 2.438 adv: 1.559 recon: 0.074 id: 0.072 time: 7:53:41.467869
Epoch 90/100 1000/2595 [D loss: 0.032 acc: 1.000][G loss: 1.911 adv: 1.129 recon: 0.065 id: 0.065 time: 7:55:47.432220
Epoch 90/100 2000/2595 [D loss: 0.015 acc: 1.000][G loss: 2.668 adv: 1.550 recon: 0.093 id: 0.096 time: 7:57:53.593828
Epoch 90/100 [D loss: 0.039 acc: 0.982][G loss: 2.486 adv: 1.566 recon: 0.077 id: 0.075 time: 7:59:08.682223
Epoch 91/100 1000/2595 [D loss: 0.075 acc: 0.921][G loss: 1.688 adv: 0.846 recon: 0.071 id: 0.067 time: 8:01:14.551279
Epoch 91/100 2000/2595 [D loss: 0.023 acc: 1.000][G loss: 2.890 adv: 1.874 recon: 0.084 id: 0.088 time: 8:03:20.463020
Epoch 91/100 [D loss: 0.044 acc: 0.976][G loss: 2.373 adv: 1.499 recon: 0.073 id: 0.071 time: 8:04:35.565477
Epoch 92/100 1000/2595 [D loss: 0.212 acc: 0.622][G loss: 1.677 adv: 0.880 recon: 0.067 id: 0.066 time: 8:06:41.739667
Epoch 92/100 2000/2595 [D loss: 0.020 acc: 1.000][G loss: 2.152 adv: 1.162 recon: 0.082 id: 0.084 time: 8:08:50.070288
Epoch 92/100 [D loss: 0.046 acc: 0.974][G loss: 2.382 adv: 1.520 recon: 0.072 id: 0.070 time: 8:10:09.950060
Epoch 93/100 1000/2595 [D loss: 0.047 acc: 1.000][G loss: 1.774 adv: 0.949 recon: 0.069 id: 0.067 time: 8:12:23.229219
Epoch 93/100 2000/2595 [D loss: 0.060 acc: 0.964][G loss: 2.462 adv: 1.469 recon: 0.082 id: 0.085 time: 8:14:36.112687
Epoch 93/100 [D loss: 0.050 acc: 0.966][G loss: 2.362 adv: 1.505 recon: 0.072 id: 0.070 time: 8:15:51.148497
Epoch 94/100 1000/2595 [D loss: 0.016 acc: 1.000][G loss: 1.795 adv: 0.982 recon: 0.068 id: 0.067 time: 8:17:57.425299
Epoch 94/100 2000/2595 [D loss: 0.107 acc: 0.852][G loss: 2.635 adv: 1.661 recon: 0.081 id: 0.083 time: 8:20:03.422247
Epoch 94/100 [D loss: 0.048 acc: 0.970][G loss: 2.393 adv: 1.532 recon: 0.072 id: 0.070 time: 8:21:18.605721
Epoch 95/100 1000/2595 [D loss: 0.024 acc: 1.000][G loss: 2.598 adv: 1.769 recon: 0.070 id: 0.066 time: 8:23:25.061468
Epoch 95/100 2000/2595 [D loss: 0.038 acc: 0.996][G loss: 2.570 adv: 1.583 recon: 0.082 id: 0.086 time: 8:25:31.375608
Epoch 95/100 [D loss: 0.039 acc: 0.979][G loss: 2.452 adv: 1.567 recon: 0.074 id: 0.073 time: 8:26:46.475000
Epoch 96/100 1000/2595 [D loss: 0.012 acc: 1.000][G loss: 2.148 adv: 1.336 recon: 0.068 id: 0.066 time: 8:28:52.656914
Epoch 96/100 2000/2595 [D loss: 0.066 acc: 0.991][G loss: 2.895 adv: 1.823 recon: 0.090 id: 0.087 time: 8:30:59.032808
Epoch 96/100 [D loss: 0.045 acc: 0.972][G loss: 2.374 adv: 1.526 recon: 0.071 id: 0.070 time: 8:32:14.550867
Epoch 97/100 1000/2595 [D loss: 0.047 acc: 0.987][G loss: 2.640 adv: 1.809 recon: 0.069 id: 0.068 time: 8:34:20.975896
Epoch 97/100 2000/2595 [D loss: 0.060 acc: 0.968][G loss: 2.446 adv: 1.471 recon: 0.081 id: 0.083 time: 8:36:27.107720
Epoch 97/100 [D loss: 0.048 acc: 0.967][G loss: 2.315 adv: 1.478 recon: 0.070 id: 0.069 time: 8:37:42.322299
Epoch 98/100 1000/2595 [D loss: 0.099 acc: 0.878][G loss: 1.973 adv: 1.202 recon: 0.064 id: 0.064 time: 8:39:48.451174
Epoch 98/100 2000/2595 [D loss: 0.036 acc: 0.991][G loss: 2.331 adv: 1.277 recon: 0.088 id: 0.088 time: 8:41:54.263399
Epoch 98/100 [D loss: 0.045 acc: 0.970][G loss: 2.386 adv: 1.534 recon: 0.071 id: 0.070 time: 8:43:09.398922
Epoch 99/100 1000/2595 [D loss: 0.158 acc: 0.752][G loss: 2.230 adv: 1.459 recon: 0.064 id: 0.064 time: 8:45:15.342800
Epoch 99/100 2000/2595 [D loss: 0.023 acc: 1.000][G loss: 2.488 adv: 1.477 recon: 0.084 id: 0.084 time: 8:47:21.534950
Epoch 99/100 [D loss: 0.047 acc: 0.969][G loss: 2.353 adv: 1.516 recon: 0.070 id: 0.069 time: 8:48:36.593430
Epoch 100/100 1000/2595 [D loss: 0.012 acc: 1.000][G loss: 2.265 adv: 1.490 recon: 0.065 id: 0.063 time: 8:50:42.707034
Epoch 100/100 2000/2595 [D loss: 0.019 acc: 1.000][G loss: 2.582 adv: 1.616 recon: 0.080 id: 0.082 time: 8:52:49.130128
Epoch 100/100 [D loss: 0.047 acc: 0.969][G loss: 2.346 adv: 1.514 recon: 0.070 id: 0.068 time: 8:54:04.198194

画像を生成する

In [15]:
# Display images
# 画像を表示する。
%matplotlib inline
import matplotlib.pyplot as plt
import numpy as np

def showImages(imgs, rows=-1, cols=-1, w=2, h=2):
    N = len(imgs)
    if rows < 0: rows = 1
    if cols < 0: cols = (N + rows -1) // rows
    fig, ax = plt.subplots(rows, cols, figsize=(w*cols, h*rows))
    idx = 0
    for row in range(rows):
        for col in range(cols) :
            if rows == 1 and cols == 1:
                axis = ax
            elif rows == 1:
                axis = ax[col]
            elif cols == 1:
                axis = ax[row]
            else:
                axis = ax[row][col]

            if idx < N:
                axis.imshow(imgs[idx])
            axis.axis('off')
            idx += 1
    plt.show()
In [16]:
# Display generated and cycle images.
# 生成画像とサイクル画像を表示する。

test_pairs = test_pair_flow[:5]

test_imgsA = test_pairs[:,0]
test_imgsB = test_pairs[:,1]

imgsAB = gan.generate_image_from_A(test_imgsA)
imgsBA = gan.generate_image_from_B(test_imgsB)

print('A-->B-->A, ID')
showImages(M1P1_ZeroP1(imgsAB), 4)

print('B-->A-->B, ID')
showImages(M1P1_ZeroP1(imgsBA), 4)
A-->B-->A, ID
B-->A-->B, ID