使用Pytorch实现变分自编码器

使用Pytorch实现变分自编码器

可以结合这篇VAE讲解文章阅读这篇blog post代码。

# Import necessary packages.
import os
import torch
import torch.nn as nn
import torch.nn.functional as F
import torchvision
from torchvision import transforms
from torchvision.utils import save_image
# Device configuration.
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
print(torch.cuda.get_device_name())
NVIDIA GeForce GTX 1650
# Create a directory if not exists.
sample_dir = 'samples'
if not os.path.exists(sample_dir):
    os.makedirs(sample_dir)
# Hyper-parameters.
image_size = 784
h_dim = 400
z_dim = 20
num_epochs = 15
batch_size = 128
learn_rate = 1e-3
# Load the MNIST dataset.
dataset = torchvision.datasets.MNIST(root='../../data/',
                                     train=True,
                                     transform=transforms.ToTensor(),
                                     download=True
                                     )
# Define the Data Loader.
data_loader = torch.utils.data.DataLoader(dataset=dataset,
                                          batch_size=batch_size,
                                          shuffle=True)
# Devine the VAE model.
class VAE(nn.Module):
    def __init__(self, image_size=784, h_dim=400, z_dim=20):
        super(VAE, self).__init__()
        self.fc1 = nn.Linear(image_size, h_dim)
        self.fc2 = nn.Linear(h_dim, z_dim)
        self.fc3 = nn.Linear(h_dim, z_dim)
        self.fc4 = nn.Linear(z_dim, h_dim)
        self.fc5 = nn.Linear(h_dim, image_size)
    
    # define the encoder.
    def encode(self, x):
        h = F.relu(self.fc1(x))
        return self.fc2(h), self.fc3(h)
    
    # 重参数
    def reparameterize(self, mu, log_var):
        std = torch.exp(log_var/2)
        eps = torch.randn_like(std)
        return mu + eps * std
    
    # define the decoder.
    def decode(self, z):
        h = F.relu(self.fc4(z))
        return F.sigmoid(self.fc5(h))
    
    # forward.
    def forward(self, x):
        mu, log_var = self.encode(x)
        z = self.reparameterize(mu, log_var)
        x_reconst = self.decode(z)
        return x_reconst, mu, log_var

model = VAE().to(device)
# define the optimizer.
optimizer = torch.optim.Adam(model.parameters(), lr=learn_rate)
# Start training.
for epoch in range(num_epochs):
    for i, (x, _) in enumerate(data_loader):
        # Forward pass
        x = x.to(device).view(-1, image_size)
        x_reconst, mu, log_var = model(x)

        # Compute reconstruction loss and kl divergence.
        # For KL divergence, see Appendix B in VAE paper or http://yunjey47.tistory.com/43
        reconst_loss = F.binary_cross_entropy(x_reconst, x, size_average=False)
        kl_div = - 0.5 * torch.sum(1 + log_var - mu.pow(2) - log_var.exp())

        # Backprop and optimize.
        loss = reconst_loss + kl_div
        optimizer.zero_grad()
        loss.backward()
        optimizer.step()

        # Set an output countor.
        if (i+1) % 10 == 0:
            print('Epoch[{}/{}], Step [{}/{}], Reconst Loss: {:.4f}, KL Div: {:.4f}'
                  .format(epoch+1, num_epochs, i+1, len(data_loader), reconst_loss.item(), kl_div.item()))
    
    with torch.no_grad():
        # Save the sample images.
        z = torch.randn(batch_size, z_dim).to(device)
        out = model.decode(z).view(-1, 1, 28, 28)
        save_image(out, os.path.join(sample_dir, 'sample-{}.png'.format(epoch+1)))

        # Save the reconstructed images.
        out, _, _ = model(x)
        x_reconst = torch.cat([x.view(-1, 1, 28, 28), out.view(-1, 1, 28, 28)], dim=3)
        save_image(x_reconst, os.path.join(sample_dir, 'reconst-{}.png'.format(epoch+1)))
/home/wsl_ubuntu/anaconda3/envs/xy_trans/lib/python3.8/site-packages/torch/nn/_reduction.py:42: UserWarning: size_average and reduce args will be deprecated, please use reduction='sum' instead.
  warnings.warn(warning.format(ret))


Epoch[1/15], Step [10/469], Reconst Loss: 36616.9141, KL Div: 3740.0823
Epoch[1/15], Step [20/469], Reconst Loss: 29773.6797, KL Div: 975.3514
Epoch[1/15], Step [30/469], Reconst Loss: 27105.4824, KL Div: 1213.4338
Epoch[1/15], Step [40/469], Reconst Loss: 26462.3633, KL Div: 635.6080
Epoch[1/15], Step [50/469], Reconst Loss: 25826.6094, KL Div: 663.0458
Epoch[1/15], Step [60/469], Reconst Loss: 25826.2461, KL Div: 789.0715
Epoch[1/15], Step [70/469], Reconst Loss: 24164.5078, KL Div: 921.9595
Epoch[1/15], Step [80/469], Reconst Loss: 24587.4492, KL Div: 1041.5199
Epoch[1/15], Step [90/469], Reconst Loss: 22938.7578, KL Div: 1195.8701
Epoch[1/15], Step [100/469], Reconst Loss: 22055.6328, KL Div: 1312.9821
Epoch[1/15], Step [110/469], Reconst Loss: 21499.7246, KL Div: 1558.9216
Epoch[1/15], Step [120/469], Reconst Loss: 19639.1953, KL Div: 1837.0837
Epoch[1/15], Step [130/469], Reconst Loss: 19362.3203, KL Div: 1719.7855
Epoch[1/15], Step [140/469], Reconst Loss: 18975.9766, KL Div: 1942.4675
Epoch[1/15], Step [150/469], Reconst Loss: 18912.7930, KL Div: 1858.0500
Epoch[1/15], Step [160/469], Reconst Loss: 18639.9473, KL Div: 1861.7341
Epoch[1/15], Step [170/469], Reconst Loss: 18130.4434, KL Div: 1787.8860
Epoch[1/15], Step [180/469], Reconst Loss: 18130.5273, KL Div: 2044.2058
Epoch[1/15], Step [190/469], Reconst Loss: 17782.9336, KL Div: 1964.3074
Epoch[1/15], Step [200/469], Reconst Loss: 17909.9258, KL Div: 1933.2725
Epoch[1/15], Step [210/469], Reconst Loss: 16536.2812, KL Div: 2063.9639
Epoch[1/15], Step [220/469], Reconst Loss: 17814.0332, KL Div: 2057.8086
Epoch[1/15], Step [230/469], Reconst Loss: 17407.0820, KL Div: 2108.3965
Epoch[1/15], Step [240/469], Reconst Loss: 16596.4531, KL Div: 2106.2700
Epoch[1/15], Step [250/469], Reconst Loss: 16938.6289, KL Div: 2231.6538
Epoch[1/15], Step [260/469], Reconst Loss: 16497.2539, KL Div: 2105.1924
Epoch[1/15], Step [270/469], Reconst Loss: 15581.2100, KL Div: 2217.1523
Epoch[1/15], Step [280/469], Reconst Loss: 16118.3672, KL Div: 2281.6221
Epoch[1/15], Step [290/469], Reconst Loss: 15486.8379, KL Div: 2341.7874
Epoch[1/15], Step [300/469], Reconst Loss: 16615.8828, KL Div: 2368.1182
Epoch[1/15], Step [310/469], Reconst Loss: 15342.2422, KL Div: 2366.4150
Epoch[1/15], Step [320/469], Reconst Loss: 15661.7598, KL Div: 2309.4153
Epoch[1/15], Step [330/469], Reconst Loss: 15456.1113, KL Div: 2324.4585
Epoch[1/15], Step [340/469], Reconst Loss: 15182.8945, KL Div: 2390.0042
Epoch[1/15], Step [350/469], Reconst Loss: 15453.9336, KL Div: 2552.0754
Epoch[1/15], Step [360/469], Reconst Loss: 15737.4033, KL Div: 2574.0132
Epoch[1/15], Step [370/469], Reconst Loss: 15173.9482, KL Div: 2714.6523
Epoch[1/15], Step [380/469], Reconst Loss: 14593.1543, KL Div: 2465.7563
Epoch[1/15], Step [390/469], Reconst Loss: 14496.3486, KL Div: 2471.1484
Epoch[1/15], Step [400/469], Reconst Loss: 15301.4873, KL Div: 2655.8091
Epoch[1/15], Step [410/469], Reconst Loss: 14106.9092, KL Div: 2595.3081
Epoch[1/15], Step [420/469], Reconst Loss: 13720.7979, KL Div: 2626.6914
Epoch[1/15], Step [430/469], Reconst Loss: 14111.7471, KL Div: 2679.8662
Epoch[1/15], Step [440/469], Reconst Loss: 14438.3672, KL Div: 2592.5044
Epoch[1/15], Step [450/469], Reconst Loss: 13193.9785, KL Div: 2707.9358
Epoch[1/15], Step [460/469], Reconst Loss: 14041.0977, KL Div: 2559.0266
Epoch[2/15], Step [10/469], Reconst Loss: 13620.3125, KL Div: 2716.8257
Epoch[2/15], Step [20/469], Reconst Loss: 13833.0693, KL Div: 2752.4922
Epoch[2/15], Step [30/469], Reconst Loss: 13145.0605, KL Div: 2830.2148
Epoch[2/15], Step [40/469], Reconst Loss: 13201.6924, KL Div: 2690.5637
Epoch[2/15], Step [50/469], Reconst Loss: 13244.9805, KL Div: 2750.5166
Epoch[2/15], Step [60/469], Reconst Loss: 13621.4131, KL Div: 2782.6863
Epoch[2/15], Step [70/469], Reconst Loss: 13467.2559, KL Div: 2725.6943
Epoch[2/15], Step [80/469], Reconst Loss: 13888.3320, KL Div: 2820.3740
Epoch[2/15], Step [90/469], Reconst Loss: 13611.9551, KL Div: 2758.0264
Epoch[2/15], Step [100/469], Reconst Loss: 13340.2197, KL Div: 2882.6914
Epoch[2/15], Step [110/469], Reconst Loss: 13055.8193, KL Div: 2964.9287
Epoch[2/15], Step [120/469], Reconst Loss: 13338.4805, KL Div: 2900.1157
Epoch[2/15], Step [130/469], Reconst Loss: 12345.2256, KL Div: 2874.3140
Epoch[2/15], Step [140/469], Reconst Loss: 13200.5859, KL Div: 2832.7090
Epoch[2/15], Step [150/469], Reconst Loss: 12849.0508, KL Div: 2767.7114
Epoch[2/15], Step [160/469], Reconst Loss: 13169.3955, KL Div: 2982.5615
Epoch[2/15], Step [170/469], Reconst Loss: 13079.1592, KL Div: 2904.1270
Epoch[2/15], Step [180/469], Reconst Loss: 12614.8232, KL Div: 2821.8394
Epoch[2/15], Step [190/469], Reconst Loss: 12650.0430, KL Div: 2924.8147
Epoch[2/15], Step [200/469], Reconst Loss: 12591.2227, KL Div: 2943.8188
Epoch[2/15], Step [210/469], Reconst Loss: 12165.0840, KL Div: 2887.9551
Epoch[2/15], Step [220/469], Reconst Loss: 12595.1963, KL Div: 2930.0762
Epoch[2/15], Step [230/469], Reconst Loss: 12445.6689, KL Div: 2921.4336
Epoch[2/15], Step [240/469], Reconst Loss: 12692.6660, KL Div: 2989.3516
Epoch[2/15], Step [250/469], Reconst Loss: 12472.9375, KL Div: 3079.8313
Epoch[2/15], Step [260/469], Reconst Loss: 12889.5625, KL Div: 2909.4849
Epoch[2/15], Step [270/469], Reconst Loss: 12022.7754, KL Div: 2885.7964
Epoch[2/15], Step [280/469], Reconst Loss: 12365.2314, KL Div: 2887.3723
Epoch[2/15], Step [290/469], Reconst Loss: 12088.4805, KL Div: 2919.9077
Epoch[2/15], Step [300/469], Reconst Loss: 11855.0879, KL Div: 3038.1257
Epoch[2/15], Step [310/469], Reconst Loss: 12560.5947, KL Div: 2883.4067
Epoch[2/15], Step [320/469], Reconst Loss: 11956.1133, KL Div: 2991.7896
Epoch[2/15], Step [330/469], Reconst Loss: 12317.2031, KL Div: 2924.3845
Epoch[2/15], Step [340/469], Reconst Loss: 12212.5273, KL Div: 3035.3462
Epoch[2/15], Step [350/469], Reconst Loss: 12334.0469, KL Div: 3019.7427
Epoch[2/15], Step [360/469], Reconst Loss: 12050.0527, KL Div: 3079.3311
Epoch[2/15], Step [370/469], Reconst Loss: 11681.4121, KL Div: 2893.3408
Epoch[2/15], Step [380/469], Reconst Loss: 13003.4150, KL Div: 3099.3091
Epoch[2/15], Step [390/469], Reconst Loss: 11820.4883, KL Div: 2967.7417
Epoch[2/15], Step [400/469], Reconst Loss: 12099.2207, KL Div: 3034.6660
Epoch[2/15], Step [410/469], Reconst Loss: 12138.3145, KL Div: 3041.2932
Epoch[2/15], Step [420/469], Reconst Loss: 12329.2314, KL Div: 3047.4082
Epoch[2/15], Step [430/469], Reconst Loss: 11969.9053, KL Div: 2886.3508
Epoch[2/15], Step [440/469], Reconst Loss: 12390.2568, KL Div: 3109.6245
Epoch[2/15], Step [450/469], Reconst Loss: 12120.2139, KL Div: 2969.8428
Epoch[2/15], Step [460/469], Reconst Loss: 11519.6641, KL Div: 3061.2007
Epoch[3/15], Step [10/469], Reconst Loss: 11936.5957, KL Div: 2964.1772
Epoch[3/15], Step [20/469], Reconst Loss: 11800.0039, KL Div: 2978.1863
Epoch[3/15], Step [30/469], Reconst Loss: 11555.3711, KL Div: 2970.8296
Epoch[3/15], Step [40/469], Reconst Loss: 11624.9033, KL Div: 3148.7576
Epoch[3/15], Step [50/469], Reconst Loss: 11968.5918, KL Div: 3108.2646
Epoch[3/15], Step [60/469], Reconst Loss: 12142.9434, KL Div: 2936.3755
Epoch[3/15], Step [70/469], Reconst Loss: 11720.2529, KL Div: 3075.7925
Epoch[3/15], Step [80/469], Reconst Loss: 12009.3633, KL Div: 3025.7983
Epoch[3/15], Step [90/469], Reconst Loss: 11264.7637, KL Div: 2961.6201
Epoch[3/15], Step [100/469], Reconst Loss: 11657.8027, KL Div: 3210.4092
Epoch[3/15], Step [110/469], Reconst Loss: 11906.7383, KL Div: 2999.9087
Epoch[3/15], Step [120/469], Reconst Loss: 11462.5273, KL Div: 3038.7141
Epoch[3/15], Step [130/469], Reconst Loss: 11444.2969, KL Div: 3084.1289
Epoch[3/15], Step [140/469], Reconst Loss: 11838.1748, KL Div: 3135.4673
Epoch[3/15], Step [150/469], Reconst Loss: 11309.9902, KL Div: 3102.3538
Epoch[3/15], Step [160/469], Reconst Loss: 11665.4375, KL Div: 3129.4873
Epoch[3/15], Step [170/469], Reconst Loss: 11889.5469, KL Div: 3134.8652
Epoch[3/15], Step [180/469], Reconst Loss: 11875.4521, KL Div: 3102.3916
Epoch[3/15], Step [190/469], Reconst Loss: 11439.8613, KL Div: 3043.8538
Epoch[3/15], Step [200/469], Reconst Loss: 10776.7715, KL Div: 3075.7236
Epoch[3/15], Step [210/469], Reconst Loss: 11236.3750, KL Div: 3091.6584
Epoch[3/15], Step [220/469], Reconst Loss: 11596.7461, KL Div: 3196.4287
Epoch[3/15], Step [230/469], Reconst Loss: 12306.6367, KL Div: 3172.8916
Epoch[3/15], Step [240/469], Reconst Loss: 11366.2051, KL Div: 3156.8931
Epoch[3/15], Step [250/469], Reconst Loss: 11073.8887, KL Div: 3051.8367
Epoch[3/15], Step [260/469], Reconst Loss: 11458.4199, KL Div: 3104.0569
Epoch[3/15], Step [270/469], Reconst Loss: 11336.3613, KL Div: 3068.1055
Epoch[3/15], Step [280/469], Reconst Loss: 11858.4570, KL Div: 3093.3188
Epoch[3/15], Step [290/469], Reconst Loss: 11563.0645, KL Div: 3206.7769
Epoch[3/15], Step [300/469], Reconst Loss: 11524.7051, KL Div: 3143.6553
Epoch[3/15], Step [310/469], Reconst Loss: 11095.1602, KL Div: 3078.8447
Epoch[3/15], Step [320/469], Reconst Loss: 11435.6738, KL Div: 3125.8306
Epoch[3/15], Step [330/469], Reconst Loss: 11313.6299, KL Div: 3047.7759
Epoch[3/15], Step [340/469], Reconst Loss: 11694.4961, KL Div: 3085.8320
Epoch[3/15], Step [350/469], Reconst Loss: 10993.4961, KL Div: 3107.8262
Epoch[3/15], Step [360/469], Reconst Loss: 11378.9541, KL Div: 3155.8989
Epoch[3/15], Step [370/469], Reconst Loss: 10975.0156, KL Div: 3118.0667
Epoch[3/15], Step [380/469], Reconst Loss: 11323.3516, KL Div: 3320.9897
Epoch[3/15], Step [390/469], Reconst Loss: 11334.2871, KL Div: 3027.3093
Epoch[3/15], Step [400/469], Reconst Loss: 11099.5430, KL Div: 3088.1921
Epoch[3/15], Step [410/469], Reconst Loss: 11220.4287, KL Div: 3087.4292
Epoch[3/15], Step [420/469], Reconst Loss: 10945.1504, KL Div: 3013.8545
Epoch[3/15], Step [430/469], Reconst Loss: 11275.8730, KL Div: 3135.9106
Epoch[3/15], Step [440/469], Reconst Loss: 11453.5879, KL Div: 3100.9600
Epoch[3/15], Step [450/469], Reconst Loss: 11319.0762, KL Div: 3123.6206
Epoch[3/15], Step [460/469], Reconst Loss: 11407.2090, KL Div: 3193.6418
Epoch[4/15], Step [10/469], Reconst Loss: 11718.5234, KL Div: 3280.7061
Epoch[4/15], Step [20/469], Reconst Loss: 11384.2910, KL Div: 3232.1904
Epoch[4/15], Step [30/469], Reconst Loss: 11241.3145, KL Div: 3071.8535
Epoch[4/15], Step [40/469], Reconst Loss: 11590.3398, KL Div: 3278.6108
Epoch[4/15], Step [50/469], Reconst Loss: 10744.5977, KL Div: 3076.8062
Epoch[4/15], Step [60/469], Reconst Loss: 11124.6836, KL Div: 3106.5952
Epoch[4/15], Step [70/469], Reconst Loss: 10991.4355, KL Div: 3190.8896
Epoch[4/15], Step [80/469], Reconst Loss: 11381.7734, KL Div: 3210.6592
Epoch[4/15], Step [90/469], Reconst Loss: 11081.2568, KL Div: 3187.8215
Epoch[4/15], Step [100/469], Reconst Loss: 11215.9980, KL Div: 3133.1924
Epoch[4/15], Step [110/469], Reconst Loss: 11682.7969, KL Div: 3114.1912
Epoch[4/15], Step [120/469], Reconst Loss: 11027.0684, KL Div: 3305.7222
Epoch[4/15], Step [130/469], Reconst Loss: 11924.5801, KL Div: 3055.8384
Epoch[4/15], Step [140/469], Reconst Loss: 11066.5186, KL Div: 3323.8628
Epoch[4/15], Step [150/469], Reconst Loss: 11265.1006, KL Div: 3147.9014
Epoch[4/15], Step [160/469], Reconst Loss: 11057.5137, KL Div: 3200.9863
Epoch[4/15], Step [170/469], Reconst Loss: 11322.3828, KL Div: 3177.7881
Epoch[4/15], Step [180/469], Reconst Loss: 11383.3877, KL Div: 3196.4248
Epoch[4/15], Step [190/469], Reconst Loss: 11155.8496, KL Div: 3159.9341
Epoch[4/15], Step [200/469], Reconst Loss: 11340.9756, KL Div: 3213.3047
Epoch[4/15], Step [210/469], Reconst Loss: 11284.0410, KL Div: 3209.7910
Epoch[4/15], Step [220/469], Reconst Loss: 11212.4189, KL Div: 3142.0354
Epoch[4/15], Step [230/469], Reconst Loss: 11293.5273, KL Div: 3137.4514
Epoch[4/15], Step [240/469], Reconst Loss: 11118.7383, KL Div: 3134.6753
Epoch[4/15], Step [250/469], Reconst Loss: 11266.2871, KL Div: 3206.3857
Epoch[4/15], Step [260/469], Reconst Loss: 11535.9268, KL Div: 3239.4302
Epoch[4/15], Step [270/469], Reconst Loss: 11310.2090, KL Div: 3230.6040
Epoch[4/15], Step [280/469], Reconst Loss: 11058.5039, KL Div: 3128.0918
Epoch[4/15], Step [290/469], Reconst Loss: 11320.7031, KL Div: 3206.9297
Epoch[4/15], Step [300/469], Reconst Loss: 11003.5762, KL Div: 3232.1726
Epoch[4/15], Step [310/469], Reconst Loss: 10903.9902, KL Div: 3178.8320
Epoch[4/15], Step [320/469], Reconst Loss: 11044.6514, KL Div: 3210.5181
Epoch[4/15], Step [330/469], Reconst Loss: 11566.7451, KL Div: 3146.3750
Epoch[4/15], Step [340/469], Reconst Loss: 11291.4971, KL Div: 3237.8911
Epoch[4/15], Step [350/469], Reconst Loss: 11393.1807, KL Div: 3211.4863
Epoch[4/15], Step [360/469], Reconst Loss: 11014.6289, KL Div: 3185.3413
Epoch[4/15], Step [370/469], Reconst Loss: 11069.9414, KL Div: 3051.4692
Epoch[4/15], Step [380/469], Reconst Loss: 11403.6025, KL Div: 3289.3984
Epoch[4/15], Step [390/469], Reconst Loss: 11195.8242, KL Div: 3116.9404
Epoch[4/15], Step [400/469], Reconst Loss: 11109.6582, KL Div: 3200.8376
Epoch[4/15], Step [410/469], Reconst Loss: 11192.2852, KL Div: 3221.7708
Epoch[4/15], Step [420/469], Reconst Loss: 11117.5352, KL Div: 3156.1177
Epoch[4/15], Step [430/469], Reconst Loss: 10460.2891, KL Div: 3070.9087
Epoch[4/15], Step [440/469], Reconst Loss: 10897.3438, KL Div: 3154.3621
Epoch[4/15], Step [450/469], Reconst Loss: 10343.1992, KL Div: 3069.1011
Epoch[4/15], Step [460/469], Reconst Loss: 10866.3574, KL Div: 3209.5576
Epoch[5/15], Step [10/469], Reconst Loss: 11480.0293, KL Div: 3253.2832
Epoch[5/15], Step [20/469], Reconst Loss: 10760.3984, KL Div: 3284.6387
Epoch[5/15], Step [30/469], Reconst Loss: 10960.1895, KL Div: 3139.7441
Epoch[5/15], Step [40/469], Reconst Loss: 11209.6641, KL Div: 3130.0898
Epoch[5/15], Step [50/469], Reconst Loss: 11632.1553, KL Div: 3199.1096
Epoch[5/15], Step [60/469], Reconst Loss: 10749.2109, KL Div: 3290.9502
Epoch[5/15], Step [70/469], Reconst Loss: 11001.4668, KL Div: 3180.0503
Epoch[5/15], Step [80/469], Reconst Loss: 11122.5449, KL Div: 3209.7297
Epoch[5/15], Step [90/469], Reconst Loss: 11061.4551, KL Div: 3175.9792
Epoch[5/15], Step [100/469], Reconst Loss: 10843.1787, KL Div: 3233.2974
Epoch[5/15], Step [110/469], Reconst Loss: 10777.2500, KL Div: 3237.2104
Epoch[5/15], Step [120/469], Reconst Loss: 11261.6592, KL Div: 3149.5342
Epoch[5/15], Step [130/469], Reconst Loss: 11682.9150, KL Div: 3226.8076
Epoch[5/15], Step [140/469], Reconst Loss: 10805.0527, KL Div: 3311.6199
Epoch[5/15], Step [150/469], Reconst Loss: 11067.4648, KL Div: 3264.8203
Epoch[5/15], Step [160/469], Reconst Loss: 11352.6855, KL Div: 3211.7300
Epoch[5/15], Step [170/469], Reconst Loss: 11213.7803, KL Div: 3198.3235
Epoch[5/15], Step [180/469], Reconst Loss: 10904.0723, KL Div: 3136.8364
Epoch[5/15], Step [190/469], Reconst Loss: 11041.4141, KL Div: 3146.3555
Epoch[5/15], Step [200/469], Reconst Loss: 11130.6670, KL Div: 3171.1870
Epoch[5/15], Step [210/469], Reconst Loss: 10444.7500, KL Div: 3255.3108
Epoch[5/15], Step [220/469], Reconst Loss: 11035.2383, KL Div: 3164.2109
Epoch[5/15], Step [230/469], Reconst Loss: 10684.4805, KL Div: 3097.4158
Epoch[5/15], Step [240/469], Reconst Loss: 10984.2705, KL Div: 3253.7698
Epoch[5/15], Step [250/469], Reconst Loss: 10203.0811, KL Div: 3159.4170
Epoch[5/15], Step [260/469], Reconst Loss: 10921.9287, KL Div: 3142.5562
Epoch[5/15], Step [270/469], Reconst Loss: 10854.9141, KL Div: 3187.5186
Epoch[5/15], Step [280/469], Reconst Loss: 11198.7490, KL Div: 3142.6655
Epoch[5/15], Step [290/469], Reconst Loss: 10405.9004, KL Div: 3163.6431
Epoch[5/15], Step [300/469], Reconst Loss: 10949.9443, KL Div: 3213.2842
Epoch[5/15], Step [310/469], Reconst Loss: 10765.2363, KL Div: 3084.9729
Epoch[5/15], Step [320/469], Reconst Loss: 11036.6035, KL Div: 3230.5505
Epoch[5/15], Step [330/469], Reconst Loss: 10910.4170, KL Div: 3276.6008
Epoch[5/15], Step [340/469], Reconst Loss: 11100.9385, KL Div: 3233.5229
Epoch[5/15], Step [350/469], Reconst Loss: 10757.4463, KL Div: 3188.2432
Epoch[5/15], Step [360/469], Reconst Loss: 10551.0918, KL Div: 3201.9780
Epoch[5/15], Step [370/469], Reconst Loss: 10979.7646, KL Div: 3148.2729
Epoch[5/15], Step [380/469], Reconst Loss: 10873.8457, KL Div: 3190.8938
Epoch[5/15], Step [390/469], Reconst Loss: 10719.6416, KL Div: 3152.4390
Epoch[5/15], Step [400/469], Reconst Loss: 10831.0283, KL Div: 3280.8018
Epoch[5/15], Step [410/469], Reconst Loss: 10744.4414, KL Div: 3183.4048
Epoch[5/15], Step [420/469], Reconst Loss: 10661.5996, KL Div: 3195.2507
Epoch[5/15], Step [430/469], Reconst Loss: 10606.8096, KL Div: 3155.7129
Epoch[5/15], Step [440/469], Reconst Loss: 10266.9805, KL Div: 3145.1309
Epoch[5/15], Step [450/469], Reconst Loss: 10697.7949, KL Div: 3306.5874
Epoch[5/15], Step [460/469], Reconst Loss: 10815.0723, KL Div: 3061.5244
Epoch[6/15], Step [10/469], Reconst Loss: 10750.5186, KL Div: 3091.4343
Epoch[6/15], Step [20/469], Reconst Loss: 10767.5156, KL Div: 3187.1230
Epoch[6/15], Step [30/469], Reconst Loss: 10963.2441, KL Div: 3315.2053
Epoch[6/15], Step [40/469], Reconst Loss: 11402.0723, KL Div: 3183.5483
Epoch[6/15], Step [50/469], Reconst Loss: 10817.4199, KL Div: 3227.4023
Epoch[6/15], Step [60/469], Reconst Loss: 10604.6230, KL Div: 3223.0859
Epoch[6/15], Step [70/469], Reconst Loss: 10482.5723, KL Div: 3136.7629
Epoch[6/15], Step [80/469], Reconst Loss: 10381.5547, KL Div: 3229.3076
Epoch[6/15], Step [90/469], Reconst Loss: 10782.4814, KL Div: 3115.7874
Epoch[6/15], Step [100/469], Reconst Loss: 10853.3516, KL Div: 3206.6821
Epoch[6/15], Step [110/469], Reconst Loss: 11187.8594, KL Div: 3233.2971
Epoch[6/15], Step [120/469], Reconst Loss: 10842.4863, KL Div: 3170.5535
Epoch[6/15], Step [130/469], Reconst Loss: 10651.2285, KL Div: 3164.5842
Epoch[6/15], Step [140/469], Reconst Loss: 10928.7861, KL Div: 3257.0200
Epoch[6/15], Step [150/469], Reconst Loss: 10734.4785, KL Div: 3161.8726
Epoch[6/15], Step [160/469], Reconst Loss: 11339.3066, KL Div: 3271.0107
Epoch[6/15], Step [170/469], Reconst Loss: 10852.5449, KL Div: 3174.1211
Epoch[6/15], Step [180/469], Reconst Loss: 10806.9805, KL Div: 3301.3025
Epoch[6/15], Step [190/469], Reconst Loss: 10688.3838, KL Div: 3111.3535
Epoch[6/15], Step [200/469], Reconst Loss: 10767.8203, KL Div: 3238.5581
Epoch[6/15], Step [210/469], Reconst Loss: 10700.6094, KL Div: 3260.3083
Epoch[6/15], Step [220/469], Reconst Loss: 10570.7383, KL Div: 3153.7571
Epoch[6/15], Step [230/469], Reconst Loss: 10500.3398, KL Div: 3198.9004
Epoch[6/15], Step [240/469], Reconst Loss: 10538.9473, KL Div: 3140.9490
Epoch[6/15], Step [250/469], Reconst Loss: 10707.0234, KL Div: 3160.1277
Epoch[6/15], Step [260/469], Reconst Loss: 10768.5127, KL Div: 3251.2061
Epoch[6/15], Step [270/469], Reconst Loss: 10869.8545, KL Div: 3130.7891
Epoch[6/15], Step [280/469], Reconst Loss: 10487.0332, KL Div: 3383.9670
Epoch[6/15], Step [290/469], Reconst Loss: 10755.4492, KL Div: 3143.0466
Epoch[6/15], Step [300/469], Reconst Loss: 11032.5059, KL Div: 3187.1768
Epoch[6/15], Step [310/469], Reconst Loss: 10592.7334, KL Div: 3156.4619
Epoch[6/15], Step [320/469], Reconst Loss: 10994.3047, KL Div: 3293.8271
Epoch[6/15], Step [330/469], Reconst Loss: 10492.3770, KL Div: 3268.3318
Epoch[6/15], Step [340/469], Reconst Loss: 10874.0693, KL Div: 3213.5630
Epoch[6/15], Step [350/469], Reconst Loss: 10822.4336, KL Div: 3177.2644
Epoch[6/15], Step [360/469], Reconst Loss: 11128.2539, KL Div: 3267.9502
Epoch[6/15], Step [370/469], Reconst Loss: 10538.5957, KL Div: 3191.8623
Epoch[6/15], Step [380/469], Reconst Loss: 10708.6680, KL Div: 3110.2969
Epoch[6/15], Step [390/469], Reconst Loss: 10597.6621, KL Div: 3220.0962
Epoch[6/15], Step [400/469], Reconst Loss: 10775.5996, KL Div: 3050.2397
Epoch[6/15], Step [410/469], Reconst Loss: 10507.6465, KL Div: 3243.5991
Epoch[6/15], Step [420/469], Reconst Loss: 10911.7246, KL Div: 3177.3521
Epoch[6/15], Step [430/469], Reconst Loss: 10863.1387, KL Div: 3133.7156
Epoch[6/15], Step [440/469], Reconst Loss: 10646.1699, KL Div: 3330.3628
Epoch[6/15], Step [450/469], Reconst Loss: 10464.0215, KL Div: 3142.5591
Epoch[6/15], Step [460/469], Reconst Loss: 10707.5957, KL Div: 3225.0454
Epoch[7/15], Step [10/469], Reconst Loss: 10632.2939, KL Div: 3291.3938
Epoch[7/15], Step [20/469], Reconst Loss: 10564.2461, KL Div: 3145.9785
Epoch[7/15], Step [30/469], Reconst Loss: 11033.7441, KL Div: 3239.8870
Epoch[7/15], Step [40/469], Reconst Loss: 10435.2979, KL Div: 3213.6694
Epoch[7/15], Step [50/469], Reconst Loss: 10564.0449, KL Div: 3205.0608
Epoch[7/15], Step [60/469], Reconst Loss: 10644.0605, KL Div: 3379.3101
Epoch[7/15], Step [70/469], Reconst Loss: 10556.4902, KL Div: 3134.3267
Epoch[7/15], Step [80/469], Reconst Loss: 10741.8203, KL Div: 3274.3567
Epoch[7/15], Step [90/469], Reconst Loss: 10343.9502, KL Div: 3112.3765
Epoch[7/15], Step [100/469], Reconst Loss: 10495.3262, KL Div: 3284.4951
Epoch[7/15], Step [110/469], Reconst Loss: 10845.0107, KL Div: 3286.0674
Epoch[7/15], Step [120/469], Reconst Loss: 10187.9238, KL Div: 3110.1138
Epoch[7/15], Step [130/469], Reconst Loss: 10058.6816, KL Div: 3183.2754
Epoch[7/15], Step [140/469], Reconst Loss: 11230.1758, KL Div: 3380.3442
Epoch[7/15], Step [150/469], Reconst Loss: 10640.2695, KL Div: 3025.5276
Epoch[7/15], Step [160/469], Reconst Loss: 10565.2725, KL Div: 3219.5183
Epoch[7/15], Step [170/469], Reconst Loss: 10699.0156, KL Div: 3276.2739
Epoch[7/15], Step [180/469], Reconst Loss: 10708.7988, KL Div: 3280.9546
Epoch[7/15], Step [190/469], Reconst Loss: 10057.8047, KL Div: 3156.7397
Epoch[7/15], Step [200/469], Reconst Loss: 10406.2402, KL Div: 3208.5554
Epoch[7/15], Step [210/469], Reconst Loss: 10224.6797, KL Div: 3243.7461
Epoch[7/15], Step [220/469], Reconst Loss: 11324.8223, KL Div: 3251.8716
Epoch[7/15], Step [230/469], Reconst Loss: 10306.9883, KL Div: 3193.4456
Epoch[7/15], Step [240/469], Reconst Loss: 10637.6348, KL Div: 3223.8604
Epoch[7/15], Step [250/469], Reconst Loss: 10435.1318, KL Div: 3253.7817
Epoch[7/15], Step [260/469], Reconst Loss: 10495.0957, KL Div: 3175.7251
Epoch[7/15], Step [270/469], Reconst Loss: 10537.0811, KL Div: 3325.2441
Epoch[7/15], Step [280/469], Reconst Loss: 10602.4043, KL Div: 3133.4087
Epoch[7/15], Step [290/469], Reconst Loss: 10619.4512, KL Div: 3237.4973
Epoch[7/15], Step [300/469], Reconst Loss: 11278.1426, KL Div: 3288.4448
Epoch[7/15], Step [310/469], Reconst Loss: 10482.0605, KL Div: 3117.4426
Epoch[7/15], Step [320/469], Reconst Loss: 10585.0801, KL Div: 3187.9077
Epoch[7/15], Step [330/469], Reconst Loss: 10699.5273, KL Div: 3266.8779
Epoch[7/15], Step [340/469], Reconst Loss: 10207.9219, KL Div: 3136.3557
Epoch[7/15], Step [350/469], Reconst Loss: 9954.6777, KL Div: 3164.3784
Epoch[7/15], Step [360/469], Reconst Loss: 10057.3262, KL Div: 3137.8042
Epoch[7/15], Step [370/469], Reconst Loss: 10427.5879, KL Div: 3268.8921
Epoch[7/15], Step [380/469], Reconst Loss: 11133.9805, KL Div: 3244.0298
Epoch[7/15], Step [390/469], Reconst Loss: 10745.1523, KL Div: 3357.5542
Epoch[7/15], Step [400/469], Reconst Loss: 10222.7441, KL Div: 3226.5781
Epoch[7/15], Step [410/469], Reconst Loss: 10608.1641, KL Div: 3265.4609
Epoch[7/15], Step [420/469], Reconst Loss: 10226.9512, KL Div: 3064.1594
Epoch[7/15], Step [430/469], Reconst Loss: 10613.5264, KL Div: 3273.1016
Epoch[7/15], Step [440/469], Reconst Loss: 11280.3701, KL Div: 3273.0532
Epoch[7/15], Step [450/469], Reconst Loss: 10453.7852, KL Div: 3309.5806
Epoch[7/15], Step [460/469], Reconst Loss: 10734.3438, KL Div: 3255.2981
Epoch[8/15], Step [10/469], Reconst Loss: 10222.9199, KL Div: 3162.2825
Epoch[8/15], Step [20/469], Reconst Loss: 10011.3857, KL Div: 3290.9229
Epoch[8/15], Step [30/469], Reconst Loss: 10320.5039, KL Div: 3266.3218
Epoch[8/15], Step [40/469], Reconst Loss: 10001.5527, KL Div: 3128.0627
Epoch[8/15], Step [50/469], Reconst Loss: 10245.0957, KL Div: 3185.4368
Epoch[8/15], Step [60/469], Reconst Loss: 10446.2246, KL Div: 3247.0127
Epoch[8/15], Step [70/469], Reconst Loss: 10673.6357, KL Div: 3304.0286
Epoch[8/15], Step [80/469], Reconst Loss: 10669.7314, KL Div: 3189.3696
Epoch[8/15], Step [90/469], Reconst Loss: 10385.3584, KL Div: 3212.2327
Epoch[8/15], Step [100/469], Reconst Loss: 10694.5918, KL Div: 3266.3623
Epoch[8/15], Step [110/469], Reconst Loss: 10549.9199, KL Div: 3290.3311
Epoch[8/15], Step [120/469], Reconst Loss: 10919.4326, KL Div: 3275.3154
Epoch[8/15], Step [130/469], Reconst Loss: 10440.5811, KL Div: 3252.5164
Epoch[8/15], Step [140/469], Reconst Loss: 10981.5742, KL Div: 3278.2183
Epoch[8/15], Step [150/469], Reconst Loss: 10981.8848, KL Div: 3280.6777
Epoch[8/15], Step [160/469], Reconst Loss: 10529.8359, KL Div: 3185.7778
Epoch[8/15], Step [170/469], Reconst Loss: 10671.3975, KL Div: 3207.3862
Epoch[8/15], Step [180/469], Reconst Loss: 10648.8613, KL Div: 3257.8528
Epoch[8/15], Step [190/469], Reconst Loss: 10642.4365, KL Div: 3295.2463
Epoch[8/15], Step [200/469], Reconst Loss: 10848.7617, KL Div: 3363.5200
Epoch[8/15], Step [210/469], Reconst Loss: 10579.8320, KL Div: 3176.8962
Epoch[8/15], Step [220/469], Reconst Loss: 10305.0020, KL Div: 3200.8408
Epoch[8/15], Step [230/469], Reconst Loss: 10259.6992, KL Div: 3216.2588
Epoch[8/15], Step [240/469], Reconst Loss: 10210.6631, KL Div: 3158.8115
Epoch[8/15], Step [250/469], Reconst Loss: 10581.1133, KL Div: 3098.4463
Epoch[8/15], Step [260/469], Reconst Loss: 10548.2168, KL Div: 3215.6663
Epoch[8/15], Step [270/469], Reconst Loss: 10532.7578, KL Div: 3263.5186
Epoch[8/15], Step [280/469], Reconst Loss: 10147.6152, KL Div: 3119.3459
Epoch[8/15], Step [290/469], Reconst Loss: 10594.5195, KL Div: 3292.4692
Epoch[8/15], Step [300/469], Reconst Loss: 10922.3145, KL Div: 3330.0107
Epoch[8/15], Step [310/469], Reconst Loss: 10477.1426, KL Div: 3282.3530
Epoch[8/15], Step [320/469], Reconst Loss: 10249.3867, KL Div: 3326.6125
Epoch[8/15], Step [330/469], Reconst Loss: 10236.3457, KL Div: 3217.5508
Epoch[8/15], Step [340/469], Reconst Loss: 10678.8818, KL Div: 3249.8584
Epoch[8/15], Step [350/469], Reconst Loss: 10571.1328, KL Div: 3218.7478
Epoch[8/15], Step [360/469], Reconst Loss: 10866.8477, KL Div: 3267.1755
Epoch[8/15], Step [370/469], Reconst Loss: 10645.7363, KL Div: 3110.9197
Epoch[8/15], Step [380/469], Reconst Loss: 10385.8838, KL Div: 3174.8872
Epoch[8/15], Step [390/469], Reconst Loss: 10650.8047, KL Div: 3213.5557
Epoch[8/15], Step [400/469], Reconst Loss: 10668.5947, KL Div: 3285.7410
Epoch[8/15], Step [410/469], Reconst Loss: 10591.2236, KL Div: 3222.0774
Epoch[8/15], Step [420/469], Reconst Loss: 10296.6787, KL Div: 3215.7456
Epoch[8/15], Step [430/469], Reconst Loss: 10314.2236, KL Div: 3188.1904
Epoch[8/15], Step [440/469], Reconst Loss: 10467.1367, KL Div: 3241.2246
Epoch[8/15], Step [450/469], Reconst Loss: 10422.1367, KL Div: 3248.3264
Epoch[8/15], Step [460/469], Reconst Loss: 10665.0215, KL Div: 3266.1738
Epoch[9/15], Step [10/469], Reconst Loss: 10196.8691, KL Div: 3178.3267
Epoch[9/15], Step [20/469], Reconst Loss: 10098.2773, KL Div: 3135.8237
Epoch[9/15], Step [30/469], Reconst Loss: 10363.6641, KL Div: 3224.7319
Epoch[9/15], Step [40/469], Reconst Loss: 10555.9258, KL Div: 3179.8088
Epoch[9/15], Step [50/469], Reconst Loss: 10878.3027, KL Div: 3282.8516
Epoch[9/15], Step [60/469], Reconst Loss: 10450.6465, KL Div: 3172.1494
Epoch[9/15], Step [70/469], Reconst Loss: 10261.3652, KL Div: 3217.4451
Epoch[9/15], Step [80/469], Reconst Loss: 10151.3027, KL Div: 3225.1514
Epoch[9/15], Step [90/469], Reconst Loss: 10402.9609, KL Div: 3146.9385
Epoch[9/15], Step [100/469], Reconst Loss: 10744.1914, KL Div: 3264.0083
Epoch[9/15], Step [110/469], Reconst Loss: 10059.4062, KL Div: 3043.5938
Epoch[9/15], Step [120/469], Reconst Loss: 10452.4492, KL Div: 3272.4497
Epoch[9/15], Step [130/469], Reconst Loss: 10109.1318, KL Div: 3111.4502
Epoch[9/15], Step [140/469], Reconst Loss: 10361.4355, KL Div: 3193.3979
Epoch[9/15], Step [150/469], Reconst Loss: 10786.0176, KL Div: 3227.8223
Epoch[9/15], Step [160/469], Reconst Loss: 10772.4268, KL Div: 3175.7617
Epoch[9/15], Step [170/469], Reconst Loss: 10644.6094, KL Div: 3376.1594
Epoch[9/15], Step [180/469], Reconst Loss: 10415.0000, KL Div: 3223.3853
Epoch[9/15], Step [190/469], Reconst Loss: 10796.9951, KL Div: 3289.1919
Epoch[9/15], Step [200/469], Reconst Loss: 10783.0713, KL Div: 3260.4243
Epoch[9/15], Step [210/469], Reconst Loss: 10893.0830, KL Div: 3373.7468
Epoch[9/15], Step [220/469], Reconst Loss: 10836.8115, KL Div: 3204.9941
Epoch[9/15], Step [230/469], Reconst Loss: 10456.2773, KL Div: 3275.3662
Epoch[9/15], Step [240/469], Reconst Loss: 10204.2227, KL Div: 3193.3169
Epoch[9/15], Step [250/469], Reconst Loss: 10149.0850, KL Div: 3222.0654
Epoch[9/15], Step [260/469], Reconst Loss: 10439.5312, KL Div: 3267.2422
Epoch[9/15], Step [270/469], Reconst Loss: 10568.4404, KL Div: 3184.8958
Epoch[9/15], Step [280/469], Reconst Loss: 10789.6836, KL Div: 3138.2075
Epoch[9/15], Step [290/469], Reconst Loss: 10422.8340, KL Div: 3260.5234
Epoch[9/15], Step [300/469], Reconst Loss: 10229.7598, KL Div: 3207.5962
Epoch[9/15], Step [310/469], Reconst Loss: 10687.4180, KL Div: 3161.0713
Epoch[9/15], Step [320/469], Reconst Loss: 10733.6777, KL Div: 3452.2300
Epoch[9/15], Step [330/469], Reconst Loss: 10646.9961, KL Div: 3243.2581
Epoch[9/15], Step [340/469], Reconst Loss: 9829.2861, KL Div: 3151.9353
Epoch[9/15], Step [350/469], Reconst Loss: 10592.4092, KL Div: 3316.3369
Epoch[9/15], Step [360/469], Reconst Loss: 10463.0449, KL Div: 3251.4734
Epoch[9/15], Step [370/469], Reconst Loss: 10664.1172, KL Div: 3201.3977
Epoch[9/15], Step [380/469], Reconst Loss: 10219.7070, KL Div: 3243.8945
Epoch[9/15], Step [390/469], Reconst Loss: 10187.8633, KL Div: 3214.5801
Epoch[9/15], Step [400/469], Reconst Loss: 9953.3008, KL Div: 3193.5020
Epoch[9/15], Step [410/469], Reconst Loss: 10499.8320, KL Div: 3195.8511
Epoch[9/15], Step [420/469], Reconst Loss: 10737.6045, KL Div: 3270.7095
Epoch[9/15], Step [430/469], Reconst Loss: 10496.1660, KL Div: 3289.7112
Epoch[9/15], Step [440/469], Reconst Loss: 10412.8184, KL Div: 3193.3955
Epoch[9/15], Step [450/469], Reconst Loss: 10529.5479, KL Div: 3377.8003
Epoch[9/15], Step [460/469], Reconst Loss: 10154.2305, KL Div: 3239.6172
Epoch[10/15], Step [10/469], Reconst Loss: 10236.5410, KL Div: 3213.1616
Epoch[10/15], Step [20/469], Reconst Loss: 10612.2803, KL Div: 3262.2549
Epoch[10/15], Step [30/469], Reconst Loss: 10679.3330, KL Div: 3247.2385
Epoch[10/15], Step [40/469], Reconst Loss: 10614.4746, KL Div: 3249.0859
Epoch[10/15], Step [50/469], Reconst Loss: 10106.0898, KL Div: 3193.5972
Epoch[10/15], Step [60/469], Reconst Loss: 10295.7402, KL Div: 3189.0188
Epoch[10/15], Step [70/469], Reconst Loss: 10105.2402, KL Div: 3285.5776
Epoch[10/15], Step [80/469], Reconst Loss: 10795.5781, KL Div: 3159.6721
Epoch[10/15], Step [90/469], Reconst Loss: 10204.5869, KL Div: 3290.6047
Epoch[10/15], Step [100/469], Reconst Loss: 10498.1865, KL Div: 3360.2505
Epoch[10/15], Step [110/469], Reconst Loss: 10509.6484, KL Div: 3240.7234
Epoch[10/15], Step [120/469], Reconst Loss: 10551.1484, KL Div: 3296.1765
Epoch[10/15], Step [130/469], Reconst Loss: 10696.8984, KL Div: 3312.8765
Epoch[10/15], Step [140/469], Reconst Loss: 9975.9434, KL Div: 3204.0149
Epoch[10/15], Step [150/469], Reconst Loss: 10749.1562, KL Div: 3280.9580
Epoch[10/15], Step [160/469], Reconst Loss: 10558.5303, KL Div: 3276.9092
Epoch[10/15], Step [170/469], Reconst Loss: 10976.7295, KL Div: 3353.9478
Epoch[10/15], Step [180/469], Reconst Loss: 10085.6924, KL Div: 3207.1685
Epoch[10/15], Step [190/469], Reconst Loss: 10486.7246, KL Div: 3159.7339
Epoch[10/15], Step [200/469], Reconst Loss: 10099.4775, KL Div: 3174.8127
Epoch[10/15], Step [210/469], Reconst Loss: 10209.8496, KL Div: 3233.4775
Epoch[10/15], Step [220/469], Reconst Loss: 10603.9912, KL Div: 3309.9707
Epoch[10/15], Step [230/469], Reconst Loss: 10298.6387, KL Div: 3270.9316
Epoch[10/15], Step [240/469], Reconst Loss: 10173.3301, KL Div: 3170.9163
Epoch[10/15], Step [250/469], Reconst Loss: 10026.7910, KL Div: 3236.4868
Epoch[10/15], Step [260/469], Reconst Loss: 10553.7070, KL Div: 3179.6345
Epoch[10/15], Step [270/469], Reconst Loss: 10175.8242, KL Div: 3330.2834
Epoch[10/15], Step [280/469], Reconst Loss: 10591.2900, KL Div: 3219.5210
Epoch[10/15], Step [290/469], Reconst Loss: 10519.7139, KL Div: 3270.1125
Epoch[10/15], Step [300/469], Reconst Loss: 10444.9531, KL Div: 3203.0942
Epoch[10/15], Step [310/469], Reconst Loss: 10812.2871, KL Div: 3274.4155
Epoch[10/15], Step [320/469], Reconst Loss: 10295.7109, KL Div: 3139.6843
Epoch[10/15], Step [330/469], Reconst Loss: 10694.4297, KL Div: 3165.6001
Epoch[10/15], Step [340/469], Reconst Loss: 10554.6348, KL Div: 3321.5742
Epoch[10/15], Step [350/469], Reconst Loss: 10688.0059, KL Div: 3252.3789
Epoch[10/15], Step [360/469], Reconst Loss: 10486.1543, KL Div: 3178.7944
Epoch[10/15], Step [370/469], Reconst Loss: 10239.1787, KL Div: 3316.7739
Epoch[10/15], Step [380/469], Reconst Loss: 10374.2188, KL Div: 3132.5322
Epoch[10/15], Step [390/469], Reconst Loss: 10329.8125, KL Div: 3208.3450
Epoch[10/15], Step [400/469], Reconst Loss: 10409.2012, KL Div: 3332.0391
Epoch[10/15], Step [410/469], Reconst Loss: 10529.1855, KL Div: 3274.3516
Epoch[10/15], Step [420/469], Reconst Loss: 11063.1924, KL Div: 3268.7520
Epoch[10/15], Step [430/469], Reconst Loss: 10256.4453, KL Div: 3231.0925
Epoch[10/15], Step [440/469], Reconst Loss: 10324.7969, KL Div: 3219.4368
Epoch[10/15], Step [450/469], Reconst Loss: 10317.6650, KL Div: 3239.9331
Epoch[10/15], Step [460/469], Reconst Loss: 10360.7959, KL Div: 3189.5708
Epoch[11/15], Step [10/469], Reconst Loss: 10053.4727, KL Div: 3250.8550
Epoch[11/15], Step [20/469], Reconst Loss: 10236.5088, KL Div: 3255.9810
Epoch[11/15], Step [30/469], Reconst Loss: 10176.1611, KL Div: 3121.9722
Epoch[11/15], Step [40/469], Reconst Loss: 10437.1982, KL Div: 3250.6987
Epoch[11/15], Step [50/469], Reconst Loss: 10708.3945, KL Div: 3278.7578
Epoch[11/15], Step [60/469], Reconst Loss: 10547.8115, KL Div: 3267.8232
Epoch[11/15], Step [70/469], Reconst Loss: 10649.3711, KL Div: 3313.7485
Epoch[11/15], Step [80/469], Reconst Loss: 10084.4355, KL Div: 3141.8008
Epoch[11/15], Step [90/469], Reconst Loss: 10295.0977, KL Div: 3230.8088
Epoch[11/15], Step [100/469], Reconst Loss: 10348.7480, KL Div: 3272.9365
Epoch[11/15], Step [110/469], Reconst Loss: 10151.3496, KL Div: 3310.7529
Epoch[11/15], Step [120/469], Reconst Loss: 10194.9375, KL Div: 3173.0044
Epoch[11/15], Step [130/469], Reconst Loss: 10214.9697, KL Div: 3293.3711
Epoch[11/15], Step [140/469], Reconst Loss: 10298.1816, KL Div: 3218.0227
Epoch[11/15], Step [150/469], Reconst Loss: 10152.0449, KL Div: 3367.3867
Epoch[11/15], Step [160/469], Reconst Loss: 10273.2090, KL Div: 3232.4277
Epoch[11/15], Step [170/469], Reconst Loss: 10385.1719, KL Div: 3256.7419
Epoch[11/15], Step [180/469], Reconst Loss: 10500.4834, KL Div: 3294.9968
Epoch[11/15], Step [190/469], Reconst Loss: 10245.0488, KL Div: 3300.9277
Epoch[11/15], Step [200/469], Reconst Loss: 10301.4082, KL Div: 3216.1733
Epoch[11/15], Step [210/469], Reconst Loss: 10016.0576, KL Div: 3248.4077
Epoch[11/15], Step [220/469], Reconst Loss: 10252.1484, KL Div: 3350.2583
Epoch[11/15], Step [230/469], Reconst Loss: 10751.9082, KL Div: 3199.2019
Epoch[11/15], Step [240/469], Reconst Loss: 10531.6982, KL Div: 3364.4631
Epoch[11/15], Step [250/469], Reconst Loss: 10466.8740, KL Div: 3298.8354
Epoch[11/15], Step [260/469], Reconst Loss: 10577.1758, KL Div: 3266.2664
Epoch[11/15], Step [270/469], Reconst Loss: 10609.9297, KL Div: 3205.9143
Epoch[11/15], Step [280/469], Reconst Loss: 10435.7598, KL Div: 3367.1514
Epoch[11/15], Step [290/469], Reconst Loss: 10704.1250, KL Div: 3236.8464
Epoch[11/15], Step [300/469], Reconst Loss: 10193.8691, KL Div: 3176.0522
Epoch[11/15], Step [310/469], Reconst Loss: 10056.8320, KL Div: 3177.2244
Epoch[11/15], Step [320/469], Reconst Loss: 10633.1270, KL Div: 3276.2898
Epoch[11/15], Step [330/469], Reconst Loss: 9991.3809, KL Div: 3107.3315
Epoch[11/15], Step [340/469], Reconst Loss: 10530.6885, KL Div: 3171.1392
Epoch[11/15], Step [350/469], Reconst Loss: 10466.3594, KL Div: 3269.9668
Epoch[11/15], Step [360/469], Reconst Loss: 10428.1621, KL Div: 3304.3184
Epoch[11/15], Step [370/469], Reconst Loss: 10165.8691, KL Div: 3153.9817
Epoch[11/15], Step [380/469], Reconst Loss: 10641.2578, KL Div: 3152.3586
Epoch[11/15], Step [390/469], Reconst Loss: 10268.7070, KL Div: 3264.2173
Epoch[11/15], Step [400/469], Reconst Loss: 10582.3369, KL Div: 3223.6660
Epoch[11/15], Step [410/469], Reconst Loss: 9774.7012, KL Div: 3183.2505
Epoch[11/15], Step [420/469], Reconst Loss: 10321.9609, KL Div: 3184.8350
Epoch[11/15], Step [430/469], Reconst Loss: 10422.4727, KL Div: 3186.4854
Epoch[11/15], Step [440/469], Reconst Loss: 10658.8877, KL Div: 3203.9243
Epoch[11/15], Step [450/469], Reconst Loss: 10263.1055, KL Div: 3264.3481
Epoch[11/15], Step [460/469], Reconst Loss: 10241.3320, KL Div: 3161.2917
Epoch[12/15], Step [10/469], Reconst Loss: 10100.4707, KL Div: 3252.5620
Epoch[12/15], Step [20/469], Reconst Loss: 10362.4365, KL Div: 3182.6909
Epoch[12/15], Step [30/469], Reconst Loss: 10143.4043, KL Div: 3187.7324
Epoch[12/15], Step [40/469], Reconst Loss: 10882.9590, KL Div: 3299.7056
Epoch[12/15], Step [50/469], Reconst Loss: 10397.6016, KL Div: 3282.6797
Epoch[12/15], Step [60/469], Reconst Loss: 10318.3145, KL Div: 3248.0708
Epoch[12/15], Step [70/469], Reconst Loss: 9727.1230, KL Div: 3104.6587
Epoch[12/15], Step [80/469], Reconst Loss: 10144.8438, KL Div: 3335.3586
Epoch[12/15], Step [90/469], Reconst Loss: 10765.9844, KL Div: 3157.8489
Epoch[12/15], Step [100/469], Reconst Loss: 10360.7236, KL Div: 3243.4824
Epoch[12/15], Step [110/469], Reconst Loss: 10216.3037, KL Div: 3199.0112
Epoch[12/15], Step [120/469], Reconst Loss: 10396.5215, KL Div: 3330.6196
Epoch[12/15], Step [130/469], Reconst Loss: 10355.6631, KL Div: 3222.9419
Epoch[12/15], Step [140/469], Reconst Loss: 9937.6602, KL Div: 3146.4893
Epoch[12/15], Step [150/469], Reconst Loss: 9942.9316, KL Div: 3222.0889
Epoch[12/15], Step [160/469], Reconst Loss: 10471.5215, KL Div: 3199.5005
Epoch[12/15], Step [170/469], Reconst Loss: 10320.1836, KL Div: 3271.3301
Epoch[12/15], Step [180/469], Reconst Loss: 9788.8047, KL Div: 3104.6367
Epoch[12/15], Step [190/469], Reconst Loss: 11003.0684, KL Div: 3323.4395
Epoch[12/15], Step [200/469], Reconst Loss: 10676.1084, KL Div: 3284.1753
Epoch[12/15], Step [210/469], Reconst Loss: 10107.3662, KL Div: 3209.1809
Epoch[12/15], Step [220/469], Reconst Loss: 10156.5889, KL Div: 3244.6765
Epoch[12/15], Step [230/469], Reconst Loss: 10257.4248, KL Div: 3279.8970
Epoch[12/15], Step [240/469], Reconst Loss: 10590.3086, KL Div: 3294.7998
Epoch[12/15], Step [250/469], Reconst Loss: 10258.4570, KL Div: 3237.8452
Epoch[12/15], Step [260/469], Reconst Loss: 10488.2148, KL Div: 3423.0193
Epoch[12/15], Step [270/469], Reconst Loss: 10744.1660, KL Div: 3183.3455
Epoch[12/15], Step [280/469], Reconst Loss: 10914.4561, KL Div: 3288.6992
Epoch[12/15], Step [290/469], Reconst Loss: 9729.5820, KL Div: 3215.9641
Epoch[12/15], Step [300/469], Reconst Loss: 10665.6982, KL Div: 3264.9512
Epoch[12/15], Step [310/469], Reconst Loss: 9803.0322, KL Div: 3138.8916
Epoch[12/15], Step [320/469], Reconst Loss: 9962.8525, KL Div: 3285.4980
Epoch[12/15], Step [330/469], Reconst Loss: 10311.7148, KL Div: 3220.7627
Epoch[12/15], Step [340/469], Reconst Loss: 10059.0391, KL Div: 3220.6550
Epoch[12/15], Step [350/469], Reconst Loss: 10087.7500, KL Div: 3218.1709
Epoch[12/15], Step [360/469], Reconst Loss: 10213.5684, KL Div: 3224.1826
Epoch[12/15], Step [370/469], Reconst Loss: 9900.9277, KL Div: 3133.0320
Epoch[12/15], Step [380/469], Reconst Loss: 10240.7793, KL Div: 3247.9814
Epoch[12/15], Step [390/469], Reconst Loss: 10284.7070, KL Div: 3239.2866
Epoch[12/15], Step [400/469], Reconst Loss: 10419.7158, KL Div: 3317.0000
Epoch[12/15], Step [410/469], Reconst Loss: 10537.9961, KL Div: 3295.1472
Epoch[12/15], Step [420/469], Reconst Loss: 10414.0420, KL Div: 3268.8992
Epoch[12/15], Step [430/469], Reconst Loss: 10457.7754, KL Div: 3189.6006
Epoch[12/15], Step [440/469], Reconst Loss: 10238.2861, KL Div: 3341.2368
Epoch[12/15], Step [450/469], Reconst Loss: 10144.2783, KL Div: 3146.1230
Epoch[12/15], Step [460/469], Reconst Loss: 9952.5000, KL Div: 3254.5134
Epoch[13/15], Step [10/469], Reconst Loss: 10561.1523, KL Div: 3278.7744
Epoch[13/15], Step [20/469], Reconst Loss: 10404.3721, KL Div: 3176.3213
Epoch[13/15], Step [30/469], Reconst Loss: 10624.2871, KL Div: 3327.5386
Epoch[13/15], Step [40/469], Reconst Loss: 10325.8320, KL Div: 3344.9980
Epoch[13/15], Step [50/469], Reconst Loss: 10723.1426, KL Div: 3287.0120
Epoch[13/15], Step [60/469], Reconst Loss: 10437.0645, KL Div: 3357.3662
Epoch[13/15], Step [70/469], Reconst Loss: 10004.9355, KL Div: 3170.4160
Epoch[13/15], Step [80/469], Reconst Loss: 10082.7598, KL Div: 3213.1504
Epoch[13/15], Step [90/469], Reconst Loss: 10512.5586, KL Div: 3321.3447
Epoch[13/15], Step [100/469], Reconst Loss: 10059.7617, KL Div: 3234.3960
Epoch[13/15], Step [110/469], Reconst Loss: 10556.2148, KL Div: 3303.7673
Epoch[13/15], Step [120/469], Reconst Loss: 10303.7070, KL Div: 3234.6082
Epoch[13/15], Step [130/469], Reconst Loss: 10065.9121, KL Div: 3103.7578
Epoch[13/15], Step [140/469], Reconst Loss: 10536.8574, KL Div: 3366.4834
Epoch[13/15], Step [150/469], Reconst Loss: 10153.4932, KL Div: 3125.0923
Epoch[13/15], Step [160/469], Reconst Loss: 9987.3643, KL Div: 3284.4302
Epoch[13/15], Step [170/469], Reconst Loss: 10263.2109, KL Div: 3213.4697
Epoch[13/15], Step [180/469], Reconst Loss: 10746.4902, KL Div: 3392.5688
Epoch[13/15], Step [190/469], Reconst Loss: 10265.0459, KL Div: 3304.5281
Epoch[13/15], Step [200/469], Reconst Loss: 10162.6172, KL Div: 3181.8455
Epoch[13/15], Step [210/469], Reconst Loss: 10058.0820, KL Div: 3228.5020
Epoch[13/15], Step [220/469], Reconst Loss: 10106.4180, KL Div: 3194.3140
Epoch[13/15], Step [230/469], Reconst Loss: 9975.6738, KL Div: 3194.1494
Epoch[13/15], Step [240/469], Reconst Loss: 10023.4424, KL Div: 3229.2405
Epoch[13/15], Step [250/469], Reconst Loss: 10043.7246, KL Div: 3207.5303
Epoch[13/15], Step [260/469], Reconst Loss: 9962.7070, KL Div: 3155.2725
Epoch[13/15], Step [270/469], Reconst Loss: 10077.5498, KL Div: 3176.8755
Epoch[13/15], Step [280/469], Reconst Loss: 10064.2910, KL Div: 3234.5818
Epoch[13/15], Step [290/469], Reconst Loss: 10216.9326, KL Div: 3207.9661
Epoch[13/15], Step [300/469], Reconst Loss: 10207.9873, KL Div: 3253.5781
Epoch[13/15], Step [310/469], Reconst Loss: 10112.1582, KL Div: 3152.4810
Epoch[13/15], Step [320/469], Reconst Loss: 10220.6309, KL Div: 3243.7036
Epoch[13/15], Step [330/469], Reconst Loss: 10113.3047, KL Div: 3277.7246
Epoch[13/15], Step [340/469], Reconst Loss: 10360.3691, KL Div: 3235.7180
Epoch[13/15], Step [350/469], Reconst Loss: 10314.1572, KL Div: 3298.3831
Epoch[13/15], Step [360/469], Reconst Loss: 9738.2354, KL Div: 3156.9714
Epoch[13/15], Step [370/469], Reconst Loss: 10193.6719, KL Div: 3227.9404
Epoch[13/15], Step [380/469], Reconst Loss: 10099.7305, KL Div: 3256.4912
Epoch[13/15], Step [390/469], Reconst Loss: 9898.5684, KL Div: 3212.1011
Epoch[13/15], Step [400/469], Reconst Loss: 10243.5762, KL Div: 3353.5847
Epoch[13/15], Step [410/469], Reconst Loss: 10356.5869, KL Div: 3183.2246
Epoch[13/15], Step [420/469], Reconst Loss: 10546.1289, KL Div: 3293.1250
Epoch[13/15], Step [430/469], Reconst Loss: 9828.0391, KL Div: 3263.3047
Epoch[13/15], Step [440/469], Reconst Loss: 9905.3340, KL Div: 3148.6338
Epoch[13/15], Step [450/469], Reconst Loss: 10301.7832, KL Div: 3251.5398
Epoch[13/15], Step [460/469], Reconst Loss: 10545.7549, KL Div: 3337.8765
Epoch[14/15], Step [10/469], Reconst Loss: 10069.1475, KL Div: 3265.8403
Epoch[14/15], Step [20/469], Reconst Loss: 10634.7285, KL Div: 3417.9934
Epoch[14/15], Step [30/469], Reconst Loss: 10381.9365, KL Div: 3184.7178
Epoch[14/15], Step [40/469], Reconst Loss: 10487.7070, KL Div: 3267.9077
Epoch[14/15], Step [50/469], Reconst Loss: 10231.0342, KL Div: 3363.9539
Epoch[14/15], Step [60/469], Reconst Loss: 10475.6465, KL Div: 3279.8052
Epoch[14/15], Step [70/469], Reconst Loss: 10026.4590, KL Div: 3185.8557
Epoch[14/15], Step [80/469], Reconst Loss: 10289.2051, KL Div: 3390.5732
Epoch[14/15], Step [90/469], Reconst Loss: 10010.5078, KL Div: 3207.6255
Epoch[14/15], Step [100/469], Reconst Loss: 10236.8457, KL Div: 3321.0464
Epoch[14/15], Step [110/469], Reconst Loss: 10333.8789, KL Div: 3246.1592
Epoch[14/15], Step [120/469], Reconst Loss: 10373.4316, KL Div: 3179.2629
Epoch[14/15], Step [130/469], Reconst Loss: 9921.7148, KL Div: 3174.9546
Epoch[14/15], Step [140/469], Reconst Loss: 10403.0566, KL Div: 3209.5942
Epoch[14/15], Step [150/469], Reconst Loss: 10434.0850, KL Div: 3237.9197
Epoch[14/15], Step [160/469], Reconst Loss: 9863.9453, KL Div: 3282.2021
Epoch[14/15], Step [170/469], Reconst Loss: 10278.5703, KL Div: 3328.7625
Epoch[14/15], Step [180/469], Reconst Loss: 9605.9189, KL Div: 3219.9016
Epoch[14/15], Step [190/469], Reconst Loss: 9991.4883, KL Div: 3135.2844
Epoch[14/15], Step [200/469], Reconst Loss: 9956.5195, KL Div: 3196.9170
Epoch[14/15], Step [210/469], Reconst Loss: 10533.9990, KL Div: 3343.3950
Epoch[14/15], Step [220/469], Reconst Loss: 10042.6260, KL Div: 3177.3831
Epoch[14/15], Step [230/469], Reconst Loss: 9880.4521, KL Div: 3222.9128
Epoch[14/15], Step [240/469], Reconst Loss: 10170.3877, KL Div: 3273.3689
Epoch[14/15], Step [250/469], Reconst Loss: 10437.9170, KL Div: 3242.1714
Epoch[14/15], Step [260/469], Reconst Loss: 10217.8145, KL Div: 3159.4246
Epoch[14/15], Step [270/469], Reconst Loss: 9837.6758, KL Div: 3148.0342
Epoch[14/15], Step [280/469], Reconst Loss: 10033.9102, KL Div: 3285.6162
Epoch[14/15], Step [290/469], Reconst Loss: 10050.0527, KL Div: 3267.7573
Epoch[14/15], Step [300/469], Reconst Loss: 10737.7852, KL Div: 3276.8811
Epoch[14/15], Step [310/469], Reconst Loss: 10654.3281, KL Div: 3195.3047
Epoch[14/15], Step [320/469], Reconst Loss: 9751.8867, KL Div: 3234.4192
Epoch[14/15], Step [330/469], Reconst Loss: 9862.9922, KL Div: 3285.5381
Epoch[14/15], Step [340/469], Reconst Loss: 9958.1113, KL Div: 3158.3511
Epoch[14/15], Step [350/469], Reconst Loss: 10145.1914, KL Div: 3199.6873
Epoch[14/15], Step [360/469], Reconst Loss: 10368.6338, KL Div: 3395.5317
Epoch[14/15], Step [370/469], Reconst Loss: 10013.0508, KL Div: 3153.7812
Epoch[14/15], Step [380/469], Reconst Loss: 10253.8887, KL Div: 3274.3643
Epoch[14/15], Step [390/469], Reconst Loss: 10283.4805, KL Div: 3308.3999
Epoch[14/15], Step [400/469], Reconst Loss: 10379.4180, KL Div: 3318.8638
Epoch[14/15], Step [410/469], Reconst Loss: 9914.1719, KL Div: 3175.6028
Epoch[14/15], Step [420/469], Reconst Loss: 10168.4199, KL Div: 3187.2297
Epoch[14/15], Step [430/469], Reconst Loss: 10245.3184, KL Div: 3236.4622
Epoch[14/15], Step [440/469], Reconst Loss: 10100.7812, KL Div: 3267.4075
Epoch[14/15], Step [450/469], Reconst Loss: 9914.7559, KL Div: 3134.0366
Epoch[14/15], Step [460/469], Reconst Loss: 10017.5098, KL Div: 3430.6792
Epoch[15/15], Step [10/469], Reconst Loss: 10327.4453, KL Div: 3295.9519
Epoch[15/15], Step [20/469], Reconst Loss: 10625.3418, KL Div: 3216.5332
Epoch[15/15], Step [30/469], Reconst Loss: 10300.7256, KL Div: 3204.3267
Epoch[15/15], Step [40/469], Reconst Loss: 10053.3760, KL Div: 3213.3494
Epoch[15/15], Step [50/469], Reconst Loss: 9802.7109, KL Div: 3277.8635
Epoch[15/15], Step [60/469], Reconst Loss: 10406.4102, KL Div: 3174.1592
Epoch[15/15], Step [70/469], Reconst Loss: 10141.3604, KL Div: 3269.6350
Epoch[15/15], Step [80/469], Reconst Loss: 10166.9277, KL Div: 3256.5972
Epoch[15/15], Step [90/469], Reconst Loss: 10143.2266, KL Div: 3141.7861
Epoch[15/15], Step [100/469], Reconst Loss: 10453.3926, KL Div: 3279.7358
Epoch[15/15], Step [110/469], Reconst Loss: 10315.9658, KL Div: 3261.2327
Epoch[15/15], Step [120/469], Reconst Loss: 9827.2764, KL Div: 3202.5688
Epoch[15/15], Step [130/469], Reconst Loss: 10371.4766, KL Div: 3302.3269
Epoch[15/15], Step [140/469], Reconst Loss: 10176.9668, KL Div: 3277.7471
Epoch[15/15], Step [150/469], Reconst Loss: 10535.4043, KL Div: 3319.8499
Epoch[15/15], Step [160/469], Reconst Loss: 10384.1865, KL Div: 3258.4556
Epoch[15/15], Step [170/469], Reconst Loss: 10176.8857, KL Div: 3313.0811
Epoch[15/15], Step [180/469], Reconst Loss: 9736.1553, KL Div: 3179.9526
Epoch[15/15], Step [190/469], Reconst Loss: 10388.7783, KL Div: 3210.8306
Epoch[15/15], Step [200/469], Reconst Loss: 10706.0146, KL Div: 3314.2937
Epoch[15/15], Step [210/469], Reconst Loss: 10436.8965, KL Div: 3201.7385
Epoch[15/15], Step [220/469], Reconst Loss: 10383.0273, KL Div: 3267.3047
Epoch[15/15], Step [230/469], Reconst Loss: 10396.6680, KL Div: 3188.7212
Epoch[15/15], Step [240/469], Reconst Loss: 10093.4375, KL Div: 3226.1409
Epoch[15/15], Step [250/469], Reconst Loss: 9976.1230, KL Div: 3331.6909
Epoch[15/15], Step [260/469], Reconst Loss: 9936.5430, KL Div: 3166.5913
Epoch[15/15], Step [270/469], Reconst Loss: 10188.4463, KL Div: 3264.0513
Epoch[15/15], Step [280/469], Reconst Loss: 9759.2363, KL Div: 3212.6204
Epoch[15/15], Step [290/469], Reconst Loss: 10251.6484, KL Div: 3285.7480
Epoch[15/15], Step [300/469], Reconst Loss: 10107.9277, KL Div: 3225.7773
Epoch[15/15], Step [310/469], Reconst Loss: 10231.5732, KL Div: 3285.9448
Epoch[15/15], Step [320/469], Reconst Loss: 10227.3945, KL Div: 3287.7163
Epoch[15/15], Step [330/469], Reconst Loss: 10169.8535, KL Div: 3291.9043
Epoch[15/15], Step [340/469], Reconst Loss: 10114.7881, KL Div: 3198.2019
Epoch[15/15], Step [350/469], Reconst Loss: 9916.6758, KL Div: 3298.3027
Epoch[15/15], Step [360/469], Reconst Loss: 10149.1543, KL Div: 3334.2546
Epoch[15/15], Step [370/469], Reconst Loss: 10391.2930, KL Div: 3154.2920
Epoch[15/15], Step [380/469], Reconst Loss: 10285.0098, KL Div: 3314.5752
Epoch[15/15], Step [390/469], Reconst Loss: 10037.2598, KL Div: 3348.3552
Epoch[15/15], Step [400/469], Reconst Loss: 10347.6201, KL Div: 3230.4548
Epoch[15/15], Step [410/469], Reconst Loss: 10050.3828, KL Div: 3257.5193
Epoch[15/15], Step [420/469], Reconst Loss: 10073.8594, KL Div: 3217.1846
Epoch[15/15], Step [430/469], Reconst Loss: 9558.9961, KL Div: 3181.1758
Epoch[15/15], Step [440/469], Reconst Loss: 9885.7969, KL Div: 3207.2212
Epoch[15/15], Step [450/469], Reconst Loss: 10039.3613, KL Div: 3216.7864
Epoch[15/15], Step [460/469], Reconst Loss: 10072.1973, KL Div: 3270.6738

在这里插入图片描述
这里分别是采样数据和重建数据实例。

在这里插入图片描述

# save and load the model checkpoint.
torch.save(model.state_dict(), 'model_param.ckpt')
model.load_state_dict(torch.load('model_param.ckpt'))
<All keys matched successfully>
  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
以下是使用PyTorch实现变分自动编码器的步骤: 1.导入所需的库和模块,包括torchtorch.nn、torch.nn.functional、torchvision等。 2.定义设备配置,判断是否有可用的GPU,如果有则使用GPU,否则使用CPU。 3.定义变分自动编码器的编码器和解码器。编码器由两个全连接层和一个输出层组成,解码器由一个全连接层和一个输出层组成。 4.定义变分自动编码器的前向传播函数forward(),其中包括编码器和解码器的前向传播过程。 5.定义变分自动编码器的损失函数,包括重构误差和KL散度。 6.定义优化器,使用Adam优化器。 7.训练模型,包括前向传播、计算损失、反向传播和优化器更新参数。 8.保存模型和生成样本图片。 下面是完整的代码实现: ```python import os import torch import torch.nn as nn import torch.nn.functional as F import torchvision from torchvision import transforms from torchvision.utils import save_image import matplotlib.pyplot as plt # 设备配置 device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') # 定义变分自动编码器的编码器和解码器 class VAE(nn.Module): def __init__(self): super(VAE, self).__init__() self.fc1 = nn.Linear(784, 400) self.fc21 = nn.Linear(400, 20) self.fc22 = nn.Linear(400, 20) self.fc3 = nn.Linear(20, 400) self.fc4 = nn.Linear(400, 784) def encode(self, x): h1 = F.relu(self.fc1(x)) return self.fc21(h1), self.fc22(h1) def reparameterize(self, mu, logvar): std = torch.exp(0.5*logvar) eps = torch.randn_like(std) return mu + eps*std def decode(self, z): h3 = F.relu(self.fc3(z)) return torch.sigmoid(self.fc4(h3)) def forward(self, x): mu, logvar = self.encode(x.view(-1, 784)) z = self.reparameterize(mu, logvar) return self.decode(z), mu, logvar # 定义变分自动编码器的损失函数 def loss_function(recon_x, x, mu, logvar): BCE = F.binary_cross_entropy(recon_x, x.view(-1, 784), reduction='sum') KLD = -0.5 * torch.sum(1 + logvar - mu.pow(2) - logvar.exp()) return BCE + KLD # 定义优化器 vae = VAE().to(device) optimizer = torch.optim.Adam(vae.parameters(), lr=1e-3) # 训练模型 def train(epoch): vae.train() train_loss = 0 for batch_idx, (data, _) in enumerate(train_loader): data = data.to(device) optimizer.zero_grad() recon_batch, mu, logvar = vae(data) loss = loss_function(recon_batch, data, mu, logvar) loss.backward() train_loss += loss.item() optimizer.step() if batch_idx % 100 == 0: print('Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}'.format( epoch, batch_idx * len(data), len(train_loader.dataset), 100. * batch_idx / len(train_loader), loss.item() / len(data))) print('====> Epoch: {} Average loss: {:.4f}'.format( epoch, train_loss / len(train_loader.dataset))) # 保存模型和生成样本图片 if not os.path.exists('./vae_samples'): os.mkdir('./vae_samples') def save_samples(epoch): with torch.no_grad(): sample = torch.randn(64, 20).to(device) sample = vae.decode(sample).cpu() save_image(sample.view(64, 1, 28, 28), './vae_samples/sample_' + str(epoch) + '.png') # 加载MNIST数据集 batch_size = 128 transform = transforms.Compose([ transforms.ToTensor(), transforms.Normalize((0.1307,), (0.3081,)) ]) train_dataset = torchvision.datasets.MNIST(root='./data', train=True, transform=transform, download=True) train_loader = torch.utils.data.DataLoader(train_dataset, batch_size=batch_size, shuffle=True) # 训练模型并保存样本图片 for epoch in range(1, 21): train(epoch) save_samples(epoch) ```

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值