目录
1.自编码粗线条理解
通过encoder 编码器压缩得到原数据的精髓, 再创建一个小神经网络学习精髓的数据,减少神经网络的负担, 达到效果。
2.代码
import torch
import torch.nn as nn
import torch.utils.data as Data
import torchvision
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from matplotlib import cm
import numpy as np
# torch.manual_seed(1) # reproducible
# Hyper Parameters
EPOCH = 10 # 训练整批数据10次
BATCH_SIZE = 64 # 批训练的数据个数
LR = 0.005 # learning rate
DOWNLOAD_MNIST = False # 已经下载好了mnist数据就写 False
N_TEST_IMG = 5
# Mnist digits dataset
train_data = torchvision.datasets.MNIST(
root='./mnist/', # 保存或者提取数据位置
train=True, # training data
transform=torchvision.transforms.ToTensor(), # 转化数据Converts a PIL.Image or numpy.ndarray to
# torch.FloatTensor of shape (C x H x W) and normalize in the range [0.0, 1.0]
download=DOWNLOAD_MNIST, # 下载
)
# plot one example
print(train_data.data.size()) # (60000, 28, 28)
print(train_data.targets.size()) # (60000)
plt.imshow(train_data.data[2].numpy(), cmap='gray')
plt.title('%i' % train_data.targets[2])
plt.show()
# Data Loader for easy mini-batch return in training, the image batch shape will be (50, 1, 28, 28)
train_loader = Data.DataLoader(dataset=train_data, batch_size=BATCH_SIZE, shuffle=True)
class AutoEncoder(nn.Module):
def __init__(self):
super(AutoEncoder, self).__init__()
self.encoder = nn.Sequential(#该形式快
nn.Linear(28*28, 128),#添加神经层 28*28长宽
nn.Tanh(),
nn.Linear(128, 64),#压缩
nn.Tanh(),
nn.Linear(64, 12),#再压缩
nn.Tanh(),
nn.Linear(12, 3), # compress to 3 features which can be visualized in plt
)
self.decoder = nn.Sequential(#解码器
nn.Linear(3, 12),#解压成原始
nn.Tanh(),
nn.Linear(12, 64),
nn.Tanh(),
nn.Linear(64, 128),
nn.Tanh(),
nn.Linear(128, 28*28),
nn.Sigmoid(), # 压缩输出值为(0,1)compress to a range (0, 1)
)
def forward(self, x):
encoded = self.encoder(x)
decoded = self.decoder(encoded)
return encoded, decoded
#定义自编码
autoencoder = AutoEncoder()
#优化
optimizer = torch.optim.Adam(autoencoder.parameters(), lr=LR)
loss_func = nn.MSELoss()#均方差优化
# initialize figure
f, a = plt.subplots(2, N_TEST_IMG, figsize=(5, 2))
plt.ion() # continuously plot
# original data (first row) for viewing
view_data = train_data.data[:N_TEST_IMG].view(-1, 28*28).type(torch.FloatTensor)/255.
for i in range(N_TEST_IMG):
a[0][i].imshow(np.reshape(view_data.data.numpy()[i], (28, 28)), cmap='gray'); a[0][i].set_xticks(()); a[0][i].set_yticks(())
#开始训练啦
for epoch in range(EPOCH):
for step, (x, b_label) in enumerate(train_loader):
b_x = x.view(-1, 28*28) # batch x, shape (batch, 28*28)
b_y = x.view(-1, 28*28) # batch y, shape (batch, 28*28)
encoded, decoded = autoencoder(b_x)
loss = loss_func(decoded, b_y) # 必备手续:mean square error
optimizer.zero_grad() # clear gradients for this training step
loss.backward() # backpropagation, compute gradients
optimizer.step() # apply gradients
if step % 100 == 0:
print('Epoch: ', epoch, '| train loss: %.4f' % loss.data.numpy())
# plotting decoded image (second row)
_, decoded_data = autoencoder(view_data)
for i in range(N_TEST_IMG):
a[1][i].clear()
a[1][i].imshow(np.reshape(decoded_data.data.numpy()[i], (28, 28)), cmap='gray')
a[1][i].set_xticks(()); a[1][i].set_yticks(())
plt.draw(); plt.pause(0.05)
plt.ioff()
plt.show()
# visualize in 3D plot
view_data = train_data.data[:200].view(-1, 28*28).type(torch.FloatTensor)/255.
encoded_data, _ = autoencoder(view_data)
fig = plt.figure(2); ax = Axes3D(fig)
X, Y, Z = encoded_data.data[:, 0].numpy(), encoded_data.data[:, 1].numpy(), encoded_data.data[:, 2].numpy()
values = train_data.targets[:200].numpy() #warnings.warn("train_labels has been renamed targets")
for x, y, z, s in zip(X, Y, Z, values):
c = cm.rainbow(int(255*s/9)); ax.text(x, y, z, s, backgroundcolor=c)
ax.set_xlim(X.min(), X.max()); ax.set_ylim(Y.min(), Y.max()); ax.set_zlim(Z.min(), Z.max())
plt.show()
3.运行结果
torch.Size([60000, 28, 28])
torch.Size([60000])
Epoch: 0 | train loss: 0.2335
Epoch: 0 | train loss: 0.0676
Epoch: 0 | train loss: 0.0648
Epoch: 0 | train loss: 0.0628
Epoch: 0 | train loss: 0.0570
Epoch: 0 | train loss: 0.0577
Epoch: 0 | train loss: 0.0558
Epoch: 0 | train loss: 0.0496
Epoch: 0 | train loss: 0.0477
Epoch: 0 | train loss: 0.0531
Epoch: 1 | train loss: 0.0462
Epoch: 1 | train loss: 0.0507
Epoch: 1 | train loss: 0.0484
Epoch: 1 | train loss: 0.0460
Epoch: 1 | train loss: 0.0460
Epoch: 1 | train loss: 0.0456
Epoch: 1 | train loss: 0.0422
Epoch: 1 | train loss: 0.0454
Epoch: 1 | train loss: 0.0447
Epoch: 1 | train loss: 0.0408
Epoch: 2 | train loss: 0.0419
Epoch: 2 | train loss: 0.0425
Epoch: 2 | train loss: 0.0411
Epoch: 2 | train loss: 0.0434
Epoch: 2 | train loss: 0.0391
Epoch: 2 | train loss: 0.0426
Epoch: 2 | train loss: 0.0449
Epoch: 2 | train loss: 0.0414
Epoch: 2 | train loss: 0.0420
Epoch: 2 | train loss: 0.0394
Epoch: 3 | train loss: 0.0401
Epoch: 3 | train loss: 0.0406
Epoch: 3 | train loss: 0.0382
Epoch: 3 | train loss: 0.0424
Epoch: 3 | train loss: 0.0431
Epoch: 3 | train loss: 0.0429
Epoch: 3 | train loss: 0.0453
Epoch: 3 | train loss: 0.0388
Epoch: 3 | train loss: 0.0410
Epoch: 3 | train loss: 0.0442
Epoch: 4 | train loss: 0.0371
Epoch: 4 | train loss: 0.0388
Epoch: 4 | train loss: 0.0401
Epoch: 4 | train loss: 0.0376
Epoch: 4 | train loss: 0.0375
Epoch: 4 | train loss: 0.0390
Epoch: 4 | train loss: 0.0405
Epoch: 4 | train loss: 0.0376
Epoch: 4 | train loss: 0.0376
Epoch: 4 | train loss: 0.0351
Epoch: 5 | train loss: 0.0368
Epoch: 5 | train loss: 0.0350
Epoch: 5 | train loss: 0.0383
Epoch: 5 | train loss: 0.0374
Epoch: 5 | train loss: 0.0394
Epoch: 5 | train loss: 0.0356
Epoch: 5 | train loss: 0.0337
Epoch: 5 | train loss: 0.0387
Epoch: 5 | train loss: 0.0391
Epoch: 5 | train loss: 0.0369
Epoch: 6 | train loss: 0.0365
Epoch: 6 | train loss: 0.0347
Epoch: 6 | train loss: 0.0376
Epoch: 6 | train loss: 0.0385
Epoch: 6 | train loss: 0.0367
Epoch: 6 | train loss: 0.0362
Epoch: 6 | train loss: 0.0376
Epoch: 6 | train loss: 0.0369
Epoch: 6 | train loss: 0.0366
Epoch: 6 | train loss: 0.0372
Epoch: 7 | train loss: 0.0354
Epoch: 7 | train loss: 0.0349
Epoch: 7 | train loss: 0.0346
Epoch: 7 | train loss: 0.0389
Epoch: 7 | train loss: 0.0394
Epoch: 7 | train loss: 0.0360
Epoch: 7 | train loss: 0.0356
Epoch: 7 | train loss: 0.0377
Epoch: 7 | train loss: 0.0364
Epoch: 7 | train loss: 0.0337
Epoch: 8 | train loss: 0.0369
Epoch: 8 | train loss: 0.0398
Epoch: 8 | train loss: 0.0349
Epoch: 8 | train loss: 0.0368
Epoch: 8 | train loss: 0.0333
Epoch: 8 | train loss: 0.0346
Epoch: 8 | train loss: 0.0378
Epoch: 8 | train loss: 0.0361
Epoch: 8 | train loss: 0.0343
Epoch: 8 | train loss: 0.0342
Epoch: 9 | train loss: 0.0346
Epoch: 9 | train loss: 0.0379
Epoch: 9 | train loss: 0.0343
Epoch: 9 | train loss: 0.0349
Epoch: 9 | train loss: 0.0355
Epoch: 9 | train loss: 0.0380
Epoch: 9 | train loss: 0.0329
Epoch: 9 | train loss: 0.0364
Epoch: 9 | train loss: 0.0373
Epoch: 9 | train loss: 0.0347