1.原理
单个样本 :得到比较好的随机性,较好的跨越鞍点;训练时间长
batch:最大化提升向量计算优势,提高计算效率;性能较差
mini-batch:均衡结果性能和计算速度
2.新概念
DateSet:抽象类,无法实例化;可继承
DataLoader:加载数据
3.代码实现
import torch
import numpy as np
from torch.utils.data import Dataset
from torch.utils.data import DataLoader
# prepare dataset
class DiabetesDataset(Dataset):
def __init__(self, filepath):
xy = np.loadtxt(filepath, delimiter=',', dtype=np.float32)
self.len = xy.shape[0]
self.x_data = torch.from_numpy(xy[:, :-1])
self.y_data = torch.from_numpy(xy[:, [-1]])
def __getitem__(self, index):
return self.x_data[index], self.y_data[index]
def __len__(self):
return self.len #dataset的长度;取某个数据处理,避免加载太多的数据进入内存
'''
DataLoader(dataset=dataset, 设置数据集
batch_size=32,batch大小
shuffle=True,是否打乱
num_workers=2,并行数量
'''
dataset = DiabetesDataset('diabetes.csv')
train_loader = DataLoader(dataset=dataset, batch_size=32, shuffle=True, num_workers=2)
# design model using class
class Model(torch.nn.Module):
def __init__(self):
super(Model, self).__init__()
self.linear1 = torch.nn.Linear(8, 6)
self.linear2 = torch.nn.Linear(6, 4)
self.linear3 = torch.nn.Linear(4, 1)
self.sigmoid = torch.nn.Sigmoid()
def forward(self, x):
x = self.sigmoid(self.linear1(x))
x = self.sigmoid(self.linear2(x))
x = self.sigmoid(self.linear3(x))
return x
model = Model()
# construct loss and optimizer
criterion = torch.nn.BCELoss(size_average=True)
optimizer = torch.optim.SGD(model.parameters(), lr=0.01)
'''
执行次数过多则导致抱错
win多核处理使用spawn,
'''
# training cycle forward, backward, update
if __name__ == '__main__':
for epoch in range(100):
for i, data in enumerate(train_loader, 0): # train_loader 是先shuffle后mini_batch
inputs, labels = data
y_pred = model(inputs)
loss = criterion(y_pred, labels)
print(epoch, i, loss.item())
optimizer.zero_grad()
loss.backward()
optimizer.step()
4.结果打印
0 0 0.686749279499054
0 1 0.6498836278915405
0 2 0.6486786603927612
0 3 0.6604384183883667
0 4 0.659893274307251
0 5 0.6793041825294495
0 6 0.6526774764060974
0 7 0.6324378252029419
0 8 0.6785942912101746
0 9 0.6788842678070068
···················
···················
···················
99 17 0.6630704402923584
99 18 0.6430260539054871
99 19 0.764205813407898
99 20 0.5634077787399292
99 21 0.7027015089988708
99 22 0.6431108713150024
99 23 0.7569079995155334