课程请见 《PyTorch深度学习实践》
数据集见链接:https://pan.baidu.com/s/1koPyBGQ7ORaH1OHGH9IO8Q
提取码:jph5
# PyTorch
import torch
from torch import nn
from torch import optim
from torch.utils import data
# NumPy
import numpy as np
class DiabetesDataset(data.Dataset):
def __init__(self, path):
self.cor = np.loadtxt(path, delimiter=',', dtype=np.float32)
self.len = self.cor.shape[0]
self.x_data = torch.from_numpy(self.cor[:, :-1])
self.y_data = torch.from_numpy(self.cor[:, -1]).reshape(-1, 1)
def __getitem__(self, index):
return self.x_data[index], self.y_data[index]
def __len__(self):
return self.len
class Model(nn.Module):
def __init__(self, input_dim, out_dim=1):
super(Model, self).__init__()
self.linear1 = nn.Linear(input_dim, 6)
self.linear2 = nn.Linear(6, 4)
self.linear3 = nn.Linear(4, 1)
self.activate = nn.ReLU()
def forward(self, x):
_y1 = self.activate(self.linear1(x))
_y2 = self.activate(self.linear2(_y1))
_y3 = torch.sigmoid(self.linear3(_y2))
return _y3
batch_size = 62
train_data = DiabetesDataset('../data/diabetes.csv')
train_iter = data.DataLoader(
dataset=train_data,
batch_size=batch_size,
shuffle=True,
num_workers=1
)
net = Model(8)
criterion = nn.BCELoss()
optimizer = optim.SGD(net.parameters(), lr=0.03)
num_epochs = 100
num_iter = len(train_data) // batch_size + 1 \
if len(train_iter) % batch_size else len(train_data) // batch_size
if __name__ == '__main__':
for epoch in range(num_epochs):
epoch_loss = 0
for x, y in train_iter:
_y = net(x)
loss = criterion(_y, y)
epoch_loss += loss.item()
optimizer.zero_grad()
loss.backward()
optimizer.step()
print('第{}轮,loss为{:.8f}'.format(epoch + 1, epoch_loss / num_iter))