classLogisticRegression(nn.Module):def__init__(self):super(LogisticRegression,self).__init__()
self.lr=nn.Linear(2,1)#相当于通过线性变换y=x*T(A)+b可以得到对应的各个系数
self.sm=nn.Sigmoid()#相当于通过激活函数的变换defforward(self, x):
x=self.lr(x)
x=self.sm(x)return x
定义损失函数和优化器
net = LogisticRegression()
criterion=nn.BCELoss()#选用BCE损失函数,该损失函数是只用于2分类问题的损失函数
optimizer=torch.optim.SGD(net.parameters(),lr=1e-3,momentum=0.9)#采用随机梯度下降的方法
定义训练过程
deftrain(net,X_data,y_data,batch_size,epoch):if torch.cuda.is_available():
net = net.cuda()
X_data=Variable(X_data).cuda()
y_data=Variable(y_data).cuda()else:
X_data=Variable(X_data)
y_data=Variable(y_data)# 创建数据加载器
dataset = Data.TensorDataset(X_data,y_data)
train_iter = Data.DataLoader(dataset,batch_size=batch_size)
net.train()
history ={"loss":[],"acc":[]}#开始训练for i inrange(epoch):
all_loss =0for X,y in train_iter:#根据逻辑回归模型拟合出的y值
y_pred = net(X)#计算损失函数
loss = criterion(y_pred,y.view(len(y),-1))#梯度清零
optimizer.zero_grad()#反向传播
loss.backward()#梯度更新
optimizer.step()
all_loss += loss.data.item()print('epoch {}'.format(i+1),end=' ')
loss,acc = evaluate(net,X_data,y_data,batch_size)
history['loss'].append(loss)
history['acc'].append(acc)return history
定义模型评估
defevaluate(net,X_data,y_data,batch_size):if torch.cuda.is_available():
net = net.cuda()
X_data=Variable(X_data).cuda()
y_data=Variable(y_data).cuda()else:
X_data=Variable(X_data)
y_data=Variable(y_data)# 创建数据加载器
dataset = Data.TensorDataset(X_data,y_data)
test_iter = Data.DataLoader(dataset,batch_size=batch_size)
net.eval()
correct =0
all_loss =0for X,y in test_iter:
y_pred = net(X)
loss = criterion(y_pred,y.view(len(y),-1))
correct +=((y_pred>=0.5).view(len(y_pred),)==y).sum().item()
all_loss += loss
acc = correct/len(y_data)print("loss is {:.4f} acc is {:.4f}".format(all_loss,acc))return all_loss,acc
训练
history = train(net,X[:800],y[:800],256,100)
plot_history(history)