import torch
from torch.autograd import Variable
torch.manual_seed(2)
x_data = Variable(torch.Tensor([[1.0], [2.0], [3.0], [4.0]]))
y_data = Variable(torch.Tensor([[0.0], [0.0], [1.0], [1.0]]))
#初始化
w = Variable(torch.Tensor([-1]), requires_grad=True)
b = Variable(torch.Tensor([0]), requires_grad=True)
epochs = 100
costs = []
lr = 0.1
print("before training, predict of x = 1.5 is:")
print("y_pred = ", float(w.data*1.5 + b.data > 0))
#模型训练
for epoch in range(epochs):
#计算梯度
A = 1/(1+torch.exp(-(w*x_data+b))) #逻辑回归函数
J = -torch.mean(y_data*torch.log(A) + (1-y_data)*torch.log(1-A)) #逻辑回归损失函数
#J = -torch.mean(y_data*torch.log(A) + (1-y_data)*torch.log(1-A)) +alpha*w**2
#基础类进行正则化,加上L2范数
costs.append(J.data)
J.backward() #自动反向传播
#参数更新
w.data = w.data - lr*w.grad.data
w.grad.data.zero_()
b.data = b.data - lr*b.grad.data
b.grad.data.zero_()
print("after training, predict of x = 1.5 is:")
print("y_pred =", float(w.data*1.5+b.data > 0))
print(w.data, b.data)
before training, predict of x = 1.5 is:
y_pred = 0.0
after training, predict of x = 1.5 is:
y_pred = 0.0
tensor([ 0.6075]) tensor([-0.9949])
[Finished in 0.4s]