UP:B站-刘二大人
原视频链接:
import torch
import matplotlib.pyplot as plt
import numpy as np
# 准备数据集
x_data = torch.tensor([[1.0], [2.0], [3.0], [4.0]])
y_data = torch.tensor([[0.0], [0.0], [1.0], [1.0]])
# 设计模型
class LogisticRegressionModel(torch.nn.Module):
# 构造函数
def __init__(self):
super(LogisticRegressionModel, self).__init__()
self.linear = torch.nn.Linear(1, 1)
# 前馈计算函数
def forward(self, x):
y_pred = torch.nn.functional.sigmoid(self.linear(x))
return y_pred
# 实例化模型
model = LogisticRegressionModel()
# 定义损失函数
criterion = torch.nn.BCELoss(reduction='sum') # BCELoss即交叉熵损失
# 定义优化器
optimizer = torch.optim.SGD(model.parameters(), lr = 0.01)
# 为画图做准备
w_list = []
b_list = []
loss_list = []
epoch_list = []
# 训练过程
for epoch in range(1000):
y_pred = model(x_data)
loss = criterion(y_pred, y_data)
print("epoch:", epoch, "loss:", loss.data.item())
# 清除梯度
optimizer.zero_grad()
# 反向传播
loss.backward()
# 权重更新
optimizer.step()
# 给画图做准备
epoch_list.append(epoch)
loss_list.append(loss.data.item())
w_list.append(model.linear.weight.data.item())
b_list.append(model.linear.bias.data.item())
print("w =", model.linear.weight.data.item())
print("b =", model.linear.bias.data.item())
# 测试过程
x_test = torch.tensor([[5.0]])
y_test = model(x_test)
print("y_pred =", y_test.data.item())
# 二维曲线图
plt.plot(epoch_list, loss_list)
plt.xlabel("epoch")
plt.ylabel("loss")
plt.show()
#三维散点图
fig = plt.figure()
ax = fig.add_subplot(111, projection = "3d")
ax.scatter(w_list, b_list, loss_list, c = "r")
ax.set_xlabel("weight")
ax.set_ylabel("bias")
ax.set_zlabel("loss")
plt.show()
# 在更大的测试集上验证效果
x = np.linspace(0, 10, 200)
# PyTorch中的view( )函数相当于numpy中的resize( )函数,都是用来重构(或者调整)张量维度的
x_t = torch.Tensor(x).view((200, 1)) # 行代表样本数量,列代表维度,把1行200列的数组转换成200行1列的张量
y_t = model(x_t)
y = y_t.data.numpy() # 直接把y_t中的数据转换成一维数组形式拿出来
plt.plot(x, y, c = "blue")
plt.plot([0, 10], [0.5, 0.5], c = 'r')
plt.xlabel("Hours")
plt.ylabel("Probability of Pass")
plt.grid() # 设置网格刻度
plt.show()