一、pytorch线性模型的建立
pytorch构建模型一般分为四块:1、数据集准备;2、设计模型;3、构建loss函数和优化器;4、训练
1.准备数据
在后面讲
2.构造模型 (源自于nn.Module)
class LinearModel(torch.nn.Module):
# 在构建模型时必须有__init__()和forward()
def __init__(self):
# 调用父类的构造,必须要有
super(LinearModel,self).__init__()
# torch里面的一个类,Linear(in_features,out_features,bias),自动完成线性运算
self.linear = torch.nn.Linear(1, 1)
def forward(self, x):
y_pred = self.linear(x)
return y_pred
# model是可调用的函数
model = LinearModel()
3.构造loss函数和优化器(using PyTorch APT)
# MSELoss是继承自nn.module,MSELoss(size_average=true,reduce=true)
criterion = torch.nn.MSELoss(size_average=False) # criterion需要y和y_hat
# BCEloss cross-entropy 交叉熵
criterion = torch.nn.BCEloss(size_average=False)
# 优化器
optimizer = torch.optim.SGD(model.parameters(), lr=0.01)
4.训练过程 (forward, backward, update)
# forawrd
for epoch in range(100):
y_pred = model(x_data) # 算出y_hat
loss = criterion(y_pred, y_data)
print(epoch, loss)
optimizer.zero_grad() #梯度归零
loss.backward() # 反向传播
optimizer.step() # 更新
5.最后打印出结果
#output weight and bias
# 加item
print('w=', model.linear.weight.item())
print('b = ', model.liear.bais.item())
6.测试阶段
x_test = torch.Tensor([[4.0]])
y_test = model(x_test)
print('y_pred = ', y_test.data)
7.添加sigmoid函数后的模型
import torch.nn.function as F
# F中有很多激活函数
class LogisticRegressionModel(torch.nn.Module):
def __init__(self):
super(LogisticRegressionModel,self).__init__()
self.Linear = torch.nn.Linear(1, 1)
def forward(self, x):
# 在计算线性变换模型前添加一个sigmoid激活函数
y_pred = F.sigmoid(self.linear(x))
return(y_pred)
二、处理多维特征的输入
import torch
class Model(torch.nn.Module):
def __init__(self):
super(Model, self).___init__()
self.linear1 = torch.nn.Linear(8, 6)
self.linear2 = torch.nn.Linear(6, 4)
self.linear3 = torch.nn.Linear(4, 1)
self.sigmoid = torch.nn.Sigmoid()
def forward(self, x):
x = self.sigmoid(self.linear1(x))
x = self.sigmoid(self.linear2(x))
x = self.sigmoid(self.linear3(x))
return x
model = Model()
#构造loss函数和优化器
criterion = torch.nn.BCEloss(size_average=False)
optimizer = torch.optim.SGD(model.parameters(), lr=0.01)
# forawrd
for epoch in range(100):
y_pred = model(x_data) # 算出y_hat
loss = criterion(y_pred, y_data)
print(epoch, loss.item())
# 反向传播
optimizer.zero_grad() #梯度归零
loss.backward()
# 更新
optimizer.step()
采用relu激活函数
import torch
class Model(torch.nn.Module):
def __init__(self):
super(Model, self).___init__()
self.linear1 = torch.nn.Linear(8, 6)
self.linear2 = torch.nn.Linear(6, 4)
self.linear3 = torch.nn.Linear(4, 1)
self.activate = torch.nn.ReLU()
self.sigmoid = torch.nn.Sigmoid()
def forward(self, x):
x = self.activate(self.linear1(x))
x = self.activate(self.linear2(x))
x = self.sigmoid(self.linear3(x)) # 最后输出激活函数改成sigmoid, 输出的曲线会比较光滑
return x
model = Model()