线性回归小实战
#房价预测
import numpy as np
import random
import matplotlib_inline.backend_inline
import matplotlib.pyplot as plt
import torch
#设计房价数据集
samples = 1000
features = 2
w_0 = [2,-3.4]
b_0 = 4.2
data = torch.from_numpy(np.random.normal(0,1,(samples,features))).float()
labels = w_0[0] * data[:,0] + w_0[1] * data[:,1] + b_0
labels += torch.from_numpy(np.random.normal(0,0.1,labels.size())).float()
print(data.size(),labels.size())
#进行图像散点图绘制
def get_scatter(figure_size,x,y):
matplotlib_inline.backend_inline.set_matplotlib_formats('svg')
plt.rcParams['figure.figsize']=figure_size
plt.scatter(x,y,s=1,c='r')
plt.show()
get_scatter([3.5,2.5],data[:,1],labels)
#加载数据集
def iter_data(batchsize,data,labels):
index = list(range(samples))
random.shuffle(index)
for batch in range(int(len(index)/batchsize)):
j = torch.LongTensor(index[0+batchsize*batch:min(batchsize*(batch+1),len(index))])
yield data.index_select(0,j),labels.index_select(0,j)
#初始化网络
def init():
w = torch.tensor(np.random.normal(0,0.1,(features,1)),requires_grad=True,dtype=torch.float32)
b = torch.zeros(1,requires_grad=True,dtype=torch.float32)
return w,b
#BP算法,回归,反向更新梯度
def forward(x,w,b):
out = torch.matmul(x,w) + b
return out
def loss(y_hat,y):
return (y_hat - y.view(y_hat.size())) ** 2 / 2
def Parms_grad(Parms,lr,batch_size):
for param in Parms:
param.data -= param.grad * lr / batch_size #注意:不要用grad(),这样会报错TypeError: 'Tensor' object is not callable
if __name__ == '__main__':
epochs = 10
lr = 0.01
batchsize = 10
#初始化w,b
w,b = init()
print(w,b)
loss_epoch = 0
for epoch in range(epochs):
for (X,y) in iter_data(batchsize,data,labels):
#前向传播
y_hat = forward(X,w,b)
#计算损失函数
l = loss(y_hat,y).sum()
# print(l)
#反向传播
l.backward()
Parms_grad([w,b],lr,batchsize)
#梯度清零
w.grad.data.zero_()
b.grad.data.zero_()
#求每一轮数的loss
loss_epoch += l
train_loss = loss_epoch/batchsize
loss_epoch = 0
print("epoch{} train_loss:{}".format(epoch,train_loss))
print('w,b:',w,b)