import numpy as np
import matplotlib.pyplot as plt
plt.rcParams['font.sans-serif']=['SimHei'] #显示中文标签
input_x = [0.50,0.75,1.00,1.25,1.50,1.75,1.75,2.00,2.25,2.50,2.75,3.00,3.25,3.50,4.00,4.25,4.50,4.75,5.00,5.50]
input_y = [10, 26, 23, 43, 20, 22, 43, 50, 62, 50, 55, 75, 62, 78, 87, 76, 64, 85, 90, 98]
input_x = np.array(input_x, dtype=float)
input_y = np.array(input_y, dtype=float)
a = 0
b = 0
class linear_regression:
'''线性回归类'''
def __init__(self, a_init, b_init, x, y):
self.a = a_init #初始化参数a
self.b = b_init #初始化参数b
self.x = x #数据x
self.y = y #标签y
# self.epochs = int(epochs) #迭代的轮数
self.shape = self.x.shape[0] #数据的个数
def model(self):
'''计算预测值'''
return self.a*self.x + self.b
def cost_function(self):
'''损失函数'''
return 0.5/self.shape * (np.square(self.y-self.a*self.x-self.b)).sum()
def optimize(self):
'''梯度下降更新参数a和b'''
alpha = 1e-1
y_hat = self.model()
da = (1.0/self.shape) * ((y_hat-self.y)*self.x).sum() #a的梯度
db = (1.0/self.shape) * (y_hat-self.y).sum() #b的梯度
self.a = self.a - alpha*da #梯度下降
self.b = self.b - alpha*db #梯度下降
return self.a, self.b
def iterate(self):
'''迭代更新'''
i = 0
loss_list=[]
while(True):
self.a, self.b = self.optimize()
print("a:",self.a, " b:",self.b)
print("loss:",self.cost_function())
loss_list.append(self.cost_function())
if i>=1 and np.abs(loss_list[i]-loss_list[i-1]) < 1e-8:
break
i = i+1
y_hat = self.model()
loss = self.cost_function()
plt.scatter(self.x,self.y,color='red')
plt.plot(self.x, y_hat,color="blue")
plt.xlabel('x')
plt.ylabel('y')
plt.show()
if __name__ == '__main__':
# print("请输入迭代轮数:")
# epochs = input()
result = linear_regression(a, b, input_x, input_y)
result.iterate()
线性回归python代码实现
最新推荐文章于 2023-07-13 14:01:30 发布