- 单特征线性回归Linear Regression With One Feature
- 多特征线性回归Linear Regression With Multiple Features
线性回归LinearRegression假设函数(One feature):
损失函数(平方差损失函数MSE):
优化器(梯度下降Gradient descent):
"""
@Title: linear_regression_with_one_feature
@Time: 2024/2/21
@Author: Michael Jie
"""
import random
import numpy as np
from sklearn.linear_model import LinearRegression
print("----------手动实现LinearRegression----------")
# 数据集,h = 2.5 * x + 1.7
x = [2, 4.3, 2.7, 9, 0.5, 17, 26, 14.9, 20, 24.8]
y = [6.7, 12.45, 8.45, 24.2, 2.95, 44.2, 66.7, 38.95, 51.7, 63.7]
# 初始化训练参数
w, b = random.random(), random.random()
print("初始化训练参数为:({w}, {b})".format(w=w, b=b))
x = np.array(x)
y = np.array(y)
m = len(x)
# 训练精度
epsilon = 1e-5
# 学习率
alpha = 5e-3
# 最大训练轮次
max_iter = 3000
num = 0
while True:
# 假设函数
h = w * x + b
# 损失
j = 1 / (2 * m) * np.sum((h - y) ** 2)
if j < epsilon or num > max_iter:
break
# 梯度下降
w -= alpha * (1 / m * np.sum((h - y) * x))
b -= alpha * (1 / m * np.sum(h - y))
num += 1
if num % 500 == 0:
print("第{num}次训练,损失为:{j}".format(num=num, j=j))
print("训练后参数为:({w}, {b})".format(w=w, b=b))
print("输入10的预测值为:{y}".format(y=w * 10 + b))
print("----------使用sklearn实现LinearRegression----------")
x = x.reshape(-1, 1)
linear = LinearRegression()
linear.fit(x, y)
print("训练后参数为:({w}, {b})".format(w=linear.coef_, b=linear.intercept_))
print("输入10的预测值为:{y}".format(y=linear.predict(np.array([10]).reshape(-1, 1))))
"""
----------手动实现LinearRegression----------
初始化训练参数为:(0.402435804175055, 0.33747677958871636)
第500次训练,损失为:0.04622723094172337
第1000次训练,损失为:0.0074798593995082825
第1500次训练,损失为:0.001210288730184651
第2000次训练,损失为:0.00019583239900314027
第2500次训练,损失为:3.168692522937181e-05
训练后参数为:(2.500388231382052, 1.6926023323602641)
输入10的预测值为:26.69648464618078
----------使用sklearn实现LinearRegression----------
训练后参数为:([2.5], 1.6999999999999957)
输入10的预测值为:[26.7]
"""