y=wi*xi+b,基于最小二乘法的线性回归:寻找参数w和b,使得w和b对x_test_data的预测值y_pred_data与真实的回归目标y_test_data之间的均方误差最小。
公式推导:
基于最小二乘法构造linear_model有5个步骤:
1、导包。
from sklearn import linear_model
import numpy as np
import matplotlib.pyplot as plt
from sklearn.metrics import mean_squared_error,r2_score,mean_absolute_error
sklearn中有专门的线性模型包linear_model,numpy用于生成数据,matplotlib用于画图,另外导入MSE,R_Square和MAE三个评价指标。
2、构造数据集。可以自动生成数据,也可以寻找现有数据,以下数据是作业中的数据,样本数据只有一个特征。
3、训练模型。
4、输出系数w和截距b并对测试集进行预测。
5、作图。
完整代码:
import pandas as pd
import matplotlib.pyplot as plt
from sklearn import linear_model
import numpy as np
from sklearn.metrics import mean_squared_error, r2_score, mean_absolute_error
def load_data():
data = pd.read_csv('Salary_Data.csv', encoding='gbk')
data = data.values.tolist()
train_x = []
train_y = []
test_x = []
test_y = []
# 前一半作为训练集,后一半作为测试集
for i in range(len(data)):
if i < len(data) / 2:
train_x.append(data[i][0])
train_y.append(data[i][1])
else:
test_x.append(data[i][0])
test_y.append(data[i][1])
return train_x, train_y, test_x, test_y
def model():
print('手写:')
train_x, train_y, test_x, test_y = load_data()
# 最小二乘法得到参数
sum = 0.0
sum_square = 0.0
sum_2 = 0.0
sum_b = 0.0
for i in range(len(train_x)):
sum = sum + train_x[i]
sum_square = sum_square + train_x[i] ** 2
ave_x = sum / len(train_x)
for i in range(len(train_x)):
sum_2 = sum_2 + (train_y[i] * (train_x[i] - ave_x))
w = sum_2 / (sum_square - sum ** 2 / len(train_x))
for i in range(len(train_x)):
sum_b = sum_b + (train_y[i] - w * train_x[i])
b = sum_b / len(train_x)
print('w=', w, 'b=', b)
# 测试
pred_y = []
for i in range(len(test_x)):
pred_y.append(w * test_x[i] + b)
# 计算MSE,MAE,r2_score
sum_mse = 0.0
sum_mae = 0.0
sum1 = 0.0
sum2 = 0.0
for i in range(len(pred_y)):
sum_mae = sum_mae + np.abs(pred_y[i] - test_y[i])
sum_mse = sum_mse + (pred_y[i] - test_y[i]) ** 2
sum_y = 0.0
for i in range(len(test_y)):
sum_y = sum_y + test_y[i]
ave_y = sum_y / len(test_y)
for i in range(len(pred_y)):
sum1 = sum1 + (pred_y[i] - test_y[i]) ** 2
sum2 = sum2 + (ave_y - test_y[i]) ** 2
print('MSE:', sum_mse / len(pred_y))
print('MAE:', sum_mae / len(pred_y))
print('R2_Squared:', 1 - sum1 / sum2)
# 显示
plt.scatter(test_x, test_y, color='black')
plt.plot(test_x, pred_y, color='blue', linewidth=3)
plt.show()
print('\n')
# 调包
def sklearn_linearmodel():
print('调包:')
train_x, train_y, test_x, test_y = load_data()
train_x = np.array(train_x).reshape(-1, 1)
train_y = np.array(train_y).reshape(-1, 1)
test_x = np.array(test_x).reshape(-1, 1)
test_y = np.array(test_y).reshape(-1, 1)
# 训练+测试
lr = linear_model.LinearRegression()
lr.fit(train_x, train_y)
y_pred = lr.predict(test_x)
# 输出系数和截距
print('w:', lr.coef_, 'b:', lr.intercept_)
# 输出评价指标
print('MSE:', mean_squared_error(test_y, y_pred))
print('MAE:', mean_absolute_error(test_y, y_pred))
print('R2_Squared:', r2_score(test_y, y_pred))
# 显示
plt.scatter(test_x, test_y, color='black')
plt.plot(test_x, y_pred, color='blue', linewidth=3)
plt.show()
if __name__ == '__main__':
model()
sklearn_linearmodel()