import numpy as np
from sklearn.linear_model import LinearRegression
from sklearn.preprocessing import PolynomialFeatures
import matplotlib.pyplot as plt
x = np.array([1,1.1,1.2,1.3,1.4,1.5,1.6,1.7,1.8,1.9])# y = 0.8x^2 + 0.8x + 1
y = np.array([2.6,2.848,3.112,3.392,3.688,4,4.328,4.672,5.032,5.408])
x = np.reshape(x,newshape=(-1,1))
y = np.reshape(y,newshape=(-1,1))
# 调用模型 degree为最高项系数
poly_reg =PolynomialFeatures(degree=2)
X_ploy =poly_reg.fit_transform(x)
lr = LinearRegression()
# 训练模型
lr.fit(X_ploy,y)
print('截距为:',lr.intercept_,'\n')
print('系数为:',lr.coef_,'\n')
# 计算R平方
print(lr.score(X_ploy,y))
# 计算y_hat
y_hat = lr.predict(X_ploy)
# 打印出图
plt.scatter(x,y)
plt.plot(x, y_hat)
plt.show()
线性回归(一元曲线拟合 ) python
最新推荐文章于 2023-01-31 17:09:23 发布