1.WPS中
1选中数据 顶端-插入
2右键随便一个点,选择添加趋势线
3右键趋势线,设置格式,勾选公式和平方值
2.最小二乘法
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
import math
#读取数据
def get_date(file_name,num):
data = pd.read_csv(file_name)
height,width = data.shape
X_parameter = []
Y_parameter = []
i=1
# 遍历数据
for x,y in zip(data['Height'],data['Weight']):
X_parameter.append([float(x)])
Y_parameter.append([float(y)])
i=i+1
if(i>num):
break
return X_parameter,Y_parameter
x,y=get_date("all2.csv",20)
x1,y1=get_date("all2.csv",200)
x2,y2=get_date("all2.csv",2000)
'''输入文件路径和读取的排数'''
plt.scatter(x,y)
plt.axis()#设定坐标轴上限下限
plt.show()#读取到的点
plt.scatter(x1,y1)
plt.axis()#设定坐标轴上限下限
plt.show()#读取到的点
plt.scatter(x2,y2)
plt.axis()#设定坐标轴上限下限
plt.show()#读取到的点
#返回R平方
def computeCorrelation(X, Y):
xBar = np.mean(X)
yBar = np.mean(Y)
SSR = 0
varX = 0
varY = 0
for i in range(0 , len(X)):
diffXXBar = X[i] - xBar
diffYYBar = Y[i] - yBar
SSR += (diffXXBar * diffYYBar)
varX += diffXXBar**2
varY += diffYYBar**2
SST = math.sqrt(varX * varY)
return math.pow((SSR / SST),2)
#返回回归方程
def regression(x,y):
x_mean=np.mean(x) #x平均值
y_mean=np.mean(y) #y平均值
num = 0.0 #分子∑
d=0.0 #分母∑
for x_i,y_i in zip(x,y):
num += (x_i-x_mean) *(y_i-y_mean)
d +=(x_i-x_mean)**2
a=num/d #根据公式得到a
b=y_mean-a*x_mean #根据公式得到b
fangcheng="y="+str(a)+"x+"+str(b)
return fangcheng,a,b
fangcheng,a,b=regression(x, y)
fangcheng1,a1,b1=regression(x1, y1)
fangcheng2,a2,b2=regression(x2, y2)
print ("方程式1:",fangcheng," R²=",computeCorrelation(x, y))
print ("方程式2:",fangcheng1," R²=",computeCorrelation(x1, y1))
print ("方程式3:",fangcheng2," R²=",computeCorrelation(x2, y2))
y_hat=a*x+b #回归方程
plt.scatter(x,y) #描点
plt.plot(x,y_hat,color='r') #画线
plt.axis() #设置坐标上下限
plt.show() #显示
y1_hat=a1*x1+b1 #回归方程
plt.scatter(x1,y1) #描点
plt.plot(x1,y1_hat,color='r') #画线
plt.axis() #设置坐标上下限
plt.show() #显示
y2_hat=a2*x2+b2 #回归方程
plt.scatter(x2,y2) #描点
plt.plot(x2,y2_hat,color='r') #画线
plt.axis() #设置坐标上下限
plt.show() #显示
3.skleran
import pandas as pd
from sklearn.linear_model import LinearRegression
import matplotlib.pyplot as plt
import numpy as np
#读取数据
def get_date(file_name,num):
data = pd.read_csv(file_name)
height,width = data.shape
X_parameter = []
Y_parameter = []
i=1
# 遍历数据
for x,y in zip(data['Height'],data['Weight']):
X_parameter.append([float(x)])
Y_parameter.append([float(y)])
i=i+1
if(i>num):
break
return X_parameter,Y_parameter
dataSet_x,dataSet_y=get_date("all2.csv",20)
dataSet_x1,dataSet_y1=get_date("all2.csv",200)
dataSet_x2,dataSet_y2=get_date("all2.csv",2000)
#regr为回归过程,fit(x,y)进行回归
regr = LinearRegression().fit(dataSet_x, dataSet_y)
#输出R的平方
print('y=',regr.coef_,'x+',regr.intercept_)
print(regr.score(dataSet_x2, dataSet_y2))
plt.scatter(dataSet_x, dataSet_y, color='black')
#用predic预测,这里预测输入x对应的值,进行画线
plt.plot(dataSet_x, regr.predict(dataSet_x), color='red', linewidth=1)
plt.show()
#regr为回归过程,fit(x,y)进行回归
regr = LinearRegression().fit(dataSet_x1, dataSet_y1)
#输出R的平方
print('y=',regr.coef_,'x+',regr.intercept_)
print(regr.score(dataSet_x1, dataSet_y1))
plt.scatter(dataSet_x1, dataSet_y1, color='black')
#用predic预测,这里预测输入x对应的值,进行画线
plt.plot(dataSet_x1, regr.predict(dataSet_x1), color='red', linewidth=1)
plt.show()
#regr为回归过程,fit(x,y)进行回归
regr = LinearRegression().fit(dataSet_x2, dataSet_y2)
#输出R的平方
print('y=',regr.coef_,'x+',regr.intercept_)
print(regr.score(dataSet_x2, dataSet_y2))
plt.scatter(dataSet_x2, dataSet_y2, color='black')
#用predic预测,这里预测输入x对应的值,进行画线
plt.plot(dataSet_x2, regr.predict(dataSet_x2), color='red', linewidth=1)
plt.show()
4.WPS结果
5.最小二乘法结果
6.skleran线性回归结果
参考链接:https://blog.csdn.net/playgoon2/article/details/77162219
结论:三种方法结果一致