# 基于梯度下降法的线性回归模型

3 篇文章 0 订阅

#!/usr/bin/env  python
from numpy import *
def createDataSet():
#为代码书写方便第1维的1相当与自己添加的x0(线性方程中的常熟项)
dataSetX = [[1,150,3],[1,200,5],[1,80,2],[1,90,2]];
dataSetY = [200,300,150,180];
return dataSetX,dataSetY

def analisis(dataSetX,dataSetY):
thita = ones((len(dataSetX[0]),1),dtype='f4')#初始化线性方程参数
alpha = 0.00001 #步长设置
#将输入list类型转化为numpy中的矩阵类型方便计算
arr_x = mat(dataSetX)
arr_y = mat(dataSetY).transpose()
#迭代次数
for i in range(100):
err = arr_x*thita-arr_y#1
print err
diff = (err.T*arr_x).transpose()#2
#1,2中的内容用向量的形式计算参数的变更
thita-=alpha*diff
return thita


>>> granddes.analisis(datax,datay)
[[-46.]
[-94.]
[-67.]
[-87.]]
[[ 12.36542642]
[-16.17125142]
[-35.86673653]
[-51.97773612]]
[[ 25.76308608]
[  1.69418383]
[-28.72001255]
[-43.93833482]]
[[ 28.83845687]
[  5.7951231 ]
[-27.07936561]
[-42.09281242]]
[[ 29.54435444]
[  6.73643327]
[-26.70263433]
[-41.66909003]]
[[ 29.7063446 ]
[  6.95245743]
[-26.61603212]
[-41.57173872]]
[[ 29.74347699]
[  7.0019871 ]
[-26.59603107]
[-41.54930818]]
[[ 29.75194883]
[  7.01329875]
[-26.59131801]
[-41.54407537]]
[[ 29.75385475]
[  7.01585484]
[-26.59010732]
[-41.54278243]]
[[ 29.75423992]
[  7.01638305]
[-26.58970785]
[-41.54240203]]
[[ 29.75428534]
[  7.01645827]
[-26.58948958]
[-41.54222548]]
[[ 29.75424135]
[  7.01641428]
[-26.58931899]
[-41.54210258]]
[[ 29.75417948]
[  7.01634645]
[-26.58915794]
[-41.5419904 ]]
[[ 29.75411761]
[  7.01627862]
[-26.58899689]
[-41.54187822]]
[[ 29.75405574]
[  7.01621079]
[-26.58883584]
[-41.54176605]]
[[ 29.75399387]
[  7.01614296]
[-26.58867478]
[-41.54165387]]
[[ 29.753932  ]
[  7.01607513]
[-26.58851373]
[-41.5415417 ]]


• 1
点赞
• 0
收藏
觉得还不错? 一键收藏
• 0
评论
09-12 2538
03-04 1076
11-22 908
07-15 446
03-10 853
06-06
02-15
02-15 1204
10-04 1570

1.余额是钱包充值的虚拟货币，按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载，可以购买VIP、付费专栏及课程。