公式1
公式2
# -*- coding: utf-8 -*-
import numpy as np
import pylab
def comput(x,y,time,rate):
b = 0.0
m = 0.0
n = float(len(x))
for i in xrange(time):
b_gradient = -2/n * ( y-( m*x+b )) #公式1
b_gradient = np.sum(b_gradient)
m_gradient = -2/n * x * ( y-( m*x+b )) #公式2
m_gradient = np.sum(m_gradient)
b = b - (b_gradient * rate)
m = m - (m_gradient * rate)
y_predict = m*x+b
pylab.plot(x,y,'o')
pylab.plot(x,y_predict,'k-')
pylab.show()
# 随机数据一组坐标
def randomData(amount=100):
x = np.array([i for i in range(amount)])
m = 1
b = (np.random.rand(amount) - 0.5) * 10
y = m * x + b
return np.array([x,y])
def main():
# 一组坐标,需要注意如果数据过多后面计算数值会溢出,所以这里设置20,超出50可能就会出错
data = randomData(20)
x = data[0]
y = data[1]
# 梯度下降次数
time = 100000
# 控制步长
rate = 0.001
comput(x,y,time,rate)
if __name__ == '__main__':
main()