梯度下降算法-python实现
曲线拟合:
数据集
from numpy import *
import numpy as np
# 数据集大小 即20个数据点
m = 20
# x的坐标以及对应的矩阵
X0 = ones((m, 1)) # 生成一个m行1列的向量,也就是x0,全是1
X1 = arange(1, m+1).reshape(m, 1) # 生成一个m行1列的向量,也就是x1,从1到m
X = hstack((X0, X1)) # 按照列堆叠形成数组,其实就是样本数据
# 对应的y坐标
y = np.array([
3, 4, 5, 5, 2, 4, 7, 8, 11, 8, 12,
11, 13, 13, 16, 17, 18, 17, 19, 21
]).reshape(m, 1)
# 学习率
alpha = 0.01
注:hstack()为数组合并,X,y都为列向量相互
求代价函数和代价函数的梯度
def cost_function(theta, X, Y):
cost1=np.dot(X,theta)-Y
cost2=cost1.transpose()
return 1/2/m*np.dot(cost2,cost1)
def gradient(theta, X, Y):
cost1=np.dot(X,theta)-Y
return 1/m*np.dot(X.transpose(),cost1)
梯度下降求解
def gradient_descent(X,Y,theta):
gradient2=gradient(theta, X, Y)
while not all (abs(gradient2) <= 1e-5):
theta=theta-alpha*gradient2
gradient2 = gradient(theta, X, Y)
return theta
theta=np.array([[1],[1]]).reshape((-1,1))
theta=gradient_descent(X, y, theta)
cost_function(theta, X, y)
绘制图像
from matplotlib import pyplot as plt
x = arange(0, 21, 0.2) # x的范围
Y = theta[0] + theta[1]*x
plt.plot(x, Y)
plt.scatter(X1,y,c='#00CED1')
plt.show()