import numpy as np
import random
#梯度下降函数
def gradientDescent(x, y, theta, alpha, m, numIterations): #alpha:学习率,m实例个数,numIterations更新次数
xTrans = x.transpose() #对矩阵转置
for i in range(0, numIterations):
hypothesis = np.dot(x, theta)
loss = hypothesis - y
cost = np.sum(loss**2) / (2*m)
print("Iteration %d / Cost: %f" % (i , cost))
gradient = np.dot(xTrans , loss) / m
#更新theta
theta = theta - alpha * gradient
return theta
#构造数据集
def genData(numPoints, bias, variance):
x = np.zeros(shape=(numPoints, 2))
y = np.zeros(shape=numPoints)
for i in range(0, numPoints):
x[i][0] = 1
x[i][1] = i
#目标变量
y[i] = (i + bias) + random.uniform(0,1) * variance
retur