“Mini-Batch”梯度下降:指的是每下降一步,使用一部分的训练集来计算梯度值
# -*- coding:utf-8 -*-
# @Time:4/18/19 9:42 PM
# @Author:CIGA
# @Filename:04_mini_batch_gradient_descent.py
# @Software:PyCharm
"""
mini-batch
“Mini-Batch”梯度下降:指的是每下降一步,使用一部分的训练集来计算梯度值
"""
import numpy as np
X = 2 * np.random.rand(100, 1)
y = 4 + 3 * X + np.random.randn(100, 1)
X_b = np.c_[np.ones((100, 1)), X]
# print(X_b)
n_epochs = 500
t0, t1 = 5, 50
m = 100
num = [i for i in range(100)]
def learning_schedule(t):
return float(t0) / (t + t1)
theta = np.random.randn(2, 1)
batch_num = 5
batch_size = m / 5
# epoch是轮次的意思,意思是用m个样本做一轮迭代
for epoch in range(n_epochs):
# 生成100个不重复的随机数
for i in range(batch_num):
start = i * batch_size
end = (i + 1) * batch_size
xi = X_b[start:end]
yi = y[start:end]
gradients = 1.0 / batch_size * xi.T.dot(xi.dot(theta) - yi)
learning_rate = learning_schedule(epoch * m + i)
theta = theta - learning_rate * gradients
print(theta)