pytorch从零开始学习2
注:暂时不写理论知识,先从代码入手,后期如果可能,会补上理论知识。
梯度下降算法
import matplotlib.pyplot as plt
# prepare data
x_data = [1.0, 2.0, 3.0]
y_data = [2.0, 4.0, 6.0]
# initialize weight
w = 1.0
def forward(x):
"""
线性模型
:param x:真实值
:return: 预测值
"""
return x * w
def cost(x_s, y_s):
"""
损失函数
:param x_s:真实值
:param y_s: 标签值
:return: 平均损失值
"""
cost = 0
for x, y in zip(x_s, y_s):
y_pred = forward(x)
cost += (y_pred - y) ** 2
return cost / len(x_s)
def gradient(x_s, y_s):
"""
梯度下降算法
:param x_s: 真实值
:param y_s:标签值
:return:平均梯度
"""
grad = 0
for x, y in zip(x_s, y_s):
grad += 2 * x * (x * w -y)
return grad / len(x_s)
# define some parameters
epoch_list = []
cost_list = []
learning_rate = 0.01
print('predict (before training)', 4, forward(4))
for epoch in range(100):
cost_val = cost(x_data, y_data)
grad_val = gradient(x_data, y_data)
w -= learning_rate * grad_val
print('Epoch:', epoch, 'W=', w, 'loss=', cost_val)
epoch_list.append(epoch)
cost_list.append(cost_val)
print('predict (after training)', 4, forward(4))
# Visualization results
plt.title('Gradient descent model')
plt.plot(epoch_list, cost_list)
plt.ylabel('cost')
plt.xlabel('epoch')
plt.show()
随机梯度下降算法实现
import matplotlib.pyplot as plt
import random
# prepare data
x_data = [1.0, 2.0, 3.0]
y_data = [2.0, 4.0, 6.0]
# stochastic initialize weight
w = random.randint(0, 10)
def forward(x):
"""
linear model
:param x: real data
:return: predict data
"""
return x * w
def loss(x, y):
"""
loss funtion for single data
:param x: real data
:param y: label
:return: loss value
"""
y_pred = forward(x)
return (y_pred - y) ** 2
def gradient(x, y):
"""
stochastic gtadient descent
:param x: real data
:param y: label
:return: gradient value
"""
return 2 * x * (x * w - y)
# define some parameters
epoch_list = []
loss_list = []
learning_rate = 0.01
print('predict (before training)', 4, forward(4))
for epoch in range(100):
# define a random input data
random_data = random.randint(0, 1)
# define the loss value
l = 0
for i in range(len(x_data)):
x = x_data[random_data]
y = y_data[random_data]
grad = gradient(x, y)
w -= learning_rate * grad
print("\tgrad:", x, y, grad)
l = loss(x, y)
print("progress:", epoch, "w=", w, "loss=", l)
epoch_list.append(epoch)
loss_list.append(l)
print('predict (after training)', 4, forward(4))
# Visualization results
plt.title('Stochastic gradient descent model')
plt.plot(epoch_list, loss_list)
plt.ylabel('loss')
plt.xlabel('epoch')
plt.show()