梯度下降
-- codeing = utf-8 --
@Time :2021/4/12 20:25
@Author:sueong
@File:03.py
@Software:PyCharm
import numpy as np
import matplotlib.pyplot as plt
x_data=[1.0,2.0,3.0]
y_data=[2.0,4.0,6.0]
w=1.0#初始值假设为1.0 w=w-a*cost对w的偏导
cost_list=[]
epoch_list=[]
def forword(x):
return x*w
def cost(xs,ys):
l_sum = 1
for x,y in zip(x_data,y_data):
y_pre = forword(x)
l_sum+=(y_pre-y)**2
return l_sum/len(xs)
def gradient(xs,ys):
grad=0
for x, y in zip(x_data, y_data):
y_pre = forword(x)
grad+=2*x*(y_pre-y)
return grad/len(xs)
print('predict before traing',4,forword(4))
for epoch in range(100):
epoch_list.append(epoch)
cost_val=cost(x_data,y_data)
cost_list.append(cost_val)
grad_val=gradient(x_data,y_data)
w-=0.01*grad_val
print('Epoch=',epoch,'w=',w,'loss',cost_val)
print('predict(after training)',4,forword(4))
plt.plot(epoch_list,cost_list)
plt.ylabel('epoch')
plt.xlabel('cost')
plt.show()
随机梯度下降
从左边变成右边这样
# -*- codeing = utf-8 -*-
# @Time :2021/4/12 20:58
# @Author:sueong
# @File:Stochastic Gradient Descent.py
# @Software:PyCharm
# -*- codeing = utf-8 -*-
# @Time :2021/4/12 20:25
# @Author:sueong
# @File:03.py
# @Software:PyCharm
import numpy as np
import matplotlib.pyplot as plt
x_data=[1.0,2.0,3.0]
y_data=[2.0,4.0,6.0]
w=1.0#初始值假设为1.0 w=w-a*cost对w的偏导
cost_list=[]
epoch_list=[]
def forword(x):
return x*w
def loss(x,y):
y_pre = forword(x)
return (y_pre-y)**2
def gradient(x,y):
return 2*x*(x*w-y)
print('predict before traing',4,forword(4))
for epoch in range(100):
epoch_list.append(epoch)
for x,y in zip(x_data,y_data):
grad=gradient(x,y)#对每个样本grad就更新w
w=w-0.01*grad
print('\tgrad:',x,y,grad)
l=loss(x,y)
print('Epoch=',epoch,'w=',w,'loss',l)
cost_list.append(l)
print('predict(after training)',4,forword(4))
plt.plot(epoch_list,cost_list)
plt.ylabel('epoch')
plt.xlabel('cost')
plt.show()