import matplotlib.pyplot as plt
x_data = [1.0, 2.0, 3.0]
y_data = [2.0, 4.0, 6.0]
w = 1.0
def forward(x):
return x * w
def cost(xs, ys):
cost = 0
for x, y in zip(xs, ys):
y_pred = forward(x)
cost += (y - forward(x)) ** 2
return cost / len(xs)
def gradient(xs, ys):
grad = 0
for x, y in zip(xs, ys):
grad += 2 * x * (x * w - y)
return grad / len(xs)
epoch_list = []
cost_list = []
print('predict(beforo training)',4,forward(4))
for epoch in range(100):
cost_val = cost(x_data,y_data)
grad_val = gradient(x_data, y_data)
w-=0.01*grad_val
print("epoch",epoch,'w=',w,'loss',cost_val)
epoch_list.append(epoch)
cost_list.append(cost_val)
print('w=',w)
print('predict(beforo training)', 4, forward(4))
plt.plot(epoch_list,cost_list)
plt.ylabel('cost')
plt.xlabel('epoch')
plt.show()
这里的线性模型是 y = wx ,如果换成
y = wx + b ,上述计算过程会变吗?会影响参数更新吗?
请写出对应的代码。
· x _ data =[1.0,2.0,3.0]
· y _ data =[4.5,7.5,10.5]
import matplotlib.pyplot as plt # plt是常用的绘制图像的库
# 训练集数据
x_data = [1.0, 2.0, 3.0] # 输入特征
y_data = [2.0, 4.0, 6.0] # 对应的目标值
w = 1.0
b = 0.0
# 定义线性模型y=wx
def forward(x):
return x * w + b
# 定义损失函数:loss=(y_predict-y)2=(x*w-y)2
def loss(x, y):
y_pred = forward(x)
return (y_pred - y) ** 2
# 计算梯度SGD
def gradient(x, y):
return 2 * x * (x * w - y)
epoch_list = []
loss_list = []
print("predict(after training", 4, forward(4))
for epoch in range(100):
for x, y in zip(x_data, y_data):
grad = gradient(x, y)
w = w - 0.01 * grad
print("\tgrad:", x, y, grad)
l = loss(x, y)
print("epoch", epoch, "w=", w, "loss=", l)
epoch_list.append(epoch)
loss_list.append(l)
# 画图
print("predict(after training", 4, forward(4))
plt.plot(epoch_list, loss_list)
plt.ylabel('Loss') # 设置纵坐标轴
plt.xlabel('epoch') # 设置横轴
plt.show()