import numpy as np
import matplotlib.pyplot as plt
x_data = [1.0,2.0,3.0]
y_data = [3.0,5.0,7.0]
def forward(x):
return x * w #定义回归模型
def loss(x,y):
y_pred = forward(x) #求模型对于的y值
return (y_pred - y) ** 2
w_list = [] #定义空列表,用来储存随机取得的权重值
mse_list = [] #用来存储求得的损失值
for w in np.arange(0.0,4.1,0.1):
print("w",w)
l_sum = 0
for x,y in zip(x_data,y_data):
y_pred = forward(x)
loss_val = loss(x,y)
l_sum += loss_val
print("\t",x,y,y_pred,loss_val)
print("MSE=",l_sum / 3)
w_list.append(w)
mse_list.append(l_sum / 3)
plt.plot(w_list,mse_list)
plt.xlabel("w")
plt.ylabel("loss")
plt.show()
import numpy as np
import matplotlib.pyplot as plt
x_data = [1.0,2.0,3.0]
y_data = [3.0,5.0,7.0]
def forward(x):
return x * w + b #定义回归模型
def loss(x,y):
y_pred = forward(x) #求模型对于的y值
return (y_pred - y) * (y_pred -y)
w_list = [] #定义空列表,用来储存随机取得的权重值
mse_list = [] #用来存储求得的损失值
b_list = []
for w in np.arange(0.0,4.1,0.1):
for b in np.arange(0,4.1,0.1):
print("b=",b)
print("w=",w)
l_sum = 0
for x,y in zip(x_data,y_data):
y_pred = forward(x)
loss_val = loss(x,y)
l_sum += loss_val
print("\t",x,y,y_pred,loss_val)
print("MSE=",l_sum / 3)
w_list.append(w)
mse_list.append(l_sum / 3)
b_list.append(b)
from mpl_toolkits.mplot3d import Axes3D
ax = Axes3D(plt.figure()) # fig = plt.figure()# ax = Axes3D(fig)
ax.plot_trisurf(w_list,b_list,mse_list) #plt.plot()
ax.set_xlabel("w")
ax.set_ylabel('b')
ax.set_zlabel("loss")
plt.show()
# # 2.梯度下降
import matplotlib.pyplot as plt
x_data = [1.0,2.0,3.0]
y_data = [2.0,4.0,6.0]
w = 0.5#猜测初始权值
def forward(x):
return x * w
def cost(xs,ys):
cost = 0
for x,y in zip(xs,ys):
y_pred =forward(x)
cost += (y_pred - y)**2
return cost / len(xs)
def gradient(xs,ys):
grad = 0
for x,y in zip(xs,ys):
grad +=2*x*(x*w-y)
return grad / len(xs)
epoch_list = []
loss_list = []
print("Predict(before training)",4,forward(4))
for epoch in range(100):
cost_val = cost(x_data,y_data)
grad = gradient(x_data,y_data)
w -= 0.01 * grad
print("Epoch:",epoch,"w=",w,"loss=",cost_val)
epoch_list.append(epoch)
loss_list.append(cost_val)
print("Predict (after training)",4,forward(4))
plt.plot(epoch_list,loss_list)
plt.ylabel("loss")
plt.xlabel("epoch")
plt.show()
import matplotlib.pyplot as plt
import numpy as np
x_data = [1.0,2.0,3.0]
y_data = [3.0,5.0,7.0]
w = 3
def forward(x):
for b in np.arange(0,4.1,0.1):
return x * w + b
def loss(xs,ys):
y_pred = forward(x)
return (y_pred - y)**2
def gradient(xs,ys):
return 2*x*(x*w-y)
epoch_list = []
loss_list = []
b_list = []
print("Predict(before training)",4,forward(4))
for b in np.arange(0,4.1,0.1):
for epoch in range(100):
for x,y in zip(x_data,y_data):
grad = gradient(x,y)
w -= 0.1*grad
print("\t grad:",x,y,grad)
l=loss(x,y)
print("Epoch:",epoch,"w=",w,"loss=",l,"b=",b)
epoch_list.append(epoch)
loss_list.append(l)
b_list.append(b)
print("Predict (after training)",4,forward(4))
from mpl_toolkits.mplot3d import Axes3D
ax = Axes3D(plt.figure())
ax.set_ylabel("b")
ax.set_xlabel('epoch')
ax.set_zlabel("loss")
ax.plot_trisurf(epoch_list,b_list,loss_list)
plt.show()
线性回归+梯度下降代码复习
于 2023-08-09 18:35:49 首次发布