TF2实战-回归问题
一、回归算法模型
找到参数w’与b’
//linear regression 模型
(wX+b-Y)^2
loss=sum(w*xi+b-yi)^2
minimize loss:
w’*x+b’-> y
//梯度下降
x’ = x-0.005*(dy/dx) //x沿着函数值下降的方向移动
w’ = w-lr*(dy/dw)
//loss下降的方向就是y’->y的方向
loss=sum(wxi+b-yi)^2
w’ = w-lr(dloss/dw) // dL/dw = 2sum(wxi+b-yi)xi
b’ = b-lr*(dloss/db) // dL/db = 2sum(wxi+b-yi)
w’*x+b’->y
二、利用numpy实战
import numpy as np
import csv
def calculate_loss(points,w0,b0):
loss = 0
for i in range(len(points)):
xi = points[i,0]
yi = points[i,1]
loss += (w0*xi+b0-yi)**2
return loss/float(len(points))
def calculate_tidu(points,w1,b1,lr):
dw = 0
db = 0
N = len(points)
for i in range(len(points)):
xi = points[i, 0]
yi = points[i, 1]
dw += 2/N*(w1*xi+b1-yi)*xi
db += 2/N*(w1*xi+b1-yi)
w_new = w1-lr*dw
b_new = b1-lr*db
return w_new,b_new
def run():
points = np.genfromtxt('data.csv',delimiter=',')
lr = 0.0001
w_orignal = 0
b_orignal = 0
loss0 = calculate_loss(points, w_orignal,b_orignal)
print('the orignal is:w = {0},b = {1}, loss = {2}'.format(w_orignal,b_orignal,loss0))
w_new = w_orignal
b_new = b_orignal
for i in range(100000):
w_new,b_new = calculate_tidu(points, w_new, b_new, lr)
loss_new = calculate_loss(points, w_new, b_new)
print('the running is:w = {0},b = {1}, loss = {2}'.format(w_new, b_new, loss_new))
if __name__ =='__main__':
run()
三、手写数字体的识别实验
基础理论: