目录
Demo Model: manually build a model
Eliminate manually computed gradient
Replace loss function and gradient update process
Machine Learning Pipeline
Step 1: Deisgn model (input, output size, forward pass)
Step 2: Construct loss and optimizer
Step 3: Training Loop
— forward pass: computer prediction
— backward pass: gradients
— update the weights
Demo Model: manually build a model
# Manually built a model
import numpy as np
# True function: f = 2*x
# using linear regression model: y = w*x
X = np.array([1, 2, 3, 4], dtype = np.float32)
Y = np.array([2, 4, 6, 8], dtype = np.float32)
w = 0.0
# model prediction
def forward(x):
return w*x
# loss function: MSE = 1/N * (w*x - y)**2
def loss(y, y_pre):
return (y_pre - y)**2.mean()
# gradient
# dJ/dw = 1/N * 2*(w*x - y) * x
def gradient( x, y, y_pre):
return np.dot(2*x, (y_pre - y)).mean()
print(f"prediction before training {forward(5):.3f}")
# Training loop
learning_rate = 0.01
n_iters = 10
for epoch in range(n_iters):
# prediction
y_pre = forward(X)
l = loss(Y, y_pre)
dw = gradient(X, Y, y_pre)
# update the weights
w -= learning_rate * gradient
if epoch % 2 == 0:
print(f"epoch {epoch+1}: w = {w:.3f}, loss = {l:.8f}")
print(f"prediction after training {forward(5):.3f}")