线性回归从0实现
import torch
from matplotlib import pyplot as plt
import numpy as np
import random
import torch.utils.data as Data
import torch.nn as nn
from torch.nn import init
import torch.optim as optim
num_inputs=2
num_examples=1000
true_w=[2, -3.4]
true_b=4.2
features=torch.randn(num_examples,num_inputs, dtype=torch.float32)
labels=true_w[0] * features[:, 0] + true_w[1] * features[:, 1] +true_b
labels += torch.tensor(np.random.normal(0, 0.01, size=labels.size()), dtype=torch.float32)
batch_size=10
dataset=Data.TensorDataset(features, labels)
data_iter=Data.DataLoader(dataset, batch_size, shuffle=True)
for X, y in data_iter:
print(X,y)
break
class LinearNet(nn.Module):
def __init__(self, n_feature):
super(LinearNet, self).__init__()
self.linear=nn.Linear(n_feature, 1)
def forward(self, x):
y=self.linear(x)
return y
net=LinearNet(num_inputs)
print(net)
for param in net.parameters():
print(param)
init.normal_(net.linear.weight, mean=0, std=0.01)
init.constant_(net.linear.bias, val=0)
loss=nn.MSELoss()
optimizer=optim.SGD(net.parameters(), lr=0.03)
print(optimizer)
num_epochs=3
for epoch in range(1, num_epochs+1):
for X, y in data_iter:
output=net(X)
l=loss(output, y.view(-1,1))
optimizer.zero_grad()
l.backward()
optimizer.step()
print('epoch %d, loss %f' %(epoch, l.item()))
dense=net[0]
print(true_w, dense.weight)
print(true_b, dense.bias)