import numpy as np
import torch
from torch.utils import data
from torch import nn
from torch.utils.tensorboard import SummaryWriter
#构造人造数据集
def synthetic_data(w,b,num_examples):
x=torch.normal(0,1,(num_examples,len(w)))
y=torch.matmul(x,w)+b
y+=torch.normal(0,0.01,y.shape)
return x,y.reshape((-1,1))
true_w=torch.tensor([2,-3.4])
true_b=4.2
features,labels=synthetic_data(true_w,true_b,1000)
def load_array(data_arrays,batch_size,is_train=True):
dataset=data.TensorDataset(*data_arrays)
return data.DataLoader(dataset,batch_size,shuffle=is_train)
batch_size=10
data_iter=load_array((features,labels),batch_size)
#定义模型
class Net(nn.Module):
def __init__(self):
super(Net,self).__init__()
self.linear=nn.Linear(2,1)
def forward(self,x):
x=self.linear(x)
return x
net=Net()
#定义损失函数
loss_fn= nn.MSELoss()
#定义优化算法
optimizer = torch.optim.SGD(net.parameters(),lr=0.03)
writer=SummaryWriter("keshihua")
step=0
#训练
num_epochs = 3
for epoch in range(num_epochs):
for x,y in data_iter:
output=net(x)
loss=loss_fn(output,y)
optimizer.zero_grad()
loss.backward()
optimizer.step()
step+=1
writer.add_scalar("linear",loss,step)
李沐线性回归简洁实现(pytorch)
最新推荐文章于 2024-05-21 19:48:43 发布