- Softmax 回归是一个多类分类模型
- 使用softmax操作得到每个类的预测置信度
- 使用交叉熵来衡量预测和标号的区别
import torch
import random
import torchvision
from torch import nn
from torch.utils.data import Dataset,DataLoader
from torch.utils.tensorboard import SummaryWriter
from torchvision import transforms
from d2l import torch as d2l
import matplotlib.pyplot as plt
minst_train = torchvision.datasets.FashionMNIST(root='./data',train=True,transform=transforms.ToTensor(),download=True)
minst_test = torchvision.datasets.FashionMNIST(root='./data',train=False,transform=transforms.ToTensor(),download=True)
minst_train_len = len(minst_train)
minst_test_len = len(minst_test)
print('训练集的长度:{} ,测试集的长度:{}'.format(minst_train_len,minst_test_len))
dataloader = DataLoader(dataset=minst_train,batch_size=256,shuffle=True)
for X,y in dataloader:
print(X.shape,X.dtype,y.shape,y.dtype)
break
net = nn.Sequential(nn.Flatten(),nn.Linear(784,10))
def init_weights(m):
if type(m) == nn.Linear:
nn.init.normal_(m.weight,std=0.01)
net.apply(init_weights)
loss = nn.CrossEntropyLoss(reduction='none')
optimer = torch.optim.SGD(net.parameters(),lr=0.1)
epoches = 10
for epoch in range(epoches):
for X,y in dataloader:
y1 = net(X)
l = loss(y1,y)
optimer.zero_grad()
l.mean().backward()
optimer.step()
print("epoch:{} loss:{}".format(epoch+1,l))