1. 实现全连接神经网络搭建(第3节内容)
在简单神经网络框架中,增加ReLU激活函数
import torch.nn as nn
##########################Step 1: 全连接神经网络搭建#############################
class NeuralNet(nn.Module):
def __init__(self, in_features, hidden_features, out_features):
super(NeuralNet, self).__init__()
# 输入层 -> 隐藏层
self.layer1 = nn.Linear(in_features, hidden_features)
# 隐藏层 -> 输出层
self.layer2 = nn.Linear(hidden_features, out_features)
def forward(self, x):
y = self.layer1(x) # 第一层参数传递
y = nn.functional.relu(y) # ReLU激活函数
y = self.layer2(y) # 第二层参数传递
return y
2. 完成数据集下载(第6节内容)
from torchvision.transforms import ToTensor
from torchvision.datasets import MNIST
############################Step 2: 数据集下载##################################
trainData = MNIST(root = "./",
train = True,
transform=ToTensor(),
download = True)
testData = MNIST(root = "./",
train = False,
transform=ToTensor(),
download = True)
3. 完成数据集加载(第6节内容)
from torch.utils.data import DataLoader
############################Step 3: 数据集加载##################################
batch_size = 64
trainData_loader = DataLoader(dataset = trainData,
batch_size = batch_size,
shuffle = True)
testData_loader = DataLoader(dataset = testData,
batch_size = batch_size,