跟之前的卷积层类似。
- 激活函数
ReLU
激活函数,上代码:
import torch.nn as nn
import torchvision
from torchvision import transforms
from torch.utils.tensorboard import SummaryWriter
from torch.utils.data import DataLoader
class MyActivation(nn.Module):
def __init__(self):
super(MyActivation, self).__init__()
self.activation1 = nn.ReLU(inplace=False)
def forward(self, x):
return self.activation1(x)
writer = SummaryWriter("logs")
tran_tensor = transforms.ToTensor()
dataset = torchvision.datasets.CIFAR10(root="../dataset", transform=tran_tensor, train=False, download=True)
dataloader = DataLoader(dataset=dataset, batch_size=64, shuffle=True, num_workers=0, drop_last=False)
step = 0
for data in dataloader:
imgs, targets = data
print("imgs.shape", imgs.shape)
writer.add_images("original_imgs", imgs, step)
activation = MyActivation()
outputs = activation(imgs)
print("outputs.shape", outputs.shape)
writer.add_images("outputs", outputs, step)
writer.close()
run
之后,输入命令行:
tensorboard --logdir=logs
结果:
sigmoid
激活函数,上代码:
import torch.nn as nn
import torchvision
from torchvision import transforms
from torch.utils.tensorboard import SummaryWriter
from torch.utils.data import DataLoader
class MyActivation(nn.Module):
def __init__(self):
super(MyActivation, self).__init__()
self.activation1 = nn.Sigmoid()
def forward(self, x):
return self.activation1(x)
writer = SummaryWriter("logs")
tran_tensor = transforms.ToTensor()
dataset = torchvision.datasets.CIFAR10(root="../dataset", transform=tran_tensor, train=False, download=True)
dataloader = DataLoader(dataset=dataset, batch_size=64, shuffle=True, num_workers=0, drop_last=False)
step = 0
for data in dataloader:
imgs, targets = data
print("imgs.shape", imgs.shape)
writer.add_images("original_imgs", imgs, step)
activation = MyActivation()
outputs = activation(imgs)
print("outputs.shape", outputs.shape)
writer.add_images("outputs", outputs, step)
writer.close()
run
之后,输入命令行:
tensorboard --logdir=logs
结果:
- 线性层
nn.Linear
,上代码:
import torch
import torchvision
import torch.nn as nn
from torchvision import transforms
from torch.utils.data import DataLoader
# 线性层类
class MyLinear(nn.Module):
def __init__(self):
super(MyLinear, self).__init__()
self.linear1 = nn.Linear(196608, 10)
def forward(self, x):
output = self.linear1(x)
return output
tran_tensor = transforms.ToTensor()
dataset = torchvision.datasets.CIFAR10(root="../dataset", train=False, transform=tran_tensor, download=True)
dataloder = DataLoader(dataset=dataset, batch_size=64, shuffle=True, num_workers=0, drop_last=True)
mylinear = MyLinear()
for data in dataloder:
imgs, targes = data
# torch.Size([64,3,32,32])
print(imgs.shape)
outputs = torch.reshape(imgs, (1, 1, 1, -1))
# torch.Size([1,1,1,196608])
print(outputs.shape)
# torch.Size([1,1,1,10])
outputs = mylinear(outputs)
print(outputs.shape)
结果:
Batch Normalization
(不负责且片面讲效果可加速,也可提高精度)
nn.Conv2d
,nn.BatchNorm2d
上代码:
import torch
from torch.utils.data import DataLoader
from torchvision import transforms
import torch.nn as nn
import torchvision
from torch.utils.tensorboard import SummaryWriter
class MyBatchNormalization(nn.Module):
def __init__(self):
super(MyBatchNormalization, self).__init__()
self.conv1 = nn.Conv2d(in_channels=3, out_channels=6, kernel_size=3, stride=1, padding=0)
self.batch_norm = nn.BatchNorm2d(6)
def forward(self, x):
x = self.conv1(x)
x = self.batch_norm(x)
return x
writer = SummaryWriter("logs")
tran_tensor = transforms.ToTensor()
dataset = torchvision.datasets.CIFAR10(root='../dataset', train=False, transform=tran_tensor, download=True)
dataloader = DataLoader(dataset=dataset, batch_size=64, shuffle=True, num_workers=0, drop_last=True)
my_batch_norm = MyBatchNormalization()
step = 0
for data in dataloader:
imgs, targets = data
print("imgs.shape", imgs.shape)
writer.add_images("input", imgs, step)
outputs = my_batch_norm(imgs)
outputs = torch.reshape(outputs, (-1, 3, 30, 30))
print("outputs.shape", outputs.shape)
writer.add_images("outputs", outputs, step)
step = step + 1
run
之后,运行命令:
tensorboard --logdir=logs
结果
上一章 10.初识Pytorch使用池化层并对其进行可视化
下一章 12.初识Pytorch搭建网络 LeNet-5复现(含nn.Sequential用法)