ReLU
import torch
from torch import nn
from torch.nn import ReLU
input = torch.tensor([[1, -0.5],
[-1, 3]])
output = torch.reshape(input, [-1, 1, 2, 2])
print(output.shape)
class Lixinyu(nn.Module):
def __init__(self):
super(Lixinyu, self).__init__()
self.relu1 = ReLU()
def forward(self, input):
output = self.relu1(input)
return output
lixinyu = Lixinyu()
output = lixinyu(input)
print(output)
D:\anaconda\python.exe C:/Users/ASUS/Desktop/tudui/nn_relu.py
torch.Size([1, 1, 2, 2])
tensor([[1., 0.],
[0., 3.]])
Process finished with exit code 0
图像展示
import torch
import torchvision.datasets
from torch import nn, sigmoid
from torch.nn import ReLU
from torch.utils.data import DataLoader
from torch.utils.tensorboard import SummaryWriter
dataset = torchvision.datasets.CIFAR10("./dataset", train=False, transform=torchvision.transforms.ToTensor(), download=True)
dataloader = DataLoader(dataset, batch_size=64)
class Lixinyu(nn.Module):
def __init__(self):
super(Lixinyu, self).__init__()
self.relu1 = ReLU()
def forward(self, input):
output = self.relu1(input)
return output
lixinyu = Lixinyu()
step = 0
writer = SummaryWriter("p20")
for data in dataloader:
imgs, targets = data
writer.add_images("origin", imgs, step)
output = lixinyu(imgs)
writer.add_images("relu", output, step)
step += 1
writer.close()
在这里插入代码片
引入非线性变换是为了使模型非线性模拟能力更强。