非线性激活
padding层不进行介绍,原因:
- 用不到
- 卷积层有相关参数
batch_size是什么?
输入的最外层的一个维度。或者说第一个参数。
举例👇
import torch
input = torch.tensor([[1,-0.5],
[-1,3]])
output = torch.reshape(input , (-1, 1,2,2))
#这里用-1是因为让机器自己算
#我们输入的多少自己知道,是一个,2×2的矩阵
以下是relu函数的使用instance👇
nn.relu.py
input = torch.tensor([[1,-0.5],
[-1, 3]
])
input = torch.reshape(input , (-1 , 1, 2,2))
print(f"input = {input}")
class Tudui(nn.Module):
def __init__(self):
super(Tudui,self).__init__()
self.relu1 = nn.Relu(inplace=False)
def forward(self,input):
output = self.relu1()
return output
tudui = Tudui()
output = tudui(input)
print(f"output = {output}")
这里的Relu函数中inplace是,这个意思:
inplace是以处理后的值是否取代原值。若设置为true,则取代原值;若设置为false,则不取代原值
in_1 = -1
out_1 = nn.ReLU(inplace = True)
#则
in_1 , out ==>1
in_2=-1
out_2 = nn.ReLU(inplace = False)
#则
in_2 ==> -1
out_2 ==> 0
import torch
import torchvision
import torch.nn
import tensorboardX.summary
from tensorboardX import SummaryWriter
from torch import nn
from torch.utils.data import DataLoader
input = torch.tensor([[1,-0.5],
[-1 ,3]])
input = torch.reshape(input , (-1, 1, 2, 2))
class Tudui(nn.Module):
def __init__(self):
super(Tudui, self).__init__()
self.sigmoid1 = nn.Sigmoid()
def forward(self,input):
output = self.sigmoid1(input)
return output
tudui = Tudui()
output = tudui(input)
print(f" output = {output}")
print("="*111)
data_set = torchvision.datasets.CIFAR10(root="hymenoptera_data/val/CIFAR10",
train = False,
transform = torchvision.transforms.ToTensor())
data_loader = DataLoader(dataset = data_set , batch_size = 64)
writer = SummaryWriter("tb_logs")
step = 0
for data in data_loader:
imgs , targets = data
print(f"imgs = {imgs}")
print("\n")
writer.add_images("orignal_sigmoid" , imgs , global_step = step)
output = tudui(imgs)
print(f"output = {output}")
print("\n")
print("="*111)
writer.add_images("final_sigmoid", output , global_step = step)
step += 1
writer.close()
运行结果