1.ReLU
处理:大于等于0的保留原始数值,小于0的置0
参数中默认inplace=False,另外创建值。
实现:
import torch from torch import nn from torch.nn import ReLU input=torch.tensor([[1,-0.5], [-1,3]]) output=torch.reshape(input,(-1,1,2,2)) print(output.shape) class Model(nn.Module): def __init__(self): super(Model,self).__init__() self.relu1=ReLU() def forward(self,input): output=self.relu1(input) return output model=Model() output=model(input) print(output)
3.sigmoid
实现:
import torch import torchvision from torch import nn from torch.nn import ReLU, Sigmoid from torch.utils.data import DataLoader from torch.utils.tensorboard import SummaryWriter input=torch.tensor([[1,-0.5], [-1,3]]) output=torch.reshape(input,(-1,1,2,2)) print(output.shape) dataset=torchvision.datasets.CIFAR10("dataset",train=False,download=True, transform=torchvision.transforms.ToTensor()) dataloader=DataLoader(dataset,batch_size=6) class Model(nn.Module): def __init__(self): super(Model,self).__init__() self.relu1=ReLU() self.sigmoid1=Sigmoid() def forward(self,input): output=self.sigmoid1(input) return output model=Model() writer=SummaryWriter("logs") step=0 for data in dataloader: imgs,targets=data writer.add_images("input",imgs,global_step=step) output=model(imgs) writer.add_images("output",output,step) step+=1 writer.close()