1.常见的激活函数
1.1 RELU
- ReLU(x) = Max(0,x)
1.2 LeakyRELU
- LeakyReLU(x)=max(0,x)+negative_slope∗min(0,x)
1.3 Sigmoid
-
S
i
g
m
o
i
d
(
x
)
=
σ
(
x
)
=
(
1
+
e
−
x
)
−
1
Sigmoid(x)=σ(x)= (1+e^{-x})^{-1}
Sigmoid(x)=σ(x)=(1+e−x)−1
2.代码实战
from torch import nn
from torch.utils.data import DataLoader
from torch.utils.tensorboard import SummaryWriter
from torchvision import datasets, transforms
class My_module(nn.Module):
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self.sigmoid = nn.Sigmoid()
def forward(self, x):
output = self.sigmoid(x)
return output
my_module = My_module()
writer = SummaryWriter(log_dir="sigmoid")
test_dataset = datasets.CIFAR10(root="datasets", transform=transforms.ToTensor(), download=True)
test_dataloader = DataLoader(dataset=test_dataset, batch_size=64, shuffle=True, drop_last=False)
step = 0
for data in test_dataloader:
imgs, labels = data
imgs_sigmoid = my_module(imgs)
writer.add_images(tag="Before_Sigmoid", img_tensor=imgs, global_step=step)
writer.add_images(tag="After_Sigmoid", img_tensor=imgs_sigmoid, global_step=step)
step += 1
writer.close()