激活函数
Sigmoid / Logistic:
import torch
# 从-100到100,均匀切分成10份
a = torch.linspace(-100, 100, 10)
print("a:\t", a)
# torch.sigmoid()
"""
from torch.nn import functional as F
F.sigmoid()
"""
b = torch.sigmoid(a)
print("b:\t", b)
a: tensor([-100.0000, -77.7778, -55.5556, -33.3333, -11.1111, 11.1111,
33.3333, 55.5555, 77.7778, 100.0000])
b: tensor([0.0000e+00, 1.6655e-34, 7.4564e-25, 3.3382e-15, 1.4945e-05, 9.9999e-01,
1.0000e+00, 1.0000e+00, 1.0000e+00, 1.0000e+00])
Tanh:
import torch
c = torch.linspace(-1, 1, 10)
print("c:\d", c)
# torch.tanh()
d = torch.tanh(c)
print("d:\t", d)
c: tensor([-1.0000, -0.7778, -0.5556, -0.3333, -0.1111, 0.1111, 0.3333, 0.5556,
0.7778, 1.0000])
d: tensor([-0.7616, -0.6514, -0.5047, -0.3215, -0.1107, 0.1107, 0.3215, 0.5047,
0.6514, 0.7616])
ReLU:
import torch
from torch.nn import functional as F
a = torch.linspace(-1, 1, 10)
print("a:\t", a)
b = torch.relu(a)
print("b:\t", b)
c = F.relu(a)
print("c:\t", c)
a: tensor([-1.0000, -0.7778, -0.5556, -0.3333, -0.1111, 0.1111, 0.3333, 0.5556,
0.7778, 1.0000])
b: tensor([0.0000, 0.0000, 0.0000, 0.0000, 0.0000, 0.1111, 0.3333, 0.5556, 0.7778,
1.0000])
c: tensor([0.0000, 0.0000, 0.0000, 0.0000, 0.0000, 0.1111, 0.3333, 0.5556, 0.7778,
1.0000])