nn.ReLU和nn.functional.relu有什么区别
其中nn.ReLU作为一个层结构,必须添加到nn.Module容器中才能使用
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
self.conv1 = nn.Conv2d(3, 6, 5)
self.conv2 = nn.Conv2d(6, 16, 5)
self.fc1 = nn.Linear(16*5*5, 120)
self.fc2 = nn.Linear(120, 84)
self.fc3 = nn.Linear(84, 40)
self.fc4 = nn.Linear(40, 10)
***self.relu = nn.ReLU()***
def forward(self, x):
x = F.max_pool2d(F.relu(self.conv1(x)), (2, 2))
x = F.max_pool2d(F.relu(self.conv2(x)), 2)
x = x.view(x.size()[0], -1)
***x = self.relu(self.fc1(x))***
***x = self.relu(self.fc2(x))***
***x = self.relu(self.fc3(x))***
x = self.fc4(x)
return x
而F.ReLU则作为一个函数调用,看上去作为一个函数调用更方便更简洁。
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
self.conv1 = nn.Conv2d(3, 6, 5)
self.conv2 = nn.Conv2d(6, 16, 5)
self.fc1 = nn.Linear(16*5*5, 120)
self.fc2 = nn.Linear(120, 84)
self.fc3 = nn.Linear(84, 40)
self.fc4 = nn.Linear(40, 10)
def forward(self, x):
x = F.max_pool2d(F.relu(self.conv1(x)), (2, 2))
x = F.max_pool2d(F.relu(self.conv2(x)), 2)
x = x.view(x.size()[0], -1)
***x = torch.nn.functional.relu(self.fc1(x))***
***x = torch.nn.functional.relu(self.fc2(x))***
***x = torch.nn.functional.relu(self.fc3(x))***
x = self.fc4(x)
return x
具体使用哪种方式,取决于编程风格。