本文整理汇总了Python中torch.nn.ReLU方法的典型用法代码示例。如果您正苦于以下问题:Python nn.ReLU方法的具体用法?Python nn.ReLU怎么用?Python nn.ReLU使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在模块torch.nn的用法示例。
在下文中一共展示了nn.ReLU方法的18个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: __init__
点赞 7
# 需要导入模块: from torch import nn [as 别名]
# 或者: from torch.nn import ReLU [as 别名]
def __init__(self, input_size, n_channels, ngf, n_layers, activation='tanh'):
super(ImageDecoder, self).__init__()
ngf = ngf * (2 ** (n_layers - 2))
layers = [nn.ConvTranspose2d(input_size, ngf, 4, 1, 0, bias=False),
nn.BatchNorm2d(ngf),
nn.ReLU(True)]
for i in range(1, n_layers - 1):
layers += [nn.ConvTranspose2d(ngf, ngf // 2, 4, 2, 1, bias=False),
nn.BatchNorm2d(ngf // 2),
nn.ReLU(True)]
ngf = ngf // 2
layers += [nn.ConvTranspose2d(ngf, n_channels, 4, 2, 1, bias=False)]
if activation == 'tanh':
layers += [nn.Tanh()]
elif activation == 'sigmoid':
layers += [nn.Sigmoid()]
else:
raise NotImplementedError
self.main = nn.Sequential(*layers)
开发者ID:jthsieh,项目名称:DDPAE-video-prediction,代码行数:25,
示例2: __init__
点赞 7
# 需要导入模块: from torch import nn [as 别名]
# 或者: from torch.nn import ReLU [as 别名]
def __init__(self):
super(GoogLeNet, self).__init__()
self.pre_layers = nn.Sequential(
nn.Conv2d(3, 192, kernel_size=3, padding=1),
nn.BatchNorm2d(192),
nn.ReLU(True),
)
self.a3 = Inception(192, 64, 96, 128, 16, 32, 32)
self.b3 = Inception(256, 128, 128, 192, 32, 96, 64)
self.maxpool = nn.MaxPool2d(3, stride=2, padding=1)
self.a4 = Inception(480, 192, 96, 208, 16, 48, 64)
self.b4 = Inception(512, 160, 112, 224, 24, 64, 64)
self.c4 = Inception(512, 128, 128, 256, 24, 64, 64)
self.d4 = Inception(512, 112, 144, 288, 32, 64, 64)
self.e4 = Inception(528, 256, 160, 320, 32, 128, 128)
self.a5 = Inception(832, 256, 160, 320, 32, 128, 128)
self.b5 = Inception(832, 384, 192, 384, 48, 128, 128)
self.avgpool = nn.AvgPool2d(8, stride=1)
self.linear = nn.Linear(1024, 10)
开发者ID:StephanZheng,项目名称:neural-fingerprinting,代码行数:26,
示例3: __init__
点赞 6
# 需要导入模块: from torch import nn [as 别名]
# 或者: from torch.nn import ReLU [as 别名]
def __init__(self, block, layers, num_classes=1000):
self.inplanes = 64
super(MyResNet, self).__init__()
self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3,
bias=False)
self.bn1 = nn.BatchNorm2d(64)
self.relu = nn.ReLU(inplace=True)
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
# note the increasing dilation
self.layer1 = self._make_layer(block, 64, layers[0])
self.layer2 = self._make_layer(block, 128, layers[1], stride=2, dilation=1)
self.layer3 = self._make_layer(block, 256, layers[2], stride=1, dilation=2)
self.layer4 = self._make_layer(block, 512, layers[3], stride=1, dilation=4)
# these layers will not be used
self.avgpool = nn.AvgPool2d(7)
self.fc = nn.Linear(512 * block.expansion, num_classes)
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))