import numpy as np
import torch
from torch import nn
from torch.autograd import Variable
定义一个卷积加batchnorm,以及relu激活函数作为基本结构
def conv_relu(in_channel,out_channel, kernel, stride=1, padding=0):
layer = nn.Sequential(
nn.Conv2d(in_channel, out_channel, kernel, stride, padding),
nn.BatchNorm2d(out_channel, eps=1e-3),
nn.ReLU(True)
)
return layer
定义inception模块
class inception(nn.Module):
def __init__(self, in_channel, out1_1,out2_1, out2_3, out3_1, out3_5,out4_1):
super(inception, self).__init__()
self.branch1x1 = conv_relu(in_channel,out1_1, 1)
self.branch3x3 = nn.Sequential(
conv_relu(in_channel, out2_1, 1),
conv_relu(out2_1, out2_3, 3, padding=1)
)
self.branch5x5 = nn.Sequential(
conv_relu(in_channel, out3_1, 1),
conv_relu(out3_1, out3_5, 5, padding=2)
)
self