手动搭建Inception V1模型(pytorch)

手动搭建Inception V1模型(pytorch)


一、Inception V1模型结构

  • Inception V1 module

在这里插入图片描述

  • Inception V1完整结构
    在这里插入图片描述

二、代码示例

import torchvision
import torch
import torch.nn as nn

# iv1 = torchvision.models.googlenet(pretrained=False)
#
# print (iv1)

def ConvBNRelu(in_channels,out_channels,kernel_size):
    return nn.Sequential(
        nn.Conv2d(in_channels=in_channels,out_channels=out_channels,kernel_size=kernel_size,
                  stride=1,padding=kernel_size//2),
        nn.BatchNorm2d(out_channels),
        nn.ReLU6(inplace=True)
    )

class InceptionV1Module(nn.Module):
    def __init__(self,in_channels,out_channels1,out_channels2reduce,out_channels2,out_channels3reduce,out_channels3,out_channels4):
        super(InceptionV1Module,self).__init__()

        self.branch1_conv = ConvBNRelu(in_channels=in_channels,out_channels=out_channels1,kernel_size=1)

        self.branch2_conv1 = ConvBNRelu(in_channels=in_channels,out_channels=out_channels2reduce,kernel_size=1)
        self.branch2_conv2 = ConvBNRelu(in_channels=out_channels2reduce,out_channels=out_channels2,kernel_size=3)

        self.branch3_conv1 = ConvBNRelu(in_channels=in_channels,out_channels=out_channels3reduce,kernel_size=1)
        self.branch3_conv2 = ConvBNRelu(in_channels=out_channels3reduce,out_channels=out_channels3,kernel_size=5)

        self.branch4_pool = nn.MaxPool2d(kernel_size=3,stride=1,padding=1)
        self.branch4_conv1 = ConvBNRelu(in_channels=in_channels,out_channels=out_channels4,kernel_size=1)

    def forward(self,x):
        out1 = self.branch1_conv(x)
        out2 = self.branch2_conv2(self.branch2_conv1(x))
        out3 = self.branch3_conv2(self.branch3_conv1(x))
        out4 = self.branch4_conv1(self.branch4_pool(x))
        # 将tensor拼接在一起,增加深度
        out = torch.cat([out1,out2,out3,out4],dim=1)
        return out

class InceptionAux(nn.Module):
    def __init__(self,in_channels,out_channels):
        super(InceptionAux,self).__init__()

        self.auxiliary_avgpool = nn.AvgPool2d(kernel_size=5,stride=3)
        self.auxiliary_conv1 = ConvBNRelu(in_channels=in_channels,out_channels=128,kernel_size=1)
        self.auxiliary_linear1 = nn.Linear(in_features=128*4*4,out_features=1024)
        self.auxiliary_relu = nn.ReLU6(inplace=True)
        self.auxiliary_dropout = nn.Dropout(p=0.7)
        self.auxiliary_linear2 = nn.Linear(in_features=1024,out_features=out_channels)

    def forward(self,x):
        x = self.auxiliary_conv1(self.auxiliary_avgpool(x))
        x = x.view(x.size(0),-1)
        x = self.auxiliary_relu(self.auxiliary_linear1(x))
        out = self.auxiliary_linear2(self.auxiliary_dropout(x))
        return out

class InceptionV1(nn.Module):
    def __init__(self,num_classes=1000,stage='train'):
        super(InceptionV1,self).__init__()
        self.stage = stage

        self.block1 = nn.Sequential(
            nn.Conv2d(in_channels=3,out_channels=64,kernel_size=7,stride=2,padding=3),
            nn.BatchNorm2d(64),
            nn.MaxPool2d(kernel_size=3,stride=2,padding=1),
            nn.Conv2d(in_channels=64,out_channels=64,kernel_size=1,stride=1),
            nn.BatchNorm2d(64)
        )
        self.block2 = nn.Sequential(
            nn.Conv2d(in_channels=64,out_channels=192,kernel_size=3,padding=1,stride=1),
            nn.BatchNorm2d(192),
            nn.MaxPool2d(kernel_size=3,stride=2,padding=1)
        )
        self.block3 = nn.Sequential(
            InceptionV1Module(in_channels=192,out_channels1=64,out_channels2reduce=96,out_channels2=128,
                              out_channels3reduce=16,out_channels3=32,out_channels4=32),
            InceptionV1Module(in_channels=256,out_channels1=128,out_channels2reduce=128,out_channels2=192,
                              out_channels3reduce=32,out_channels3=96,out_channels4=64),
            nn.MaxPool2d(kernel_size=3,stride=2,padding=1)
        )

        self.block4_1 = InceptionV1Module(in_channels=480,out_channels1=192,out_channels2reduce=96,out_channels2=208,
                                          out_channels3reduce=16,out_channels3=48,out_channels4=64)

        if self.stage == 'train':
            self.aux_logits1 = InceptionAux(in_channels=512,out_channels=num_classes)

        self.block4_2 = nn.Sequential(
            InceptionV1Module(in_channels=512,out_channels1=160,out_channels2reduce=112,out_channels2=224,
                              out_channels3reduce=24,out_channels3=64,out_channels4=64),
            InceptionV1Module(in_channels=512,out_channels1=128,out_channels2reduce=128,out_channels2=256,
                              out_channels3reduce=24,out_channels3=64,out_channels4=64),
            InceptionV1Module(in_channels=512,out_channels1=112,out_channels2reduce=144,out_channels2=288,
                              out_channels3reduce=32,out_channels3=64,out_channels4=64)
        )

        if self.stage == 'train':
            self.aux_logits2 = InceptionAux(in_channels=528,out_channels=num_classes)

        self.block4_3 = nn.Sequential(
            InceptionV1Module(in_channels=528,out_channels1=256,out_channels2reduce=160,out_channels2=320,
                              out_channels3reduce=32,out_channels3=128,out_channels4=128),
            nn.MaxPool2d(kernel_size=3,stride=2,padding=1)
        )

        self.block5 = nn.Sequential(
            InceptionV1Module(in_channels=832,out_channels1=256,out_channels2reduce=160,out_channels2=320,
                              out_channels3reduce=32,out_channels3=128,out_channels4=128),
            InceptionV1Module(in_channels=832,out_channels1=384,out_channels2reduce=192,out_channels2=384,
                              out_channels3reduce=48,out_channels3=128,out_channels4=128)
        )

        self.avgpool = nn.AvgPool2d(kernel_size=7,stride=1)
        self.dropout = nn.Dropout(p=0.4)
        self.linear = nn.Linear(in_features=1024,out_features=num_classes)

    def forward(self,x):
        x = self.block1(x)
        x = self.block2(x)
        x = self.block3(x)
        aux1 = self.block4_1(x)
        aux2 = self.block4_2(aux1)
        x = self.block4_3(aux2)
        out = self.block5(x)
        out = self.avgpool(out)
        out = self.dropout(out)
        out = out.view(out.size(0),-1)
        out = self.linear(out)
        if self.stage == 'train':
            aux1 = self.aux_logits1(aux1)
            aux2 = self.aux_logits2(aux2)
            return aux1,aux2,out
        else:
            return out

if __name__ == '__main__':
    from torchsummary import summary
    model = InceptionV1()
    print (model)

    input = torch.randn(1,3,224,224)
    summary(model,(3,224,224))
    aux1,aux2,out = model(input)
    print (aux1.shape)
    print (aux2.shape)
    print (out.shape)
  • 输出展示:
InceptionV1(
  (block1): Sequential(
    (0): Conv2d(3, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3))
    (1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
    (2): MaxPool2d(kernel_size=3, stride=2, padding=1, dilation=1, ceil_mode=False)
    (3): Conv2d(64, 64, kernel_size=(1, 1), stride=(1, 1))
    (4): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
  )
  (block2): Sequential(
    (0): Conv2d(64, 192, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
    (1): BatchNorm2d(192, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
    (2): MaxPool2d(kernel_size=3, stride=2, padding=1, dilation=1, ceil_mode=False)
  )
  (block3): Sequential(
    (0): InceptionV1Module(
      (branch1_conv): Sequential(
        (0): Conv2d(192, 64, kernel_size=(1, 1), stride=(1, 1))
        (1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        (2): ReLU6(inplace=True)
      )
      (branch2_conv1): Sequential(
        (0): Conv2d(192, 96, kernel_size=(1, 1), stride=(1, 1))
        (1): BatchNorm2d(96, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        (2): ReLU6(inplace=True)
      )
      (branch2_conv2): Sequential(
        (0): Conv2d(96, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
        (1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        (2): ReLU6(inplace=True)
      )
      (branch3_conv1): Sequential(
        (0): Conv2d(192, 16, kernel_size=(1, 1), stride=(1, 1))
        (1): BatchNorm2d(16, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        (2): ReLU6(inplace=True)
      )
      (branch3_conv2): Sequential(
        (0): Conv2d(16, 32, kernel_size=(5, 5), stride=(1, 1), padding=(2, 2))
        (1): BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        (2): ReLU6(inplace=True)
      )
      (branch4_pool): MaxPool2d(kernel_size=3, stride=1, padding=1, dilation=1, ceil_mode=False)
      (branch4_conv1): Sequential(
        (0): Conv2d(192, 32, kernel_size=(1, 1), stride=(1, 1))
        (1): BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        (2): ReLU6(inplace=True)
      )
    )
    (1): InceptionV1Module(
      (branch1_conv): Sequential(
        (0): Conv2d(256, 128, kernel_size=(1, 1), stride=(1, 1))
        (1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        (2): ReLU6(inplace=True)
      )
      (branch2_conv1): Sequential(
        (0): Conv2d(256, 128, kernel_size=(1, 1), stride=(1, 1))
        (1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        (2): ReLU6(inplace=True)
      )
      (branch2_conv2): Sequential(
        (0): Conv2d(128, 192, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
        (1): BatchNorm2d(192, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        (2): ReLU6(inplace=True)
      )
      (branch3_conv1): Sequential(
        (0): Conv2d(256, 32, kernel_size=(1, 1), stride=(1, 1))
        (1): BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        (2): ReLU6(inplace=True)
      )
      (branch3_conv2): Sequential(
        (0): Conv2d(32, 96, kernel_size=(5, 5), stride=(1, 1), padding=(2, 2))
        (1): BatchNorm2d(96, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        (2): ReLU6(inplace=True)
      )
      (branch4_pool): MaxPool2d(kernel_size=3, stride=1, padding=1, dilation=1, ceil_mode=False)
      (branch4_conv1): Sequential(
        (0): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1))
        (1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        (2): ReLU6(inplace=True)
      )
    )
    (2): MaxPool2d(kernel_size=3, stride=2, padding=1, dilation=1, ceil_mode=False)
  )
  (block4_1): InceptionV1Module(
    (branch1_conv): Sequential(
      (0): Conv2d(480, 192, kernel_size=(1, 1), stride=(1, 1))
      (1): BatchNorm2d(192, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (2): ReLU6(inplace=True)
    )
    (branch2_conv1): Sequential(
      (0): Conv2d(480, 96, kernel_size=(1, 1), stride=(1, 1))
      (1): BatchNorm2d(96, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (2): ReLU6(inplace=True)
    )
    (branch2_conv2): Sequential(
      (0): Conv2d(96, 208, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
      (1): BatchNorm2d(208, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (2): ReLU6(inplace=True)
    )
    (branch3_conv1): Sequential(
      (0): Conv2d(480, 16, kernel_size=(1, 1), stride=(1, 1))
      (1): BatchNorm2d(16, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (2): ReLU6(inplace=True)
    )
    (branch3_conv2): Sequential(
      (0): Conv2d(16, 48, kernel_size=(5, 5), stride=(1, 1), padding=(2, 2))
      (1): BatchNorm2d(48, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (2): ReLU6(inplace=True)
    )
    (branch4_pool): MaxPool2d(kernel_size=3, stride=1, padding=1, dilation=1, ceil_mode=False)
    (branch4_conv1): Sequential(
      (0): Conv2d(480, 64, kernel_size=(1, 1), stride=(1, 1))
      (1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (2): ReLU6(inplace=True)
    )
  )
  (aux_logits1): InceptionAux(
    (auxiliary_avgpool): AvgPool2d(kernel_size=5, stride=3, padding=0)
    (auxiliary_conv1): Sequential(
      (0): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1))
      (1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (2): ReLU6(inplace=True)
    )
    (auxiliary_linear1): Linear(in_features=2048, out_features=1024, bias=True)
    (auxiliary_relu): ReLU6(inplace=True)
    (auxiliary_dropout): Dropout(p=0.7, inplace=False)
    (auxiliary_linear2): Linear(in_features=1024, out_features=1000, bias=True)
  )
  (block4_2): Sequential(
    (0): InceptionV1Module(
      (branch1_conv): Sequential(
        (0): Conv2d(512, 160, kernel_size=(1, 1), stride=(1, 1))
        (1): BatchNorm2d(160, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        (2): ReLU6(inplace=True)
      )
      (branch2_conv1): Sequential(
        (0): Conv2d(512, 112, kernel_size=(1, 1), stride=(1, 1))
        (1): BatchNorm2d(112, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        (2): ReLU6(inplace=True)
      )
      (branch2_conv2): Sequential(
        (0): Conv2d(112, 224, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
        (1): BatchNorm2d(224, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        (2): ReLU6(inplace=True)
      )
      (branch3_conv1): Sequential(
        (0): Conv2d(512, 24, kernel_size=(1, 1), stride=(1, 1))
        (1): BatchNorm2d(24, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        (2): ReLU6(inplace=True)
      )
      (branch3_conv2): Sequential(
        (0): Conv2d(24, 64, kernel_size=(5, 5), stride=(1, 1), padding=(2, 2))
        (1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        (2): ReLU6(inplace=True)
      )
      (branch4_pool): MaxPool2d(kernel_size=3, stride=1, padding=1, dilation=1, ceil_mode=False)
      (branch4_conv1): Sequential(
        (0): Conv2d(512, 64, kernel_size=(1, 1), stride=(1, 1))
        (1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        (2): ReLU6(inplace=True)
      )
    )
    (1): InceptionV1Module(
      (branch1_conv): Sequential(
        (0): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1))
        (1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        (2): ReLU6(inplace=True)
      )
      (branch2_conv1): Sequential(
        (0): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1))
        (1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        (2): ReLU6(inplace=True)
      )
      (branch2_conv2): Sequential(
        (0): Conv2d(128, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
        (1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        (2): ReLU6(inplace=True)
      )
      (branch3_conv1): Sequential(
        (0): Conv2d(512, 24, kernel_size=(1, 1), stride=(1, 1))
        (1): BatchNorm2d(24, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        (2): ReLU6(inplace=True)
      )
      (branch3_conv2): Sequential(
        (0): Conv2d(24, 64, kernel_size=(5, 5), stride=(1, 1), padding=(2, 2))
        (1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        (2): ReLU6(inplace=True)
      )
      (branch4_pool): MaxPool2d(kernel_size=3, stride=1, padding=1, dilation=1, ceil_mode=False)
      (branch4_conv1): Sequential(
        (0): Conv2d(512, 64, kernel_size=(1, 1), stride=(1, 1))
        (1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        (2): ReLU6(inplace=True)
      )
    )
    (2): InceptionV1Module(
      (branch1_conv): Sequential(
        (0): Conv2d(512, 112, kernel_size=(1, 1), stride=(1, 1))
        (1): BatchNorm2d(112, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        (2): ReLU6(inplace=True)
      )
      (branch2_conv1): Sequential(
        (0): Conv2d(512, 144, kernel_size=(1, 1), stride=(1, 1))
        (1): BatchNorm2d(144, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        (2): ReLU6(inplace=True)
      )
      (branch2_conv2): Sequential(
        (0): Conv2d(144, 288, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
        (1): BatchNorm2d(288, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        (2): ReLU6(inplace=True)
      )
      (branch3_conv1): Sequential(
        (0): Conv2d(512, 32, kernel_size=(1, 1), stride=(1, 1))
        (1): BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        (2): ReLU6(inplace=True)
      )
      (branch3_conv2): Sequential(
        (0): Conv2d(32, 64, kernel_size=(5, 5), stride=(1, 1), padding=(2, 2))
        (1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        (2): ReLU6(inplace=True)
      )
      (branch4_pool): MaxPool2d(kernel_size=3, stride=1, padding=1, dilation=1, ceil_mode=False)
      (branch4_conv1): Sequential(
        (0): Conv2d(512, 64, kernel_size=(1, 1), stride=(1, 1))
        (1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        (2): ReLU6(inplace=True)
      )
    )
  )
  (aux_logits2): InceptionAux(
    (auxiliary_avgpool): AvgPool2d(kernel_size=5, stride=3, padding=0)
    (auxiliary_conv1): Sequential(
      (0): Conv2d(528, 128, kernel_size=(1, 1), stride=(1, 1))
      (1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (2): ReLU6(inplace=True)
    )
    (auxiliary_linear1): Linear(in_features=2048, out_features=1024, bias=True)
    (auxiliary_relu): ReLU6(inplace=True)
    (auxiliary_dropout): Dropout(p=0.7, inplace=False)
    (auxiliary_linear2): Linear(in_features=1024, out_features=1000, bias=True)
  )
  (block4_3): Sequential(
    (0): InceptionV1Module(
      (branch1_conv): Sequential(
        (0): Conv2d(528, 256, kernel_size=(1, 1), stride=(1, 1))
        (1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        (2): ReLU6(inplace=True)
      )
      (branch2_conv1): Sequential(
        (0): Conv2d(528, 160, kernel_size=(1, 1), stride=(1, 1))
        (1): BatchNorm2d(160, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        (2): ReLU6(inplace=True)
      )
      (branch2_conv2): Sequential(
        (0): Conv2d(160, 320, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
        (1): BatchNorm2d(320, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        (2): ReLU6(inplace=True)
      )
      (branch3_conv1): Sequential(
        (0): Conv2d(528, 32, kernel_size=(1, 1), stride=(1, 1))
        (1): BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        (2): ReLU6(inplace=True)
      )
      (branch3_conv2): Sequential(
        (0): Conv2d(32, 128, kernel_size=(5, 5), stride=(1, 1), padding=(2, 2))
        (1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        (2): ReLU6(inplace=True)
      )
      (branch4_pool): MaxPool2d(kernel_size=3, stride=1, padding=1, dilation=1, ceil_mode=False)
      (branch4_conv1): Sequential(
        (0): Conv2d(528, 128, kernel_size=(1, 1), stride=(1, 1))
        (1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        (2): ReLU6(inplace=True)
      )
    )
    (1): MaxPool2d(kernel_size=3, stride=2, padding=1, dilation=1, ceil_mode=False)
  )
  (block5): Sequential(
    (0): InceptionV1Module(
      (branch1_conv): Sequential(
        (0): Conv2d(832, 256, kernel_size=(1, 1), stride=(1, 1))
        (1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        (2): ReLU6(inplace=True)
      )
      (branch2_conv1): Sequential(
        (0): Conv2d(832, 160, kernel_size=(1, 1), stride=(1, 1))
        (1): BatchNorm2d(160, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        (2): ReLU6(inplace=True)
      )
      (branch2_conv2): Sequential(
        (0): Conv2d(160, 320, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
        (1): BatchNorm2d(320, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        (2): ReLU6(inplace=True)
      )
      (branch3_conv1): Sequential(
        (0): Conv2d(832, 32, kernel_size=(1, 1), stride=(1, 1))
        (1): BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        (2): ReLU6(inplace=True)
      )
      (branch3_conv2): Sequential(
        (0): Conv2d(32, 128, kernel_size=(5, 5), stride=(1, 1), padding=(2, 2))
        (1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        (2): ReLU6(inplace=True)
      )
      (branch4_pool): MaxPool2d(kernel_size=3, stride=1, padding=1, dilation=1, ceil_mode=False)
      (branch4_conv1): Sequential(
        (0): Conv2d(832, 128, kernel_size=(1, 1), stride=(1, 1))
        (1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        (2): ReLU6(inplace=True)
      )
    )
    (1): InceptionV1Module(
      (branch1_conv): Sequential(
        (0): Conv2d(832, 384, kernel_size=(1, 1), stride=(1, 1))
        (1): BatchNorm2d(384, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        (2): ReLU6(inplace=True)
      )
      (branch2_conv1): Sequential(
        (0): Conv2d(832, 192, kernel_size=(1, 1), stride=(1, 1))
        (1): BatchNorm2d(192, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        (2): ReLU6(inplace=True)
      )
      (branch2_conv2): Sequential(
        (0): Conv2d(192, 384, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
        (1): BatchNorm2d(384, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        (2): ReLU6(inplace=True)
      )
      (branch3_conv1): Sequential(
        (0): Conv2d(832, 48, kernel_size=(1, 1), stride=(1, 1))
        (1): BatchNorm2d(48, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        (2): ReLU6(inplace=True)
      )
      (branch3_conv2): Sequential(
        (0): Conv2d(48, 128, kernel_size=(5, 5), stride=(1, 1), padding=(2, 2))
        (1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        (2): ReLU6(inplace=True)
      )
      (branch4_pool): MaxPool2d(kernel_size=3, stride=1, padding=1, dilation=1, ceil_mode=False)
      (branch4_conv1): Sequential(
        (0): Conv2d(832, 128, kernel_size=(1, 1), stride=(1, 1))
        (1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
        (2): ReLU6(inplace=True)
      )
    )
  )
  (avgpool): AvgPool2d(kernel_size=7, stride=1, padding=0)
  (dropout): Dropout(p=0.4, inplace=False)
  (linear): Linear(in_features=1024, out_features=1000, bias=True)
)
----------------------------------------------------------------
        Layer (type)               Output Shape         Param #
================================================================
            Conv2d-1         [-1, 64, 112, 112]           9,472
       BatchNorm2d-2         [-1, 64, 112, 112]             128
         MaxPool2d-3           [-1, 64, 56, 56]               0
            Conv2d-4           [-1, 64, 56, 56]           4,160
       BatchNorm2d-5           [-1, 64, 56, 56]             128
            Conv2d-6          [-1, 192, 56, 56]         110,784
       BatchNorm2d-7          [-1, 192, 56, 56]             384
         MaxPool2d-8          [-1, 192, 28, 28]               0
            Conv2d-9           [-1, 64, 28, 28]          12,352
      BatchNorm2d-10           [-1, 64, 28, 28]             128
            ReLU6-11           [-1, 64, 28, 28]               0
           Conv2d-12           [-1, 96, 28, 28]          18,528
      BatchNorm2d-13           [-1, 96, 28, 28]             192
            ReLU6-14           [-1, 96, 28, 28]               0
           Conv2d-15          [-1, 128, 28, 28]         110,720
      BatchNorm2d-16          [-1, 128, 28, 28]             256
            ReLU6-17          [-1, 128, 28, 28]               0
           Conv2d-18           [-1, 16, 28, 28]           3,088
      BatchNorm2d-19           [-1, 16, 28, 28]              32
            ReLU6-20           [-1, 16, 28, 28]               0
           Conv2d-21           [-1, 32, 28, 28]          12,832
      BatchNorm2d-22           [-1, 32, 28, 28]              64
            ReLU6-23           [-1, 32, 28, 28]               0
        MaxPool2d-24          [-1, 192, 28, 28]               0
           Conv2d-25           [-1, 32, 28, 28]           6,176
      BatchNorm2d-26           [-1, 32, 28, 28]              64
            ReLU6-27           [-1, 32, 28, 28]               0
InceptionV1Module-28          [-1, 256, 28, 28]               0
           Conv2d-29          [-1, 128, 28, 28]          32,896
      BatchNorm2d-30          [-1, 128, 28, 28]             256
            ReLU6-31          [-1, 128, 28, 28]               0
           Conv2d-32          [-1, 128, 28, 28]          32,896
      BatchNorm2d-33          [-1, 128, 28, 28]             256
            ReLU6-34          [-1, 128, 28, 28]               0
           Conv2d-35          [-1, 192, 28, 28]         221,376
      BatchNorm2d-36          [-1, 192, 28, 28]             384
            ReLU6-37          [-1, 192, 28, 28]               0
           Conv2d-38           [-1, 32, 28, 28]           8,224
      BatchNorm2d-39           [-1, 32, 28, 28]              64
            ReLU6-40           [-1, 32, 28, 28]               0
           Conv2d-41           [-1, 96, 28, 28]          76,896
      BatchNorm2d-42           [-1, 96, 28, 28]             192
            ReLU6-43           [-1, 96, 28, 28]               0
        MaxPool2d-44          [-1, 256, 28, 28]               0
           Conv2d-45           [-1, 64, 28, 28]          16,448
      BatchNorm2d-46           [-1, 64, 28, 28]             128
            ReLU6-47           [-1, 64, 28, 28]               0
InceptionV1Module-48          [-1, 480, 28, 28]               0
        MaxPool2d-49          [-1, 480, 14, 14]               0
           Conv2d-50          [-1, 192, 14, 14]          92,352
      BatchNorm2d-51          [-1, 192, 14, 14]             384
            ReLU6-52          [-1, 192, 14, 14]               0
           Conv2d-53           [-1, 96, 14, 14]          46,176
      BatchNorm2d-54           [-1, 96, 14, 14]             192
            ReLU6-55           [-1, 96, 14, 14]               0
           Conv2d-56          [-1, 208, 14, 14]         179,920
      BatchNorm2d-57          [-1, 208, 14, 14]             416
            ReLU6-58          [-1, 208, 14, 14]               0
           Conv2d-59           [-1, 16, 14, 14]           7,696
      BatchNorm2d-60           [-1, 16, 14, 14]              32
            ReLU6-61           [-1, 16, 14, 14]               0
           Conv2d-62           [-1, 48, 14, 14]          19,248
      BatchNorm2d-63           [-1, 48, 14, 14]              96
            ReLU6-64           [-1, 48, 14, 14]               0
        MaxPool2d-65          [-1, 480, 14, 14]               0
           Conv2d-66           [-1, 64, 14, 14]          30,784
      BatchNorm2d-67           [-1, 64, 14, 14]             128
            ReLU6-68           [-1, 64, 14, 14]               0
InceptionV1Module-69          [-1, 512, 14, 14]               0
           Conv2d-70          [-1, 160, 14, 14]          82,080
      BatchNorm2d-71          [-1, 160, 14, 14]             320
            ReLU6-72          [-1, 160, 14, 14]               0
           Conv2d-73          [-1, 112, 14, 14]          57,456
      BatchNorm2d-74          [-1, 112, 14, 14]             224
            ReLU6-75          [-1, 112, 14, 14]               0
           Conv2d-76          [-1, 224, 14, 14]         226,016
      BatchNorm2d-77          [-1, 224, 14, 14]             448
            ReLU6-78          [-1, 224, 14, 14]               0
           Conv2d-79           [-1, 24, 14, 14]          12,312
      BatchNorm2d-80           [-1, 24, 14, 14]              48
            ReLU6-81           [-1, 24, 14, 14]               0
           Conv2d-82           [-1, 64, 14, 14]          38,464
      BatchNorm2d-83           [-1, 64, 14, 14]             128
            ReLU6-84           [-1, 64, 14, 14]               0
        MaxPool2d-85          [-1, 512, 14, 14]               0
           Conv2d-86           [-1, 64, 14, 14]          32,832
      BatchNorm2d-87           [-1, 64, 14, 14]             128
            ReLU6-88           [-1, 64, 14, 14]               0
InceptionV1Module-89          [-1, 512, 14, 14]               0
           Conv2d-90          [-1, 128, 14, 14]          65,664
      BatchNorm2d-91          [-1, 128, 14, 14]             256
            ReLU6-92          [-1, 128, 14, 14]               0
           Conv2d-93          [-1, 128, 14, 14]          65,664
      BatchNorm2d-94          [-1, 128, 14, 14]             256
            ReLU6-95          [-1, 128, 14, 14]               0
           Conv2d-96          [-1, 256, 14, 14]         295,168
      BatchNorm2d-97          [-1, 256, 14, 14]             512
            ReLU6-98          [-1, 256, 14, 14]               0
           Conv2d-99           [-1, 24, 14, 14]          12,312
     BatchNorm2d-100           [-1, 24, 14, 14]              48
           ReLU6-101           [-1, 24, 14, 14]               0
          Conv2d-102           [-1, 64, 14, 14]          38,464
     BatchNorm2d-103           [-1, 64, 14, 14]             128
           ReLU6-104           [-1, 64, 14, 14]               0
       MaxPool2d-105          [-1, 512, 14, 14]               0
          Conv2d-106           [-1, 64, 14, 14]          32,832
     BatchNorm2d-107           [-1, 64, 14, 14]             128
           ReLU6-108           [-1, 64, 14, 14]               0
InceptionV1Module-109          [-1, 512, 14, 14]               0
          Conv2d-110          [-1, 112, 14, 14]          57,456
     BatchNorm2d-111          [-1, 112, 14, 14]             224
           ReLU6-112          [-1, 112, 14, 14]               0
          Conv2d-113          [-1, 144, 14, 14]          73,872
     BatchNorm2d-114          [-1, 144, 14, 14]             288
           ReLU6-115          [-1, 144, 14, 14]               0
          Conv2d-116          [-1, 288, 14, 14]         373,536
     BatchNorm2d-117          [-1, 288, 14, 14]             576
           ReLU6-118          [-1, 288, 14, 14]               0
          Conv2d-119           [-1, 32, 14, 14]          16,416
     BatchNorm2d-120           [-1, 32, 14, 14]              64
           ReLU6-121           [-1, 32, 14, 14]               0
          Conv2d-122           [-1, 64, 14, 14]          51,264
     BatchNorm2d-123           [-1, 64, 14, 14]             128
           ReLU6-124           [-1, 64, 14, 14]               0
       MaxPool2d-125          [-1, 512, 14, 14]               0
          Conv2d-126           [-1, 64, 14, 14]          32,832
     BatchNorm2d-127           [-1, 64, 14, 14]             128
           ReLU6-128           [-1, 64, 14, 14]               0
InceptionV1Module-129          [-1, 528, 14, 14]               0
          Conv2d-130          [-1, 256, 14, 14]         135,424
     BatchNorm2d-131          [-1, 256, 14, 14]             512
           ReLU6-132          [-1, 256, 14, 14]               0
          Conv2d-133          [-1, 160, 14, 14]          84,640
     BatchNorm2d-134          [-1, 160, 14, 14]             320
           ReLU6-135          [-1, 160, 14, 14]               0
          Conv2d-136          [-1, 320, 14, 14]         461,120
     BatchNorm2d-137          [-1, 320, 14, 14]             640
           ReLU6-138          [-1, 320, 14, 14]               0
          Conv2d-139           [-1, 32, 14, 14]          16,928
     BatchNorm2d-140           [-1, 32, 14, 14]              64
           ReLU6-141           [-1, 32, 14, 14]               0
          Conv2d-142          [-1, 128, 14, 14]         102,528
     BatchNorm2d-143          [-1, 128, 14, 14]             256
           ReLU6-144          [-1, 128, 14, 14]               0
       MaxPool2d-145          [-1, 528, 14, 14]               0
          Conv2d-146          [-1, 128, 14, 14]          67,712
     BatchNorm2d-147          [-1, 128, 14, 14]             256
           ReLU6-148          [-1, 128, 14, 14]               0
InceptionV1Module-149          [-1, 832, 14, 14]               0
       MaxPool2d-150            [-1, 832, 7, 7]               0
          Conv2d-151            [-1, 256, 7, 7]         213,248
     BatchNorm2d-152            [-1, 256, 7, 7]             512
           ReLU6-153            [-1, 256, 7, 7]               0
          Conv2d-154            [-1, 160, 7, 7]         133,280
     BatchNorm2d-155            [-1, 160, 7, 7]             320
           ReLU6-156            [-1, 160, 7, 7]               0
          Conv2d-157            [-1, 320, 7, 7]         461,120
     BatchNorm2d-158            [-1, 320, 7, 7]             640
           ReLU6-159            [-1, 320, 7, 7]               0
          Conv2d-160             [-1, 32, 7, 7]          26,656
     BatchNorm2d-161             [-1, 32, 7, 7]              64
           ReLU6-162             [-1, 32, 7, 7]               0
          Conv2d-163            [-1, 128, 7, 7]         102,528
     BatchNorm2d-164            [-1, 128, 7, 7]             256
           ReLU6-165            [-1, 128, 7, 7]               0
       MaxPool2d-166            [-1, 832, 7, 7]               0
          Conv2d-167            [-1, 128, 7, 7]         106,624
     BatchNorm2d-168            [-1, 128, 7, 7]             256
           ReLU6-169            [-1, 128, 7, 7]               0
InceptionV1Module-170            [-1, 832, 7, 7]               0
          Conv2d-171            [-1, 384, 7, 7]         319,872
     BatchNorm2d-172            [-1, 384, 7, 7]             768
           ReLU6-173            [-1, 384, 7, 7]               0
          Conv2d-174            [-1, 192, 7, 7]         159,936
     BatchNorm2d-175            [-1, 192, 7, 7]             384
           ReLU6-176            [-1, 192, 7, 7]               0
          Conv2d-177            [-1, 384, 7, 7]         663,936
     BatchNorm2d-178            [-1, 384, 7, 7]             768
           ReLU6-179            [-1, 384, 7, 7]               0
          Conv2d-180             [-1, 48, 7, 7]          39,984
     BatchNorm2d-181             [-1, 48, 7, 7]              96
           ReLU6-182             [-1, 48, 7, 7]               0
          Conv2d-183            [-1, 128, 7, 7]         153,728
     BatchNorm2d-184            [-1, 128, 7, 7]             256
           ReLU6-185            [-1, 128, 7, 7]               0
       MaxPool2d-186            [-1, 832, 7, 7]               0
          Conv2d-187            [-1, 128, 7, 7]         106,624
     BatchNorm2d-188            [-1, 128, 7, 7]             256
           ReLU6-189            [-1, 128, 7, 7]               0
InceptionV1Module-190           [-1, 1024, 7, 7]               0
       AvgPool2d-191           [-1, 1024, 1, 1]               0
         Dropout-192           [-1, 1024, 1, 1]               0
          Linear-193                 [-1, 1000]       1,025,000
       AvgPool2d-194            [-1, 512, 4, 4]               0
          Conv2d-195            [-1, 128, 4, 4]          65,664
     BatchNorm2d-196            [-1, 128, 4, 4]             256
           ReLU6-197            [-1, 128, 4, 4]               0
          Linear-198                 [-1, 1024]       2,098,176
           ReLU6-199                 [-1, 1024]               0
         Dropout-200                 [-1, 1024]               0
          Linear-201                 [-1, 1000]       1,025,000
    InceptionAux-202                 [-1, 1000]               0
       AvgPool2d-203            [-1, 528, 4, 4]               0
          Conv2d-204            [-1, 128, 4, 4]          67,712
     BatchNorm2d-205            [-1, 128, 4, 4]             256
           ReLU6-206            [-1, 128, 4, 4]               0
          Linear-207                 [-1, 1024]       2,098,176
           ReLU6-208                 [-1, 1024]               0
         Dropout-209                 [-1, 1024]               0
          Linear-210                 [-1, 1000]       1,025,000
    InceptionAux-211                 [-1, 1000]               0
================================================================
Total params: 13,393,352
Trainable params: 13,393,352
Non-trainable params: 0
----------------------------------------------------------------
Input size (MB): 0.57
Forward/backward pass size (MB): 82.15
Params size (MB): 51.09
Estimated Total Size (MB): 133.82
----------------------------------------------------------------
torch.Size([1, 1000])
torch.Size([1, 1000])
torch.Size([1, 1000])

三、参考链接

  • 0
    点赞
  • 4
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值