Inception-v4实现

最近在看Inception-v4, Inception-ResNet and the Impact of Residual Connections on Learning论文,便想动手实现一下Inceptiion-v4。

下面的一些函数,分别对应论文中不同的模式图。详情请查看论文。

第一步,导入库

import numpy as np
import tensorflow as tf
from tensorflow import keras
import pandas as pd
import matplotlib.pyplot as plt

from keras import Model, Sequential
from keras.layers import Activation, Conv2D, Input, MaxPool2D, Dense, Flatten, Dropout, BatchNormalization, concatenate, add, AveragePooling2D, GlobalAveragePooling2D

第二步,编写输入部分

'''
Figure 3.
The schema for stem of the pure Inception_v4 and Inception-ResNet-v2 networks.
This is the input part of those networks.Cf.Figures 9 and 15
'''
def Stem(input_shape=(299, 299, 3)):
    input_ = Input(shape=input_shape)
    conv1 = Conv2D(32, kernel_size=(3,3), strides=2, padding='VALID', activation='relu')(input_)
    conv2 = Conv2D(32, kernel_size=(3,3), strides=1, padding='VALID', activation='relu')(conv1)
    conv3 = Conv2D(64, kernel_size=(3,3), strides=1, padding='SAME', activation='relu')(conv2)
    
    maxpool1 = MaxPool2D(pool_size=(3,3), strides=2, padding='VALID')(conv3)
    conv4 = Conv2D(96, kernel_size=(3,3), strides=2, padding='VALID', activation='relu')(conv3)
    
    #filter concat
    concat1 = concatenate([maxpool1, conv4])
    
    conv5 = Conv2D(64, kernel_size=(1,1), strides=1, padding='SAME', activation='relu')(concat1)
    conv6 = Conv2D(96, kernel_size=(3,3), strides=1, padding='VALID', activation='relu')(conv5)
    conv7 = Conv2D(64, kernel_size=(1,1), strides=1, padding='SAME', activation='relu')(concat1)
    conv8 = Conv2D(64, kernel_size=(7,1), strides=1, padding='SAME', activation='relu')(conv7)
    conv9 = Conv2D(64, kernel_size=(1,7), strides=1, padding='SAME', activation='relu')(conv8)
    conv10 = Conv2D(96, kernel_size=(3,3), strides=1, padding='VALID', activation='relu')(conv9)
    
    #filter concat
    concat2 = concatenate([conv6, conv10])
    
    conv11 = Conv2D(192, kernel_size=(3,3), strides=2, padding='VALID', activation='relu')(concat2)
    maxpool2 = MaxPool2D(pool_size=(3,3), strides=2, padding='VALID')(concat2)
    
    #filter concat
    concat3 = concatenate([conv11, maxpool2])
    
    model = Model(inputs=input_, outputs=concat3)
#     model.summary()
    return model
# Stem()

第三步,编写 Inception-A 块

'''
Figure 4.
The schema for 35x35 grid modules of the pure Inception-v4 network.
This is the Inception-A block of Figure 9.
'''
def InceptionA(concat_shape=(35, 35, 384)):
    input_ = Input(shape=concat_shape)
    
    avgpooling = AveragePooling2D(pool_size=(1,1), strides=1, padding='SAME')(input_)
    conv1 = Conv2D(96, kernel_size=(1,1), strides=1, padding='SAME', activation='relu')(avgpooling)
    
    conv2 = Conv2D(96, kernel_size=(1,1), strides=1, padding='SAME', activation='relu')(input_)
    
    conv3 = Conv2D(64, kernel_size=(1,1), strides=1, padding='SAME', activation='relu')(input_)
    conv4 = Conv2D(96, kernel_size=(3,3), strides=1, padding='SAME', activation='relu')(conv3)
    
    conv5 = Conv2D(64, kernel_size=(1,1), strides=1, padding='SAME', activation='relu')(input_)
    conv6 = Conv2D(96, kernel_size=(3,3), strides=1, padding='SAME', activation='relu')(conv5)
    conv7 = Conv2D(96, kernel_size=(3,3), strides=1, padding='SAME', activation='relu')(conv6)
    
    concat1 = concatenate([conv1, conv2, conv4, conv7])
    
    model = Model(inputs=input_, outputs=concat1)
#     model.summary()
    return model
# InceptionA()

第四步,编写 Inception-B 块

'''
Figure 5.
The schema for 17 × 17 grid modules of the pure Inception-v4 network. 
This is the Inception-B block of Figure 9.
'''
def InceptionB(concat_shape=(17, 17, 1024)):
    input_ = Input(shape=concat_shape)
    
    avgpooling = AveragePooling2D(pool_size=(1,1), strides=1, padding='SAME')(input_)
    conv1 = Conv2D(128, kernel_size=(1,1), strides=1, padding='SAME', activation='relu')(avgpooling)
    
    conv2 = Conv2D(384, kernel_size=(1,1), strides=1, padding='SAME', activation='relu')(input_)
    
    conv3 = Conv2D(192, kernel_size=(1,1), strides=1, padding='SAME', activation='relu')(input_)
    conv4 = Conv2D(224, kernel_size=(1,7), strides=1, padding='SAME', activation='relu')(conv3)
    conv5 = Conv2D(256, kernel_size=(1,7), strides=1, padding='SAME', activation='relu')(conv4)
    
    conv6 = Conv2D(192, kernel_size=(1,1), strides=1, padding='SAME', activation='relu')(input_)
    conv7 = Conv2D(192, kernel_size=(1,7), strides=1, padding='SAME', activation='relu')(conv6)
    conv8 = Conv2D(224, kernel_size=(7,1), strides=1, padding='SAME', activation='relu')(conv7)
    conv9 = Conv2D(224, kernel_size=(1,7), strides=1, padding='SAME', activation='relu')(conv8)
    conv10 = Conv2D(256, kernel_size=(7,1), strides=1, padding='SAME', activation='relu')(conv9)
    
    concat1 = concatenate([conv1, conv2, conv5, conv10])
    
    model = Model(inputs=input_, outputs=concat1)
#     model.summary()
    return model
# InceptionB()

第五步,编写 Inception-C 块

'''
Figure 6.
The schema for 8×8 grid modules of the pure Inception-v4 network. 
This is the Inception-C block of Figure 9.
'''
def InceptionC(concat_shape=(8, 8, 1536)):
    input_ = Input(shape=concat_shape)
    
    avgpooling = AveragePooling2D(pool_size=(1,1), strides=1, padding='SAME')(input_)
    conv1 = Conv2D(256, kernel_size=(1,1), strides=1, padding='SAME', activation='relu')(avgpooling)
    
    conv2 = Conv2D(256, kernel_size=(1,1), strides=1, padding='SAME', activation='relu')(input_)
    
    conv3 = Conv2D(384, kernel_size=(1,1), strides=1, padding='SAME', activation='relu')(input_)
    conv4 = Conv2D(256, kernel_size=(1,3), strides=1, padding='SAME', activation='relu')(conv3)
    conv5 = Conv2D(256, kernel_size=(3,1), strides=1, padding='SAME', activation='relu')(conv3)
    
    conv6 = Conv2D(384, kernel_size=(1,1), strides=1, padding='SAME', activation='relu')(input_)
    conv7 = Conv2D(448, kernel_size=(1,3), strides=1, padding='SAME', activation='relu')(conv6)
    conv8 = Conv2D(512, kernel_size=(3,1), strides=1, padding='SAME', activation='relu')(conv7)
    conv9 = Conv2D(256, kernel_size=(3,1), strides=1, padding='SAME', activation='relu')(conv8)
    conv10 = Conv2D(256, kernel_size=(1,3), strides=1, padding='SAME', activation='relu')(conv8)
    
    concat1 = concatenate([conv1, conv2, conv4, conv5, conv9, conv10])
    
    model = Model(inputs=input_, outputs=concat1)
#     model.summary()
    return model
# InceptionC()

第六步,编写 Reduction-A 块

'''
Figure 7. 
The schema for 35 × 35 to 17 × 17 reduction module.
Different variants of this blocks (with various number of filters)
are used in Figure 9, and 15 in each of the new Inception(-v4, -ResNet-v1, -ResNet-v2)
variants presented in this paper. 
The k, l, m, n numbers represent filter bank sizes which can be looked up in Table 1.
'''
# filter_bank_size contains four values k, l, m and n.
def ReductionA(filter_bank_size=[192, 224, 256, 384], concat_shape=(35, 35, 384)):
    input_ = Input(shape=concat_shape)
    
    maxpool = MaxPool2D(pool_size=(3,3), strides=2, padding='VALID')(input_)
    
    conv1 = Conv2D(filter_bank_size[3], kernel_size=(3,3), strides=2, padding='VALID', activation='relu')(input_)
    
    conv2 = Conv2D(filter_bank_size[0], kernel_size=(1,1), strides=1, padding='SAME', activation='relu')(input_)
    conv3 = Conv2D(filter_bank_size[1], kernel_size=(3,3), strides=1, padding='SAME', activation='relu')(conv2)
    conv4 = Conv2D(filter_bank_size[2], kernel_size=(3,3), strides=2, padding='VALID', activation='relu')(conv3)
    
    concat1 = concatenate([maxpool, conv1, conv4])
    
    model = Model(inputs=input_, outputs=concat1)
#     model.summary()
    
    return model
# ReductionA()

第七步,编写 Reduction-B 块

'''
Figure 8. 
The schema for 17 × 17 to 8 × 8 grid-reduction module.
This is the reduction module used by the pure Inception-v4 network in Figure 9.
'''
def ReductionB(concat_shape=(17, 17, 1024)):
    input_ = Input(shape=concat_shape)
    
    maxpool = MaxPool2D(pool_size=(3,3), strides=2, padding='VALID')(input_)
    
    conv1 = Conv2D(192, kernel_size=(1,1), strides=1, padding='SAME', activation='relu')(input_)
    conv2 = Conv2D(192, kernel_size=(3,3), strides=2, padding='VALID', activation='relu')(conv1)
    
    conv3 = Conv2D(256, kernel_size=(1,1), strides=1, padding='SAME', activation='relu')(input_)
    conv4 = Conv2D(256, kernel_size=(1,7), strides=1, padding='SAME', activation='relu')(conv3)
    conv5 = Conv2D(320, kernel_size=(7,1), strides=1, padding='SAME', activation='relu')(conv4)
    conv6 = Conv2D(320, kernel_size=(3,3), strides=2, padding='VALID', activation='relu')(conv5)
    
    concat1 = concatenate([maxpool, conv2, conv6])
    
    model = Model(inputs=input_, outputs=concat1)
#     model.summary()
    
    return model
# ReductionB()

第八步,编写Inception-v4网络

'''
Figure 9. 
The overall schema of the Inception-v4 network. 
For the detailed modules, please refer to Figures 3, 4, 5, 6, 7 and 8 for the
detailed structure of the various components.
'''
def InceptionV4(input_shape=(299,299,3), nclass=1000):
    model = Sequential()
    model.add(Input(shape=input_shape))
    model.add(Stem())
    for i in range(4):
        model.add(InceptionA())
    model.add(ReductionA())
    for i in range(7):
        model.add(InceptionB())
    model.add(ReductionB())
    for i in range(3):
        model.add(InceptionC())
    model.add(GlobalAveragePooling2D())
    model.add(Dropout(0.2))
    model.add(Dense(nclass, 'softmax'))
    
    return model
model = InceptionV4()
model.summary()

  • 1
    点赞
  • 3
    收藏
    觉得还不错? 一键收藏
  • 1
    评论
评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值