上一篇Inception-v4中,已经实现了Inception-v4网络模型,参考此篇博客以及Inception-v4, Inception-ResNet and the Impact of Residual Connections on Learning论文。再来实现一下Inception-ResNet-v1网络模型,其实Inception-ResNet-v2网络模型与Inception-ResNet-v1相差很小,只是需要变动一下其中的参数,故不再实现v2。
第一步,实现 InceptionResNetA
'''
Figure 10.
The schema for 35 × 35 grid (Inception-ResNet-A)
module of Inception-ResNet-v1 network.
'''
def InceptionResNetA(activation_shape=(35,35,256)):
input_ = Input(shape=activation_shape)
conv1 = Conv2D(32, kernel_size=(1,1), strides=1, padding='SAME', activation='relu')(input_)
conv2 = Conv2D(32, kernel_size=(1,1), strides=1, padding='SAME', activation='relu')(input_)
conv3 = Conv2D(32, kernel_size=(3,3), strides=1, padding='SAME', activation='relu')(conv2)
conv4 = Conv2D(32, kernel_size=(1,1), strides=1, padding='SAME', activation='relu')(input_)
conv5 = Conv2D(32, kernel_size=(3,3), strides=1, padding='SAME', activation='relu')(conv4)
conv6 = Conv2D(32, kernel_size=(3,3), strides=1, padding='SAME', activation='relu')(conv5)
conv7 = concatenate([conv1, conv3, conv6]) #?????
conv8 = Conv2D(256, kernel_size=(1,1), strides=1, padding='SAME')(conv7)
add1 = add([input_, conv8])
output_ = Activation('relu')(add1)
model = Model(inputs=input_, outputs=output_)
# model.summary()
return model
# InceptionResNetA()
第二步,实现 InceptionResNetB
'''
Figure 11.
The schema for 17 × 17 grid (Inception-ResNet-B)
module of Inception-ResNet-v1 network.
'''
def InceptionResNetB(activation_shape=(17,17,896)):
input_ = Input(shape=activation_shape)
conv1 = Conv2D(128, kernel_size=(1,1), strides=1, padding='SAME', activation='relu')(input_)
conv2 = Conv2D(128, kernel_size=(1,1), strides=1, padding='SAME', activation='relu')(input_)
conv3 = Conv2D(128, kernel_size=(1,7), strides=1, padding='SAME', activation='relu')(conv2)
conv4 = Conv2D(128, kernel_size=(7,1), strides=1, padding='SAME', activation='relu')(conv3)
conv5 = concatenate([conv1, conv4])
conv6 = Conv2D(896, kernel_size=(1,1), strides=1, padding='SAME')(conv5)
add1 = add([input_, conv6])
output_ = Activation('relu')(add1)
model = Model(inputs=input_, outputs=output_)
# model.summary()
return model
# InceptionResNetB()
第三步,重新定义 ReductionB
'''
Figure 12.
“Reduction-B” 17×17 to 8×8 grid-reduction module.
This module used by the smaller Inception-ResNet-v1 network in Figure 15.
'''
def ReductionB(concat_shape=(17,17,896)):
input_ = Input(shape=concat_shape)
maxpool = MaxPool2D(pool_size=(3,3), strides=2, padding='VALID')(input_)
conv1 = Conv2D(256, kernel_size=(1,1), strides=1, padding='SAME', activation='relu')(input_)
conv2 = Conv2D(384, kernel_size=(3,3), strides=2, padding='VALID', activation='relu')(conv1)
conv3 = Conv2D(256, kernel_size=(1,1), strides=1, padding='SAME', activation='relu')(input_)
conv4 = Conv2D(256, kernel_size=(3,3), strides=2, padding='VALID', activation='relu')(conv3)
conv5 = Conv2D(256, kernel_size=(1,1), strides=1, padding='SAME', activation='relu')(input_)
conv6 = Conv2D(256, kernel_size=(3,3), strides=1, padding='SAME', activation='relu')(conv5)
conv7 = Conv2D(256, kernel_size=(3,3), strides=2, padding='VALID', activation='relu')(conv6)
concat1 = concatenate([maxpool, conv2, conv4, conv7])
model = Model(inputs=input_, outputs=concat1)
# model.summary()
return model
# ReductionB()
第四步,实现 InceptionResNetC
'''
Figure 13.
The schema for 8×8 grid (Inception-ResNet-C) module
of Inception-ResNet-v1 network.
'''
def InceptionResNetC(activation_shape=(8,8,1792)):
input_ = Input(shape=activation_shape)
conv1 = Conv2D(192, kernel_size=(1,1), strides=1, padding='SAME', activation='relu')(input_)
conv2 = Conv2D(192, kernel_size=(1,1), strides=1, padding='SAME', activation='relu')(input_)
conv3 = Conv2D(192, kernel_size=(1,3), strides=1, padding='SAME', activation='relu')(conv2)
conv4 = Conv2D(192, kernel_size=(3,1), strides=1, padding='SAME', activation='relu')(conv3)
conv5 = concatenate([conv1, conv4])
conv6 = Conv2D(1792, kernel_size=(1,1), strides=1, padding='SAME')(conv5)
add1 = add([input_, conv6])
output_ = Activation('relu')(add1)
model = Model(inputs=input_, outputs=output_)
# model.summary()
return model
# InceptionResNetC()
第五步,实现 Stem
'''
Figure 14.
The stem of the Inception-ResNet-v1 network
'''
def Stem(input_shape=(299, 299, 3)):
input_ = Input(shape=input_shape)
conv1 = Conv2D(32, kernel_size=(3,3), strides=2, padding='VALID', activation='relu')(input_)
conv2 = Conv2D(32, kernel_size=(3,3), strides=1, padding='VALID', activation='relu')(conv1)
conv3 = Conv2D(64, kernel_size=(3,3), strides=1, padding='SAME', activation='relu')(conv2)
maxpool1 = MaxPool2D(pool_size=(3,3), strides=2, padding='VALID')(conv3)
conv4 = Conv2D(80, kernel_size=(1,1), strides=1, padding='SAME', activation='relu')(maxpool1)
conv5 = Conv2D(192, kernel_size=(3,3), strides=1, padding='VALID', activation='relu')(conv4)
conv6 = Conv2D(256, kernel_size=(3,3), strides=2, padding='VALID', activation='relu')(conv5)
model = Model(inputs=input_, outputs=conv6)
# model.summary()
return model
# Stem()
第六步,实现 InceptionResNetV1网络模型
'''
Figure 14.
The stem of the Inception-ResNet-v1 network.
'''
def InceptionResNetV1(input_shape=(299,299,3), nclass=1000):
model = Sequential()
model.add(Input(shape=input_shape))
model.add(Stem())
for i in range(5):
model.add(InceptionResNetA())
model.add(ReductionA(filter_bank_size=[192, 192, 256, 384], concat_shape=(35, 35, 256)))
for i in range(10):
model.add(InceptionResNetB())
model.add(ReductionB())
for i in range(5):
model.add(InceptionResNetC())
model.add(GlobalAveragePooling2D())
model.add(Dropout(0.2))
model.add(Dense(nclass, 'softmax'))
return model
model = InceptionResNetV1()
model.summary()
如有错误,欢迎指正!!!