1.网络结构
2.构建DenseNet
import tensorflow as tf
from tensorflow import keras
搭建卷积层,每一次卷积都会对输入同输出做Concatenate,以达到重用前面层的特征的作用
def conv_fn(x,growth_rate):
x1 = keras.layers.BatchNormalization()(x)
x1 = keras.layers.Activation('relu')(x1)
x1 = keras.layers.Conv2D(4*growth_rate,1,1,padding="same")(x1)
x1 = keras.layers.BatchNormalization()(x1)
x1 = keras.layers.Activation("relu")(x1)
x1 = keras.layers.Conv2D(growth_rate,3,1,padding="same")(x1)
return keras.layers.Concatenate(axis=3)([x,x1])
每一个Densblock模块
def dense_block(x,block,growth_rate = 32):
for i in range(block):
x = conv_fn(x,growth_rate)
return x
Transition layers:缩减特征图大小同时为了降低复杂度,减少特征图的数量
k = keras.backend
def trans_block(x,theta):
x1 = keras.layers.BatchNormalization()(x)
x1 = keras.layers.Activation("relu")(x1)
x1 = keras.layers.Conv2D(int(k.int_shape(x)[3]*theta),1,1)(x1)
x1 = keras.layers.AveragePooling2D(pool_size=(2,2),strides=2,padding="valid")(x1)
return x1
搭建DenseNet
def densenet(input_shape,block,n_classes=12):
# 56*56*64
x_input = keras.layers.Input(shape=input_shape)
x = keras.layers.Conv2D(64,kernel_size=(7,7),strides=2,padding="same")(x_input)
x = keras.layers.BatchNormalization()(x)
x = keras.layers.Activation("relu")(x)
x = keras.layers.MaxPooling2D(pool_size=3,strides=2,padding="same")(x)
x = dense_block(x,block[0])
x = trans_block(x,0.5) # 28*28
x = dense_block(x,block[1])
x = trans_block(x,0.5) #14*14
x = dense_block(x,block[2])
x = trans_block(x,0.5) # 7*7
x = dense_block(x,block[3])
x = keras.layers.BatchNormalization()(x)
x = keras.layers.Activation("relu")(x)
x = keras.layers.GlobalAveragePooling2D()(x)
outputs = keras.layers.Dense(n_classes,activation="softmax")(x)
model = keras.models.Model(inputs=[x_input],outputs=[outputs])
return model
不同的DenseNet
model_121 = densenet([224,224,3],[6,12,24,16]) #DenseNet-121
model_169 = densenet([224,224,3],[6,12,32,32]) #DenseNet-169
model_201 = densenet([224,224,3],[6,12,48,32]) # DenseNet-201
model_269 = densenet([224,224,3],[6,12,64,48]) # DenseNet-269