下面代码显示了如何使用keras实现DenseNet网络
DenseNet121 : dense_block == [6, 12, 24, 16]
DenseNet169 : dense_block == [6, 12, 32, 32]
DenseNet201 : dense_block == [6, 12, 48, 32]
下面举了一个DenseNet201 的例子
import os
from keras import backend as K
from keras.models import Model
from keras.layers import Activation
from keras.layers import AveragePooling2D
from keras.layers import BatchNormalization
from keras.layers import Concatenate
from keras.layers import Conv2D, Dense, Dropout
from keras.layers import Dense
from keras.layers import GlobalAveragePooling2D
from keras.layers import GlobalMaxPooling2D
from keras.layers import Input
from keras.layers import MaxPooling2D
from keras.layers import ZeroPadding2D
def dense_block(x, blocks, name):
for i in range(blocks):
x = conv_block(x, 32, name=name + '_block' + str(i + 1))
return x
def transition_block(x, reduction, name):
bn_axis = 3 if K.image_data_format() == 'channels_last' else 1
x = BatchNormalization(axis=bn_axis, epsilon=1.001e-5,
name=name + '_bn')(x)
x = Activation('relu', name=name + '_relu')(x)
x = Conv2D(int(K.int_shape(x)[bn_axis] * reduction), 1, use_bias=False,
name=name + '_conv')(x)
x = AveragePooling2D(2, strides=2, name=name + '_pool')(x)
return x
def conv_block(x, growth_rate, name):
bn_axis = 3 if K.image_data_format() == 'channels_last' else 1
x1 = BatchNormalization(axis=bn_axis, epsilon=1.001e-5,
name=name + '_0_bn')(x)
x1 = Activation('relu', name=name + '_0_relu')(x1)
x1 = Conv2D(4 * growth_rate, 1, use_bias=False,
name=name + '_1_conv')(x1)
x1 = BatchNormalization(axis=bn_axis, epsilon=1.001e-5,
name=name + '_1_bn')(x1)
x1 = Activation('relu', name=name + '_1_relu')(x1)
x1 = Conv2D(growth_rate, 3, padding='same', use_bias=False,
name=name + '_2_conv')(x1)
x = Concatenate(axis=bn_axis, name=name + '_concat')([x, x1])
return x
def cnn_model(img_rows, img_cols, color_type=1, num_classes=None):
# eps = 1.1e-5
# 处理尺寸不同的后端
global bn_axis
if K.image_data_format() == 'channels_last':
bn_axis = 3
img_input = Input(shape=(img_rows, img_cols, color_type), name='data')
else:
bn_axis = 1
img_input = Input(shape=(color_type, img_rows, img_cols), name='data')
x = ZeroPadding2D(padding=((3, 3), (3, 3)))(img_input)
x = Conv2D(64, 7, strides=2, use_bias=False, name='conv1/conv')(x)
x = BatchNormalization(axis=bn_axis, epsilon=1.001e-5,
name='conv1/bn')(x)
x = Activation('relu', name='conv1/relu')(x)
x = ZeroPadding2D(padding=((1, 1), (1, 1)))(x)
x = MaxPooling2D(3, strides=2, name='pool1')(x)
# DenseNet201 : dense_block == [6, 12, 48, 32]
x = dense_block(x, 6, name='conv2')
x = transition_block(x, 0.5, name='pool2')
x = dense_block(x, 12, name='conv3')
x = transition_block(x, 0.5, name='pool3')
x = dense_block(x, 48, name='conv4')
x = transition_block(x, 0.5, name='pool4')
x = dense_block(x, 32, name='conv5')
x = BatchNormalization(axis=bn_axis, epsilon=1.001e-5,
name='bn')(x)
model = Model(img_input, x)
if K.image_data_format() == 'channels_first':
# 使用预先训练过的权重进行Theano后端
weights_path ='models/densenet201_weights_notop.h5'
else:
# 在Tensorflow后端使用预先训练的权重
weights_path = 'models/densenet201_weights_notop.h5'
model.load_weights(weights_path)
x = model.output
x_new = GlobalAveragePooling2D()(x) # GMP feature
x_new = Dense(num_classes, activation="softmax", name="classifier")(x_new)
model = Model(img_input, x_new)
return model