# [Deep Learning] 用TensorFlow实现ResNeXt和DenseNet

## 原文来源：GitHub

ResNeXt-Tensorflow

Tensorflow 1.x

Python 3.x

tflearn（如果你觉得全局平均池比较容易，你应该安装tflearn）

ResNet

ResNeXt

（b）是分割 + 变换（Bottleneck）+ 拼接 + 转换 + 合并

def split_layer(self, input_x, stride, layer_name):

with tf.name_scope(layer_name) :

layers_split = list()

for i in range(cardinality) :

splits = self.transform_layer(input_x, stride=stride, scope=layer_name + '_splitN_' + str(i))

layers_split.append(splits)

return Concatenation(layers_split)

def transform_layer(self, x, stride, scope):

with tf.name_scope(scope) :

x = conv_layer(x, filter=depth, kernel=[1,1], stride=stride, layer_name=scope+'_conv1')

x = Batch_Normalization(x, training=self.training, scope=scope+'_batch1')

x = Relu(x)

x = conv_layer(x, filter=depth, kernel=[3,3], stride=1, layer_name=scope+'_conv2')

x = Batch_Normalization(x, training=self.training, scope=scope+'_batch2')

return x

def transition_layer(self, x, out_dim, scope):

with tf.name_scope(scope):

x = conv_layer(x, filter=out_dim, kernel=[1,1], stride=1, layer_name=scope+'_conv1')

DenseNet （https://github.com/taki0112/Densenet-Tensorflow）

Densenet-Tensorflow

tflearn（如果你对全局平均池（global average pooling）能够很好地掌握，那你就应该安装tflearn。

defBatch_Normalization(x, training, scope):

witharg_scope([batch_norm],

scope=scope,

decay=0.9,

center=True,

scale=True,

zero_debias_moving_mean=True) :

returntf.cond(training,

lambda :batch_norm(inputs=x, is_training=training, reuse=None),

lambda :batch_norm(inputs=x, is_training=training, reuse=True))

defGlobal_Average_Pooling(x, stride=1) :

width=np.shape(x)[1]

height=np.shape(x)[2]

pool_size= [width, height]

return tf.layers.average_pooling2d(inputs=x, pool_size=pool_size, strides=stride)

# The stride value does not matter

defGlobal_Average_Pooling(x):

returntflearn.layers.conv.global_avg_pool(x, name='Global_avg_pooling')

defDense_net(self, input_x):

x =conv_layer(input_x, filter=2*self.filters, kernel=[7,7], stride=2, layer_name='conv0')

x =Max_Pooling(x, pool_size=[3,3], stride=2)

x =self.dense_block(input_x=x, nb_layers=6, layer_name='dense_1')

x =self.transition_layer(x, scope='trans_1')

x =self.dense_block(input_x=x, nb_layers=12, layer_name='dense_2')

x =self.transition_layer(x, scope='trans_2')

x =self.dense_block(input_x=x, nb_layers=48, layer_name='dense_3')

x =self.transition_layer(x, scope='trans_3')

x =self.dense_block(input_x=x, nb_layers=32, layer_name='dense_final')

x =Batch_Normalization(x, training=self.training, scope='linear_batch')

x =Relu(x)

x =Global_Average_Pooling(x)

x = Linear(x)

defdense_block(self, input_x, nb_layers, layer_name):

withtf.name_scope(layer_name):

layers_concat=list()

layers_concat.append(input_x)

x =self.bottleneck_layer(input_x, scope=layer_name+'_bottleN_'+str(0))

layers_concat.append(x)

for i inrange(nb_layers-1):

x =Concatenation(layers_concat)

x =self.bottleneck_layer(x, scope=layer_name+'_bottleN_'+str(i +1))

defbottleneck_layer(self, x, scope):

withtf.name_scope(scope):

x =Batch_Normalization(x, training=self.training, scope=scope+'_batch1')

x =conv_layer(x, filter=4*self.filters, kernel=[1,1], layer_name=scope+'_conv1')

x =Drop_out(x, rate=dropout_rate, training=self.training)

x =Batch_Normalization(x, training=self.training, scope=scope+'_batch2')

x =conv_layer(x, filter=self.filters, kernel=[3,3], layer_name=scope+'_conv2')

deftransition_layer(self, x, scope):

x =conv_layer(x, filter=self.filters, kernel=[1,1], layer_name=scope+'_conv1')

x =Average_pooling(x, pool_size=[2,2], stride=2)

（MNIST）最高测试精度为99.2％（该结果不使用dropout）。

for i inrange(self.nb_blocks) :

# original : 6 -&gt; 12 -&gt; 48

x =self.dense_block(input_x=x, nb_layers=4, layer_name='dense_'+str(i))

x =self.transition_layer(x, scope='trans_'+str(i))

CIFAR-10

CIFAR-100

Image Net

©️2019 CSDN 皮肤主题: 大白 设计师: CSDN官方博客