tf卷积层外加BN实现

def conv_layer(inpt, filter_shape, stride):
    out_channels = filter_shape[3]

    filter_ = weight_variable(filter_shape)
    conv = tf.nn.conv2d(inpt, filter=filter_, strides=[1, stride, stride, 1], padding="SAME")
    mean, var = tf.nn.moments(conv, axes=[0,1,2])#计算一阶矩(均值),以及二阶矩(方差)
    beta = tf.Variable(tf.zeros([out_channels]), name="beta")
    gamma = weight_variable([out_channels], name="gamma")
    
    batch_norm = tf.nn.batch_norm_with_global_normalization(
        conv, mean, var, beta, gamma, 0.001,
        scale_after_normalization=True)

    out = tf.nn.relu(batch_norm)

    return out
  • 1
    点赞
  • 2
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
这里是一个使用TensorFlow实现的refinet网络的代码示例,包括了对conv4-3和conv5-3卷层的BN归一化处理。 ```python import tensorflow as tf def conv_bn_relu(inputs, filters, kernel_size, strides, padding, training): x = tf.layers.conv2d(inputs=inputs, filters=filters, kernel_size=kernel_size, strides=strides, padding=padding) x = tf.layers.batch_normalization(x, training=training) x = tf.nn.relu(x) return x def refinet(inputs, training): # Conv1 x = conv_bn_relu(inputs, filters=64, kernel_size=3, strides=1, padding='same', training=training) x = conv_bn_relu(x, filters=64, kernel_size=3, strides=1, padding='same', training=training) x = tf.layers.max_pooling2d(x, pool_size=2, strides=2, padding='same') # Conv2 x = conv_bn_relu(x, filters=128, kernel_size=3, strides=1, padding='same', training=training) x = conv_bn_relu(x, filters=128, kernel_size=3, strides=1, padding='same', training=training) x = tf.layers.max_pooling2d(x, pool_size=2, strides=2, padding='same') # Conv3 x = conv_bn_relu(x, filters=256, kernel_size=3, strides=1, padding='same', training=training) x = conv_bn_relu(x, filters=256, kernel_size=3, strides=1, padding='same', training=training) x = conv_bn_relu(x, filters=256, kernel_size=3, strides=1, padding='same', training=training) x = tf.layers.max_pooling2d(x, pool_size=2, strides=2, padding='same') # Conv4 x = conv_bn_relu(x, filters=512, kernel_size=3, strides=1, padding='same', training=training) x = conv_bn_relu(x, filters=512, kernel_size=3, strides=1, padding='same', training=training) conv4_3 = conv_bn_relu(x, filters=512, kernel_size=3, strides=1, padding='same', training=training) # Conv5 x = tf.layers.max_pooling2d(conv4_3, pool_size=2, strides=2, padding='same') x = conv_bn_relu(x, filters=512, kernel_size=3, strides=1, padding='same', training=training) x = conv_bn_relu(x, filters=512, kernel_size=3, strides=1, padding='same', training=training) conv5_3 = conv_bn_relu(x, filters=512, kernel_size=3, strides=1, padding='same', training=training) # Conv6 x = tf.layers.max_pooling2d(conv5_3, pool_size=2, strides=2, padding='same') x = conv_bn_relu(x, filters=1024, kernel_size=3, strides=1, padding='same', training=training) x = conv_bn_relu(x, filters=1024, kernel_size=3, strides=1, padding='same', training=training) x = conv_bn_relu(x, filters=1024, kernel_size=3, strides=1, padding='same', training=training) # Conv7 conv6_1 = tf.layers.conv2d(inputs=x, filters=256, kernel_size=1, strides=1, padding='same') conv6_1_bn = tf.layers.batch_normalization(conv6_1, training=training) conv6_1_relu = tf.nn.relu(conv6_1_bn) conv6_2 = tf.layers.conv2d(inputs=x, filters=512, kernel_size=3, strides=2, padding='same') conv6_2_bn = tf.layers.batch_normalization(conv6_2, training=training) conv6_2_relu = tf.nn.relu(conv6_2_bn) return conv4_3, conv5_3, conv6_1_relu, conv6_2_relu ``` 在上面的代码中,`conv_bn_relu`函数是一个便捷的函数,用于定义一个卷层、Batch Normalization层和ReLU激活函数的组合。`refinet`函数是整个RefineNet的主要实现,其中包括了对Conv4-3和Conv5-3卷层的BN归一化处理。
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值