4-CNN-demo-0602-SeNet_block.py


import tensorflow as tf

"""
实现Senet block模块
"""

def Se_block(inputs_tensor, name, reduction=8):
    """
    :param inputs_tensor:  4-D tensor  [N, H, W, C]
    :param reduction:
    :return:
    """
    with tf.variable_scope(name):
        _, H, W, C = inputs_tensor.get_shape()

        # 1、挤压
        x_mean = tf.reduce_mean(inputs_tensor, axis=[1, 2], keepdims=True)
        # [N, 1, 1, C]

        # 2、激励
        x = tf.layers.conv2d(
            x_mean, C // reduction, kernel_size=1, strides=1, padding='valid', activation=tf.nn.relu,
            name='se1'
        )  # [N, 1, 1, C/r]
        x = tf.layers.conv2d(
            x, C, kernel_size=1, strides=1, padding='valid', activation=tf.nn.sigmoid, name='se2'
        )  # [N, 1, 1, C]

        # 3、特征重标定。
        y = tf.multiply(inputs_tensor, x)
    return y


def test():
    data = tf.ones(shape=[2, 64, 64, 128], dtype=tf.float32)
    se_out = Se_block(data, name='se_block', reduction=8)
    print(se_out)

    with tf.Session() as sess:
        sess.run(tf.global_variables_initializer())
        print(sess.run(se_out))


if __name__ == '__main__':
    test()
D:\Anaconda\python.exe D:/AI20/HJZ/04-深度学习/3-CNN/20191215__AI20_CNN/06_SeNet_block.py
Tensor("se_block/Mul:0", shape=(2, 64, 64, 128), dtype=float32)
2020-01-31 12:30:18.151070: I tensorflow/core/platform/cpu_feature_guard.cc:141] Your CPU supports instructions that this TensorFlow binary was not compiled to use: AVX AVX2
[[[[0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   ...
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]]

  [[0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   ...
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]]

  [[0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   ...
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]]

  ...

  [[0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   ...
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]]

  [[0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   ...
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]]

  [[0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   ...
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]]]


 [[[0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   ...
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]]

  [[0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   ...
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]]

  [[0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   ...
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]]

  ...

  [[0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   ...
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]]

  [[0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   ...
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]]

  [[0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   ...
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]
   [0.41282308 0.34352598 0.33103785 ... 0.43650055 0.45205465
    0.61846423]]]]

Process finished with exit code 0

  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值