论文Mobilenet v1 的网络结构和参数图


Layer (type) Output Shape Param #

input_1 (InputLayer) [(None, 224, 224, 3)] 0


conv1_pad (ZeroPadding2D) (None, 226, 226, 3) 0


conv1 (Conv2D) (None, 112, 112, 32) 864


conv1_bn (BatchNormalization (None, 112, 112, 32) 128


conv1_relu (Activation) (None, 112, 112, 32) 0


conv_pad_1 (ZeroPadding2D) (None, 114, 114, 32) 0


conv_dw_1 (DepthwiseConv2D) (None, 112, 112, 32) 288


conv_dw_1_bn (BatchNormaliza (None, 112, 112, 32) 128


conv_dw_1_relu (Activation) (None, 112, 112, 32) 0


conv_pw_1 (Conv2D) (None, 112, 112, 64) 2048


conv_pw_1_bn (BatchNormaliza (None, 112, 112, 64) 256


conv_pw_1_relu (Activation) (None, 112, 112, 64) 0


conv_pad_2 (ZeroPadding2D) (None, 114, 114, 64) 0


conv_dw_2 (DepthwiseConv2D) (None, 56, 56, 64) 576


conv_dw_2_bn (BatchNormaliza (None, 56, 56, 64) 256


conv_dw_2_relu (Activation) (None, 56, 56, 64) 0


conv_pw_2 (Conv2D) (None, 56, 56, 128) 8192


conv_pw_2_bn (BatchNormaliza (None, 56, 56, 128) 512


conv_pw_2_relu (Activation) (None, 56, 56, 128) 0


conv_pad_3 (ZeroPadding2D) (None, 58, 58, 128) 0


conv_dw_3 (DepthwiseConv2D) (None, 56, 56, 128) 1152


conv_dw_3_bn (BatchNormaliza (None, 56, 56, 128) 512


conv_dw_3_relu (Activation) (None, 56, 56, 128) 0


conv_pw_3 (Conv2D) (None, 56, 56, 128) 16384


conv_pw_3_bn (BatchNormaliza (None, 56, 56, 128) 512


conv_pw_3_relu (Activation) (None, 56, 56, 128) 0


conv_pad_4 (ZeroPadding2D) (None, 58, 58, 128) 0


conv_dw_4 (DepthwiseConv2D) (None, 28, 28, 128) 1152


conv_dw_4_bn (BatchNormaliza (None, 28, 28, 128) 512


conv_dw_4_relu (Activation) (None, 28, 28, 128) 0


conv_pw_4 (Conv2D) (None, 28, 28, 256) 32768


conv_pw_4_bn (BatchNormaliza (None, 28, 28, 256) 1024


conv_pw_4_relu (Activation) (None, 28, 28, 256) 0


conv_pad_5 (ZeroPadding2D) (None, 30, 30, 256) 0


conv_dw_5 (DepthwiseConv2D) (None, 28, 28, 256) 2304


conv_dw_5_bn (BatchNormaliza (None, 28, 28, 256) 1024


conv_dw_5_relu (Activation) (None, 28, 28, 256) 0


conv_pw_5 (Conv2D) (None, 28, 28, 256) 65536


conv_pw_5_bn (BatchNormaliza (None, 28, 28, 256) 1024


conv_pw_5_relu (Activation) (None, 28, 28, 256) 0


conv_pad_6 (ZeroPadding2D) (None, 30, 30, 256) 0


conv_dw_6 (DepthwiseConv2D) (None, 14, 14, 256) 2304


conv_dw_6_bn (BatchNormaliza (None, 14, 14, 256) 1024


conv_dw_6_relu (Activation) (None, 14, 14, 256) 0


conv_pw_6 (Conv2D) (None, 14, 14, 512) 131072


conv_pw_6_bn (BatchNormaliza (None, 14, 14, 512) 2048


conv_pw_6_relu (Activation) (None, 14, 14, 512) 0


conv_pad_7 (ZeroPadding2D) (None, 16, 16, 512) 0


conv_dw_7 (DepthwiseConv2D) (None, 14, 14, 512) 4608


conv_dw_7_bn (BatchNormaliza (None, 14, 14, 512) 2048


conv_dw_7_relu (Activation) (None, 14, 14, 512) 0


conv_pw_7 (Conv2D) (None, 14, 14, 512) 262144


conv_pw_7_bn (BatchNormaliza (None, 14, 14, 512) 2048


conv_pw_7_relu (Activation) (None, 14, 14, 512) 0


conv_pad_8 (ZeroPadding2D) (None, 16, 16, 512) 0


conv_dw_8 (DepthwiseConv2D) (None, 14, 14, 512) 4608


conv_dw_8_bn (BatchNormaliza (None, 14, 14, 512) 2048


conv_dw_8_relu (Activation) (None, 14, 14, 512) 0


conv_pw_8 (Conv2D) (None, 14, 14, 512) 262144


conv_pw_8_bn (BatchNormaliza (None, 14, 14, 512) 2048


conv_pw_8_relu (Activation) (None, 14, 14, 512) 0


conv_pad_9 (ZeroPadding2D) (None, 16, 16, 512) 0


conv_dw_9 (DepthwiseConv2D) (None, 14, 14, 512) 4608


conv_dw_9_bn (BatchNormaliza (None, 14, 14, 512) 2048


conv_dw_9_relu (Activation) (None, 14, 14, 512) 0


conv_pw_9 (Conv2D) (None, 14, 14, 512) 262144


conv_pw_9_bn (BatchNormaliza (None, 14, 14, 512) 2048


conv_pw_9_relu (Activation) (None, 14, 14, 512) 0


conv_pad_10 (ZeroPadding2D) (None, 16, 16, 512) 0


conv_dw_10 (DepthwiseConv2D) (None, 14, 14, 512) 4608


conv_dw_10_bn (BatchNormaliz (None, 14, 14, 512) 2048


conv_dw_10_relu (Activation) (None, 14, 14, 512) 0


conv_pw_10 (Conv2D) (None, 14, 14, 512) 262144


conv_pw_10_bn (BatchNormaliz (None, 14, 14, 512) 2048


conv_pw_10_relu (Activation) (None, 14, 14, 512) 0


conv_pad_11 (ZeroPadding2D) (None, 16, 16, 512) 0


conv_dw_11 (DepthwiseConv2D) (None, 14, 14, 512) 4608


conv_dw_11_bn (BatchNormaliz (None, 14, 14, 512) 2048


conv_dw_11_relu (Activation) (None, 14, 14, 512) 0


conv_pw_11 (Conv2D) (None, 14, 14, 512) 262144


conv_pw_11_bn (BatchNormaliz (None, 14, 14, 512) 2048


conv_pw_11_relu (Activation) (None, 14, 14, 512) 0


conv_pad_12 (ZeroPadding2D) (None, 16, 16, 512) 0


conv_dw_12 (DepthwiseConv2D) (None, 7, 7, 512) 4608


conv_dw_12_bn (BatchNormaliz (None, 7, 7, 512) 2048


conv_dw_12_relu (Activation) (None, 7, 7, 512) 0


conv_pw_12 (Conv2D) (None, 7, 7, 1024) 524288


conv_pw_12_bn (BatchNormaliz (None, 7, 7, 1024) 4096


conv_pw_12_relu (Activation) (None, 7, 7, 1024) 0


conv_pad_13 (ZeroPadding2D) (None, 9, 9, 1024) 0


conv_dw_13 (DepthwiseConv2D) (None, 7, 7, 1024) 9216


conv_dw_13_bn (BatchNormaliz (None, 7, 7, 1024) 4096


conv_dw_13_relu (Activation) (None, 7, 7, 1024) 0


conv_pw_13 (Conv2D) (None, 7, 7, 1024) 1048576


conv_pw_13_bn (BatchNormaliz (None, 7, 7, 1024) 4096


conv_pw_13_relu (Activation) (None, 7, 7, 1024) 0


global_average_pooling2d (Gl (None, 1024) 0


reshape_1 (Reshape) (None, 1, 1, 1024) 0


dropout (Dropout) (None, 1, 1, 1024) 0


fc (Dense) (None, 1, 1, 18) 18450


act_softmax (Activation) (None, 1, 1, 18) 0

Total params: 3,247,314
Trainable params: 3,225,426
Non-trainable params: 21,888


  • 1
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值