Tensorflow2.0 keras MobileNetV2 代码实现

网络介绍请参看:博文
keras搭建深度学习模型的若干方法:博文
直接上网络结构
在这里插入图片描述

迁移学习

依旧看看标准答案

import  tensorflow as tf
from    tensorflow import keras

base_model = keras.applications.MobileNetV2(weights='imagenet')
base_model.summary()

在这里插入图片描述

自编程序

Block

在这里插入图片描述
在这里插入图片描述

import  tensorflow as tf
from    tensorflow import keras
import tensorflow.keras.backend as K
from    tensorflow.keras import layers, models, Sequential, backend
from tensorflow.keras.layers import Conv2D, MaxPooling2D, Dense, Flatten, Dropout, BatchNormalization, Activation, GlobalAveragePooling2D
from tensorflow.keras.layers import Concatenate, Lambda, Input, ZeroPadding2D, AveragePooling2D, DepthwiseConv2D, Reshape


def relu6(x):
    return K.relu(x, max_value=6)

# 保证特征层数为8的倍数
def make_divisible(v, divisor, min_value=None):
    if min_value is None:
        min_value = divisor
    new_v = max(min_value, int(v+divisor/2)//divisor*divisor) #//向下取整,除
    if new_v<0.9*v:
        new_v +=divisor
    return new_v

def conv_block (x, nb_filter, kernel=(1,1), stride=(1,1), name=None):

    x = Conv2D(nb_filter, kernel, strides=stride, padding='same', use_bias=False, name=name+'_conv1')(x)
    x = BatchNormalization(axis=3, name=name+'_bn1')(x)
    x = Activation(relu6, name=name+'_relu')(x)

    return x


def depthwise_res_block(x, nb_filter, kernel, stride, t, alpha, resdiual=False, name=None):

    input_tensor=x
    exp_channels= x.shape[-1]*t  #扩展维度
    alpha_channels = int(nb_filter*alpha)     #压缩维度

    x = conv_block(x, exp_channels, (1,1), (1,1), name=name+'_expansion')

    x = DepthwiseConv2D(kernel, padding='same', strides=stride, depth_multiplier=1, use_bias=False, name=name+'_dpconv')(x)
    x = BatchNormalization(axis=3, name=name+'_bn1')(x)
    x = Activation(relu6, name=name+'_relu1')(x)

    x = Conv2D(alpha_channels, (1,1), padding='same', use_bias=False, strides=(1,1), name=name+'_conv_2')(x)
    x = BatchNormalization(axis=3, name=name+'_bn2')(x)

    if resdiual:
        x = layers.add([x, input_tensor])

    return x

def inverted_residual_layers(x,nb_filter, stride, t, alpha, n, name=None):

    x = depthwise_res_block(x, nb_filter, (3,3), stride, t, alpha,False, name=name+'_dep1')

    for i in range(1, n):
        x = depthwise_res_block(x, nb_filter, (3,3), (1,1), t, alpha, True, name=name+'_dep'+s
  • 3
    点赞
  • 26
    收藏
    觉得还不错? 一键收藏
  • 2
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 2
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值