resnet50网络结构_基于 Tensorflon 实现 Resnet50 并基于 CIFAR-10 数据训练

05247621b306faea0f2376146a2795ac.png

基于 Tensorflon 实现 Resnet50 并基于 CIFAR-10 数据训练

了解 Resnet50 网络结构

引入了残差结构,解决了训练时因网络过深而发生梯度消失的情况有着更好的特征提取能力

9631879b7f4f96683dde22c0074cf4f5.png

比如下

1448d6c8dd21babc7f405df7e0b9d6bb.png

面这

基于 Tensorflow 实现 Resnet50

参考网络

caffe可视化版本

模型构建

def identity_block(inputs, kernal_size, filters):
    filters1, filters2, filters3 = filters
    x = tf.keras.layers.Conv2D(filters1, (1, 1), padding='same')(inputs)
    x = tf.keras.layers.BatchNormalization()(x)
    x = tf.keras.layers.Activation('relu')(x)

    x = tf.keras.layers.Conv2D(filters2, (1, 1), padding='same')(x)
    x = tf.keras.layers.BatchNormalization()(x)
    x = tf.keras.layers.Activation('relu')(x)

    x = tf.keras.layers.Conv2D(filters3, kernal_size, padding='same')(x)
    x = tf.keras.layers.BatchNormalization()(x)
    x = tf.keras.layers.Activation('relu')(x)

    x = tf.keras.layers.add([x, inputs])
    x = tf.keras.layers.Activation('relu')(x)

    return x


def conv_block(inputs, kernal_size, filters, strides=(2, 2)):
    filters1, filters2, filters3 = filters
    x = tf.keras.layers.Conv2D(filters1, (1, 1), strides=strides, padding='same')(inputs)
    x = tf.keras.layers.BatchNormalization()(x)
    x = tf.keras.layers.Activation('relu')(x)

    x = tf.keras.layers.Conv2D(filters2, kernal_size, strides=strides, padding='same')(x)
    x = tf.keras.layers.BatchNormalization()(x)
    x = tf.keras.layers.Activation('relu')(x)

    x = tf.keras.layers.Conv2D(filters3, (1, 1), strides=strides, padding='same')(x)
    x = tf.keras.layers.BatchNormalization()(x)
    x = tf.keras.layers.Activation('relu')(x)

    shortcut = tf.keras.layers.Conv2D(filters3, (1, 1), strides=strides, padding='same')(x)
    shortcut = tf.keras.layers.BatchNormalization()(shortcut)

    x = tf.keras.layers.add([x, shortcut])
    tf.keras.layers.Activation('relu')(x)

    return x


def resnet50(inputs, classes):
    "开始有7*7的"
    x = tf.keras.layers.Conv2D(64, (7, 7), strides=(2, 2), padding='same')(inputs)
    x = tf.keras.layers.BatchNormalization()(x)
    x = tf.keras.layers.Activation('relu')(x)

    x = tf.keras.layers.MaxPool2D((3, 3), strides=(2, 2))(x)
    x = conv_block(x, 3, [64, 64, 256])
    x = identity_block(x, 3, [64, 64, 256])
    x = identity_block(x, 3, [64, 64, 256])

    x = conv_block(x, 3, [128, 128, 512])
    x = identity_block(x, 3, [128, 128, 512])
    x = identity_block(x, 3, [128, 128, 512])
    x = identity_block(x, 3, [128, 128, 512])

    x = conv_block(x, 3, [256, 256, 1024])
    x = identity_block(x, 3, [256, 256, 1024])
    x = identity_block(x, 3, [256, 256, 1024])
    x = identity_block(x, 3, [256, 256, 1024])
    x = identity_block(x, 3, [256, 256, 1024])
    x = identity_block(x, 3, [256, 256, 1024])

    x = conv_block(x, 3, [512, 512, 2048])
    x = identity_block(x, 3, [512, 512, 2048])
    x = identity_block(x, 3, [512, 512, 2048])

    x = tf.keras.layers.GlobalAveragePooling2D()(x)
    x = tf.keras.layers.Dense(classes, activation='softmax')(x)

    return x
  
inputs = tf.keras.Input(shape=[32, 32, 3])
model = tf.keras.Model(inputs=inputs, outputs=resnet50(inputs, classes=10))
model.summary()

参数信息

Model: "model"
__________________________________________________________________________________________________
Layer (type)                    Output Shape         Param #     Connected to                     
==================================================================================================
input_1 (InputLayer)            [(None, 32, 32, 3)]  0                                            
__________________________________________________________________________________________________
conv2d (Conv2D)                 (None, 16, 16, 64)   9472        input_1[0][0]                    
__________________________________________________________________________________________________
batch_normalization (BatchNorma (None, 16, 16, 64)   256         conv2d[0][0]                     
__________________________________________________________________________________________________
activation (Activation)         (None, 16, 16, 64)   0           batch_normalization[0][0]        
__________________________________________________________________________________________________
max_pooling2d (MaxPooling2D)    (None, 7, 7, 64)     0           activation[0][0]                 
__________________________________________________________________________________________________
conv2d_1 (Conv2D)               (None, 4, 4, 64)     4160        max_pooling2d[0][0]              
__________________________________________________________________________________________________
batch_normalization_1 (BatchNor (None, 4, 4, 64)     256         conv2d_1[0][0]                   
__________________________________________________________________________________________________
activation_1 (Activation)       (None, 4, 4, 64)     0           batch_normalization_1[0][0]      
__________________________________________________________________________________________________
conv2d_2 (Conv2D)               (None, 2, 2, 64)     36928       activation_1[0][0]               
__________________________________________________________________________________________________
batch_normalization_2 (BatchNor (None, 2, 2, 64)     256         conv2d_2[0][0]                   
__________________________________________________________________________________________________
activation_2 (Activation)       (None, 2, 2, 64)     0           batch_normalization_2[0][0]      
__________________________________________________________________________________________________
conv2d_3 (Conv2D)               (None, 1, 1, 256)    16640       activation_2[0][0]               
__________________________________________________________________________________________________
batch_normalization_3 (BatchNor (None, 1, 1, 256)    1024        conv2d_3[0][0]                   
__________________________________________________________________________________________________
activation_3 (Activation)       (None, 1, 1, 256)    0           batch_normalization_3[0][0]      
__________________________________________________________________________________________________
conv2d_4 (Conv2D)               (None, 1, 1, 256)    65792       activation_3[0][0]               
__________________________________________________________________________________________________
batch_normalization_4 (BatchNor (None, 1, 1, 256)    1024        conv2d_4[0][0]                   
__________________________________________________________________________________________________
add (Add)                       (None, 1, 1, 256)    0           activation_3[0][0]               
                                                                 batch_normalization_4[0][0]      
__________________________________________________________________________________________________
conv2d_5 (Conv2D)               (None, 1, 1, 64)     16448       add[0][0]                        
__________________________________________________________________________________________________
batch_normalization_5 (BatchNor (None, 1, 1, 64)     256         conv2d_5[0][0]                   
__________________________________________________________________________________________________
activation_5 (Activation)       (None, 1, 1, 64)     0           batch_normalization_5[0][0]      
__________________________________________________________________________________________________
conv2d_6 (Conv2D)               (None, 1, 1, 64)     4160        activation_5[0][0]               
__________________________________________________________________________________________________
batch_normalization_6 (BatchNor (None, 1, 1, 64)     256         conv2d_6[0][0]                   
__________________________________________________________________________________________________
activation_6 (Activation)       (None, 1, 1, 64)     0           batch_normalization_6[0][0]      
__________________________________________________________________________________________________
conv2d_7 (Conv2D)               (None, 1, 1, 256)    147712      activation_6[0][0]               
__________________________________________________________________________________________________
batch_normalization_7 (BatchNor (None, 1, 1, 256)    1024        conv2d_7[0][0]                   
__________________________________________________________________________________________________
activation_7 (Activation)       (None, 1, 1, 256)    0           batch_normalization_7[0][0]      
__________________________________________________________________________________________________
add_1 (Add)                     (None, 1, 1, 256)    0           activation_7[0][0]               
                                                                 add[0][0]                        
__________________________________________________________________________________________________
activation_8 (Activation)       (None, 1, 1, 256)    0           add_1[0][0]                      
__________________________________________________________________________________________________
conv2d_8 (Conv2D)               (None, 1, 1, 64)     16448       activation_8[0][0]               
__________________________________________________________________________________________________
batch_normalization_8 (BatchNor (None, 1, 1, 64)     256         conv2d_8[0][0]                   
__________________________________________________________________________________________________
activation_9 (Activation)       (None, 1, 1, 64)     0           batch_normalization_8[0][0]      
__________________________________________________________________________________________________
conv2d_9 (Conv2D)               (None, 1, 1, 64)     4160        activation_9[0][0]               
__________________________________________________________________________________________________
batch_normalization_9 (BatchNor (None, 1, 1, 64)     256         conv2d_9[0][0]                   
__________________________________________________________________________________________________
activation_10 (Activation)      (None, 1, 1, 64)     0           batch_normalization_9[0][0]      
__________________________________________________________________________________________________
conv2d_10 (Conv2D)              (None, 1, 1, 256)    147712      activation_10[0][0]              
__________________________________________________________________________________________________
batch_normalization_10 (BatchNo (None, 1, 1, 256)    1024        conv2d_10[0][0]                  
__________________________________________________________________________________________________
activation_11 (Activation)      (None, 1, 1, 256)    0           batch_normalization_10[0][0]     
__________________________________________________________________________________________________
add_2 (Add)                     (None, 1, 1, 256)    0           activation_11[0][0]              
                                                                 activation_8[0][0]               
__________________________________________________________________________________________________
activation_12 (Activation)      (None, 1, 1, 256)    0           add_2[0][0]                      
__________________________________________________________________________________________________
conv2d_11 (Conv2D)              (None, 1, 1, 128)    32896       activation_12[0][0]              
__________________________________________________________________________________________________
batch_normalization_11 (BatchNo (None, 1, 1, 128)    512         conv2d_11[0][0]                  
__________________________________________________________________________________________________
activation_13 (Activation)      (None, 1, 1, 128)    0           batch_normalization_11[0][0]     
__________________________________________________________________________________________________
conv2d_12 (Conv2D)              (None, 1, 1, 128)    147584      activation_13[0][0]              
__________________________________________________________________________________________________
batch_normalization_12 (BatchNo (None, 1, 1, 128)    512         conv2d_12[0][0]                  
__________________________________________________________________________________________________
activation_14 (Activation)      (None, 1, 1, 128)    0           batch_normalization_12[0][0]     
__________________________________________________________________________________________________
conv2d_13 (Conv2D)              (None, 1, 1, 512)    66048       activation_14[0][0]              
__________________________________________________________________________________________________
batch_normalization_13 (BatchNo (None, 1, 1, 512)    2048        conv2d_13[0][0]                  
__________________________________________________________________________________________________
activation_15 (Activation)      (None, 1, 1, 512)    0           batch_normalization_13[0][0]     
__________________________________________________________________________________________________
conv2d_14 (Conv2D)              (None, 1, 1, 512)    262656      activation_15[0][0]              
__________________________________________________________________________________________________
batch_normalization_14 (BatchNo (None, 1, 1, 512)    2048        conv2d_14[0][0]                  
__________________________________________________________________________________________________
add_3 (Add)                     (None, 1, 1, 512)    0           activation_15[0][0]              
                                                                 batch_normalization_14[0][0]     
__________________________________________________________________________________________________
conv2d_15 (Conv2D)              (None, 1, 1, 128)    65664       add_3[0][0]                      
__________________________________________________________________________________________________
batch_normalization_15 (BatchNo (None, 1, 1, 128)    512         conv2d_15[0][0]                  
__________________________________________________________________________________________________
activation_17 (Activation)      (None, 1, 1, 128)    0           batch_normalization_15[0][0]     
__________________________________________________________________________________________________
conv2d_16 (Conv2D)              (None, 1, 1, 128)    16512       activation_17[0][0]              
__________________________________________________________________________________________________
batch_normalization_16 (BatchNo (None, 1, 1, 128)    512         conv2d_16[0][0]                  
__________________________________________________________________________________________________
activation_18 (Activation)      (None, 1, 1, 128)    0           batch_normalization_16[0][0]     
__________________________________________________________________________________________________
conv2d_17 (Conv2D)              (None, 1, 1, 512)    590336      activation_18[0][0]              
__________________________________________________________________________________________________
batch_normalization_17 (BatchNo (None, 1, 1, 512)    2048        conv2d_17[0][0]                  
__________________________________________________________________________________________________
activation_19 (Activation)      (None, 1, 1, 512)    0           batch_normalization_17[0][0]     
__________________________________________________________________________________________________
add_4 (Add)                     (None, 1, 1, 512)    0           activation_19[0][0]              
                                                                 add_3[0][0]                      
__________________________________________________________________________________________________
activation_20 (Activation)      (None, 1, 1, 512)    0           add_4[0][0]                      
__________________________________________________________________________________________________
conv2d_18 (Conv2D)              (None, 1, 1, 128)    65664       activation_20[0][0]              
__________________________________________________________________________________________________
batch_normalization_18 (BatchNo (None, 1, 1, 128)    512         conv2d_18[0][0]                  
__________________________________________________________________________________________________
activation_21 (Activation)      (None, 1, 1, 128)    0           batch_normalization_18[0][0]     
__________________________________________________________________________________________________
conv2d_19 (Conv2D)              (None, 1, 1, 128)    16512       activation_21[0][0]              
__________________________________________________________________________________________________
batch_normalization_19 (BatchNo (None, 1, 1, 128)    512         conv2d_19[0][0]                  
__________________________________________________________________________________________________
activation_22 (Activation)      (None, 1, 1, 128)    0           batch_normalization_19[0][0]     
__________________________________________________________________________________________________
conv2d_20 (Conv2D)              (None, 1, 1, 512)    590336      activation_22[0][0]              
__________________________________________________________________________________________________
batch_normalization_20 (BatchNo (None, 1, 1, 512)    2048        conv2d_20[0][0]                  
__________________________________________________________________________________________________
activation_23 (Activation)      (None, 1, 1, 512)    0           batch_normalization_20[0][0]     
__________________________________________________________________________________________________
add_5 (Add)                     (None, 1, 1, 512)    0           activation_23[0][0]              
                                                                 activation_20[0][0]              
__________________________________________________________________________________________________
activation_24 (Activation)      (None, 1, 1, 512)    0           add_5[0][0]                      
__________________________________________________________________________________________________
conv2d_21 (Conv2D)              (None, 1, 1, 128)    65664       activation_24[0][0]              
__________________________________________________________________________________________________
batch_normalization_21 (BatchNo (None, 1, 1, 128)    512         conv2d_21[0][0]                  
__________________________________________________________________________________________________
activation_25 (Activation)      (None, 1, 1, 128)    0           batch_normalization_21[0][0]     
__________________________________________________________________________________________________
conv2d_22 (Conv2D)              (None, 1, 1, 128)    16512       activation_25[0][0]              
__________________________________________________________________________________________________
batch_normalization_22 (BatchNo (None, 1, 1, 128)    512         conv2d_22[0][0]                  
__________________________________________________________________________________________________
activation_26 (Activation)      (None, 1, 1, 128)    0           batch_normalization_22[0][0]     
__________________________________________________________________________________________________
conv2d_23 (Conv2D)              (None, 1, 1, 512)    590336      activation_26[0][0]              
__________________________________________________________________________________________________
batch_normalization_23 (BatchNo (None, 1, 1, 512)    2048        conv2d_23[0][0]                  
__________________________________________________________________________________________________
activation_27 (Activation)      (None, 1, 1, 512)    0           batch_normalization_23[0][0]     
__________________________________________________________________________________________________
add_6 (Add)                     (None, 1, 1, 512)    0           activation_27[0][0]              
                                                                 activation_24[0][0]              
__________________________________________________________________________________________________
activation_28 (Activation)      (None, 1, 1, 512)    0           add_6[0][0]                      
__________________________________________________________________________________________________
conv2d_24 (Conv2D)              (None, 1, 1, 256)    131328      activation_28[0][0]              
__________________________________________________________________________________________________
batch_normalization_24 (BatchNo (None, 1, 1, 256)    1024        conv2d_24[0][0]                  
__________________________________________________________________________________________________
activation_29 (Activation)      (None, 1, 1, 256)    0           batch_normalization_24[0][0]     
__________________________________________________________________________________________________
conv2d_25 (Conv2D)              (None, 1, 1, 256)    590080      activation_29[0][0]              
__________________________________________________________________________________________________
batch_normalization_25 (BatchNo (None, 1, 1, 256)    1024        conv2d_25[0][0]                  
__________________________________________________________________________________________________
activation_30 (Activation)      (None, 1, 1, 256)    0           batch_normalization_25[0][0]     
__________________________________________________________________________________________________
conv2d_26 (Conv2D)              (None, 1, 1, 1024)   263168      activation_30[0][0]              
__________________________________________________________________________________________________
batch_normalization_26 (BatchNo (None, 1, 1, 1024)   4096        conv2d_26[0][0]                  
__________________________________________________________________________________________________
activation_31 (Activation)      (None, 1, 1, 1024)   0           batch_normalization_26[0][0]     
__________________________________________________________________________________________________
conv2d_27 (Conv2D)              (None, 1, 1, 1024)   1049600     activation_31[0][0]              
__________________________________________________________________________________________________
batch_normalization_27 (BatchNo (None, 1, 1, 1024)   4096        conv2d_27[0][0]                  
__________________________________________________________________________________________________
add_7 (Add)                     (None, 1, 1, 1024)   0           activation_31[0][0]              
                                                                 batch_normalization_27[0][0]     
__________________________________________________________________________________________________
conv2d_28 (Conv2D)              (None, 1, 1, 256)    262400      add_7[0][0]                      
__________________________________________________________________________________________________
batch_normalization_28 (BatchNo (None, 1, 1, 256)    1024        conv2d_28[0][0]                  
__________________________________________________________________________________________________
activation_33 (Activation)      (None, 1, 1, 256)    0           batch_normalization_28[0][0]     
__________________________________________________________________________________________________
conv2d_29 (Conv2D)              (None, 1, 1, 256)    65792       activation_33[0][0]              
__________________________________________________________________________________________________
batch_normalization_29 (BatchNo (None, 1, 1, 256)    1024        conv2d_29[0][0]                  
__________________________________________________________________________________________________
activation_34 (Activation)      (None, 1, 1, 256)    0           batch_normalization_29[0][0]     
__________________________________________________________________________________________________
conv2d_30 (Conv2D)              (None, 1, 1, 1024)   2360320     activation_34[0][0]              
__________________________________________________________________________________________________
batch_normalization_30 (BatchNo (None, 1, 1, 1024)   4096        conv2d_30[0][0]                  
__________________________________________________________________________________________________
activation_35 (Activation)      (None, 1, 1, 1024)   0           batch_normalization_30[0][0]     
__________________________________________________________________________________________________
add_8 (Add)                     (None, 1, 1, 1024)   0           activation_35[0][0]              
                                                                 add_7[0][0]                      
__________________________________________________________________________________________________
activation_36 (Activation)      (None, 1, 1, 1024)   0           add_8[0][0]                      
__________________________________________________________________________________________________
conv2d_31 (Conv2D)              (None, 1, 1, 256)    262400      activation_36[0][0]              
__________________________________________________________________________________________________
batch_normalization_31 (BatchNo (None, 1, 1, 256)    1024        conv2d_31[0][0]                  
__________________________________________________________________________________________________
activation_37 (Activation)      (None, 1, 1, 256)    0           batch_normalization_31[0][0]     
__________________________________________________________________________________________________
conv2d_32 (Conv2D)              (None, 1, 1, 256)    65792       activation_37[0][0]              
__________________________________________________________________________________________________
batch_normalization_32 (BatchNo (None, 1, 1, 256)    1024        conv2d_32[0][0]                  
__________________________________________________________________________________________________
activation_38 (Activation)      (None, 1, 1, 256)    0           batch_normalization_32[0][0]     
__________________________________________________________________________________________________
conv2d_33 (Conv2D)              (None, 1, 1, 1024)   2360320     activation_38[0][0]              
__________________________________________________________________________________________________
batch_normalization_33 (BatchNo (None, 1, 1, 1024)   4096        conv2d_33[0][0]                  
__________________________________________________________________________________________________
activation_39 (Activation)      (None, 1, 1, 1024)   0           batch_normalization_33[0][0]     
__________________________________________________________________________________________________
add_9 (Add)                     (None, 1, 1, 1024)   0           activation_39[0][0]              
                                                                 activation_36[0][0]              
__________________________________________________________________________________________________
activation_40 (Activation)      (None, 1, 1, 1024)   0           add_9[0][0]                      
__________________________________________________________________________________________________
conv2d_34 (Conv2D)              (None, 1, 1, 256)    262400      activation_40[0][0]              
__________________________________________________________________________________________________
batch_normalization_34 (BatchNo (None, 1, 1, 256)    1024        conv2d_34[0][0]                  
__________________________________________________________________________________________________
activation_41 (Activation)      (None, 1, 1, 256)    0           batch_normalization_34[0][0]     
__________________________________________________________________________________________________
conv2d_35 (Conv2D)              (None, 1, 1, 256)    65792       activation_41[0][0]              
__________________________________________________________________________________________________
batch_normalization_35 (BatchNo (None, 1, 1, 256)    1024        conv2d_35[0][0]                  
__________________________________________________________________________________________________
activation_42 (Activation)      (None, 1, 1, 256)    0           batch_normalization_35[0][0]     
__________________________________________________________________________________________________
conv2d_36 (Conv2D)              (None, 1, 1, 1024)   2360320     activation_42[0][0]              
__________________________________________________________________________________________________
batch_normalization_36 (BatchNo (None, 1, 1, 1024)   4096        conv2d_36[0][0]                  
__________________________________________________________________________________________________
activation_43 (Activation)      (None, 1, 1, 1024)   0           batch_normalization_36[0][0]     
__________________________________________________________________________________________________
add_10 (Add)                    (None, 1, 1, 1024)   0           activation_43[0][0]              
                                                                 activation_40[0][0]              
__________________________________________________________________________________________________
activation_44 (Activation)      (None, 1, 1, 1024)   0           add_10[0][0]                     
__________________________________________________________________________________________________
conv2d_37 (Conv2D)              (None, 1, 1, 256)    262400      activation_44[0][0]              
__________________________________________________________________________________________________
batch_normalization_37 (BatchNo (None, 1, 1, 256)    1024        conv2d_37[0][0]                  
__________________________________________________________________________________________________
activation_45 (Activation)      (None, 1, 1, 256)    0           batch_normalization_37[0][0]     
__________________________________________________________________________________________________
conv2d_38 (Conv2D)              (None, 1, 1, 256)    65792       activation_45[0][0]              
__________________________________________________________________________________________________
batch_normalization_38 (BatchNo (None, 1, 1, 256)    1024        conv2d_38[0][0]                  
__________________________________________________________________________________________________
activation_46 (Activation)      (None, 1, 1, 256)    0           batch_normalization_38[0][0]     
__________________________________________________________________________________________________
conv2d_39 (Conv2D)              (None, 1, 1, 1024)   2360320     activation_46[0][0]              
__________________________________________________________________________________________________
batch_normalization_39 (BatchNo (None, 1, 1, 1024)   4096        conv2d_39[0][0]                  
__________________________________________________________________________________________________
activation_47 (Activation)      (None, 1, 1, 1024)   0           batch_normalization_39[0][0]     
__________________________________________________________________________________________________
add_11 (Add)                    (None, 1, 1, 1024)   0           activation_47[0][0]              
                                                                 activation_44[0][0]              
__________________________________________________________________________________________________
activation_48 (Activation)      (None, 1, 1, 1024)   0           add_11[0][0]                     
__________________________________________________________________________________________________
conv2d_40 (Conv2D)              (None, 1, 1, 256)    262400      activation_48[0][0]              
__________________________________________________________________________________________________
batch_normalization_40 (BatchNo (None, 1, 1, 256)    1024        conv2d_40[0][0]                  
__________________________________________________________________________________________________
activation_49 (Activation)      (None, 1, 1, 256)    0           batch_normalization_40[0][0]     
__________________________________________________________________________________________________
conv2d_41 (Conv2D)              (None, 1, 1, 256)    65792       activation_49[0][0]              
__________________________________________________________________________________________________
batch_normalization_41 (BatchNo (None, 1, 1, 256)    1024        conv2d_41[0][0]                  
__________________________________________________________________________________________________
activation_50 (Activation)      (None, 1, 1, 256)    0           batch_normalization_41[0][0]     
__________________________________________________________________________________________________
conv2d_42 (Conv2D)              (None, 1, 1, 1024)   2360320     activation_50[0][0]              
__________________________________________________________________________________________________
batch_normalization_42 (BatchNo (None, 1, 1, 1024)   4096        conv2d_42[0][0]                  
__________________________________________________________________________________________________
activation_51 (Activation)      (None, 1, 1, 1024)   0           batch_normalization_42[0][0]     
__________________________________________________________________________________________________
add_12 (Add)                    (None, 1, 1, 1024)   0           activation_51[0][0]              
                                                                 activation_48[0][0]              
__________________________________________________________________________________________________
activation_52 (Activation)      (None, 1, 1, 1024)   0           add_12[0][0]                     
__________________________________________________________________________________________________
conv2d_43 (Conv2D)              (None, 1, 1, 512)    524800      activation_52[0][0]              
__________________________________________________________________________________________________
batch_normalization_43 (BatchNo (None, 1, 1, 512)    2048        conv2d_43[0][0]                  
__________________________________________________________________________________________________
activation_53 (Activation)      (None, 1, 1, 512)    0           batch_normalization_43[0][0]     
__________________________________________________________________________________________________
conv2d_44 (Conv2D)              (None, 1, 1, 512)    2359808     activation_53[0][0]              
__________________________________________________________________________________________________
batch_normalization_44 (BatchNo (None, 1, 1, 512)    2048        conv2d_44[0][0]                  
__________________________________________________________________________________________________
activation_54 (Activation)      (None, 1, 1, 512)    0           batch_normalization_44[0][0]     
__________________________________________________________________________________________________
conv2d_45 (Conv2D)              (None, 1, 1, 2048)   1050624     activation_54[0][0]              
__________________________________________________________________________________________________
batch_normalization_45 (BatchNo (None, 1, 1, 2048)   8192        conv2d_45[0][0]                  
__________________________________________________________________________________________________
activation_55 (Activation)      (None, 1, 1, 2048)   0           batch_normalization_45[0][0]     
__________________________________________________________________________________________________
conv2d_46 (Conv2D)              (None, 1, 1, 2048)   4196352     activation_55[0][0]              
__________________________________________________________________________________________________
batch_normalization_46 (BatchNo (None, 1, 1, 2048)   8192        conv2d_46[0][0]                  
__________________________________________________________________________________________________
add_13 (Add)                    (None, 1, 1, 2048)   0           activation_55[0][0]              
                                                                 batch_normalization_46[0][0]     
__________________________________________________________________________________________________
conv2d_47 (Conv2D)              (None, 1, 1, 512)    1049088     add_13[0][0]                     
__________________________________________________________________________________________________
batch_normalization_47 (BatchNo (None, 1, 1, 512)    2048        conv2d_47[0][0]                  
__________________________________________________________________________________________________
activation_57 (Activation)      (None, 1, 1, 512)    0           batch_normalization_47[0][0]     
__________________________________________________________________________________________________
conv2d_48 (Conv2D)              (None, 1, 1, 512)    262656      activation_57[0][0]              
__________________________________________________________________________________________________
batch_normalization_48 (BatchNo (None, 1, 1, 512)    2048        conv2d_48[0][0]                  
__________________________________________________________________________________________________
activation_58 (Activation)      (None, 1, 1, 512)    0           batch_normalization_48[0][0]     
__________________________________________________________________________________________________
conv2d_49 (Conv2D)              (None, 1, 1, 2048)   9439232     activation_58[0][0]              
__________________________________________________________________________________________________
batch_normalization_49 (BatchNo (None, 1, 1, 2048)   8192        conv2d_49[0][0]                  
__________________________________________________________________________________________________
activation_59 (Activation)      (None, 1, 1, 2048)   0           batch_normalization_49[0][0]     
__________________________________________________________________________________________________
add_14 (Add)                    (None, 1, 1, 2048)   0           activation_59[0][0]              
                                                                 add_13[0][0]                     
__________________________________________________________________________________________________
activation_60 (Activation)      (None, 1, 1, 2048)   0           add_14[0][0]                     
__________________________________________________________________________________________________
conv2d_50 (Conv2D)              (None, 1, 1, 512)    1049088     activation_60[0][0]              
__________________________________________________________________________________________________
batch_normalization_50 (BatchNo (None, 1, 1, 512)    2048        conv2d_50[0][0]                  
__________________________________________________________________________________________________
activation_61 (Activation)      (None, 1, 1, 512)    0           batch_normalization_50[0][0]     
__________________________________________________________________________________________________
conv2d_51 (Conv2D)              (None, 1, 1, 512)    262656      activation_61[0][0]              
__________________________________________________________________________________________________
batch_normalization_51 (BatchNo (None, 1, 1, 512)    2048        conv2d_51[0][0]                  
__________________________________________________________________________________________________
activation_62 (Activation)      (None, 1, 1, 512)    0           batch_normalization_51[0][0]     
__________________________________________________________________________________________________
conv2d_52 (Conv2D)              (None, 1, 1, 2048)   9439232     activation_62[0][0]              
__________________________________________________________________________________________________
batch_normalization_52 (BatchNo (None, 1, 1, 2048)   8192        conv2d_52[0][0]                  
__________________________________________________________________________________________________
activation_63 (Activation)      (None, 1, 1, 2048)   0           batch_normalization_52[0][0]     
__________________________________________________________________________________________________
add_15 (Add)                    (None, 1, 1, 2048)   0           activation_63[0][0]              
                                                                 activation_60[0][0]              
__________________________________________________________________________________________________
activation_64 (Activation)      (None, 1, 1, 2048)   0           add_15[0][0]                     
__________________________________________________________________________________________________
global_average_pooling2d (Globa (None, 2048)         0           activation_64[0][0]              
__________________________________________________________________________________________________
dense (Dense)                   (None, 10)           20490       global_average_pooling2d[0][0]   
==================================================================================================
Total params: 48,233,354
Trainable params: 48,180,234
Non-trainable params: 53,120
__________________________________________________________________________________________________

Process finished with exit code 0

数据准备

(x_train, y_train), (x_test, y_test) = tf.keras.datasets.cifar10.load_data()
x_train = x_train / 255.0
y_train = tf.keras.utils.to_categorical(y_train, 10)

模型训练

model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['categorical_accuracy', 'Recall', 'AUC'])
model.save('restnet50_cifar10')

模型应用

img = cv2.imread('catpng', 1)
img = np.expand_dims(img, 0)
model = tf.keras.modelsload_model('restnet50_cifar10.h5')
pred = model.predict(img)
print(pred)
  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值