导入相关的库
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import layers
print("TensorFlow version:", tf.__version__)
TensorFlow version: 2.6.4
(train_images, train_labels), (test_images, test_labels) = keras.datasets.cifar10.load_data()
Downloading data from https://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz 170500096/170498071 [==============================] - 6s 0us/step 170508288/170498071 [==============================] - 6s 0us/step
data_augmentation = keras.Sequential(
[
layers.RandomFlip("horizontal"),
layers.RandomRotation(0.1),
]
)
构建模型
inputs = keras.Input(shape=(32, 32, 3))
x = data_augmentation(inputs)
x = layers.Rescaling(1./255)(x)
x = layers.Conv2D(64, 3, padding="same")(x)
x = layers.BatchNormalization()(x)
x = layers.Activation("relu")(x)
x = layers.Conv2D(64, 3, padding="same")(x)
x = layers.BatchNormalization()(x)
x = layers.Activation("relu")(x)
previous_block_activation = x
for size in [128, 256, 512]:
x = layers.Activation("relu")(x)
x = layers.SeparableConv2D(size, 3, padding="same")(x)
x = layers.BatchNormalization()(x)
x = layers.Activation("relu")(x)
x = layers.SeparableConv2D(size, 3, padding="same")(x)
x = layers.BatchNormalization()(x)
x = layers.MaxPooling2D(3, strides=2, padding="same")(x)
# Project residual
residual = layers.Conv2D(size, 1, strides=2, padding="same")(
previous_block_activation
)
x = layers.add([x, residual]) # Add back residual
previous_block_activation = x # Set aside next residual
x = layers.Conv2D(1024, 3, padding="same")(x)
x = layers.BatchNormalization()(x)
x = layers.Activation("relu")(x)
x = layers.GlobalAveragePooling2D()(x)
x = layers.Dropout(0.5)(x)
outputs = layers.Dense(10, activation="softmax")(x)
model = keras.Model(inputs=inputs, outputs=outputs)
model.summary()
keras.utils.plot_model(model, show_shapes=True, to_file='model.png')
Model: "model" __________________________________________________________________________________________________ Layer (type) Output Shape Param # Connected to ================================================================================================== input_1 (InputLayer) [(None, 32, 32, 3)] 0 __________________________________________________________________________________________________ sequential (Sequential) (None, 32, 32, 3) 0 input_1[0][0] __________________________________________________________________________________________________ rescaling (Rescaling) (None, 32, 32, 3) 0 sequential[0][0] __________________________________________________________________________________________________ conv2d (Conv2D) (None, 32, 32, 64) 1792 rescaling[0][0] __________________________________________________________________________________________________ batch_normalization (BatchNorma (None, 32, 32, 64) 256 conv2d[0][0] __________________________________________________________________________________________________ activation (Activation) (None, 32, 32, 64) 0 batch_normalization[0][0] __________________________________________________________________________________________________ conv2d_1 (Conv2D) (None, 32, 32, 64) 36928 activation[0][0] __________________________________________________________________________________________________ batch_normalization_1 (BatchNor (None, 32, 32, 64) 256 conv2d_1[0][0] __________________________________________________________________________________________________ activation_1 (Activation) (None, 32, 32, 64) 0 batch_normalization_1[0][0] __________________________________________________________________________________________________ activation_2 (Activation) (None, 32, 32, 64) 0 activation_1[0][0] __________________________________________________________________________________________________ separable_conv2d (SeparableConv (None, 32, 32, 128) 8896 activation_2[0][0] __________________________________________________________________________________________________ batch_normalization_2 (BatchNor (None, 32, 32, 128) 512 separable_conv2d[0][0] __________________________________________________________________________________________________ activation_3 (Activation) (None, 32, 32, 128) 0 batch_normalization_2[0][0] __________________________________________________________________________________________________ separable_conv2d_1 (SeparableCo (None, 32, 32, 128) 17664 activation_3[0][0] __________________________________________________________________________________________________ batch_normalization_3 (BatchNor (None, 32, 32, 128) 512 separable_conv2d_1[0][0] __________________________________________________________________________________________________ max_po