函数式API简介:
不同于Sequential结构的网络,函数式api可以自行设计网络结构。
核心思想在于可以调用,每一层都可以用调用来实现。
#本代码基于tensorflow2.0
import tensorflow as tf
from tensorflow import keras
import matplotlib.pyplot as plt
%matplotlib inline
fashion_mnist = keras.datasets.fashion_mnist
#1 导入数据
(train_images, train_labels),(test_images, test_labels) = fashion_mnist.load_data()
train_images = train_images / 255
test_images = test_images / 255
#2 建立模型
#2.1 设置输入
input = keras.Input(shape=(28, 28)) #不用设置第一维
#2.2 构建模型
x = keras.layers.Flatten()(input) #调用Flatten层
x = keras.layers.Dense(32, activation = 'relu')(x) #
x = keras.layers.Dropout(0.5)(x)
x = keras.layers.Dense(64, activation = 'relu')(x)
output = keras.layers.Dense(10, activation = 'softmax')(x)
#相当于反复调用上一步的输出,一层一层往下套
model = keras.Model(inputs = input, outputs = output)
model.summary()
#3 模型编译
model.compile(optimizer='adam',
loss = 'sparse_categorical_crossentropy',
metrics=['accuracy'])
#4 模型训练
model.fit(train_images,
train_labels,
epochs=30,
validation_data=(test_images, test_labels))
简单实现多输入模型(部分代码):
input1 = keras.Input(shape = (28, 28))
input2 = keras.Input(shape = (28, 28))
x1 = keras.layers.Flatten()(input1)
x2 = keras.layers.Flatten()(input2)
x = keras.layers.concatenate([x1,x2]) # 这一行用来将两个输入合成
x = keras.layers.Dense(32, activation = 'relu')(x)
output = keras.layers.Dense(1, activation = 'sigmoid')(x)
model = keras.Model(inputs=[input1, input2], outputs = output)
model.summary()