函数api核心思想在于可以调用,每一层都可以调用,需要哪一层调用哪一层
import tensorflow as tf
from tensorflow import keras
import matplotlib.pyplot as plt
%matplotlib inline
#导入fashion
fashion_mnist = keras.datasets.fashion_mnist
(train_images,train_labels),(test_images,test_labels)=fashion_mnist.load_data()
#归一化
train_images = train_images / 255.0
test_images = test_images / 255.0
#看一下输入的形状
train_images.shape
#输入使用keras提供的input方法
input = keras.Input(shape=(28,28)) #明确输入形状
#函数调用
x = keras.layers.Flatten()(input)
#可以将keras.layers.Flatten()这一层看作调用的函数,传入的参数是input
x = keras.layers.Dense(32,activation='relu')(x)
#keras.layers.Dense(32,activation='relu')是调用的函数,输出是32,激活函数relu
x = keras.layers.Dropout(0.5)(x)
x = keras.layers.Dense(64, activation='relu')(x)
output = keras.layers.Dense(10, activation='softmax')(x)
#从上面input到output之间就是建立完成的网络,注意要逐层调用
#接下来建立模型
model = keras.Model(inputs=input,outputs=output)
model.summary()
#模型编译
model.compile(optimizer = 'adam',
loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
#模型运行
history = model.fit(train_images,
train_labels,
epochs=30,
validation_data=(test_images,test_labels))
函数api的好处在于可以建立多输入多输出的模型:
input1 = keras.Input(shape=(28,28))
input2 = keras.Input(shape=(28,28))
x1 = keras.layers.Flatten()(input1)
x2 = keras.layers.Flatten()(input2)
#用一个keras.layers.concatenate([x1,x2])连接在一起,比较两个是不是同一类(相当于逻辑回归)
x = keras.layers.concatenate([x1,x2])
x = keras.layers.Dense(64, activation='relu')(x)
output = keras.layers.Dense(10, activation='sigmoid')(x) #逻辑回归注意激活函数
model = keras.Model(inputs=[input1,input2],outputs=output)
model.summary() #可以在模型分层中看到有分叉,后来又合并