1. 自定义网络层
实现自己的层的最佳方法是扩展tf.keras.layers.Layer类并实现:
- _init_()函数,在其中执行所有与输入无关的初始化
- build()函数,获得输入张量的形状,并可以进行其余的初始化
- call()函数,构建网络结构,进行前向传播
该自定以网络相当于:tf.keras.layers.Dense()
格式:
class MyDense(tf.keras.layers.Layer): # 继承该类
def __init__(self, input_dim, output_dim):
super(MyDense, self).__init__ # 继承父类中__init__()函数中的成员,否则会被覆盖掉
# self.weight 和 self.add_variable作用一样,都是把变量添加到层中
self.kernel = self.add_weight('w', [input_dim, output_dim])
self.bias = self.add_weight('b', [output_dim])
def call(self, inputs, training=None): # model(x)会自动调用
out = inputs @ self.kernel + self.bias
retuen out
2. 自定义网络结构
机器学习模型中有很多是通过叠加不同的结构层组合而成的,实现自定义网络结构就要扩展tf.keras.Model 类
- _init_()函数,在其中执行所有与输入无关的初始化
- build()函数,获得输入张量的形状,并可以进行其余的初始化
- call()函数,构建网络结构,进行前向传播
该自定义网络相当于:tf.keras.Sequential()
格式:
class MyNetwork(tf.keras.Model):
def __init__(self):
super(MyNetwork, self).__init__()
self.fc1 = MyDense(32*32*3, 256) # 调用MyDense类的__init__方法
self.fc2 = MyDense(256, 128)
self.fc3 = MyDense(128, 64)
self.fc4 = MyDense(64, 32)
self.fc5 = MyDense(32, 10)
def call(self):
x = tf.reshape(inputs, [-1, 32*32*3])
x = self.fc1(x) # 调用MyDense类的call方法
x = tf.nn.relu(x)
x = self.fc2(x) # 调用MyDense类的call方法
x = tf.nn.relu(x)
x = self.fc3(x) # 调用MyDense类的call方法
x = tf.nn.relu(x)
x = self.fc4(x) # 调用MyDense类的call方法
x = tf.nn.relu(x)
x = self.fc5(x) # 调用MyDense类的call方法
return x
3. 示例
import tensorflow as tf
(x, y), (x_val, y_val) = tf.keras.datasets.mnist.load_data()
print(x.shape, y.shape)
train_db = tf.data.Dataset.from_tensor_slices((x, y))
test_db = tf.data.Dataset.from_tensor_slices((x_val, y_val))
def processing(x, y):
x = tf.cast(x, dtype=tf.float32)/255.
x = tf.reshape(x, [-1, 28*28])
x = tf.squeeze(x)
y = tf.cast(y, dtype=tf.int32)
y = tf.one_hot(y, depth=10)
return x, y
batcs = 128
train_db = train_db.map(processing).shuffle(10000).batch(batcs)
test_db = test_db.map(processing).batch(batcs)
sample = next(iter(train_db))
print(sample[0].shape, sample[1].shape)
# network = tf.keras.Sequential(
# [tf.keras.layers.Dense(256, activation='relu'),
# tf.keras.layers.Dense(128, activation='relu'),
# tf.keras.layers.Dense(64, activation='relu'),
# tf.keras.layers.Dense(32, activation='relu'),
# tf.keras.layers.Dense(10)]
# )
class MyDense(tf.keras.layers.Layer):
def __init__(self, input_dim, output_dim):
super(MyDense, self).__init__()
self.kernel = self.add_weight('w', [input_dim, output_dim])
self.bias = self.add_weight('b', [output_dim])
def call(self, inputs):
out = inputs @ self.kernel + self.bias
return out
class MyNetwork(tf.keras.Model):
def __init__(self):
super(MyNetwork, self).__init__()
self.fc1 = MyDense(28*28, 256)
self.fc2 = MyDense(256, 128)
self.fc3 = MyDense(128, 64)
self.fc4 = MyDense(64, 32)
self.fc5 = MyDense(32, 10)
def call(self, inputs):
x = self.fc1(inputs)
x = tf.nn.relu(x)
x = self.fc2(x)
x = tf.nn.relu(x)
x = self.fc3(x)
x = tf.nn.relu(x)
x = self.fc4(x)
x = tf.nn.relu(x)
x = self.fc5(x)
return x
network = MyNetwork() # 创建该类
network.compile(optimizer=tf.keras.optimizers.Adam(lr=0.001), loss=tf.losses.CategoricalCrossentropy(from_logits=True), metrics=['accuracy'])
network.fit(train_db, epochs=2)
输出为:
(128, 784) (128, 10)
Epoch 1/2
2020-09-07 10:17:30.047658: I tensorflow/stream_executor/platform/default/dso_loader.cc:48] Successfully opened dynamic library cublas64_10.dll
469/469 [==============================] - 3s 6ms/step - loss: 0.3062 - accuracy: 0.9102
Epoch 2/2
469/469 [==============================] - 3s 6ms/step - loss: 0.1062 - accuracy: 0.9681