自编码器包括编码器和解码器,编码器提供按规则编码的功能,解码器将编码器的输出扩展为与编码器输入具有相同的维度的输出。在这个过程中,自编码器通过数据重组进行机器学习。
import tensorflow as tf
import tensorflow.keras as keras
import tensorflow.keras.layers as layers
from IPython.display import SVG
print(tf.__version__)
(x_train,y_train),(x_test,y_test) = keras.datasets.mnist.load_data()
x_train = x_train.reshape((-1,28*28))/255.0
x_test = x_test.reshape((-1,28*28))/255.0
code_dim = 32
inputs = layers.Input(shape=(x_train.shape[1],),name='inputs')
code = layers.Dense(code_dim,activation='relu',name='code')(inputs)
outputs = layers.Dense(x_train.shape[1],activation='softmax',name='outputs')(code)
# 构建自编码器
auto_encoder = keras.Model(inputs,outputs)
# 使用自编码器对设置的层进行编码
keras.utils.plot_model(auto_encoder,show_shapes=True)
encoder = keras.Model(inputs,code)
keras.utils.plot_model(encoder,show_shapes=True)
# 构建解码器
decoder_input = keras.Input((code_dim,))
decoder_output = auto_encoder.layers[-1](decoder_input)
decoder = keras.Model(decoder_input, decoder_output)
keras.utils.plot_model(decoder,show_shapes=True)
# 使用编码器对编码后的层进行编码
auto_encoder.compile(optimizer='adam',loss='binary_crossentropy')
# 对设置的模型进行训练
history = auto_encoder.fit(x_train,x_train,batch_size=64,epochs=10,validation_split=0.1)
# 对自编码器与解码器进行预测
encoded=encoder.predict(x_test)
decoded=decoder.predict(encoded)
import matplotlib.pyplot as plt
plt.figure(figsize=(10.4))
(2)卷积自编码器
与前面自编码器不同的是,卷积自编码器使用卷积层代替全连接层,目的是降低输入特征的维度,使得速度更快,准确率更高。
import tensorflow as tf
import tensorflow.keras as keras
import tensorflow.keras.layers as layers
from IPython.display import SVG
print(tf.__version__)
(x_train,y_train),(x_test,y_test) = keras.datasets.mnist.load_data()
x_train = x_train.reshape((-1,28*28))/255.0
x_test = x_test.reshape((-1,28*28))/255.0
code_dim = 32
inputs = layers.Input(shape=(x_train.shape[1],x_train[2],x_train.shape[3]),name='inputs')
code = layers.Conv2D(16,(3,3),activation='relu',padding='same')(inputs)
code = layers.MaxPool2D((2,2),padding='same')(code)
decoded = layers.Conv2D(16,(3,3),activation='relu',padding='same')(code)
decoded = layers.UpSampling2D((2,2))(decoded)
outputs = layers.Conv2D(1,(3,3),activation='sigmoid',padding='same')(decoded)
# 构建自编码器
auto_encoder = keras.Model(inputs,outputs)
auto_encoder.complie(
optimizer=keras.optimizer.Adam(),loss = keras.losses.BinaryCrossentropy)
# 使用自编码器对设置的层进行编码
keras.utils.plot_model(auto_encoder,show_shapes=True)
early_stop = keras.callbacks.EarlyStopping(patience=2,monitor='loss')
# 对设置的模型进行训练
history = auto_encoder.fit(x_train,x_train,batch_size=64,epochs=10,validation_split=0.1,callbacks=[early_stop])
# 对自编码器与解码器进行预测
encoded=encoder.predict(x_test)
decoded=decoder.predict(encoded)
import matplotlib.pyplot as plt
plt.figure(figsize=(10.4))