Dense层mnist手写字符分类
loss: categorical_crossentropy
optimizer: rmsprop
最后一层: 10, activation=softmax
代码如下:
from keras.datasets import mnist
#加载数据
(train_image, train_labels), (test_image, test_labels) = mnist.load_data()
from keras import models
from keras import layers
#构建模型
network = models.Sequential()
#添加层
network.add(layers.Dense(512, activation='relu', input_shape=(28*28, )))
network.add(layers.Dense(10, activation='softmax'))
#编译
network.compile(optimizer='rmsprop', loss='categorical_crossentropy', metrics=['acc'])
#数据预处理
train_image = train_image.reshape((60000, 28*28))
test_image = test_image.reshape((10000, 28*28))
train_image = train_image.astype('float') / 255
test_image = test_image.astype('float') / 255
#标签预处理
from keras.utils import to_categorical
train_labels = to_categorical(train_labels)
test_labels = to_categorical(test_labels)
#训练模型
history = network.fit(train_image,
train_labels,
batch_size=128,
epochs=20,
validation_data=(test_image, test_labels))
history_dic = history.history
loss_value = history_dic['loss']
val_loss_value = history_dic['val_loss']
#训练损失可视化
import matplotlib.pyplot as plt
epoch = range(1, len(loss_value) +1)
print(epoch)
plt.plot(epoch, loss_value, 'bo', label='Train_loss')
plt.plot(epoch, val_loss_value, 'b', label='Tess_loss')
plt.xlabel('epochs')
plt.ylabel('loss')
plt.legend()
plt.show()
result: