一.实验环境
- jupyter lab
- tensorflow 2.2.0
- pythom3.6
二.步骤
1.导入包
#1.开始导入包
#1. Start importing the package
import sklearn
import matplotlib.pyplot as plt
## MNIST数据集在tensorflow中,所以不用自己下了
from tensorflow.keras.datasets import mnist
from tensorflow import keras
from sklearn.datasets import load_digits
from sklearn.model_selection import train_test_split
import numpy as np
import pandas as pd
2.导入数据集
#2.导入数据集
#2. Import the dataset
(train_images, train_labels), (test_images, test_labels) = mnist.load_data()
##查看数据集结构
## View the data set structure
print(train_images.shape,train_labels.shape,test_images.shape,test_labels.shape)
3.处理数据集
## 归一化图像数据集
## Normalized image data set
train_images = train_images.reshape((60000, 28 * 28))
train_images = train_images.astype('float32') / 255
test_images = test_images.reshape((10000, 28 * 28))
test_images = test_images.astype('float32') / 255
## 归一化标签数据集
## Normalized tag data set
from tensorflow.keras.utils import to_categorical
train_labels = to_categorical(train_labels)
test_labels = to_categorical(test_labels)
##输出归一化后的数据集结构
## Normalized tag data set
print(train_images.shape,train_labels.shape,test_images.shape,test_labels.shape)
4.定义网络结构
#3.定义网络结构
#3. Define the network structure
model = keras.models.Sequential()
##定义输入层
##Define the input layer
model.add(keras.layers.Dense(30,input_shape = train_images.shape[1:]))
##定义隐藏层,定义 relu 作为激活函数
## Define the hidden layer and define RELu as the activation function
for _ in range(15):
model.add(keras.layers.Dense(100,activation = 'relu'))
##定义输出层,通常定义 softmax 作为激活函数
##Define the output layer, usually defining Softmax as the activation function
model.add(keras.layers.Dense(10,activation = 'softmax'))
##定义目标函数
## Define the target function
model.compile(loss = "categorical_crossentropy",optimizer = "sgd",metrics = ["accuracy"])
##进行100次迭代
## 100 iterations
history = model.fit(train_images, train_labels, epochs=100, batch_size=128)
5.检查在测试集上的表现
test_loss, test_acc = model.evaluate(test_images, test_labels)
最后准确率为97%