mnist_mlp.py 在mnist数据集上训练一个多层感知机
'''Trains a simple deep NN on the MNIST dataset.
Gets to 98.40% test accuracy after 20 epochs
(there is *a lot* of margin for parameter tuning).
2 seconds per epoch on a K520 GPU.
'''
from __future__ import print_function
import keras
from keras.datasets import mnist
from keras.models import Sequential
#Dense 全连接层
#Dropout 对于神经网络按一定概率将其从网络中丢弃,解决过拟合问题
from keras.layers import Dense, Dropout
from keras.optimizers import RMSprop
#epochs 训练过程中数据被轮的次数,向前向后传播时数据所有批次的单次训练迭代
#one epoch = numbers of iterations = N =训练样本数量/batch_size
batch_size = 128
num_classes = 10
epochs = 20
# the data, shuffled and split between train and test sets
(x_train, y_train), (x_test, y_test) = mnist.load_data()
#输入数据维度(x_train.shape[0],x_train.shape[1]*x_train.shape[2])(图像数量,行×列)
x_train = x_train.reshape(60000, 784)
x_test = x_test.reshape