神经网络模型训练mnist

在这里插入图片描述

from keras.datasets import mnist  #调用mnist数据包
from keras import models
from keras import layers
(train_images, train_labels), (test_images, test_labels) = mnist.load_data()   #调用训练集和测试集


#分离训练集和验证集
X_VAL=train_images[50000:]           #  50000到60000
Y_VAL=train_labels[50000:]
X_TRAIN=train_images[:50000]      #  0 到 50000
Y_TRAIN=train_labels[:50000]


#数据集预处理
X_TRAIN = X_TRAIN.reshape(50000, 28 * 28).astype('float32') / 255.0              
X_VAL =X_VAL.reshape(10000, 28 * 28).astype('float32') / 255.0
test_images = test_images.reshape(10000, 28 * 28).astype('float32') / 255.0   


     
import numpy as np 
#训练集和验证集匹配
#X_TRAIN _idx = np.random.choice(50000,700)         # 从50000中任意选700个
#X_VAL_idx =  np.random.choice(10000,300)            # 从10000中任意选300个
#X_TRAIN = X_TRAIN[X_TRAIN_idx]
#Y_TRAIN = Y_TRAIN[X_TRAIN_idx]                     
# X_VAL  = X_VAL [X_VAL_idx]
#Y_VAL= Y_VAL [X_VAL_idx]
#对于mnist这种小是数据集不用选,全部数据喂给模型进行训练就行了

#标签数据独热编码
from keras.utils import to_categorical
Y_TRAIN = to_categorical(Y_TRAIN)
Y_VAL = to_categorical(Y_VAL)
test_labels = to_categorical(test_labels)

 
from keras.models  import Sequential
model = Sequential()
model.add(layers.Dense(512, activation = 'relu', input_shape = (28 * 28, )))
model.add(layers.Dense(10, activation = 'softmax'))
print(model.summary())

#模型编译
model.compile(optimizer = 'sgd', loss = 'categorical_crossentropy', metrics = ['accuracy'])
#sgd :梯度下降法      categorical_crossentropy :多分类 

#模型训练
his = model.fit(X_TRAIN,Y_TRAIN,epochs=100,batch_size = 32,validation_data = (X_VAL,Y_VAL) )
# batch_size = 32 默认训练32组数据更新一次模型的权重



import matplotlib.pyplot as plt
# 绘制训练 & 验证的损失值
plt.plot(his .history['loss'],"b",label="train loss")
plt.plot(his .history['val_loss'],"r",label="val  loss")
plt.plot(his .history['accuracy'],"y",label="train  accuracy")
plt.plot(his .history['val_accuracy'],"g",label="val  accuracy")
plt.title('Model loss')                 #标题
plt.ylabel('Loss// accuracy')      #y 轴标题
plt.xlabel('Epoch')                     #x  轴标题
plt.legend(['train loss', 'val  loss','train  accuracy', 'val  accuracy',], loc='upper left')   #图例
plt.show()



test_loss, test_acc = model.evaluate(test_images, test_labels)
print('test_acc: ', test_acc)

model.save_weights("D:\\20keras\\model\\my_model.h1")
model.load_weights("D:\\20keras\\model\\my_model.h1")

test_loss, test_acc = model.evaluate(test_images, test_labels)
print('test_acc: ', test_acc)


  • 1
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值