from tensorflow import keras
(x_train, y_train), (x_test, y_test) = keras.datasets.mnist.load_data()
print(x_train.shape)
print(y_train.shape)
from sklearn.model_selection import train_test_split
x_train, x_valid, y_train, y_valid=train_test_split(x_train, y_train, train_size=5/6, random_state=520)
# print(x_valid.shape)
# print(y_valid.shape)
# for i in range(50000):
# x_train[i] = x_train[i].flatten()
# print(x_train.shape)
from matplotlib import pyplot
import numpy as np
import cv2
# print(y_train[0])
# cv2.imshow('x_train', x_train[0])
# cv2.waitKey()
# cv2.destroyAllWindows()
# pyplot.imshow(x_train[0], cmap='gray')
# print(x_train.reshape(784,-1))
import tensorflow as tf
from tensorflow.keras import layers
# 进行模型的构建以及函数, 网络的搭建
model = tf.keras.Sequential()
model.add(tf.keras.layers.Flatten(input_shape=(28,28)))
model.add(tf.keras.layers.Dense(32, activation='relu'))
model.add(tf.keras.layers.Dense(32, activation='relu'))
model.add(tf.keras.layers.Dense(10, activation='softmax'))
# model.add(layers.Dense(32, activation='relu'))
# model.add(layers.Dense(32, activation='relu'))
# model.add(layers.Dense(10, activation='softmax'))
# # 选择损失和评估函数时候需要选择APi的参考:
# 进行模型的搭建,以及优化器的选择
model.summary()
model.compile(optimizer=tf.keras.optimizers.Adam(0.001),
loss=tf.keras.losses.SparseCategoricalCrossentropy(),
metrics=[tf.keras.metrics.SparseCategoricalAccuracy()])
model.fit(x_train, y_train, epochs=5, batch_size=64,validation_data=(x_valid, y_valid))
predict = model.predict(x_test)
关于tensorflow的安装,需要读者进一步进行,在这里就不赘述了。里面的数据集是内置的,大可放心