import matplotlib as mpl
import matplotlib.pyplot as plt
# %matplotlib inline
import numpy as np
import sklearn
import pandas as pd
import os
import sys
import time
import tensorflow as tf
from tensorflow import keras
print(tf.__version__)
print(sys.version_info)
for module in mpl, np, pd, sklearn, tf, keras:
print(module.__name__, module.__version__)
## 数据收集
fashion_mnist = keras.datasets.fashion_mnist
(x_train_all, y_train_all), (x_test, y_test) = fashion_mnist.load_data()
# 把训练集拆分为训练集和验证集
# 前五千张作为验证集,后面的(55000张)作为训练集
x_valid, x_train = x_train_all[:5000], x_train_all[5000:]
y_valid, y_train = y_train_all[:5000], y_train_all[5000:]
print(x_valid.shape, y_valid.shape) # 验证集
print(x_train.shape, y_train.shape) # 训练集
print(x_test.shape, y_test.shape) # 测试集
print(np.max(x_train), np.min(x_train))# 打印训练集中的极值
## 归一化 x = (x - u(均值)) / std(方差) 就得到符合均值是0,方差是1的
# 正态分布
from sklearn.preprocessing import StandardScaler
scaler = StandardScaler()
# 训练集
#先转成32,
#x_train: [None,28,28] -> [None,784] -> 再转换回来reshape(-1, 28, 28)
#fit_transform:fit把训练集的均值方差记录下来给别的用(验证集和测试集)
x_train_scaled = scaler.fit_transform(
x_train.astype(np.float32).reshape(-1, 1)).reshape(-1, 28, 28)
# 验证集 直接用transform
x_vaild_scaled = scaler.transform(
x_valid.astype(np.float32).reshape(-1, 1)).reshape(-1, 28, 28)
# 测试集 直接用transform
x_test_scaled = scaler.transform(
x_test.astype(np.float32).reshape(-1, 1)).reshape(-1, 28, 28)
# tf.keras.models.Sequential()
"""
model = keras.models.Sequential()#建立对象
model.add(keras.layers.Flatten(input_shape=[28,28]))#展平输入
#把二维矩阵展成28*28的一维向量
model.add(keras.layers.Dense(300,activation="relu"))
# 全链接层 activation 是激活函数
# :让下一层的所有单元一一的与上一层连接
model.add(keras.layers.Dense(100,activation="relu"))
model.add(keras.layers.Dense(10,activation="softmax"))
"""
model = keras.models.Sequential(
[
keras.layers.Flatten(input_shape=[28, 28]),
keras.layers.Dense(300, activation="relu"),
keras.layers.Dense(100, activation="relu"),
keras.layers.Dense(10, activation="softmax")
])
# relu: y = max(0,x)当x大于0时,输出x,否则输出0
# softmax: 将向量变成概率分布,x=[x1,x2,x3]
# y = [e^x1/sum, e^x2/sum, e^x3/sum ],
# sum = e^x1 + e^x2 + e^x3
# reson for sparse: y是个数 需要 one_hot成向量
# 如果y是一个向量,直接用categorical_crossentropy
model.compile(loss="sparse_categorical_crossentropy",
optimizer="sgd", # 模型的求解方法,调整方法
metrics=["accuracy"])
# 235500 = 784 * 300 + 784
# [none ,784] * w + b -> [None, 300] w->[784,300] , b = [300]
# Tensorboard需要文件夹 , earlystopping, ModelCheckspoint需要文件名
logdir = './callbacks'
if not os.path.exists(logdir):
os.mkdir(logdir)#建立文件夹
output_model_file = os.path.join(logdir,
"fashion_mnist_model.h5")#定义输出文件,路径,文件名
callbacks = [
keras.callbacks.TensorBoard(logdir),
keras.callbacks.ModelCheckpoint(output_model_file,
save_best_only = True),#保存最好的模型,默认保存最近的
keras.callbacks.EarlyStopping(patience=5, min_delta = 1e-3),#min_delta阈值:训练间的差别,
# patience :连续多少次发生min_delta的情况要停止
# 比这个低的话,要停止
]
# 开启训练
history = model.fit(x_train_scaled, y_train, epochs=100, # 训练集遍历10次
validation_data=(x_vaild_scaled, y_valid), # 验证集
callbacks=callbacks)
def plot_learning_curves(history):
pd.DataFrame(history.history).plot(figsize=(8, 5))
# 把数据转换成DataFrame(一种数据结构) 图的大小 (8 ,5)
plt.grid(True) # 显示网格
plt.gca().set_ylim(0, 1) # 设置y坐标轴的范围
plt.show()
plot_learning_curves(history)
# model.evaluate(x_test_scaled,y_test) # 测试集测试
在callbacks路径下输入此:
得到
在浏览器地址栏中输入:
localhost:6006
得到
可以查看一些数据