import tensorflow as tf import numpy as np import pandas as pd import matplotlib.pyplot as plt (train_image, train_lable), (test_image, test_label) = tf.keras.datasets.fashion_mnist.load_data() train_image = train_image / 255 test_image = test_image / 255 # model = tf.keras.Sequential() # model.add(tf.keras.layers.Flatten(input_shape=(28, 28))) # 变成 28*28的向量 # model.add(tf.keras.layers.Dense(128, activation='relu')) # model.add(tf.keras.layers.Dense(128, activation='relu')) # model.add(tf.keras.layers.Dense(128, activation='relu')) # model.add(tf.keras.layers.Dense(10, activation='softmax')) # # print(model.summary()) # model.compile(optimizer=tf.keras.optimizers.Adam(learning_rate=0.001) # , loss='categorical_crossentropy' # , metrics=['acc']) # # model.fit(train_image,train_lable,epochs=10) # loss: 0.2371 - acc: 0.9107 # # train_lable_onehot = tf.keras.utils.to_categorical(train_lable) # test_label_onehot = tf.keras.utils.to_categorical(test_label) # # history = model.fit(train_image, train_lable_onehot # , epochs=10 # , validation_data=(test_image, test_label_onehot)) # # print(history.history.keys()) # dict_keys(['loss', 'acc', 'val_loss', 'val_acc']) # plt.plot(history.epoch, history.history.get('loss'), label='loss') # train 上面的loss # plt.plot(history.epoch, history.history.get('val_loss'), label='val_loss') # test上面的loss # plt.legend() # plt.show() # 过拟合,训练数据集上表现良好,在测试集上面表现糟糕 # 欠拟合,训练数据集上表现糟糕,在测试集上面表现糟糕 # todo 添加dropout层 # model = tf.keras.Sequential() # model.add(tf.keras.layers.Flatten(input_shape=(28, 28))) # 变成 28*28的向量 # model.add(tf.keras.layers.Dense(128, activation='relu')) # model.add(tf.keras.layers.Dropout(0.5)) # 随机丢弃50% # model.add(tf.keras.layers.Dense(128, activation='relu')) # model.add(tf.keras.layers.Dropout(0.5)) # 随机丢弃50% # model.add(tf.keras.layers.Dense(128, activation='relu')) # model.add(tf.keras.layers.Dropout(0.5)) # 随机丢弃50% # model.add(tf.keras.layers.Dense(10, activation='softmax')) # # print(model.summary()) # model.compile(optimizer=tf.keras.optimizers.Adam(learning_rate=0.001) # , loss='categorical_crossentropy' # , metrics=['acc']) # # model.fit(train_image,train_lable,epochs=10) # loss: 0.2371 - acc: 0.9107 # train_lable_onehot = tf.keras.utils.to_categorical(train_lable) test_label_onehot = tf.keras.utils.to_categorical(test_label) # # history = model.fit(train_image, train_lable_onehot # , epochs=10 # , validation_data=(test_image, test_label_onehot)) # plt.plot(history.epoch, history.history.get('loss'), label='loss') # train 上面的loss # plt.plot(history.epoch, history.history.get('val_loss'), label='val_loss') # test上面的loss # plt.plot(history.epoch, history.history.get('acc'), label='acc') # train 上面的acc # plt.plot(history.epoch, history.history.get('val_acc'), label='val_acc') # test上面的acc # plt.legend() # plt.show() # todo 减小规模 model = tf.keras.Sequential() model.add(tf.keras.layers.Flatten(input_shape=(28, 28))) model.add(tf.keras.layers.Dense(32, activation='relu')) model.add(tf.keras.layers.Dense(10, activation='softmax')) model.compile(optimizer=tf.keras.optimizers.Adam(learning_rate=0.001) , loss='categorical_crossentropy' , metrics=['acc']) history = model.fit(train_image, train_lable_onehot , epochs=10 , validation_data=(test_image, test_label_onehot)) plt.plot(history.epoch, history.history.get('acc'), label='acc') # train 上面的acc plt.plot(history.epoch, history.history.get('val_acc'), label='val_acc') # test上面的acc plt.legend() plt.show()
调参优化
最新推荐文章于 2022-09-21 14:19:53 发布