调参优化

import tensorflow as tf
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt

(train_image, train_lable), (test_image, test_label) = tf.keras.datasets.fashion_mnist.load_data()
train_image = train_image / 255
test_image = test_image / 255

# model = tf.keras.Sequential()
# model.add(tf.keras.layers.Flatten(input_shape=(28, 28)))  # 变成 28*28的向量
# model.add(tf.keras.layers.Dense(128, activation='relu'))
# model.add(tf.keras.layers.Dense(128, activation='relu'))
# model.add(tf.keras.layers.Dense(128, activation='relu'))
# model.add(tf.keras.layers.Dense(10, activation='softmax'))
# # print(model.summary())
# model.compile(optimizer=tf.keras.optimizers.Adam(learning_rate=0.001)
#               , loss='categorical_crossentropy'
#               , metrics=['acc'])
# # model.fit(train_image,train_lable,epochs=10)   #   loss: 0.2371 - acc: 0.9107
#
# train_lable_onehot = tf.keras.utils.to_categorical(train_lable)
# test_label_onehot = tf.keras.utils.to_categorical(test_label)
#
# history = model.fit(train_image, train_lable_onehot
#                     , epochs=10
#                     , validation_data=(test_image, test_label_onehot))
# # print(history.history.keys()) # dict_keys(['loss', 'acc', 'val_loss', 'val_acc'])
# plt.plot(history.epoch, history.history.get('loss'), label='loss')  # train 上面的loss
# plt.plot(history.epoch, history.history.get('val_loss'), label='val_loss')  # test上面的loss
# plt.legend()
# plt.show()

# 过拟合,训练数据集上表现良好,在测试集上面表现糟糕
# 欠拟合,训练数据集上表现糟糕,在测试集上面表现糟糕
# todo   添加dropout层
# model = tf.keras.Sequential()
# model.add(tf.keras.layers.Flatten(input_shape=(28, 28)))  # 变成 28*28的向量
# model.add(tf.keras.layers.Dense(128, activation='relu'))
# model.add(tf.keras.layers.Dropout(0.5))  # 随机丢弃50%
# model.add(tf.keras.layers.Dense(128, activation='relu'))
# model.add(tf.keras.layers.Dropout(0.5))  # 随机丢弃50%
# model.add(tf.keras.layers.Dense(128, activation='relu'))
# model.add(tf.keras.layers.Dropout(0.5))  # 随机丢弃50%
# model.add(tf.keras.layers.Dense(10, activation='softmax'))
# # print(model.summary())
# model.compile(optimizer=tf.keras.optimizers.Adam(learning_rate=0.001)
#               , loss='categorical_crossentropy'
#               , metrics=['acc'])
# # model.fit(train_image,train_lable,epochs=10)   #   loss: 0.2371 - acc: 0.9107
#
train_lable_onehot = tf.keras.utils.to_categorical(train_lable)
test_label_onehot = tf.keras.utils.to_categorical(test_label)
#
# history = model.fit(train_image, train_lable_onehot
#                     , epochs=10
#                     , validation_data=(test_image, test_label_onehot))
# plt.plot(history.epoch, history.history.get('loss'), label='loss')  # train 上面的loss
# plt.plot(history.epoch, history.history.get('val_loss'), label='val_loss')  # test上面的loss
# plt.plot(history.epoch, history.history.get('acc'), label='acc')  # train 上面的acc
# plt.plot(history.epoch, history.history.get('val_acc'), label='val_acc')  # test上面的acc
# plt.legend()
# plt.show()

# todo 减小规模
model = tf.keras.Sequential()
model.add(tf.keras.layers.Flatten(input_shape=(28, 28)))
model.add(tf.keras.layers.Dense(32, activation='relu'))
model.add(tf.keras.layers.Dense(10, activation='softmax'))
model.compile(optimizer=tf.keras.optimizers.Adam(learning_rate=0.001)
              , loss='categorical_crossentropy'
              , metrics=['acc'])
history = model.fit(train_image, train_lable_onehot
                    , epochs=10
                    , validation_data=(test_image, test_label_onehot))
plt.plot(history.epoch, history.history.get('acc'), label='acc')  # train 上面的acc
plt.plot(history.epoch, history.history.get('val_acc'), label='val_acc')  # test上面的acc
plt.legend()
plt.show()
  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值