1、conda运行环境加载fashion-mnist数据集,存放在如下路径中,C:\Users\Administrator.keras
数据网盘下载链接:
链接:https://pan.baidu.com/s/1sCCOdscPRVwgbJSKShJl0A
提取码:4wpq
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import datasets,layers,Sequential,metrics,optimizers
def data_press(x,y):
x=tf.cast(x,tf.float32)/255
y=tf.cast(y,tf.int32)
return x,y
(x,y),(x_test,y_test)=datasets.fashion_mnist.load_data()
traindata=tf.data.Dataset.from_tensor_slices((x,y))
traindata=traindata.map(data_press).shuffle(100000).batch(100)
testdata=tf.data.Dataset.from_tensor_slices((x_test,y_test))
testdata=testdata.map(data_press).batch(100)
# d=next(iter(traindata))
# print(d)
model=Sequential([
layers.Dense(256,activation='relu')
,layers.Dense(126,activation='relu')
,layers.Dense(64,activation='relu')
,layers.Dense(32,activation='relu')
,layers.Dense(10)
])
model.build(input_shape=[None,28*28])
model.summary()
optim=optimizers.Adam(lr=1e-3)
import datetime
current_time=datetime.datetime.now().strftime(("%Y%m%d-%H%M%S"))
log_dir='logs/'+current_time
summary_writer=tf.summary.create_file_writer(log_dir)
count={'cnt':0,'test':0}
def train(epoch,count):
for step,(x,y) in enumerate(traindata):
with tf.GradientTape() as tape:
# print('x_trainshape',x.shape,y.shape)
x=tf.reshape(x,[-1,28*28])
y=tf.one_hot(y,depth=10)
y_pred=model(x)
mse_loss=tf.reduce_mean(tf.square(y,y_pred))
mse_loss2=tf.reduce_mean(tf.losses.MSE(y,y_pred))
mse_ce=tf.reduce_mean(keras.losses.categorical_crossentropy(y,y_pred,from_logits=True))
grads=tape.gradient(mse_ce,model.trainable_variables)
optim.apply_gradients(zip(grads,model.trainable_variables))
if step%100==0:
print(epoch,step,'',float(mse_loss),float(mse_loss2),float(mse_ce))
with summary_writer.as_default():
tf.summary.scalar('train-loss',float(mse_ce),step=count['cnt'])
count['cnt']+=1
#test
toteal_c=0
total_num=0
for step,(x,y) in enumerate(testdata):
# print(x.shape)
# print('y.shape',y.shape)
x=tf.reshape(x,[-1,28*28])
x=model(x)
pred=tf.argmax(x,axis=1)
pred=tf.cast(pred,dtype=tf.int32)
correct=tf.equal(pred,y)
correct=tf.reduce_sum(tf.cast(correct,tf.int32))
toteal_c+=int(correct)
total_num+=x.shape[0]
with summary_writer.as_default():
tf.summary.scalar('test-acc',float(toteal_c/total_num),step=count['test'])
count['test']+=1
acc=toteal_c/total_num
print(acc)
if __name__=='__main__':
for i in range(30):
train(i,count)
tensorboard 展示:
1、开启tensorboard --logdir logs
2、定义summary,
current_time=datetime.datetime.now().strftime(("%Y%m%d-%H%M%S"))
log_dir='logs/'+current_time
summary_writer=tf.summary.create_file_writer(log_dir)
with summary_writer.as_default():
tf.summary.scalar('train-loss',float(mse_ce),step=count['cnt'])
3、http://localhost:6006/ 展示