此篇为纯代码
import tensorflow as tf
import datetime
batchSize = 128
EPOCH = 30
(x_train,y_train),(x_test,y_test) = tf.keras.datasets.fashion_mnist.load_data()
def preprocessing(x,y):
x = tf.cast(x,dtype=tf.float32) / 255.
y = tf.cast(y,dtype=tf.int32)
return x,y
dataBase = tf.data.Dataset.from_tensor_slices((x_train,y_train)).map(preprocessing)
dataBase = dataBase.shuffle(10000).batch(batchSize)
dataBase_test = tf.data.Dataset.from_tensor_slices((x_test,y_test)).map(preprocessing)
dataBase_test = dataBase_test.shuffle(10000).batch(batchSize)
modle = tf.keras.Sequential([
tf.keras.layers.Dense(256,activation = tf.nn.leaky_relu),
tf.keras.layers.Dense(128,activation=tf.nn.relu),
tf.keras.layers.Dense(64,activation = tf.nn.relu),
tf.keras.layers.Dense(32,activation = tf.nn.relu),
tf.keras.layers.Dense(10)
])
modle.build(input_shape=[None,28*28])
modle.summary()
optimizer = tf.keras.optimizers.Adam(lr = 1e-3)
loss_metric = tf.keras.metrics.Mean()
acc_metric = tf.keras.metrics.Accuracy()
def main():
totalCorrect = 0
totalNum = 0
for epoch in range(EPOCH):
print("="*30,"Epoch",epoch+1,"="*30)
for step,(x,y) in enumerate(dataBase):
x = tf.reshape(x,[-1,28*28])
with tf.GradientTape() as tape:
y = tf.one_hot(y,depth=10)
logits = modle(x)
loss = tf.reduce_mean(tf.keras.losses.categorical_crossentropy(y,logits,from_logits=True))
grad = tape.gradient(loss,modle.trainable_variables)
optimizer.apply_gradients(zip(grad,modle.trainable_variables))
if step % 6000 == 0:
loss_metric.update_state(loss)
print("Loss CrossEntropy Value:",loss_metric.result().numpy())
loss_metric.reset_state()
for x_,y_ in dataBase_test:
x_ = tf.reshape(x_,[-1,28*28])
logits = modle(x_)
prob = tf.nn.softmax(logits,axis=1)
pred = tf.argmax(prob,axis=1)
pred = tf.cast(pred,dtype=tf.int32)
correct = tf.equal(pred,y_)
correct = tf.reduce_sum(tf.cast(correct,dtype=tf.int32))
totalCorrect += correct
totalNum += batchSize
acc = float(totalCorrect / totalNum ) * 100
acc_metric.update_state(y_,pred)
print("Acc:" , acc_metric.result().numpy(),"%")
acc_metric.reset_state()
log_dir = "logs/"
summary_writer = tf.summary.create_file_writer(log_dir)
with summary_writer.as_default():
tf.summary.scalar("Loss",float(loss),step = epoch)
tf.summary.scalar("Acc",acc,step=epoch)
image_all = tf.reshape(x_,[-1,28,28,1])
tf.summary.image("Test Image",image_all,step=epoch,max_outputs=50)
if __name__ == '__main__':
main()