import tensorflow as tf
import datetime
import os
# Press the green button in the gutter to run the script.
if __name__ == '__main__':
(train_image, train_labels), (test_image, test_labels) = tf.keras.datasets.mnist.load_data()
train_image = tf.expand_dims(train_image, -1)
test_image = tf.expand_dims(test_image, -1)
train_image = tf.cast(train_image / 255, tf.float32)
test_image = tf.cast(test_image / 255, tf.float32)
train_labels = tf.cast(train_labels, tf.int64)
test_labels = tf.cast(test_labels, tf.int64)
dataset = tf.data.Dataset.from_tensor_slices((train_image, train_labels))
test_dataset = tf.data.Dataset.from_tensor_slices((test_image, test_labels))
dataset = dataset.repeat().shuffle(60000).batch(128)
test_dataset = test_dataset.repeat(1).batch(128)
model = tf.keras.Sequential([
tf.keras.layers.Conv2D(16, [3, 3], activation='relu', input_shape=(None, None, 1)),
tf.keras.layers.Conv2D(32, [3, 3], activation='relu'),
tf.keras.layers.GlobalMaxPooling2D(),
tf.keras.layers.Dense(10, activation='softmax')
])
model.compile(optimizer='adam',
loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
log_dir = os.path.join("logs",datetime.datetime.now().strftime("%Y%m%d-H%M%S"))
#路径拼接
tensorboard_callback = tf.keras.callbacks.TensorBoard(log_dir=log_dir,histogram_freq=1)
'''
log_dir:保存TensorBoard要解析的日志文件的目录路径。
histogram_freq:默认为0。计算模型各层的激活值和权重直方图的频率(以epoch计)。如果设置为0,将不会计算直方图。若想直方图可视化,必须指定验证数据(或分割验证集)。
'''
model.fit(dataset,
epochs=5,
steps_per_epoch=60000 // 128,
validation_data=test_dataset,
validation_steps=10000 // 128,
callbacks=[tensorboard_callback])
控制台启动tensorboard
PS F:\pandas\pythonProject> tensorboard --logdir logs
2021-10-20 16:28:17.925351: I tensorflow/stream_executor/platform/default/dso_loader.cc:49] Successfully opened dynamic library cudart64_110.dll
Serving TensorBoard on localhost; to expose to the network, use a proxy or pass --bind_all
TensorBoard 2.7.0 at http://localhost:6006/ (Press CTRL+C to quit)
自定义变量tensorboard
file_writer = tf.summary.create_file_writer(log_dir+'metrics')#文件编写器
file_writer.set_as_default()#设置成默认
#自定义变量
def lr_sch(epoch):
learning_rate = 0.2
if epoch>5:
learning_rate=0.02
if epoch>10:
learning_rate=0.01
if epoch>20:
learning_rate=0.005
tf.summary.scalar('learning rate',data=learning_rate,step=epoch)#记录标量值变化
return learning_rate
lr_callback = tf.keras.callbacks.LearningRateScheduler(lr_sch)
model.fit(dataset,
epochs=25,
steps_per_epoch=60000 // 128,
validation_data=test_dataset,
validation_steps=10000 // 128,
callbacks=[tensorboard_callback,lr_callback])
自定义训练的Tensorboard
optimizer = tf.keras.optimizers.Adam()
loss_func = tf.keras.losses.SparseCategoricalCrossentropy()
def loss(model, x, y):
y_ = model(x)
return loss_func(y, y_)
train_loss = tf.keras.metrics.Mean('train_loss')
train_accuracy = tf.keras.metrics.SparseCategoricalAccuracy('train_accuracy')
test_loss = tf.keras.metrics.Mean('test_loss')
test_accuracy = tf.keras.metrics.SparseCategoricalAccuracy('test_accuracy')
def train_step(model, images, labels):
with tf.GradientTape() as t:
pred = model(images)
loss_step = loss_func(labels, pred)
grads = t.gradient(loss_step, model.trainable_variables)
optimizer.apply_gradients(zip(grads, model.trainable_variables))
train_loss(loss_step)
train_accuracy(labels, pred)
def test_step(model, images, labels):
pred = model(images)
loss_step = loss_func(labels, pred)
test_loss(loss_step)
test_accuracy(labels, pred)
current_time = datetime.datetime.now().strftime("%Ym%d-%H%M%S")
train_log_dir = 'logs/gradient_tape/' + current_time + '/train'
test_log_dir = 'logs/gradient_tape/' + current_time + '/test'
train_summary_writer = tf.summary.create_file_writer(train_log_dir)
test_summary_writer = tf.summary.create_file_writer(test_log_dir)
def train():
for epoch in range(10):
for (batch,(images,labels)) in enumerate(dataset):
train_step(model,images,labels)
print('.',end=' ')
with train_summary_writer.as_default():
tf.summary.scalar('loss',train_loss.result(),step=epoch)
tf.summary.scalar('accuracy', train_accuracy.result(),step=epoch)
for (batch,(images,labels)) in enumerate(test_dataset):
test_step(model,images,labels)
print('*',end=' ')
with test_summary_writer.as_default():
tf.summary.scalar('loss', test_loss.result(), step=epoch)
tf.summary.scalar('accuracy', test_accuracy.result(), step=epoch)
template = 'Epoch {}, Loss: {}, Accuracy: {}, Test Loss: {}, Test Accuracy: {}'
print(template.format((
epoch+1,
train_loss.result(),
train_accuracy.result()*100,
train_loss.result(),
train_accuracy.result()*100
)))
train_loss.reset_states()
train_accuracy.reset_states()
test_loss.reset_states()
test_accuracy.reset_states()
train()