1. 单输出感知机
# ***************** 单输出感知机
x = tf.random.normal([1,3])
y = tf.convert_to_tensor([1])
w = tf.ones([3,1])
b = tf.ones(1)
with tf.GradientTape() as tape:
tape.watch([w,b])
logits = tf.nn.sigmoid(x@w+b)
loss = tf.reduce_mean(tf.losses.MSE(y,logits))
grads = tape.gradient(loss,[w,b])
2. 多输出感知机
# *************** 多输出感知机
x = tf.random.uniform([10,5])
y = tf.constant([0,2,1,3,2,3,1,2,1,2])
w = tf.ones([5,4])
b = tf.ones(4)
with tf.GradientTape() as tape:
tape.watch([w,b])
logits = x@w + b
prob = tf.nn.softmax(logits)
loss = tf.reduce_mean(tf.losses.MeanSquaredError().__call__(tf.one_hot(y,depth=4),prob))
grads = tape.gradient(loss,[w,b])
本文为参考龙龙老师的“深度学习与TensorFlow 2入门实战“课程书写的学习笔记
by CyrusMay 2022 04 17