a=tf.linspace(-10.,10.,10)
with tf.GradientTape() as tape:
tape.watch(a)
y=tf.sigmoid(a)
grads=tape.gradient(y,[a])
grads
[<tf.Tensor: shape=(10,), dtype=float32, numpy=
array([4.5395809e-05, 4.1859134e-04, 3.8362027e-03, 3.3258736e-02,
1.8632649e-01, 1.8632641e-01, 3.3258699e-02, 3.8362255e-03,
4.1854731e-04, 4.5416677e-05], dtype=float32)>]
with tf.GradientTape() as tape:
tape.watch(a)
y=tf.tanh(a)
grads=tape.gradient(y,[a])
grads
[<tf.Tensor: shape=(10,), dtype=float32, numpy=
array([0.0000000e+00, 5.9604645e-07, 5.9604645e-05, 5.0777197e-03,
3.5285264e-01, 3.5285199e-01, 5.0774813e-03, 5.9843063e-05,
4.7683716e-07, 0.0000000e+00], dtype=float32)>]
relu函数
a=tf.linspace(-1.,1.,10)
tf.nn.relu(a)
<tf.Tensor: shape=(10,), dtype=float32, numpy=
array([0. , 0. , 0. , 0. , 0. ,
0.11111116, 0.33333337, 0.5555556 , 0.7777778 , 1. ],
dtype=float32)>