Tensorflow2.0 梯度下降与激活函数
"""
梯度下降与激活函数
"""
import tensorflow as tf
print("==" * 30, "sigmoid")
# sigmoid
# 原始数据
x = tf.linspace(-10.0, 10.0, 10)
# 梯度计算
with tf.GradientTape() as tape:
tape.watch(x)
y = tf.sigmoid(x) # 函数处理
grads = tape.gradient(y, x) # 梯度计算
print("原始数据 x:\n", x.numpy())
print("Sigmoid函数处理后 y:\n", y.numpy())
print("对y求导 y:\n", grads.numpy())
print("==" * 30, "Tanh")
# tanh
x = tf.linspace(-5.0, 5.0, 10)
with tf.GradientTape() as tape:
tape.watch(x)
y = tf.tanh(x)
grads = tape.gradient(y, x)
print("原始数据 x:\n", x.numpy())
print("Tanh函数处理后 y:\n", y.numpy())
print("对y求导 y:\n", grads.numpy())
# relu
print("==" * 30, "Relu")
x = tf.linspace(-1.0, 1.0, 10)
with tf.GradientTape(persistent=True) as tape:
tape.watch(x)
y = tf.nn.relu(x)
y_leaky = tf.nn.leaky_relu(x)
grads_y = tape.gradient(y, x)
grads_y_leaky = tape.gradient(y_leaky, x)
print("原始数据 x:\n", x.numpy())
print("relu函数处理后 y:\n", y.numpy())
print("leaky_relu函数处理后 y:\n", y_leaky.numpy())
print("relu结果求导 grads:\n", grads_y.numpy())
print("leaky_relu结果求导 grads:\n", grads_y_leaky.numpy())
sigmoid
tanh
relu leaky_relu