TF版本2.2及以上
def creat_FGM(epsilon=1.0):
@tf.function
def train_step(self, data):
'''
计算在embedding上的gradient
计算扰动 在embedding上加上扰动
重新计算loss和gradient
删除embedding上的扰动,并更新参数
'''
data = data_adapter.expand_1d(data)
x, y, sample_weight = data_adapter.unpack_x_y_sample_weight(data)
with tf.GradientTape() as tape:
y_pred = model(x,training=True)
loss = loss_func(y,y_pred)
embedding = model.trainable_variables[0]
embedding_gradients = tape.gradient(loss,[model.trainable_variables[0]])[0]
embedding_gradients = tf.zeros_like(embedding) + embedding_gradients
delta = 0.2 * embedding_gradients / (tf.math.sqrt(tf.reduce_sum(embedding_gradients**2)) + 1e-8)
model.trainable_variables[0].assign_add(delta)
with tf.GradientTape() as tape2:
y_pred = model(x,training=True)
new_loss = loss_func(y,y_pred)
gradients = tape2