graph1=tf.Graph()
with graph1.as_default():
x=tf.constant([[2.0,1.0]],dtype=tf.float32)
y=tf.constant([0.0],dtype=tf.float32)
梯度下降法解线性方程就是不断输入系数x=[2,1]和结果y=[3],作为训练数据来训练参数w1, w2, b,所以先给出训练数据:
with graph1.as_default():
w=tf.Variable(tf.constant([[1.0],[2.0]],dtype=tf.float32),name='w',trainable=True)
b=tf.Variable(tf.constant([0,0],dtype=tf.float32),name='b',trainable=True)
y_train=tf.matmul(x,w)+b
with graph1.as_default():
loss=tf.reduce_mean((y-y_train)**2)
lr=0.05
with graph1.as_default():
global_step=tf.Variable(0,trainable=False)
learning_rate=tf.compat.v1.train.exponential_decay(learning_rate=lr,
global_step=global_step,
decay_rate=0.9,