def myregression():
"""
实现多维输入的线性回归神经网络
假设输入为x = [a,b,c,d,e,f,g],正确答案为:y_true
则y_true = x1 * a + x2 * b + x3 * c + x4 * d + x5 * e + x6 * f + x7 * g + 1
其中x取值为[0.1,0.2,0.3,0.4,0.5,0.6,0.7]
"""
#1、生成输入与标准答案数据集,x是一个[100,7]矩阵,应该乘以[7,1]的矩阵得到[100,1]的答案矩阵
x = tf.random_normal([100,7],mean = 1.75,stddev = 0.5,name = 'input_data_x')
y_true = tf.matmul(x,[[0.1],[0.2],[0.3],[0.4],[0.5],[0.6],[0.7]]) + 1
#2、生成线性回归参数,计算预测结果
w = tf.Variable(tf.random_normal([7,1],mean = 2,stddev=0.2,))
b = tf.Variable(0.0)
y_predict = tf.matmul(x,w) + b
#3、计算loss
loss = tf.reduce_mean(tf.square(y_predict - y_true))
#4、反向传播优化参数
train_op = tf.train.GradientDescentOptimizer(0.04).minimize(loss)
init_op = tf.global_variables_initializer()
with tf.Session() as sess:
sess.run(init_op)
for i in range(100000):
sess.run(train_op)
print(i,end=' ')
tmp = w.eval()
for j in tmp:
print(j,end=' ')
print(b.eval())
if __name__ == "__main__":
import tensorflow as tf
myregression()
新手代码,如有不当,请多指正
运算结果:在10000次训练之后,参数很好的向正确值收敛