'''
tensorflow 实现非线性回归
案例一:y = ax^2+bx+c
案例二:y = a*exp(-(x-u)**2/(x*sigma**2))+b
这个和神经网络目前来看还没有关系,好像完全就是利用模型,然后去进行反向传播,最后去计算的最优参数
'''
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
'''案例一:y = ax^2+bx+c'''
def demo1():
#构造数据
x = np.arange(-10,10,0.1)
y = 3*x**2+2*x+1
plt.plot(x,y,'*')
# plt.show()
#构造模型
xx = tf.placeholder(dtype=tf.float32,shape=[x.shape[0]],name='xx')
a = tf.Variable(initial_value=0.0,name='a')
b = tf.Variable(initial_value=0.0,name='b')
c = tf.Variable(initial_value=0.0,name='c')
yy = tf.placeholder(dtype=tf.float32,shape=[y.shape[0]],name='yy')
predict = a*xx**2+b*xx+c
loss = tf.sqrt(tf.reduce_mean((yy-predict)**2))
Optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.0005).minimize(loss)
with tf.Session() as sess:
tf.global_variables_initializer().run()
for i in range(7000):
Optimizer.run(feed_dict={xx:x,yy:y})
print("loss:%f"%(loss.eval(feed_dict={xx:x,yy:y})))
print('a:%f,b:%f,c:%f'%(a.eval(),b.eval(),c.eval()))
plt.plot(x,a.eval()*x**2+b.eval()*x+c.eval())
plt.show()
迭代了7000次后,达到了较小的损失
loss:0.500241
loss:0.500210
loss:0.500238
a:3.012125,b:2.000031,c:0.941524
输出结果
第二个案例
'''案例二:y = a*exp(-(x-u)**2/(x*sigma**2))+b'''
def demo2():
#构造数据
x = np.arange(-10,10,0.1)
a = 2
u = 1.0
s = 1.0
b = 2
y = a*np.exp(-(x-u)**2*s)+b
plt.plot(x,y,'*')
# plt.show()
#构造模型
xx = tf.placeholder(dtype=tf.float32,shape=[x.shape[0]],name='xx')
aa = tf.Variable(initial_value=3.5,name='aa')
bb = tf.Variable(initial_value=2.2,name='bb')
uu = tf.Variable(initial_value=1.5,name='uu')
ss = tf.Variable(initial_value=2.0,name='sigma')
yy = tf.placeholder(dtype=tf.float32,shape=[y.shape[0]],name='xx')
predict = aa*tf.exp(-(xx-uu)**2*ss)+bb
loss = tf.sqrt(tf.reduce_mean((yy-predict)**2))
Optimizer = tf.train.AdamOptimizer(learning_rate=0.0005).minimize(loss)
with tf.Session() as sess:
tf.global_variables_initializer().run()
for i in range(5000):
Optimizer.run(feed_dict={xx:x,yy:y})
print("loss:%f"%(loss.eval(feed_dict={xx:x,yy:y})))
print('aa:%f,uu:%f,ss:%f,bb:%f'%(aa.eval(),uu.eval(),ss.eval(),bb.eval()))
plt.plot(x,aa.eval()*np.exp(-(x-uu.eval())**2*ss.eval())+bb.eval())
plt.show()
# demo2()
loss:0.000044
loss:0.000041
loss:0.000031
aa:2.000023,uu:1.000000,ss:0.999975,bb:2.000025
结果图