https://www.bilibili.com/video/av16001891?p=15
15 16 17
1 添加层
#输入 输入的size 输出的size 激活函数
def add_layer(inputs,in_size,out_size,activation_function=None):
Weights=tf.Variable(tf.random_normal([in_size,out_size]))
biases=tf.Variable(tf.zeros([1,out_size])+0.1)
Wx_plus_b=tf.matmul(inputs,Weights)+biases
if activation_function is None:
outputs=Wx_plus_b
else:
outputs=activation_function(Wx_plus_b)
return outputs
2 训练
xs=tf.placeholder(tf.float32,[None,1])
ys=tf.placeholder(tf.float32,[None,1])
l1=add_layer(xs,1,10,activation_function=tf.nn.relu) #隐藏层 10维
prediction=add_layer(l1,10,1,activation_function=None) #输出层 1维
loss=tf.reduce_mean(tf.reduce_sum(tf.square(ys-prediction),reduction_indices=[1]))
train_step=tf.train.GradientDescentOptimizer(0.5).minimize(loss)
#init=tf.initialize_all_variables() #已经不适用
init=tf.global_variables_initializer()
sess=tf.Session()
sess.run(init)
for i in range(1001):
sess.run(train_step,feed_dict={xs:x,ys:y})
if i%50==0:
print(sess.run(loss,feed_dict={xs:x,ys:y}))
3 plot
plt.ion()#绘图在show之后不终止
for i in range(1001):
sess.run(train_step,feed_dict={xs:x,ys:y})
if i%50==0:
plt.cla()
prediction_value=sess.run(prediction,feed_dict={xs:x})
plt.scatter(x, y)
plt.plot(x, prediction_value, 'r-', lw=3)
plt.pause(0.1)
print(sess.run(loss,feed_dict={xs:x,ys:y}))
plt.ioff()
plt.show()