tensorflow基本操作

import tensorflow as tf#导包

tf.name_scope:定义操作名称

with tf.name_scope('Weights'): #操作名称
	Weights=tf.Variable(tf.random_normal([in_size,out_size]),name='W'

记录weights变化情况

tf.summary.histogram(layer_name+'/Weights',Weights)

定义占位符:

tf.placeholder(tf.float32,[None,1],name='x_input')

存储网络结构:

tf.summary.FileWriter('logs/', sess.graph)# 将网络结构图存储

合并所有summary

tf.summary.merge_all()

记录描点

rs=sess.run(merged, feed_dict={xs: x_data, ys: y_data})
writer.add_summary(rs,i)  #每隔50步记录一个点

完整代码

import tensorflow as tf
import  numpy as np
import matplotlib as mpl
mpl.use('TkAgg')
import matplotlib.pyplot as plt
def add_layer(input,in_size,out_size,n_layer,activation_function=None):
    layer_name='layer%s'%n_layer
    with tf.name_scope(layer_name):
        with tf.name_scope('Weights'): #操作名称
            Weights =tf.Variable(tf.random_normal([in_size,out_size]),name='W')# random_normal=>随机变量
            tf.summary.histogram(layer_name+'/Weights',Weights)# 记录weight的变化情况
        with tf.name_scope('biases'):
            biases =tf.Variable(tf.zeros([1,out_size])+0.1,name='b')
            tf.summary.histogram(layer_name + '/biases', biases)
        with tf.name_scope('Wx_plus_b'):
            Wx_plus_b=tf.matmul(input,Weights)+biases

        if activation_function is None:
            output= Wx_plus_b
        else:
            output=activation_function(Wx_plus_b)
    tf.summary.histogram(layer_name + '/output', output)
    return output

x_data =np.linspace(-1,1,300)[:,np.newaxis]
noise= np.random.normal(0,0.05,x_data.shape)
y_data =np.square(x_data)+noise
with tf.name_scope('inputs'):
    xs=tf.placeholder(tf.float32,[None,1],name='x_input')
    ys=tf.placeholder(tf.float32,[None,1],name='y_input')

l1=add_layer(xs,1,10,1,activation_function=tf.nn.relu)
prediction=add_layer(l1,10,1,n_layer=2,activation_function=None)

#prediction =add_layer(l1,10,1,activation_function=None)
with tf.name_scope('loss'):
    loss =tf.reduce_mean(tf.reduce_sum(tf.square(ys-prediction),
                        reduction_indices=[1]),name='mse')
    tf.summary.scalar('loss', loss)
with tf.name_scope('train'):
    train_step =tf.train.GradientDescentOptimizer(learning_rate=0.01)\
        .minimize(loss)

fig = plt.figure()
ax = fig.add_subplot(1,1,1)
ax.scatter(x_data, y_data)
plt.ion()
plt.show()
init =tf.initialize_all_variables()
with tf.Session() as sess:

    writer = tf.summary.FileWriter('logs/', sess.graph)# 将网络结构图存储
    merged = tf.summary.merge_all() #合并所有的summary
    sess.run(init)
    """for i in range(1000):
        sess.run(train_step,feed_dict={xs:x_data,ys:y_data})
        if i%50==0:
            print(sess.run(loss,feed_dict={xs:x_data,ys:y_data}))
        prediction_value =sess.run(prediction,feed_dict={xs:x_data})
        plt.plot(x_data,prediction_value,'r-',lw=5)
        plt.show()
        plt.pause(0.1)"""
    for i in range(1000):
        # training
        sess.run(train_step, feed_dict={xs: x_data, ys: y_data})
        if i % 50 == 0:
            # to visualize the result and improvement
            rs=sess.run(merged, feed_dict={xs: x_data, ys: y_data})
            writer.add_summary(rs,i)  #每隔50步记录一个点
            try:
                ax.lines.remove(lines[0])
            except Exception:
                pass
            prediction_value = sess.run(prediction, feed_dict={xs: x_data})
            # plot the prediction
            lines = ax.plot(x_data, prediction_value, 'r-', lw=5)
            plt.pause(0.5)

参考:https://morvanzhou.github.io/tutorials/machine-learning/tensorflow

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值