莫烦TensorFlow_07 tensorboard可视化

import tensorflow as tf  
import numpy as np  
import matplotlib.pyplot as plt  
  
def add_layer(inputs, in_size, out_size, activation_function = None):  
  
  with tf.name_scope('layer'):
    
    with tf.name_scope('Weights'):
      Weights = tf.Variable(tf.random_normal([in_size, out_size]), name='W')  # hang lie  
    
    with tf.name_scope('biases'):
      biases = tf.Variable(tf.zeros([1, out_size]) + 0.1, name = 'b')  
    
    with tf.name_scope('Wx_plus_b'):
      Wx_plus_b = tf.matmul(inputs, Weights) + biases  
    
    if activation_function is None:  
      outputs = Wx_plus_b  
    else:  
      outputs = activation_function(Wx_plus_b)  
    return outputs  
   
#define placeholder
with tf.name_scope('inputs'):
  xs = tf.placeholder(tf.float32, [None, 1], name = 'x_input') 
  ys = tf.placeholder(tf.float32, [None, 1], name = 'y_input')  
  
#add hidden layer
l1 = add_layer(xs, 1, 10, activation_function = tf.nn.relu)  
#add output layer
prediction = add_layer(l1, 10, 1, activation_function = None)  

#the error between prediction and real data  
with tf.name_scope('loss'):
  loss = tf.reduce_mean(tf.reduce_sum(tf.square(ys - prediction),   
				    reduction_indices=[1]  ))  
with tf.name_scope('train'):
  train_step = tf.train.GradientDescentOptimizer(0.1).minimize(loss)  
  
sess = tf.Session()  
writer = tf.summary.FileWriter("logs/", sess.graph)

#import step 
sess.run(tf.global_variables_initializer() )

  

注意:有些浏览器可能支持的不好,推荐使用最新的Chrome

命令行输入:

tensorboard --logdir=logs/

 

转载于:https://www.cnblogs.com/alexYuin/p/8684231.html

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值