# tensorflow笔记（三）之 tensorboard的使用

tensorflow笔记（三）之 tensorboard的使用

## 1. 实践1--矩阵相乘

 1 import tensorflow as tf
2
3 with tf.name_scope('graph') as scope:
4      matrix1 = tf.constant([[3., 3.]],name ='matrix1')  #1 row by 2 column
5      matrix2 = tf.constant([[2.],[2.]],name ='matrix2') # 2 row by 1 column
6      product = tf.matmul(matrix1, matrix2,name='product')
7
8 sess = tf.Session()
9
10 writer = tf.summary.FileWriter("logs/", sess.graph)
11
12 init = tf.global_variables_initializer()
13
14 sess.run(init)

## 2. 实践2---线性拟合（一）

 1 import tensorflow as tf
2 import numpy as np
3
4 ## prepare the original data
5 with tf.name_scope('data'):
6      x_data = np.random.rand(100).astype(np.float32)
7      y_data = 0.3*x_data+0.1
8 ##creat parameters
9 with tf.name_scope('parameters'):
10      weight = tf.Variable(tf.random_uniform([1],-1.0,1.0))
11      bias = tf.Variable(tf.zeros([1]))
12 ##get y_prediction
13 with tf.name_scope('y_prediction'):
14      y_prediction = weight*x_data+bias
15 ##compute the loss
16 with tf.name_scope('loss'):
17      loss = tf.reduce_mean(tf.square(y_data-y_prediction))
18 ##creat optimizer
20 #creat train ,minimize the loss
21 with tf.name_scope('train'):
22      train = optimizer.minimize(loss)
23 #creat init
24 with tf.name_scope('init'):
25      init = tf.global_variables_initializer()
26 ##creat a Session
27 sess = tf.Session()
28 ##initialize
29 writer = tf.summary.FileWriter("logs/", sess.graph)
30 sess.run(init)
31 ## Loop
32 for step  in  range(101):
33     sess.run(train)
34     if step %10==0 :
35         print step ,'weight:',sess.run(weight),'bias:',sess.run(bias)

Parameters部分

y_prediction部分和init部分

loss部分

## 2. 实践2---线性拟合（二）

 1 import tensorflow as tf
2 import numpy as np
3
4 ## prepare the original data
5 with tf.name_scope('data'):
6      x_data = np.random.rand(100).astype(np.float32)
7      y_data = 0.3*x_data+0.1
8 ##creat parameters
9 with tf.name_scope('parameters'):
10      with tf.name_scope('weights'):
11             weight = tf.Variable(tf.random_uniform([1],-1.0,1.0))
12            tf.summary.histogram('weight',weight)
13      with tf.name_scope('biases'):
14            bias = tf.Variable(tf.zeros([1]))
15            tf.summary.histogram('bias',bias)
16 ##get y_prediction
17 with tf.name_scope('y_prediction'):
18      y_prediction = weight*x_data+bias
19 ##compute the loss
20 with tf.name_scope('loss'):
21      loss = tf.reduce_mean(tf.square(y_data-y_prediction))
22      tf.summary.scalar('loss',loss)
23 ##creat optimizer
25 #creat train ,minimize the loss
26 with tf.name_scope('train'):
27      train = optimizer.minimize(loss)
28 #creat init
29 with tf.name_scope('init'):
30      init = tf.global_variables_initializer()
31 ##creat a Session
32 sess = tf.Session()
33 #merged
34 merged = tf.summary.merge_all()
35 ##initialize
36 writer = tf.summary.FileWriter("logs/", sess.graph)
37 sess.run(init)
38 ## Loop
39 for step  in  range(101):
40     sess.run(train)
41     rs=sess.run(merged)
42     writer.add_summary(rs, step)

运行过程与上面两个一样

scalar中的loss训练图

distribution中的weight和bias的训练图

histogram中的weight和bias的训练图

tensorboard的博客结束了，我写的只是基础部分，更多东西还请看官方的文档和教程，希望这篇博客能对你学习tensorboard有帮助！

notebook链接: https://pan.baidu.com/s/1o8lzN1g 密码: mbv8

posted @ 2017-08-25 18:49  FANG_YANG  阅读(149712)  评论(3编辑  收藏  举报