下面代碼為一個簡單的線性回歸模型,擬合y = 2 * x, 創建的tensorboard文件夾位於當前文件夾下
1 import os 2 import io 3 import time 4 import numpy as np 5 import matplotlib.pyplot as plt 6 import tensorflow as tf 7 8 sess = tf.Session() 9 # 創建summary_writer,將tensorboard summary寫入tensorboard文件夾 10 summary_writer = tf.summary.FileWriter('tensorboard', tf.get_default_graph()) 11 # 確保summary_writer寫入的Tensorflo文件夾存在 12 if not os.path.exists('tensorboard'): 13 os.makedirs('tensorboard') 14 15 # 設置模型參數 16 batch_size = 50 17 generations = 100 18 x_data = np.arange(1000) / 10 19 true_slope = 2. 20 y_data = x_data * true_slope + np.random.normal(loc=0.0, scale=25, size=1000) 21 22 # 分割數據為訓練集和測試集 23 train_ix = np.random.choice(len(x_data), size=int(len(x_data) * 0.9), replace=False) 24 # print('train_ix', train_ix.shape) 25 test_ix = np.setdiff1d(np.arange(1000), train_ix) 26 x_data_train, y_data_train = x_data[train_ix], y_data[train_ix] 27 # print('x_data_train', x_data_train.shape) 28 # print('y_data_train', y_data_train.shape) 29 x_data_test, y_data_test = x_data[test_ix], y_data[test_ix] 30 # print('x_data_test', x_data_test.shape) 31 # print('y_data_test', y_data_test.shape) 32 # print(y_data_test) 33 # 創建占位符,變量,模型操作,損失和優化函數 34 x_graph_input = tf.placeholder(tf.float32, [None]) 35 y_graph_input = tf.placeholder(tf.float32, [None]) 36 m = tf.Variable(tf.random_normal([1], dtype=tf.float32), name='Slope') 37 output = tf.multiply(m, x_graph_input, name='Batch_Multiplication') 38 residuals = output - y_graph_input 39 l2_loss = tf.reduce_mean(tf.abs(residuals), name='L2_Loss') 40 my_optim = tf.train.GradientDescentOptimizer(0.001) 41 train_step = my_optim.minimize(l2_loss) 42 43 # 創建tensorboard 操作匯總一個標量值 44 with tf.name_scope('Slope_Estimate'): 45 tf.summary.scalar('Slope_Estimate', tf.squeeze(m)) 46 47 # 添加tensorboard的另一個匯總數據是直方圖匯總 48 with tf.name_scope('Loss_and_Residuals'): 49 tf.summary.histogram('Histogram_Error', l2_loss) 50 tf.summary.histogram('Histogram_Residuals', residuals) 51 52 # 匯總數據 53 summary_op = tf.summary.merge_all() 54 init = tf.global_variables_initializer() 55 sess.run(init) 56 57 # 訓練線性回歸模型 58 for i in range(generations): 59 batch_indices = np.random.choice(len(x_data_train), size=batch_size) 60 x_batch = x_data_train[batch_indices] 61 y_batch = y_data_train[batch_indices] 62 # print(y_batch.shape) 63 _, train_loss, summary = sess.run([train_step, l2_loss, summary_op], 64 feed_dict={x_graph_input: x_batch, y_graph_input: y_batch}) 65 66 67 test_loss, test_resids = sess.run([l2_loss, residuals], feed_dict = {x_graph_input: x_data_test, y_graph_input: y_data_test}) 68 if (i + 1) % 10 == 0: 69 print('generation {} of {}. Train Loss: {:.3}, Test Loss: {:.3}.'.format(i + 1, generations, train_loss, test_loss)) 70 log_writer = tf.summary.FileWriter('tensorboard') 71 log_writer.add_summary(summary, i) 72 73 # 創建函數輸出protobuff格式的圖像 74 def get_linear_plot(slope): 75 linear_prediction = x_data * slope 76 plt.plot(x_data, y_data, 'b.', label='data') 77 plt.plot(x_data, linear_prediction, 'r--', linewidth=3, label='predicted line') 78 plt.legend(loc='upper left') 79 buf = io.BytesIO() 80 plt.savefig(buf, format='png') 81 buf.seek(0) 82 return buf 83 84 85 slope = sess.run(m) 86 plot_buf = get_linear_plot(slope[0]) 87 image = tf.image.decode_png(plot_buf.getvalue(), channels=4) 88 image = tf.expand_dims(image, 0) 89 image_summary_op = tf.summary.image('Linear_Plot', image) 90 image_summary = sess.run(image_summary_op) 91 log_writer.add_summary(image_summary, i) 92 log_writer.close()
在當前文件夾下進入cmd模式,輸入tensorboard --logdir=tensorboard --host=127.0.0.1
訪問127.0.0.1:6006即可進入tensorboard
注意:若寫為--logdir='tensorboard'會顯示不出來
也可直接在pycharm控制台中輸入tensorboard --logdir=tensorboard --host=127.0.0.1