1 import tensorflow as tf 2 import numpy as np 3 import matplotlib.pyplot as plt 4 def add_layer(inputs,in_size,out_size,activation_function=None): 5 with tf.name_scope('layer'): 6 with tf.name_scope('weights'): 7 Weights = tf.Variable(tf.random_normal([in_size,out_size])) 8 with tf.name_scope('biases'): 9 biases = tf.Variable(tf.zeros([1,out_size])+0.1) 10 with tf.name_scope('Wx_plus_b'): 11 Wx_plus_b = tf.matmul(inputs,Weights)+biases 12 if activation_function is None: 13 outputs = Wx_plus_b 14 else: 15 outputs=activation_function(Wx_plus_b) 16 return outputs 17 18 x_data=np.linspace(-1,1,300)[:,np.newaxis] 19 noise = np.random.normal(0,0.05,x_data.shape) 20 y_data=np.square(x_data)-0.5+noise 21 22 23 with tf.name_scope('input'): 24 xs=tf.placeholder(tf.float32,[None,1],name='x_input')#1表示输入是1 25 ys=tf.placeholder(tf.float32,[None,1],name='y_input') 26 27 l1=add_layer(xs,1,10,activation_function=tf.nn.relu) 28 prediction=add_layer(l1,10,1,activation_function=None) 29 with tf.name_scope('loss'): 30 loss=tf.reduce_mean(tf.reduce_sum(tf.square(ys-prediction),reduction_indices=[1])) 31 with tf.name_scope('train'): 32 train_step = tf.train.GradientDescentOptimizer(0.1).minimize(loss) 33 34 35 init=tf.initialize_all_variables() 36 sess=tf.Session() 37 writer = tf.summary.FileWriter('logs/', sess.graph)####最重要#### 38 sess.run(init) 39 40 fig=plt.figure() 41 ax=fig.add_subplot(1,1,1) 42 ax.scatter(x_data,y_data) 43 plt.ion()#连续画图 44 plt.show() 45 46 for i in range(1000): 47 sess.run(train_step,feed_dict={xs:x_data,ys:y_data}) 48 if i %50==0: 49 # print(sess.run(loss,feed_dict={xs:x_data,ys:y_data})) 50 try: 51 ax.lines.remove(lines[0]) 52 except Exception: 53 pass 54 prediction_value=sess.run(prediction,feed_dict={xs:x_data}) 55 lines = ax.plot(x_data,prediction_value,'r-',lw=5) 56 plt.pause(0.1)
常见问题如下:
- :tf.summary.FileWriter('logs/', sess.graph) 这是正确写法,不能写成writer = tf.train.SummaryWriter('logs/', sess.graph)会报错
- 生成logs文件后,cmd到logs文件夹下的路径 执行以下
3:在chrome浏览器中输入http://localhost:6006
4:打开tensorboard中点击GRAPHS即可得到网络可视图,如下: