下面的范例使用TensorFlow的低阶API实现线性回归模型。
低阶API主要包括张量操作,计算图和自动微分。
import tensorflow as tf # 打印时间分割线 @tf.function def printbar(): ts = tf.timestamp() today_ts = ts%(24*60*60) hour = tf.cast(today_ts//3600+8,tf.int32)%tf.constant(24) minite = tf.cast((today_ts%3600)//60,tf.int32) second = tf.cast(tf.floor(today_ts%60),tf.int32) def timeformat(m): if tf.strings.length(tf.strings.format("{}",m))==1: return(tf.strings.format("0{}",m)) else: return(tf.strings.format("{}",m)) timestring = tf.strings.join([timeformat(hour),timeformat(minite), timeformat(second)],separator = ":") tf.print("=========="*8,end = "") tf.print(timestring) # 样本数量 n = 400 # 生成测试用数据集 X = tf.random.uniform([n,2],minval=-10,maxval=10) w0 = tf.constant([[2.0],[-1.0]]) b0 = tf.constant(3.0) Y = X@w0 + b0 + tf.random.normal([n,1],mean = 0.0,stddev= 2.0) # @表示矩阵乘法,增加正态扰动 # 使用动态图调试 w = tf.Variable(tf.random.normal(w0.shape)) b = tf.Variable(0.0) def train(epoches): for epoch in tf.range(1,epoches+1): with tf.GradientTape() as tape: #正向传播求损失 Y_hat = X@w + b loss = tf.squeeze(tf.transpose(Y-Y_hat)@(Y-Y_hat))/(2.0*n) # 反向传播求梯度 dloss_dw,dloss_db = tape.gradient(loss,[w,b]) # 梯度下降法更新参数 w.assign(w - 0.001*dloss_dw) b.assign(b - 0.001*dloss_db) if epoch%1000 == 0: printbar() tf.print("epoch =",epoch," loss =",loss,) tf.print("w =",w) tf.print("b =",b) tf.print("") train(5000)
结果:
================================================================================15:18:17 epoch = 1000 loss = 2.66289544 w = [[2.0176034] [-1.02091444]] b = 1.92718041 ================================================================================15:18:19 epoch = 2000 loss = 2.12707591 w = [[2.01378] [-1.01979101]] b = 2.63039422 ================================================================================15:18:21 epoch = 3000 loss = 2.05447602 w = [[2.01237178] [-1.01937926]] b = 2.88924217 ================================================================================15:18:23 epoch = 4000 loss = 2.04463911 w = [[2.01185489] [-1.01922464]] b = 2.98452425 ================================================================================15:18:24 epoch = 5000 loss = 2.04330635 w = [[2.01166272] [-1.01917028]] b = 3.01959634
转换成静态图加速:
## 使用autograph机制转换成静态图加速 w = tf.Variable(tf.random.normal(w0.shape)) b = tf.Variable(0.0) @tf.function def train(epoches): for epoch in tf.range(1,epoches+1): with tf.GradientTape() as tape: #正向传播求损失 Y_hat = X@w + b loss = tf.squeeze(tf.transpose(Y-Y_hat)@(Y-Y_hat))/(2.0*n) # 反向传播求梯度 dloss_dw,dloss_db = tape.gradient(loss,[w,b]) # 梯度下降法更新参数 w.assign(w - 0.001*dloss_dw) b.assign(b - 0.001*dloss_db) if epoch%1000 == 0: printbar() tf.print("epoch =",epoch," loss =",loss,) tf.print("w =",w) tf.print("b =",b) tf.print("") train(5000)
结果:
================================================================================15:19:50 epoch = 1000 loss = 2.6668539 w = [[2.01762223] [-1.02092016]] b = 1.92363214 ================================================================================15:19:51 epoch = 2000 loss = 2.12761354 w = [[2.01378703] [-1.01979291]] b = 2.6290853 ================================================================================15:19:52 epoch = 3000 loss = 2.0545485 w = [[2.0123744] [-1.01938]] b = 2.888762 ================================================================================15:19:53 epoch = 4000 loss = 2.04464912 w = [[2.01185584] [-1.019225]] b = 2.98434567 ================================================================================15:19:54 epoch = 5000 loss = 2.04330778 w = [[2.0116632] [-1.0191704]] b = 3.01952934
参考:
开源电子书地址:https://lyhue1991.github.io/eat_tensorflow2_in_30_days/
GitHub 项目地址:https://github.com/lyhue1991/eat_tensorflow2_in_30_days