• [深度学习]python深度学习 实现一个简单的线性回归案例


    #!/usr/bin/env python
    # -*- coding: utf-8 -*-
    # @File  : 自实现一个线性回归.py
    # @Author: 赵路仓
    # @Date  : 2020/4/12
    # @Desc  :
    # @Contact : 398333404@qq.com
    import os
    
    import tensorflow as tf
    
    
    def linear_regression():
        """
        自实现一个线性回归
        :return:
        """
        # 命名空间
        with tf.variable_scope("prepared_data"):
            # 准备数据
            x = tf.random_normal(shape=[100, 1], name="Feature")
            y_true = tf.matmul(x, [[0.08]]) + 0.7
            # x = tf.constant([[1.0], [2.0], [3.0]])
            # y_true = tf.constant([[0.78], [0.86], [0.94]])
    
        with tf.variable_scope("create_model"):
            # 2.构造函数
            # 定义模型变量参数
            weights = tf.Variable(initial_value=tf.random_normal(shape=[1, 1], name="Weights"))
            bias = tf.Variable(initial_value=tf.random_normal(shape=[1, 1], name="Bias"))
            y_predit = tf.matmul(x, weights) + bias
    
        with tf.variable_scope("loss_function"):
            # 3.构造损失函数
            error = tf.reduce_mean(tf.square(y_predit - y_true))
    
        with tf.variable_scope("optimizer"):
            # 4.优化损失
            optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.01).minimize(error)
    
        # 收集变量
        tf.summary.scalar("error", error)
        tf.summary.histogram("weights", weights)
        tf.summary.histogram("bias", bias)
    
        # 合并变量
        merged = tf.summary.merge_all()
    
        # 创建saver对象
        saver = tf.train.Saver()
    
        # 显式的初始化变量
        init = tf.global_variables_initializer()
    
        # 开启会话
        with tf.Session() as sess:
            # 初始化变量
            sess.run(init)
    
            # 创建事件文件
            file_writer = tf.summary.FileWriter("E:/tmp/linear", graph=sess.graph)
    
            # print(x.eval())
            # print(y_true.eval())
            # 查看初始化变量模型参数之后的值
            print("训练前模型参数为:权重%f,偏置%f" % (weights.eval(), bias.eval()))
    
            # 开始训练
            for i in range(1000):
                sess.run(optimizer)
                print("第%d次参数为:权重%f,偏置%f,损失%f" % (i + 1, weights.eval(), bias.eval(), error.eval()))
    
                # 运行合并变量操作
                summary = sess.run(merged)
                # 将每次迭代后的变量写入事件
                file_writer.add_summary(summary, i)
    
                # 保存模型
                if i == 999:
                    saver.save(sess, "./tmp/model/my_linear.ckpt")
    
            # # 加载模型
            # if os.path.exists("./tmp/model/checkpoint"):
            #     saver.restore(sess, "./tmp/model/my_linear.ckpt")
    
            print("参数为:权重%f,偏置%f,损失%f" % (weights.eval(), bias.eval(), error.eval()))
            pre = [[0.5]]
            prediction = tf.matmul(pre, weights) + bias
            sess.run(prediction)
            print(prediction.eval())
    
        return None
    
    
    if __name__ == "__main__":
        linear_regression()
  • 相关阅读:
    centos 7 安装zabbix 4.0
    django Middleware
    初探paramiko
    python中的反射
    python异常处理
    双绞线
    简易的CMDB服务端
    4-初识Django Admin
    数据资产管理实践纲要
    matplotlib 散点图,为不同区域的点添加不同颜色
  • 原文地址:https://www.cnblogs.com/zlc364624/p/12686695.html
Copyright © 2020-2023  润新知