• pytorch(2):线性回归


    一、基本公式

    $Linear Regression$

    理想的函数:

    $y = w * x + b$

    存在噪音:

    $y = w * x + b + epsilon $

    $epsilon in N(0.01,1)$

    优化函数转化为:

    $loss = (WX + b - y)^2$

    更新超参数

    $b_{new} = b - lr  frac{Delta loss}{Delta b} = b - lr	imes 2(WX + b -y) $

    $w_{new} = w - lr frac{Delta loss}{Delta w} = w - lr	imes 2(WX + b -y) X$

    二、代码

    import numpy as np
    import matplotlib.pyplot as plt
    class LineRegression():
        def __init__(self):
            pass
        def compute_error_for_line_given_points(self, b, w, points):
            """计算给定超参数【W, B】的误差"""
            totleError = 0
            for i in range(0, len(points)):
                x = points[i, 0]
                y = points[i, 1]
                totleError = totleError + (y-(w*x + b))**2
            return totleError/float(len(points))
        def step_gradient(self, b, w, points, lr):
            """梯度下降法更新w,b的值"""
            b_gradient = 0
            w_gradient = 0
            N = float(len(points))
            for i in range(len(points)):
                x = points[i, 0]
                y = points[i, 1]
                b_gradient = b_gradient - 2*(y-(w*x+b)) / N
                w_gradient = w_gradient - 2*x*(y-(w*x+b)) / N
            b_new = b - (lr * b_gradient)
            w_new = w - (lr * w_gradient)
            return [b_new, w_new]
        def gradient_descent_runner(self, points, b, w, lr, iterations):
            """梯度下降"""
            for i in range(iterations):
                b, w = self.step_gradient(b, w, np.array(points), lr)
            return [b,w]
        def run(self):
            points = np.genfromtxt("data.csv", delimiter=",")
            lr = 0.0001
            initial_b = 0
            initial_w = 0
            iterations = 1000
            print(
                f"Starting project descent at b = {initial_b}, w = {initial_w},error = {self.compute_error_for_line_given_points(initial_b, initial_w, points)}")
            print('
    Running...')
            [b,w] = self.gradient_descent_runner(points,initial_b,initial_w,lr,iterations)
            print(f"
    After project descent at b = {b}, w = {w},error = {self.compute_error_for_line_given_points(b,w,points)}")
            print('
    b:{},w:{}'.format(b, w))
            x = points[:, 0]
            y = w * x + b
            plt.scatter(points[:, 0], points[:, 1], c='', edgecolors='b', s=15, label='orginal')
            plt.plot(x, y, c='black', label='predict', linestyle=':')
            plt.legend()
            plt.show()
    if __name__ == '__main__':
        LineRegression().run()

    三、画图

  • 相关阅读:
    [CF 803G]- Periodic RMQ Problem 动态开点线段树 或 离线
    [2018CCPC吉林赛区(重现赛)- 感谢北华大学] 补题记录 躁起来
    【EDU68 E】 Count The Rectangles 数据结构算几何
    【HDU5409】CRB and Graph 边双联通 子树最值
    【CF1137C】 Museums Tour 拆点+缩点
    【HDU6035】 Colorful Tree
    【Edu 67】 补题记录
    五月月赛 寻宝 exkmp + 主席树
    ZOJ
    CF 551 D.Serval and Rooted Tree 树形DP
  • 原文地址:https://www.cnblogs.com/zhangxianrong/p/13977913.html
Copyright © 2020-2023  润新知