• 机器学习-梯度下降算法案例


    import numpy as np
    """
    模拟数据集
    """
    data = []
    w = 1.477
    b = 0.089
    for i in range(100):
        x = np.random.uniform(-10.,10.)
        eps = np.random.normal(0.,0.1)
        y = 1.477*x + 0.089 + eps
        data.append([x,y])
    """
    求均方差
    """
    def mse(w,b,data):
        loss = 0
        for i in range(0,len(data)):
            x = data[i][0]
            y = data[i][1]
            loss += (y-w*x-b)**2
        return loss/float(len(data))
    """
    计算梯度
    lr:学习率
    """
    def step_gradient(w,b,data,lr):
        b_gradient = 0
        w_gradient = 0
        m = float(len(data))
        for i in range(0,len(data)):
            x = data[i][0]
            y = data[i][1]
            b_gradient += (2/m) * (w*x+b-y)
            w_gradient += (2/m) * x * (w*x+b-y)
        new_w = w - lr*w_gradient
        new_b = b - lr*b_gradient
        return new_w,new_b
    """
    梯度更新
    lr:学习率
    num:迭代次数
    """
    def gradient_descent(data,start_b,start_w,lr,num):
        b = start_b
        w = start_w
        for step in range(num):
            w,b = step_gradient(w,b,np.array(data),lr)
            loss = mse(w,b,data)
            if step %50 == 0:
                print(step,w,b)
        return b,w
    """
    梯度下降执行函数
    """
    def main():
        lr = 0.001
        init_b = 0
        init_w = 0
        num = 1000
        b,w = gradient_descent(data,init_b,init_w,lr,num)
        lose = mse(w,b,data)
        print("
    ")
        print(lose,w,b)  # 0.008130636177163393 1.4743398873266684 0.08602558343646888
    main()

     Pytorch

    import torch
    import numpy as np
    import matplotlib
    """
    1准备数据
    2通过模型计算predict
    3计算loss
    4方向传播,更新参数
    """
    #y = 3*x+1.2
    x = torch.rand([500,1])
    y = x*3+1.2
    w = torch.rand([1,1],requires_grad=True)
    b = torch.tensor(0,requires_grad=True,dtype=torch.float32)
    
    
    for i in range(500):
        y_predict = torch.matmul(x, w) + b
        loss = (y_predict - y).pow(2).mean()
        if w.grad is not None:
            w.grad.data.zero_()
        if b.grad is not None:
            b.grad.data.zero_()
        loss.backward()
        w.data = w.data - w.grad * 0.1
        b.data = b.data - b.grad * 0.1
        print(w,b,loss)
  • 相关阅读:
    2019春总结作业
    第二周作业
    第三周作业
    2019春第三次课程设计实验报告
    2019春第二次课程设计实验报告
    2019春第一次课程设计实验报告
    第十二周作业
    第十一周作业
    第十周作业
    第九周作业
  • 原文地址:https://www.cnblogs.com/2016-zck/p/14474169.html
Copyright © 2020-2023  润新知