• Python之简单的神经网络


    from sklearn import datasets
    from sklearn import preprocessing
    from sklearn.model_selection import train_test_split
    from sklearn.neighbors import KNeighborsClassifier
    import numpy as np
    
    
    def sigmoid(x):
        # 激活函数 f(x) = 1 / (1 + e^(-x))
        return 1 / (1 + np.exp(-x))
    
    
    def deri_sigmoid(x):
        # 激活函数求导 f'(x) = f(x) * (1 - f(x))
        k = sigmoid(x)
        return k * (1 - k)
    
    
    def mse_loss(y_true, y_pred):
        return ((y_true - y_pred) ** 2).mean()
    
    
    class OurNeuralNetwork():
        def __init__(self):
            self.w1 = np.random.normal()
            self.w2 = np.random.normal()
            self.w3 = np.random.normal()
            self.w4 = np.random.normal()
            self.w5 = np.random.normal()
            self.w6 = np.random.normal()
    
            self.b1 = np.random.normal()
            self.b2 = np.random.normal()
            self.b3 = np.random.normal()
    
        def feedforward(self, x):
            h1 = sigmoid(x[0] * self.w1 + x[1] * self.w2 + self.b1)
            h2 = sigmoid(x[0] * self.w3 + x[1] * self.w4 + self.b2)
            o1 = sigmoid(h1 * self.w5 + h2 * self.w6 + self.b3)
            return o1
    
        def train(self, data, all_y_trues):
            learn_rate = 0.1
            times = 1000
            for time in range(times):
                for x, y_true in zip(data, all_y_trues):
                    sum_h1 = x[0] * self.w1 + x[1] * self.w2 + self.b1
                    h1 = sigmoid(sum_h1)
                    sum_h2 = x[0] * self.w3 + x[1] * self.w4 + self.b2
                    h2 = sigmoid(sum_h2)
                    sum_o1 = h1 * self.w5 + h2 * self.w6 + self.b3
                    o1 = sigmoid(sum_o1)
                    y_pred = o1
    
                    dL_dypred = -2 * (y_true - y_pred)  # 第一个导数 dL/dypred
                    dypred_dw5 = deri_sigmoid(sum_o1) * h1
                    dypred_dw6 = deri_sigmoid(sum_o1) * h2
                    dypred_db3 = deri_sigmoid(sum_o1)
    
                    dypred_dh1 = deri_sigmoid(sum_o1) * self.w5
                    dypred_dh2 = deri_sigmoid(sum_o1) * self.w6
    
                    dh1_dw1 = deri_sigmoid(sum_h1) * x[0]
                    dh1_dw2 = deri_sigmoid(sum_h1) * x[1]
                    dh1_db1 = deri_sigmoid(sum_h1)
    
                    dh2_dw3 = deri_sigmoid(sum_h2) * x[0]
                    dh2_dw4 = deri_sigmoid(sum_h2) * x[1]
                    dh2_db2 = deri_sigmoid(sum_h2)
    
                    # 更新权重 w1 -= learn_rate * dL_dw1, dL_dw1 = dL/dypred * dypred/dh1 * dh1/dw1
                    self.w5 -= learn_rate * dL_dypred * dypred_dw5
                    self.w6 -= learn_rate * dL_dypred * dypred_dw6
                    self.w3 -= learn_rate * dL_dypred * dypred_db3
    
                    self.w3 -= learn_rate * dL_dypred * dypred_dh2 * dh2_dw3
                    self.w4 -= learn_rate * dL_dypred * dypred_dh2 * dh2_dw4
                    self.b2 -= learn_rate * dL_dypred * dypred_dh2 * dh2_db2
    
                    self.w1 -= learn_rate * dL_dypred * dypred_dh1 * dh1_dw1
                    self.w2 -= learn_rate * dL_dypred * dypred_dh1 * dh1_dw2
                    self.b1 -= learn_rate * dL_dypred * dypred_dh1 * dh1_db1
    
                    if time % 10 == 0:
                        y_preds = np.apply_along_axis(self.feedforward, 1, data)
                        loss = mse_loss(all_y_trues, y_preds)
                        print("time %d loss: %0.3f" % (time, loss))
    
    
    # Define dataset
    data = np.array([
        [-2, -1], # Alice
        [25, 6],  # Bob
        [17, 4],  # Charlie
        [-15, -6] # diana
    ])
    all_y_trues = np.array([
        1, # Alice
        0, # Bob
        0, # Charlie
        1 # diana
    ])
    
    # Train our neural network!
    network = OurNeuralNetwork()
    network.train(data, all_y_trues)
    人生如修仙,岂是一日间。何时登临顶,上善若水前。
  • 相关阅读:
    Andrew Ng机器学习算法入门((七):特征选择和多项式回归
    Andrew Ng机器学习算法入门((六):多变量线性回归方程求解
    Andrew Ng机器学习算法入门((五):矩阵和向量
    Linux常用命令集锦
    Andrew Ng机器学习算法入门(四):阶梯下降算法
    AZscaaner源码解读之数据库连接(一)
    Andrew Ng机器学习算法入门(三):线性回归算法
    Andrew Ng机器学习算法入门(二):机器学习分类
    MapServer
    fetch.js
  • 原文地址:https://www.cnblogs.com/f-society/p/12629334.html
Copyright © 2020-2023  润新知