• BP神经网络


    头文件

     1 #pragma once
     2 
     3 #include <iostream>
     4 #include <cmath>
     5 #include <vector>
     6 #include <stdlib.h>
     7 #include <time.h>
     8 
     9 using namespace std;
    10 
    11 #define innode 2        //输入结点数
    12 #define hidenode 4      //隐含结点数
    13 #define hidelayer 1     //隐含层数
    14 #define outnode 1       //输出结点数
    15 #define learningRate 0.9//学习速率,alpha
    16 
    17 // --- -1~1 随机数产生器 --- 
    18 inline double get_11Random()    // -1 ~ 1
    19 {
    20     return ((2.0*(double)rand()/RAND_MAX) - 1);
    21 }
    22 
    23 // --- sigmoid 函数 --- 
    24 inline double sigmoid(double x)
    25 {
    26     double ans = 1 / (1+exp(-x));
    27     return ans;
    28 }
    29 
    30 // --- 输入层节点。包含以下分量:--- 
    31 // 1.value:     固定输入值; 
    32 // 2.weight:    面对第一层隐含层每个节点都有权值; 
    33 // 3.wDeltaSum: 面对第一层隐含层每个节点权值的delta值累积
    34 typedef struct inputNode
    35 {
    36     double value;
    37     vector<double> weight, wDeltaSum;
    38 }inputNode;
    39 
    40 // --- 输出层节点。包含以下数值:--- 
    41 // 1.value:     节点当前值; 
    42 // 2.delta:     与正确输出值之间的delta值; 
    43 // 3.rightout:  正确输出值
    44 // 4.bias:      偏移量
    45 // 5.bDeltaSum: bias的delta值的累积,每个节点一个
    46 typedef struct outputNode   // 输出层节点
    47 {
    48     double value, delta, rightout, bias, bDeltaSum;
    49 }outputNode;
    50 
    51 // --- 隐含层节点。包含以下数值:--- 
    52 // 1.value:     节点当前值; 
    53 // 2.delta:     BP推导出的delta值;
    54 // 3.bias:      偏移量
    55 // 4.bDeltaSum: bias的delta值的累积,每个节点一个
    56 // 5.weight:    面对下一层(隐含层/输出层)每个节点都有权值; 
    57 // 6.wDeltaSum: weight的delta值的累积,面对下一层(隐含层/输出层)每个节点各自积累
    58 typedef struct hiddenNode   // 隐含层节点
    59 {
    60     double value, delta, bias, bDeltaSum;
    61     vector<double> weight, wDeltaSum;
    62 }hiddenNode;
    63 
    64 // --- 单个样本 --- 
    65 typedef struct sample
    66 {
    67     vector<double> in, out;
    68 }sample;
    69 
    70 // --- BP神经网络 --- 
    71 class BpNet
    72 {
    73 public:
    74     BpNet();    //构造函数
    75     void forwardPropagationEpoc();  // 单个样本前向传播
    76     void backPropagationEpoc();     // 单个样本后向传播
    77     // 更新 weight, bias
    78     void training (static vector<sample> sampleGroup, double threshold);
    79     // 神经网络预测
    80     void predict  (vector<sample>& testGroup);                          
    81     // 设置学习样本输入
    82     void setInput (static vector<double> sampleIn); 
    83     // 设置学习样本输出        
    84     void setOutput(static vector<double> sampleOut);    
    85 
    86 public:
    87     double error;
    88     inputNode* inputLayer[innode];// 输入层(仅一层)
    89     outputNode* outputLayer[outnode];// 输出层(仅一层)
    90     // 隐含层(可能有多层)
    91     hiddenNode* hiddenLayer[hidelayer][hidenode];
    92 };

    主程序:main

     1 #include "BP.h"
     2 
     3 int main()
     4 {
     5     BpNet testNet;
     6 
     7     // 学习样本
     8     vector<double> samplein[4];
     9     vector<double> sampleout[4];
    10     samplein[0].push_back(0); samplein[0].push_back(0); sampleout[0].push_back(0); 
    11     samplein[1].push_back(0); samplein[1].push_back(1); sampleout[1].push_back(1); 
    12     samplein[2].push_back(1); samplein[2].push_back(0); sampleout[2].push_back(1); 
    13     samplein[3].push_back(1); samplein[3].push_back(1); sampleout[3].push_back(0); 
    14     sample sampleInOut[4];
    15     for (int i = 0; i < 4; i++)
    16     {
    17         sampleInOut[i].in = samplein[i];
    18         sampleInOut[i].out = sampleout[i];
    19     }
    20     vector<sample> sampleGroup(sampleInOut, sampleInOut + 4);
    21     testNet.training(sampleGroup, 0.0001);
    22 
    23     // 测试数据
    24     vector<double> testin[4];
    25     vector<double> testout[4];
    26     testin[0].push_back(0.1);   testin[0].push_back(0.2);
    27     testin[1].push_back(0.15);  testin[1].push_back(0.9);
    28     testin[2].push_back(1.1);   testin[2].push_back(0.01);
    29     testin[3].push_back(0.88);  testin[3].push_back(1.03);
    30     sample testInOut[4];
    31     for (int i = 0; i < 4; i++) testInOut[i].in = testin[i];
    32     vector<sample> testGroup(testInOut, testInOut + 4);
    33 
    34     // 预测测试数据,并输出结果
    35     testNet.predict(testGroup);
    36     for (int i = 0; i < testGroup.size(); i++)
    37     {
    38         for (int j = 0; j < testGroup[i].in.size(); j++) 
    39         cout << testGroup[i].in[j] << "	";
    40         cout << "-- prediction :";
    41         for (int j = 0; j < testGroup[i].out.size(); j++) 
    42         cout << testGroup[i].out[j] << "	";
    43         cout << endl;
    44     }
    45 
    46     system("pause");
    47     return 0;
    48 }

    源程序:bp.cpp

      1 #include "BP.h"
      2 
      3 using namespace std;
      4 
      5 BpNet::BpNet()
      6 {
      7     srand((unsigned)time(NULL)); // 随机数种子    
      8     error = 100.f;      // error初始值,极大值即可
      9 
     10     // 初始化输入层
     11     for (int i = 0; i < innode; i++)
     12     {
     13         inputLayer[i] = new inputNode();
     14         for (int j = 0; j < hidenode; j++) 
     15         {
     16             inputLayer[i]->weight.push_back(get_11Random());
     17             inputLayer[i]->wDeltaSum.push_back(0.f);
     18         }
     19     }
     20 
     21     // 初始化隐藏层
     22     for (int i = 0; i < hidelayer; i++)
     23     {
     24         if (i == hidelayer - 1)
     25         {
     26             for (int j = 0; j < hidenode; j++)
     27             {
     28                 hiddenLayer[i][j] = new hiddenNode();
     29                 hiddenLayer[i][j]->bias = get_11Random();
     30                 for (int k = 0; k < outnode; k++) 
     31                 {
     32                     hiddenLayer[i][j]->weight.push_back(get_11Random());
     33                     hiddenLayer[i][j]->wDeltaSum.push_back(0.f);
     34                 }
     35             }
     36         }
     37         else
     38         {
     39             for (int j = 0; j < hidenode; j++)
     40             {
     41                 hiddenLayer[i][j] = new hiddenNode();
     42                 hiddenLayer[i][j]->bias = get_11Random();
     43                 for (int k = 0; k < hidenode; k++) 
     44                 {
     45                     hiddenLayer[i][j]->weight.push_back(get_11Random());
     46                 }
     47             }
     48         }
     49     }
     50 
     51     // 初始化输出层
     52     for (int i = 0; i < outnode; i++)
     53     {
     54         outputLayer[i] = new outputNode();
     55         outputLayer[i]->bias = get_11Random();
     56     }
     57 }
     58 
     59 void BpNet::forwardPropagationEpoc()
     60 {
     61     // forward propagation on hidden layer
     62     for (int i = 0; i < hidelayer; i++)
     63     {
     64         if (i == 0)
     65         {
     66             for (int j = 0; j < hidenode; j++)
     67             {
     68                 double sum = 0.f;
     69                 for (int k = 0; k < innode; k++) 
     70                 {
     71                     sum += inputLayer[k]->value * inputLayer[k]->weight[j];
     72                 }
     73                 sum += hiddenLayer[i][j]->bias;
     74                 hiddenLayer[i][j]->value = sigmoid(sum);
     75             }
     76         }
     77         else
     78         {
     79             for (int j = 0; j < hidenode; j++)
     80             {
     81                 double sum = 0.f;
     82                 for (int k = 0; k < hidenode; k++) 
     83                 {
     84                     sum += hiddenLayer[i-1][k]->value * hiddenLayer[i-1][k]->weight[j];
     85                 }
     86                 sum += hiddenLayer[i][j]->bias;
     87                 hiddenLayer[i][j]->value = sigmoid(sum);
     88             }
     89         }
     90     }
     91 
     92     // forward propagation on output layer
     93     for (int i = 0; i < outnode; i++)
     94     {
     95         double sum = 0.f;
     96         for (int j = 0; j < hidenode; j++)
     97         {
     98             sum += hiddenLayer[hidelayer-1][j]->value * hiddenLayer[hidelayer-1][j]->weight[i];
     99         }
    100         sum += outputLayer[i]->bias;
    101         outputLayer[i]->value = sigmoid(sum);
    102     }
    103 }
    104 
    105 void BpNet::backPropagationEpoc()
    106 {
    107     // backward propagation on output layer
    108     // -- compute delta
    109     for (int i = 0; i < outnode; i++)
    110     {
    111         double tmpe = fabs(outputLayer[i]->value-outputLayer[i]->rightout);
    112         error += tmpe * tmpe / 2;
    113 
    114         outputLayer[i]->delta 
    115             = (outputLayer[i]->value-outputLayer[i]->rightout)*(1-outputLayer[i]->value)*outputLayer[i]->value;
    116     }
    117 
    118     // backward propagation on hidden layer
    119     // -- compute delta
    120     for (int i = hidelayer - 1; i >= 0; i--)    // 反向计算
    121     {
    122         if (i == hidelayer - 1)
    123         {
    124             for (int j = 0; j < hidenode; j++)
    125             {
    126                 double sum = 0.f;
    127                 for (int k=0; k<outnode; k++)
    128                 {
    129                     sum += outputLayer[k]->delta * hiddenLayer[i][j]->weight[k];
    130                 }
    131                 hiddenLayer[i][j]->delta
    132                     = sum * (1 - hiddenLayer[i][j]->value) * hiddenLayer[i][j]->value;
    133             }
    134         }
    135         else
    136         {
    137             for (int j = 0; j < hidenode; j++)
    138             {
    139                 double sum = 0.f;
    140                 for (int k=0; k<hidenode; k++)
    141                 {
    142                     sum += hiddenLayer[i + 1][k]->delta * hiddenLayer[i][j]->weight[k];
    143                 }
    144                 hiddenLayer[i][j]->delta 
    145                     = sum * (1 - hiddenLayer[i][j]->value) * hiddenLayer[i][j]->value;
    146             }
    147         }
    148     }
    149 
    150     // backward propagation on input layer
    151     // -- update weight delta sum
    152     for (int i = 0; i < innode; i++)
    153     {
    154         for (int j = 0; j < hidenode; j++)
    155         {
    156             inputLayer[i]->wDeltaSum[j] += inputLayer[i]->value * hiddenLayer[0][j]->delta;
    157         }
    158     }
    159 
    160     // backward propagation on hidden layer
    161     // -- update weight delta sum & bias delta sum
    162     for (int i = 0; i < hidelayer; i++)
    163     {
    164         if (i == hidelayer - 1)
    165         {
    166             for (int j = 0; j < hidenode; j++)
    167             {
    168                 hiddenLayer[i][j]->bDeltaSum += hiddenLayer[i][j]->delta;
    169                 for (int k = 0; k < outnode; k++)
    170                 { 
    171                     hiddenLayer[i][j]->wDeltaSum[k] += hiddenLayer[i][j]->value * outputLayer[k]->delta; 
    172                 }
    173             }
    174         }
    175         else
    176         {
    177             for (int j = 0; j < hidenode; j++)
    178             {
    179                 hiddenLayer[i][j]->bDeltaSum += hiddenLayer[i][j]->delta;
    180                 for (int k = 0; k < hidenode; k++)
    181                 { 
    182                     hiddenLayer[i][j]->wDeltaSum[k] += hiddenLayer[i][j]->value * hiddenLayer[i+1][k]->delta; 
    183                 }
    184             }
    185         }
    186     }
    187 
    188     // backward propagation on output layer
    189     // -- update bias delta sum
    190     for (int i = 0; i < outnode; i++) outputLayer[i]->bDeltaSum += outputLayer[i]->delta;
    191 }
    192 
    193 void BpNet::training(static vector<sample> sampleGroup, double threshold)
    194 {
    195     int sampleNum = sampleGroup.size();
    196 
    197     while(error > threshold)
    198     //for (int curTrainingTime = 0; curTrainingTime < trainingTime; curTrainingTime++)
    199     {
    200         cout << "training error: " << error << endl;
    201         error = 0.f;
    202         // initialize delta sum
    203         for (int i = 0; i < innode; i++) inputLayer[i]->wDeltaSum.assign(inputLayer[i]->wDeltaSum.size(), 0.f);
    204         for (int i = 0; i < hidelayer; i++){
    205             for (int j = 0; j < hidenode; j++) 
    206             {
    207                 hiddenLayer[i][j]->wDeltaSum.assign(hiddenLayer[i][j]->wDeltaSum.size(), 0.f);
    208                 hiddenLayer[i][j]->bDeltaSum = 0.f;
    209             }
    210         }
    211         for (int i = 0; i < outnode; i++) outputLayer[i]->bDeltaSum = 0.f;
    212 
    213         for (int iter = 0; iter < sampleNum; iter++)
    214         {
    215             setInput(sampleGroup[iter].in);
    216             setOutput(sampleGroup[iter].out);
    217 
    218             forwardPropagationEpoc();
    219             backPropagationEpoc();
    220         }
    221 
    222         // backward propagation on input layer
    223         // -- update weight
    224         for (int i = 0; i < innode; i++)
    225         {
    226             for (int j = 0; j < hidenode; j++) 
    227             {
    228                 inputLayer[i]->weight[j] -= learningRate * inputLayer[i]->wDeltaSum[j] / sampleNum;
    229             }
    230         }
    231 
    232         // backward propagation on hidden layer
    233         // -- update weight & bias
    234         for (int i = 0; i < hidelayer; i++)
    235         {
    236             if (i == hidelayer - 1)
    237             {
    238                 for (int j = 0; j < hidenode; j++)
    239                 { 
    240                     // bias
    241                     hiddenLayer[i][j]->bias -= learningRate * hiddenLayer[i][j]->bDeltaSum / sampleNum;
    242 
    243                     // weight
    244                     for (int k = 0; k < outnode; k++) 
    245                     { hiddenLayer[i][j]->weight[k] -= learningRate * hiddenLayer[i][j]->wDeltaSum[k] / sampleNum; }
    246                 }
    247             }
    248             else
    249             {
    250                 for (int j = 0; j < hidenode; j++)
    251                 {
    252                     // bias
    253                     hiddenLayer[i][j]->bias -= learningRate * hiddenLayer[i][j]->bDeltaSum / sampleNum;
    254 
    255                     // weight
    256                     for (int k = 0; k < hidenode; k++) 
    257                     { hiddenLayer[i][j]->weight[k] -= learningRate * hiddenLayer[i][j]->wDeltaSum[k] / sampleNum; }
    258                 }
    259             }
    260         }
    261 
    262         // backward propagation on output layer
    263         // -- update bias
    264         for (int i = 0; i < outnode; i++)
    265         { outputLayer[i]->bias -= learningRate * outputLayer[i]->bDeltaSum / sampleNum; }
    266     }
    267 }
    268 
    269 void BpNet::predict(vector<sample>& testGroup)
    270 {
    271     int testNum = testGroup.size();
    272 
    273     for (int iter = 0; iter < testNum; iter++)
    274     {
    275         testGroup[iter].out.clear();
    276         setInput(testGroup[iter].in);
    277 
    278         // forward propagation on hidden layer
    279         for (int i = 0; i < hidelayer; i++)
    280         {
    281             if (i == 0)
    282             {
    283                 for (int j = 0; j < hidenode; j++)
    284                 {
    285                     double sum = 0.f;
    286                     for (int k = 0; k < innode; k++) 
    287                     {
    288                         sum += inputLayer[k]->value * inputLayer[k]->weight[j];
    289                     }
    290                     sum += hiddenLayer[i][j]->bias;
    291                     hiddenLayer[i][j]->value = sigmoid(sum);
    292                 }
    293             }
    294             else
    295             {
    296                 for (int j = 0; j < hidenode; j++)
    297                 {
    298                     double sum = 0.f;
    299                     for (int k = 0; k < hidenode; k++) 
    300                     {
    301                         sum += hiddenLayer[i-1][k]->value * hiddenLayer[i-1][k]->weight[j];
    302                     }
    303                     sum += hiddenLayer[i][j]->bias;
    304                     hiddenLayer[i][j]->value = sigmoid(sum);
    305                 }
    306             }
    307         }
    308 
    309         // forward propagation on output layer
    310         for (int i = 0; i < outnode; i++)
    311         {
    312             double sum = 0.f;
    313             for (int j = 0; j < hidenode; j++)
    314             {
    315                 sum += hiddenLayer[hidelayer-1][j]->value * hiddenLayer[hidelayer-1][j]->weight[i];
    316             }
    317             sum += outputLayer[i]->bias;
    318             outputLayer[i]->value = sigmoid(sum);
    319             testGroup[iter].out.push_back(outputLayer[i]->value);
    320         }
    321     }
    322 }
    323 
    324 void BpNet::setInput(static vector<double> sampleIn)
    325 {
    326     for (int i = 0; i < innode; i++)
    327     inputLayer[i]->value = sampleIn[i];
    328 }
    329 
    330 void BpNet::setOutput(static vector<double> sampleOut)
    331 {
    332     for (int i = 0; i < outnode; i++) 
    333     outputLayer[i]->rightout = sampleOut[i];
    334 }
  • 相关阅读:
    Centos 通过yum的方式升级内核
    docker入门——镜像简介
    docker入门——管理容器
    docker入门——安装及简单操作
    docker入门——简介
    Centos 6安装 Jenkins
    Centos 7 搭建OpenStack 私有云——(1)基础环境配置
    Python正则表达式
    多选控件multipicker
    绝对定位 软键盘弹出时顶起底部按钮
  • 原文地址:https://www.cnblogs.com/hsy1941/p/9217398.html
Copyright © 2020-2023  润新知