• 各机器学习方法代码(OpenCV2)


      1 #include <iostream>
      2 #include <math.h>
      3 #include <string>
      4 #include "cv.h"
      5 #include "ml.h"
      6 #include "highgui.h"
      7 
      8 using namespace cv;
      9 using namespace std;
     10 
     11 bool plotSupportVectors=true;
     12 int numTrainingPoints=200;
     13 int numTestPoints=2000;
     14 int size=200;
     15 int eq=0;
     16 
     17 // accuracy
     18 float evaluate(cv::Mat& predicted, cv::Mat& actual) {
     19     assert(predicted.rows == actual.rows);
     20     int t = 0;
     21     int f = 0;
     22     for(int i = 0; i < actual.rows; i++) {
     23         float p = predicted.at<float>(i,0);
     24         float a = actual.at<float>(i,0);
     25         if((p >= 0.0 && a >= 0.0) || (p <= 0.0 &&  a <= 0.0)) {
     26             t++;
     27         } else {
     28             f++;
     29         }
     30     }
     31     return (t * 1.0) / (t + f);
     32 }
     33 
     34 // plot data and class
     35 void plot_binary(cv::Mat& data, cv::Mat& classes, string name) {
     36     cv::Mat plot(size, size, CV_8UC3);
     37     plot.setTo(cv::Scalar(255.0,255.0,255.0));
     38     for(int i = 0; i < data.rows; i++) {
     39 
     40         float x = data.at<float>(i,0) * size;
     41         float y = data.at<float>(i,1) * size;
     42 
     43         if(classes.at<float>(i, 0) > 0) {
     44             cv::circle(plot, Point(x,y), 2, CV_RGB(255,0,0),1);
     45         } else {
     46             cv::circle(plot, Point(x,y), 2, CV_RGB(0,255,0),1);
     47         }
     48     }
     49     cv::imshow(name, plot);
     50 }
     51 
     52 // function to learn
     53 int f(float x, float y, int equation) {
     54     switch(equation) {
     55     case 0:
     56         return y > sin(x*10) ? -1 : 1;
     57         break;
     58     case 1:
     59         return y > cos(x * 10) ? -1 : 1;
     60         break;
     61     case 2:
     62         return y > 2*x ? -1 : 1;
     63         break;
     64     case 3:
     65         return y > tan(x*10) ? -1 : 1;
     66         break;
     67     default:
     68         return y > cos(x*10) ? -1 : 1;
     69     }
     70 }
     71 
     72 // label data with equation
     73 cv::Mat labelData(cv::Mat points, int equation) {
     74     cv::Mat labels(points.rows, 1, CV_32FC1);
     75     for(int i = 0; i < points.rows; i++) {
     76              float x = points.at<float>(i,0);
     77              float y = points.at<float>(i,1);
     78              labels.at<float>(i, 0) = f(x, y, equation);
     79         }
     80     return labels;
     81 }
     82 
     83 void svm(cv::Mat& trainingData, cv::Mat& trainingClasses, cv::Mat& testData, cv::Mat& testClasses) {
     84     CvSVMParams param = CvSVMParams();
     85 
     86     param.svm_type = CvSVM::C_SVC;
     87     param.kernel_type = CvSVM::RBF; //CvSVM::RBF, CvSVM::LINEAR ...
     88     param.degree = 0; // for poly
     89     param.gamma = 20; // for poly/rbf/sigmoid
     90     param.coef0 = 0; // for poly/sigmoid
     91 
     92     param.C = 7; // for CV_SVM_C_SVC, CV_SVM_EPS_SVR and CV_SVM_NU_SVR
     93     param.nu = 0.0; // for CV_SVM_NU_SVC, CV_SVM_ONE_CLASS, and CV_SVM_NU_SVR
     94     param.p = 0.0; // for CV_SVM_EPS_SVR
     95 
     96     param.class_weights = NULL; // for CV_SVM_C_SVC
     97     param.term_crit.type = CV_TERMCRIT_ITER +CV_TERMCRIT_EPS;
     98     param.term_crit.max_iter = 1000;
     99     param.term_crit.epsilon = 1e-6;
    100 
    101     // SVM training (use train auto for OpenCV>=2.0)
    102     CvSVM svm(trainingData, trainingClasses, cv::Mat(), cv::Mat(), param);
    103 
    104     cv::Mat predicted(testClasses.rows, 1, CV_32F);
    105 
    106     for(int i = 0; i < testData.rows; i++) {
    107         cv::Mat sample = testData.row(i);
    108 
    109         float x = sample.at<float>(0,0);
    110         float y = sample.at<float>(0,1);
    111 
    112         predicted.at<float>(i, 0) = svm.predict(sample);
    113     }
    114 
    115     cout << "Accuracy_{SVM} = " << evaluate(predicted, testClasses) << endl;
    116     plot_binary(testData, predicted, "Predictions SVM");
    117 
    118     // plot support vectors
    119     if(plotSupportVectors) {
    120         cv::Mat plot_sv(size, size, CV_8UC3);
    121         plot_sv.setTo(cv::Scalar(255.0,255.0,255.0));
    122 
    123         int svec_count = svm.get_support_vector_count();
    124         for(int vecNum = 0; vecNum < svec_count; vecNum++) {
    125             const float* vec = svm.get_support_vector(vecNum);
    126             cv::circle(plot_sv, Point(vec[0]*size, vec[1]*size), 3 , CV_RGB(0, 0, 0));
    127         }
    128     cv::imshow("Support Vectors", plot_sv);
    129     }
    130 }
    131 
    132 void mlp(cv::Mat& trainingData, cv::Mat& trainingClasses, cv::Mat& testData, cv::Mat& testClasses) {
    133 
    134     cv::Mat layers = cv::Mat(4, 1, CV_32SC1);
    135 
    136     layers.row(0) = cv::Scalar(2);
    137     layers.row(1) = cv::Scalar(10);
    138     layers.row(2) = cv::Scalar(15);
    139     layers.row(3) = cv::Scalar(1);
    140 
    141     CvANN_MLP mlp;
    142     CvANN_MLP_TrainParams params;
    143     CvTermCriteria criteria;
    144     criteria.max_iter = 100;
    145     criteria.epsilon = 0.00001f;
    146     criteria.type = CV_TERMCRIT_ITER | CV_TERMCRIT_EPS;
    147     params.train_method = CvANN_MLP_TrainParams::BACKPROP;
    148     params.bp_dw_scale = 0.05f;
    149     params.bp_moment_scale = 0.05f;
    150     params.term_crit = criteria;
    151 
    152     mlp.create(layers);
    153 
    154     // train
    155     mlp.train(trainingData, trainingClasses, cv::Mat(), cv::Mat(), params);
    156 
    157     cv::Mat response(1, 1, CV_32FC1);
    158     cv::Mat predicted(testClasses.rows, 1, CV_32F);
    159     for(int i = 0; i < testData.rows; i++) {
    160         cv::Mat response(1, 1, CV_32FC1);
    161         cv::Mat sample = testData.row(i);
    162 
    163         mlp.predict(sample, response);
    164         predicted.at<float>(i,0) = response.at<float>(0,0);
    165 
    166     }
    167 
    168     cout << "Accuracy_{MLP} = " << evaluate(predicted, testClasses) << endl;
    169     plot_binary(testData, predicted, "Predictions Backpropagation");
    170 }
    171 
    172 void knn(cv::Mat& trainingData, cv::Mat& trainingClasses, cv::Mat& testData, cv::Mat& testClasses, int K) {
    173 
    174     CvKNearest knn(trainingData, trainingClasses, cv::Mat(), false, K);
    175     cv::Mat predicted(testClasses.rows, 1, CV_32F);
    176     for(int i = 0; i < testData.rows; i++) {
    177             const cv::Mat sample = testData.row(i);
    178             predicted.at<float>(i,0) = knn.find_nearest(sample, K);
    179     }
    180 
    181     cout << "Accuracy_{KNN} = " << evaluate(predicted, testClasses) << endl;
    182     plot_binary(testData, predicted, "Predictions KNN");
    183 
    184 }
    185 
    186 void bayes(cv::Mat& trainingData, cv::Mat& trainingClasses, cv::Mat& testData, cv::Mat& testClasses) {
    187 
    188     CvNormalBayesClassifier bayes(trainingData, trainingClasses);
    189     cv::Mat predicted(testClasses.rows, 1, CV_32F);
    190     for (int i = 0; i < testData.rows; i++) {
    191         const cv::Mat sample = testData.row(i);
    192         predicted.at<float> (i, 0) = bayes.predict(sample);
    193     }
    194 
    195     cout << "Accuracy_{BAYES} = " << evaluate(predicted, testClasses) << endl;
    196     plot_binary(testData, predicted, "Predictions Bayes");
    197 
    198 }
    199 
    200 void decisiontree(cv::Mat& trainingData, cv::Mat& trainingClasses, cv::Mat& testData, cv::Mat& testClasses) {
    201 
    202     CvDTree dtree;
    203     cv::Mat var_type(3, 1, CV_8U);
    204 
    205     // define attributes as numerical
    206     var_type.at<unsigned int>(0,0) = CV_VAR_NUMERICAL;
    207     var_type.at<unsigned int>(0,1) = CV_VAR_NUMERICAL;
    208     // define output node as numerical
    209     var_type.at<unsigned int>(0,2) = CV_VAR_NUMERICAL;
    210 
    211     dtree.train(trainingData,CV_ROW_SAMPLE, trainingClasses, cv::Mat(), cv::Mat(), var_type, cv::Mat(), CvDTreeParams());
    212     cv::Mat predicted(testClasses.rows, 1, CV_32F);
    213     for (int i = 0; i < testData.rows; i++) {
    214         const cv::Mat sample = testData.row(i);
    215         CvDTreeNode* prediction = dtree.predict(sample);
    216         predicted.at<float> (i, 0) = prediction->value;
    217     }
    218 
    219     cout << "Accuracy_{TREE} = " << evaluate(predicted, testClasses) << endl;
    220     plot_binary(testData, predicted, "Predictions tree");
    221 
    222 }
    223 
    224 
    225 int main() {
    226 
    227     cv::Mat trainingData(numTrainingPoints, 2, CV_32FC1);
    228     cv::Mat testData(numTestPoints, 2, CV_32FC1);
    229 
    230     cv::randu(trainingData,0,1);
    231     cv::randu(testData,0,1);
    232 
    233     cv::Mat trainingClasses = labelData(trainingData, eq);
    234     cv::Mat testClasses = labelData(testData, eq);
    235 
    236     plot_binary(trainingData, trainingClasses, "Training Data");
    237     plot_binary(testData, testClasses, "Test Data");
    238 
    239     svm(trainingData, trainingClasses, testData, testClasses);
    240     mlp(trainingData, trainingClasses, testData, testClasses);
    241     knn(trainingData, trainingClasses, testData, testClasses, 3);
    242     bayes(trainingData, trainingClasses, testData, testClasses);
    243     decisiontree(trainingData, trainingClasses, testData, testClasses);
    244 
    245     cv::waitKey();
    246 
    247     return 0;
    248 }

     图像分类结果:

  • 相关阅读:
    [原创] 腾讯RTX二次开发相关的一些注意事项
    小技巧:快速清除项目中的svn相关文件!
    用SQL实现的BASE64加密及解密函数(SQL2005以上有效)
    摄影基础知识
    优秀的WEB前端开发框架:Bootstrap!
    virtualbox 安装 mac os x lion 10.7实现全屏显示!
    按摩穴位治疗鼻炎
    实用技巧:利用Excel实现客户档案的统一管理
    写了个小游戏:怪兽岛之挖矿练习
    闲来无事,用javascript写了一个简单的轨迹动画
  • 原文地址:https://www.cnblogs.com/ggYYa/p/7059071.html
Copyright © 2020-2023  润新知