• MATLAB神经网络(1)之R练习


    MATLAB神经网络(1)之R练习

    将在MATLAB神经网络中学到的知识用R进行适当地重构,再写一遍,一方面可以加深理解和记忆,另一方面练习R,比较R和MATLAB的不同。
    如要在R中使用之前的数据,应首先在MATLAB中用writetable函数将原本的由mat文件读入的数据写到csv文件中,以备R读入。

    writetable(T,filename) writes to a file with the name and extension specified by filename.

    writetable determines the file format based on the specified extension. The extension must be one of the following:

    1. .txt, .dat, or .csv for delimited text files
    2. .xls, .xlsm, or .xlsx for Excel® spreadsheet files
    3. .xlsb for Excel spreadsheet files supported on systems with Excel for Windows® See doc writetable.
    writetable(table(c1),"data1.csv");
    writetable(table(c2),"data2.csv");
    writetable(table(c3),"data3.csv");
    writetable(table(c4),"data4.csv");

     这里我们使用R中十分经典的鸢尾花数据集iris(在dplyr包中)。

    library(dplyr)

    ## Warning: package 'dplyr' was built under R version 3.5.3

    ##
    ## Attaching package: 'dplyr'

    ## The following objects are masked from 'package:stats':
    ##
    ## filter, lag

    ## The following objects are masked from 'package:base':
    ##
    ## intersect, setdiff, setequal, union

    dim(iris)

    ## [1] 150 5

    str(iris)

    ## 'data.frame': 150 obs. of 5 variables:
    ## Sepal.Length: num 5.1 4.9 4.7 4.6 5 5.4 4.6 5 4.4 4.9 ...
    ## Sepal.Width : num 3.5 3 3.2 3.1 3.6 3.9 3.4 3.4 2.9 3.1 ...
    ## Petal.Length: num 1.4 1.4 1.3 1.5 1.4 1.7 1.4 1.5 1.4 1.5 ...
    ## Petal.Width : num 0.2 0.2 0.2 0.2 0.2 0.4 0.3 0.2 0.2 0.1 ...
    ## Species : Factor w/ 3 levels "setosa","versicolor",..: 1 1 1 1 1 1 1 1 1 1 ...

    可以看到该数据集共有150组数据,4个自变量,1个因变量(factor),鸢尾花有3类。

    sort: sort a vector or factor (partially) into ascending or descending order.

    order: returns a permutation which rearranges its first argument into ascending or descending order, breaking ties by further arguments.

    k<-rnorm(150)
    n<-order(k)
    input<-iris[,1:4]
    output1<-iris[,5]
    output1<-as.integer(output1)
    output<-matrix(0,150,3)
    output<-as.data.frame(output)
    #
    把输出从1维变成3
    for(i in 1:150)
    {
    if(output1[i]==1)
    output[i,1]=1
    else if(output1[i]==2)
    output[i,2]=1
    else
    output[i,3]=1
    }
    input_train=input[n[1:120],]
    output_train=output[n[1:120],]
    input_test=input[n[121:150],]
    input_test=input[n[121:150],]
    me<-apply(input_train,2,mean)
    va<-apply(input_train,2,var)
    me

    ## Sepal.Length Sepal.Width Petal.Length Petal.Width
    ## 5.875000 3.030000 3.849167 1.239167

    va

    ## Sepal.Length Sepal.Width Petal.Length Petal.Width
    ## 0.6707983 0.1876639 3.0223522 0.5622346

    #输入数据归一化
    inputn<-scale(input_train)

    下面进行神经网络初始化。

    结构4-5-3

    innum<-4
    midnum<-5
    outnum<-3
    #
    权值初始化
    w1<-matrix(rnorm(innum*midnum),midnum,innum)
    b1<-rnorm(midnum)
    w2<-matrix(rnorm(outnum*midnum),midnum,outnum)
    b2<-rnorm(outnum)
    #
    学习率
    xite<-0.1
    loopNumber<-50
    I<-rep(0,midnum)
    Iout<-rep(0,midnum)
    FI<-rep(0,midnum)
    dw1<-matrix(0,innum,midnum)
    db1<-rep(0,midnum)

    神经网络训练

    E<-rep(0,loopNumber)
    for(ii in 1:loopNumber)
    {
    for(i in 1:120)
    {
    x=inputn[i,]
    for(j in 1:midnum)
    {
    I[j]<-sum(inputn[i,]*w1[j,])+b1[j]
    Iout[j]<-1/(1+exp(-I[j]))
    }
    yn<-t(w2)%*%Iout+b2

    e<-output_train[i,]-yn
    E[ii]<-E[ii]+sum(abs(e))

    dw2<-t(e)%*%Iout
    db2<-e

    for(j in 1:midnum)
    {
    S<-1/(1+exp(-I[j]));
    FI[j]<-S*(1-S);
    }

    for(k in 1:innum)
    {
    for(j in 1:midnum)
    {
    dw1[k,j]<-FI[j]*x[k]*sum(e*w2[j,])
    db1[j]<-FI[j]*sum(e*w2[j,])
    }
    }

    w1<-w1+xite*t(dw1)
    b1<-b1+xite*t(db1)
    w2<-w2+xite*t(dw2)
    b2<-b2+xite*t(db2)
    }
    }

    分类预测

    inputn_test<-input_test
    for(i in 1:120)
    {
    inputn_test[i,]<-(input_test[i,]-me)/va^0.5
    }
    fore=matrix(0,3,30);
    for(i in 1:30)
    {
    for(j in 1:midnum)
    {
    I[j]=sum(inputn_test[i,]*w1[j,])+b1[j]
    I<-unlist(I)
    Iout[j]=1/(1+exp(-I[j]))
    }
    fore[,i]=t(w2)%*%Iout+b2
    }

    结果分析

    output_fore=rep(0,30)
    for(i in 1:30)
    {
    output_fore[i]<-which.max(fore[,i])
    }
    error=output_fore-output1[n[121:150]]
    t<-table(output_fore,output1[n[121:150]])
    t

    ##
    ## output_fore 1 2 3
    ## 1 13 0 0
    ## 2 0 9 0
    ## 3 0 0 8

    #正确率
    options(digits=3)
    rightridio<-(t[1,1]+t[2,2]+t[3,3])/30
    result<-paste("
    正确率是 ",round(rightridio*100,digits=3),"%")
    result

    ## [1] "正确率是 100 %"

  • 相关阅读:
    学习视频资料下载论坛
    2007年12月英语四级预测作文大全1
    主板报警声音大全
    2007年12月英语四级预测作文大全1
    主板报警声音大全
    LOGO在线制作
    武汉之行收获
    武汉之行
    心灵小栈: 镌刻在地下500米的母爱
    一道终身受益的测试题
  • 原文地址:https://www.cnblogs.com/dingdangsunny/p/12323493.html
Copyright © 2020-2023  润新知