• TensorFlow.NET机器学习入门【6】采用神经网络处理FashionMNIST


    "如果一个算法在MNIST上不work,那么它就根本没法用;而如果它在MNIST上work,它在其他数据上也可能不work"。

                                                                                                                                                —— 马克吐温

    上一篇文章我们实现了一个MNIST手写数字识别的程序,通过一个简单的两层神经网络,就轻松获得了98%的识别成功率。这个成功率不代表你的网络是有效的,因为MNIST实在是太简单了,我们需要更复杂的数据集来检验网络的有效性!这就有了Fashion-MNIST数据集,它采用10种服装的图片来取代数字0~9,除此之外,其图片大小、数量均和MNIST一致。

    上篇文章的代码几乎不用改动,只要改个获取原始图片文件的文件夹名称即可。

    程序运行结果识别成功率大约为82%左右。

    我们可以对网络进行调整,看能否提高识别率,具体可用的方法:

    1、增加网络层

    2、增加神经元个数

    3、改用其它激活函数

    试验结果表明,不管如何调整,识别率始终上不去多少。可见该网络方案已经碰到了瓶颈,如果要大幅度提高识别率必须要采取新的方案了。

    下篇文章我们将介绍卷积神经网络(CNN)的应用,通过CNN来处理图像数据将是一个更好、更科学的解决方案。

    由于本文代码和上一篇文章的代码高度一致,这里就不再详细说明了。全部代码如下:

     /// <summary>
        /// 采用神经网络处理Fashion-MNIST数据集
        /// </summary>
        public class NN_MultipleClassification_Fashion_MNIST
        {
            private readonly string TrainImagePath = @"D:\Study\Blogs\TF_Net\Asset\fashion_mnist_png\train";
            private readonly string TestImagePath = @"D:\Study\Blogs\TF_Net\Asset\fashion_mnist_png\test";
            private readonly string train_date_path = @"D:\Study\Blogs\TF_Net\Asset\fashion_mnist_png\train_data.bin";
            private readonly string train_label_path = @"D:\Study\Blogs\TF_Net\Asset\fashion_mnist_png\train_label.bin";
    
            private readonly int img_rows = 28;
            private readonly int img_cols = 28;
            private readonly int num_classes = 10;  // total classes
    
            public void Run()
            {
                var model = BuildModel();
                model.summary();
    
                model.compile(optimizer: keras.optimizers.Adam(0.001f),
                    loss: keras.losses.SparseCategoricalCrossentropy(),
                    metrics: new[] { "accuracy" });
    
                (NDArray train_x, NDArray train_y) = LoadTrainingData();
                model.fit(train_x, train_y, batch_size: 1024, epochs: 20);
    
                test(model);
            }
    
            /// <summary>
            /// 构建网络模型
            /// </summary>     
            private Model BuildModel()
            {
                // 网络参数          
                int n_hidden_1 = 128;    // 1st layer number of neurons.     
                int n_hidden_2 = 128;    // 2nd layer number of neurons.                                
                float scale = 1.0f / 255;
    
                var model = keras.Sequential(new List<ILayer>
                {
                    keras.layers.InputLayer((img_rows,img_cols)),
                    keras.layers.Flatten(),
                    keras.layers.Rescaling(scale),
                    keras.layers.Dense(n_hidden_1, activation:keras.activations.Relu),
                    keras.layers.Dense(n_hidden_2, activation:keras.activations.Relu),
                    keras.layers.Dense(num_classes, activation:keras.activations.Softmax)
                });
    
                return model;
            }
    
            /// <summary>
            /// 加载训练数据
            /// </summary>
            /// <param name="total_size"></param>    
            private (NDArray, NDArray) LoadTrainingData()
            {
                try
                {
                    Console.WriteLine("Load data");
                    IFormatter serializer = new BinaryFormatter();
                    FileStream loadFile = new FileStream(train_date_path, FileMode.Open, FileAccess.Read);
                    float[,,] arrx = serializer.Deserialize(loadFile) as float[,,];
    
                    loadFile = new FileStream(train_label_path, FileMode.Open, FileAccess.Read);
                    int[] arry = serializer.Deserialize(loadFile) as int[];
                    Console.WriteLine("Load data success");
                    return (np.array(arrx), np.array(arry));
                }
                catch (Exception ex)
                {
                    Console.WriteLine($"Load data Exception:{ex.Message}");
                    return LoadRawData();
                }
            }
    
            private (NDArray, NDArray) LoadRawData()
            {
                Console.WriteLine("LoadRawData");
    
                int total_size = 60000;
                float[,,] arrx = new float[total_size, img_rows, img_cols];
                int[] arry = new int[total_size];
    
                int count = 0;
    
                DirectoryInfo RootDir = new DirectoryInfo(TrainImagePath);
                foreach (var Dir in RootDir.GetDirectories())
                {
                    foreach (var file in Dir.GetFiles("*.png"))
                    {
                        Bitmap bmp = (Bitmap)Image.FromFile(file.FullName);
                        if (bmp.Width != img_cols || bmp.Height != img_rows)
                        {
                            continue;
                        }
    
                        for (int row = 0; row < img_rows; row++)
                            for (int col = 0; col < img_cols; col++)
                            {
                                var pixel = bmp.GetPixel(col, row);
                                int val = (pixel.R + pixel.G + pixel.B) / 3;
    
                                arrx[count, row, col] = val;
                                arry[count] = int.Parse(Dir.Name);
                            }
    
                        count++;
                    }
    
                    Console.WriteLine($"Load image data count={count}");
                }
    
                Console.WriteLine("LoadRawData finished");
                //Save Data
                Console.WriteLine("Save data");
                IFormatter serializer = new BinaryFormatter();
    
                //开始序列化
                FileStream saveFile = new FileStream(train_date_path, FileMode.Create, FileAccess.Write);
                serializer.Serialize(saveFile, arrx);
                saveFile.Close();
    
                saveFile = new FileStream(train_label_path, FileMode.Create, FileAccess.Write);
                serializer.Serialize(saveFile, arry);
                saveFile.Close();
                Console.WriteLine("Save data finished");
    
                return (np.array(arrx), np.array(arry));
            }
    
            /// <summary>
            /// 消费模型
            /// </summary>      
            private void test(Model model)
            {
                Random rand = new Random(1);
    
                DirectoryInfo TestDir = new DirectoryInfo(TestImagePath);
                foreach (var ChildDir in TestDir.GetDirectories())
                {
                    Console.WriteLine($"Folder:【{ChildDir.Name}】");
                    var Files = ChildDir.GetFiles("*.png");
                    for (int i = 0; i < 10; i++)
                    {
                        int index = rand.Next(1000);
                        var image = Files[index];
    
                        var x = LoadImage(image.FullName);
                        var pred_y = model.Apply(x);
                        var result = argmax(pred_y[0].numpy());
    
                        Console.WriteLine($"FileName:{image.Name}\tPred:{result}");
                    }
                }
            }
    
            private NDArray LoadImage(string filename)
            {
                float[,,] arrx = new float[1, img_rows, img_cols];
                Bitmap bmp = (Bitmap)Image.FromFile(filename);
    
                for (int row = 0; row < img_rows; row++)
                    for (int col = 0; col < img_cols; col++)
                    {
                        var pixel = bmp.GetPixel(col, row);
                        int val = (pixel.R + pixel.G + pixel.B) / 3;
                        arrx[0, row, col] = val;
                    }
    
                return np.array(arrx);
            }
    
            private int argmax(NDArray array)
            {
                var arr = array.reshape(-1);
    
                float max = 0;
                for (int i = 0; i < 10; i++)
                {
                    if (arr[i] > max)
                    {
                        max = arr[i];
                    }
                }
    
                for (int i = 0; i < 10; i++)
                {
                    if (arr[i] == max)
                    {
                        return i;
                    }
                }
    
                return 0;
            }
        }
    View Code

    【相关资源】

     源码:Git: https://gitee.com/seabluescn/tf_not.git

    项目名称:NN_MultipleClassification_Fashion_MNIST

    目录:查看TensorFlow.NET机器学习入门系列目录


    签名区:
    如果您觉得这篇博客对您有帮助或启发,请点击右侧【推荐】支持,谢谢!
  • 相关阅读:
    AcWing 826. 单链表
    AcWing 803. 区间合并
    codeforces Codeforces Round #597 (Div. 2) D. Shichikuji and Power Grid
    球球大作战.exe
    RGB MIXER三原色混色器的制作
    125. 验证回文串
    110. 平衡二叉树
    112. 路径总和
    111. 二叉树的最小深度
    100. 相同的树
  • 原文地址:https://www.cnblogs.com/seabluescn/p/15592839.html
Copyright © 2020-2023  润新知