• MNIST练习


    MNIST 识别手写数字练习

    导入包、数据

    from keras.datasets import mnist
    import matplotlib.pyplot as plt
    (X_train, Y_train),(X_test, Y_test) = mnist.load_data()
    from keras import Model, Input, metrics, optimizers
    from keras.layers import Dense, Conv2D, Activation, MaxPool2D
    import keras
    import numpy as np
    

    训练数据可视化

    def mnist_show(X_train, Y_train):
        n = 3
        m = 3
        plt.figure()    
        for i in range(m):
            for j in range(n):
                idx = np.random.choice(X_train.shape[0])
                plt.subplot(m,n,i*m+j+1)
                plt.imshow(X_train[idx])
                plt.title(Y_train[idx])
        plt.show()
    mnist_show(X_train, Y_train)
    

    output_1_0

    数据预处理

    # 数据预处理
    X_train = X_train[0:500]
    Y_train = Y_train[0:500]
    X_test = X_test[0:100]
    Y_test = Y_test[0:100]
    print(X_train.shape,Y_train.shape)
    X_tr = X_train / 255.0
    X_te = X_test / 255.0
    
    X_tr = X_tr.reshape(X_tr.shape[0],28,28,1)
    X_te = X_te.reshape(X_te.shape[0],28,28,1)
    Y_tr = Y_train.reshape(Y_train.shape[0],1)
    Y_te = Y_test.reshape(Y_test.shape[0],1)
    print(X_tr.shape)
    print(X_te.shape)
    
    
    Y_tr = keras.utils.to_categorical(Y_train, 10)
    Y_te = keras.utils.to_categorical(Y_test, 10)
    print(Y_tr.shape,Y_te.shape)
    
    (500, 28, 28) (500,)
    (500, 28, 28, 1)
    (100, 28, 28, 1)
    (500, 10) (100, 10)
    

    定义模型

    def model():
        inputs = Input(shape=(28,28,1))
        x = Conv2D(filters=16,kernel_size=(2,2),strides=1,padding='same')(inputs)
        x = keras.layers.BatchNormalization(axis = 3)(x)
        x = keras.layers.MaxPool2D(pool_size=(2,2))(x)
        x = Conv2D(filters=32,kernel_size=(2,2),strides=1,padding='same')(inputs)
        x = keras.layers.BatchNormalization(axis = 3)(x)
        x = keras.layers.MaxPool2D(pool_size=(2,2))(x)
        x = keras.layers.Flatten()(x)
        x = Dense(200,activation='relu')(x)
        x = keras.layers.Dropout(0.3)(x)
        x = Dense(50,activation='relu')(x)
        x = keras.layers.Dropout(0.3)(x)    
        x = Dense(10,activation='softmax')(x)
        model = Model(inputs = inputs, outputs = x)
        optimizer = optimizers.Adam(1e-3)
        loss = keras.losses.categorical_crossentropy
        model.compile(optimizer=optimizer, loss = loss)
        model.summary()
        return model
    mnist_classification = model()
    

    模型框图可视化

    Model: "model_13"
    _________________________________________________________________
    Layer (type)                 Output Shape              Param #   
    =================================================================
    input_18 (InputLayer)        (None, 28, 28, 1)         0         
    _________________________________________________________________
    conv2d_21 (Conv2D)           (None, 28, 28, 32)        160       
    _________________________________________________________________
    batch_normalization_4 (Batch (None, 28, 28, 32)        128       
    _________________________________________________________________
    max_pooling2d_21 (MaxPooling (None, 14, 14, 32)        0         
    _________________________________________________________________
    flatten_16 (Flatten)         (None, 6272)              0         
    _________________________________________________________________
    dense_32 (Dense)             (None, 200)               1254600   
    _________________________________________________________________
    dropout_10 (Dropout)         (None, 200)               0         
    _________________________________________________________________
    dense_33 (Dense)             (None, 50)                10050     
    _________________________________________________________________
    dropout_11 (Dropout)         (None, 50)                0         
    _________________________________________________________________
    dense_34 (Dense)             (None, 10)                510       
    =================================================================
    Total params: 1,265,448
    Trainable params: 1,265,384
    Non-trainable params: 64
    _________________________________________________________________
    

    训练模型

    mnist_classification.fit(X_tr,Y_tr,batch_size=64,epochs=200)
    
    Epoch 1/200
    500/500 [==============================] - 1s 3ms/step - loss: 2.1675
    Epoch 2/200
    500/500 [==============================] - 1s 1ms/step - loss: 1.1908
    Epoch 3/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.7801
    Epoch 4/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.6275
    Epoch 5/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.5564
    Epoch 6/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.4784
    Epoch 7/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.3887
    Epoch 8/200
    500/500 [==============================] - 0s 997us/step - loss: 0.3055
    Epoch 9/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.3098
    Epoch 10/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.2750
    Epoch 11/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.2044
    Epoch 12/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.2084
    Epoch 13/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.2392
    Epoch 14/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.1859
    Epoch 15/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.1709
    Epoch 16/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.1494
    Epoch 17/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.1361
    Epoch 18/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.1330
    Epoch 19/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0902
    Epoch 20/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0964
    Epoch 21/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.1014
    Epoch 22/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0904
    Epoch 23/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.1006
    Epoch 24/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0909
    Epoch 25/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0794
    Epoch 26/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0757
    Epoch 27/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0488
    Epoch 28/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0530
    Epoch 29/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0341
    Epoch 30/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0488
    Epoch 31/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0559
    Epoch 32/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0529
    Epoch 33/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0795
    Epoch 34/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0413
    Epoch 35/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0509
    Epoch 36/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0478
    Epoch 37/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0289
    Epoch 38/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0421
    Epoch 39/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0433
    Epoch 40/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0484
    Epoch 41/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0248
    Epoch 42/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0426
    Epoch 43/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0438
    Epoch 44/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0270
    Epoch 45/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0249
    Epoch 46/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0308
    Epoch 47/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0291
    Epoch 48/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0462
    Epoch 49/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0518
    Epoch 50/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0296
    Epoch 51/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0421
    Epoch 52/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0524
    Epoch 53/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0414
    Epoch 54/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0392
    Epoch 55/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0211
    Epoch 56/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0360
    Epoch 57/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0218
    Epoch 58/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0285
    Epoch 59/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0161
    Epoch 60/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0190
    Epoch 61/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0242
    Epoch 62/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0300
    Epoch 63/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0190
    Epoch 64/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0200
    Epoch 65/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0199
    Epoch 66/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0225
    Epoch 67/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0245
    Epoch 68/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0200
    Epoch 69/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0182
    Epoch 70/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0173
    Epoch 71/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0355
    Epoch 72/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0267
    Epoch 73/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0310
    Epoch 74/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0158
    Epoch 75/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0365
    Epoch 76/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0214
    Epoch 77/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0211
    Epoch 78/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0126
    Epoch 79/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0136
    Epoch 80/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0275
    Epoch 81/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0276
    Epoch 82/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0353
    Epoch 83/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0194
    Epoch 84/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0123
    Epoch 85/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0281
    Epoch 86/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0229
    Epoch 87/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0092
    Epoch 88/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0309
    Epoch 89/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0294
    Epoch 90/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0284
    Epoch 91/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0363
    Epoch 92/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0176
    Epoch 93/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0234
    Epoch 94/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0162
    Epoch 95/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0238
    Epoch 96/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0110
    Epoch 97/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0247
    Epoch 98/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0153
    Epoch 99/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0309
    Epoch 100/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0095
    Epoch 101/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0363
    Epoch 102/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0250
    Epoch 103/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0252
    Epoch 104/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0093
    Epoch 105/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0280
    Epoch 106/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0137
    Epoch 107/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0080
    Epoch 108/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0096
    Epoch 109/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0157
    Epoch 110/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0286
    Epoch 111/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0152
    Epoch 112/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0108
    Epoch 113/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0124
    Epoch 114/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0155
    Epoch 115/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0163
    Epoch 116/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0053
    Epoch 117/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0041
    Epoch 118/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0121
    Epoch 119/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0057
    Epoch 120/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0053
    Epoch 121/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0169
    Epoch 122/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0028
    Epoch 123/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0136
    Epoch 124/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0323
    Epoch 125/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0065
    Epoch 126/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0084
    Epoch 127/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0281
    Epoch 128/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0066
    Epoch 129/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0087
    Epoch 130/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0082
    Epoch 131/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0074
    Epoch 132/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0184
    Epoch 133/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0198
    Epoch 134/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0211
    Epoch 135/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0158
    Epoch 136/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0345
    Epoch 137/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0117
    Epoch 138/200
    500/500 [==============================] - ETA: 0s - loss: 0.008 - 1s 2ms/step - loss: 0.0081
    Epoch 139/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0088
    Epoch 140/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0141
    Epoch 141/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0077
    Epoch 142/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0111
    Epoch 143/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0343
    Epoch 144/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0046
    Epoch 145/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0196
    Epoch 146/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0103
    Epoch 147/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0120
    Epoch 148/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0119
    Epoch 149/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0088
    Epoch 150/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0132
    Epoch 151/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0094
    Epoch 152/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0147
    Epoch 153/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0036
    Epoch 154/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0241
    Epoch 155/200
    500/500 [==============================] - 2s 5ms/step - loss: 0.0234
    Epoch 156/200
    500/500 [==============================] - 1s 3ms/step - loss: 0.0162
    Epoch 157/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0126
    Epoch 158/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0228
    Epoch 159/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0186
    Epoch 160/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0162
    Epoch 161/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0144
    Epoch 162/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0260
    Epoch 163/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0248
    Epoch 164/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0116
    Epoch 165/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0214
    Epoch 166/200
    500/500 [==============================] - 3s 6ms/step - loss: 0.0120
    Epoch 167/200
    500/500 [==============================] - 2s 4ms/step - loss: 0.0263
    Epoch 168/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0110
    Epoch 169/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0351
    Epoch 170/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0164
    Epoch 171/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0131
    Epoch 172/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0247
    Epoch 173/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0234
    Epoch 174/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0322
    Epoch 175/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0259
    Epoch 176/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0089
    Epoch 177/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0106
    Epoch 178/200
    500/500 [==============================] - 1s 2ms/step - loss: 8.5937e-04
    Epoch 179/200
    500/500 [==============================] - 2s 3ms/step - loss: 0.0069
    Epoch 180/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0118
    Epoch 181/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0030
    Epoch 182/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0084
    Epoch 183/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0171
    Epoch 184/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0344
    Epoch 185/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0123
    Epoch 186/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0103
    Epoch 187/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0073
    Epoch 188/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0337
    Epoch 189/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0174
    Epoch 190/200
    500/500 [==============================] - 1s 2ms/step - loss: 0.0112
    Epoch 191/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0131
    Epoch 192/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0016
    Epoch 193/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0112
    Epoch 194/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0179
    Epoch 195/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0119
    Epoch 196/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0150
    Epoch 197/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0101
    Epoch 198/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0018
    Epoch 199/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0030
    Epoch 200/200
    500/500 [==============================] - 1s 1ms/step - loss: 0.0161
    
    
    
    
    
    <keras.callbacks.callbacks.History at 0x1356fe890>
    

    评价模型

    score = mnist_classification.evaluate(X_te,Y_te,verbose=1)
    print(score)
    
    100/100 [==============================] - 0s 2ms/step
    0.7463939571380616
    
    Y_pre = mnist_classification.predict(X_te)
    print(type(Y_pre))
    
    <class 'numpy.ndarray'>
    
    Y_p_val = np.argmax(Y_pre,axis=-1)
    
    print(Y_test[0:100]-Y_p_val)
    
    [ 0  0  0  0  0  0  0  0  1  0  0  0  0  0  0  2  0  0  1  0  0  0  0  0
      0  0 -2  0  0  0  0  0  0 -2  0  0  0  0  0  0  0  0  0  0  0  0 -4  0
      0  0  0  0  0  0  0  0  0  0  0  0  0  6  0  0  4 -5  4  0  0  0  0  0
      0  1  0  0  0  0  0  0 -2  0  0 -2  0  0  0  0  0  0  0  0  0  0  0  0
     -7  0  0  0]
    
    import keras_lr_finder
    
    lr_finder = keras_lr_finder.LRFinder(mnist_classification)
    lr_finder.find(X_tr,Y_tr,start_lr = 1e-5, end_lr = 1e2,batch_size = 200, epochs = 10)
    
    Epoch 1/10
    500/500 [==============================] - 1s 2ms/step - loss: 0.0054
    Epoch 2/10
    400/500 [=======================>......] - ETA: 0s - loss: 0.0047
    
    lr_finder.plot_loss(n_skip_beginning=1,n_skip_end=1)
    

    存取模型

    n
    mnist_classification.save(filepath='mnist_cla.h5',include_optimizer='Adam',overwrite=True)
    
    aa = keras.models.load_model('mnist_cla.h5')
    
    aa.summary()
    
    Model: "model_13"
    _________________________________________________________________
    Layer (type)                 Output Shape              Param #   
    =================================================================
    input_18 (InputLayer)        (None, 28, 28, 1)         0         
    _________________________________________________________________
    conv2d_21 (Conv2D)           (None, 28, 28, 32)        160       
    _________________________________________________________________
    batch_normalization_4 (Batch (None, 28, 28, 32)        128       
    _________________________________________________________________
    max_pooling2d_21 (MaxPooling (None, 14, 14, 32)        0         
    _________________________________________________________________
    flatten_16 (Flatten)         (None, 6272)              0         
    _________________________________________________________________
    dense_32 (Dense)             (None, 200)               1254600   
    _________________________________________________________________
    dropout_10 (Dropout)         (None, 200)               0         
    _________________________________________________________________
    dense_33 (Dense)             (None, 50)                10050     
    _________________________________________________________________
    dropout_11 (Dropout)         (None, 50)                0         
    _________________________________________________________________
    dense_34 (Dense)             (None, 10)                510       
    =================================================================
    Total params: 1,265,448
    Trainable params: 1,265,384
    Non-trainable params: 64
    _________________________________________________________________
    
    
    
  • 相关阅读:
    How to write an async method with out parameter?
    Common async / Task mistakes, and how to avoid them
    C# string reference type?
    Executing tasks in parallel
    SSIS How to Create an ETL Package Article 03/25/2022 3 minutes to read
    Should I worry about "This async method lacks 'await' operators and will run synchronously" warning
    C# 5.0 Async Tips and Tricks, Part 1
    What the async keyword actually does
    Why DataSet is being passed by reference without explicitly passing out or ref parameter? [duplicate]
    Windows权限维持
  • 原文地址:https://www.cnblogs.com/tolshao/p/MNIST.html
Copyright © 2020-2023  润新知