• 【深度学习】吴恩达网易公开课练习(class1 week2)


    知识点汇总

    作业内容:用logistic回归对猫进行分类

    numpy知识点:

    1. 查看矩阵维度: x.shape
    2. 初始化0矩阵: np.zeros((dim1, dim2))
    3. 去掉矩阵中大小是1的维度: x = np.squeeze(x)
    4. 将(a, b, c, d)矩阵转换为(b(*)c(*)d, a): X_flatten = X.reshape(X.shape[0], -1).T

    算法逻辑梳理:

    1. 导入包
    2. 输入数据处理: 载入图片,格式转换,归一化
    3. 初始化参数
    4. 前向传播
    5. 反向更新
    6. 预测结果
    7. 收敛曲线图

    logistic回归代码:

    # 整体代码
    
    import numpy as np
    import matplotlib.pyplot as plt
    import h5py
    import scipy
    from PIL import Image
    from scipy import ndimage
    from lr_utils import load_dataset
    
    %matplotlib inline
    
    # Loading the data (cat/non-cat)
    train_set_x_orig, train_set_y, test_set_x_orig, test_set_y, classes = load_dataset()
    
    m_train = train_set_x_orig.shape[0]
    m_test = test_set_x_orig.shape[0]
    num_px = train_set_x_orig.shape[1]
    
    # Reshape the training and test examples
    train_set_x_flatten = train_set_x_orig.reshape(train_set_x_orig.shape[0], -1).T
    test_set_x_flatten = test_set_x_orig.reshape(test_set_x_orig.shape[0], -1).T
    
    train_set_x = train_set_x_flatten/255.
    test_set_x = test_set_x_flatten/255.
    
    
    def sigmoid(z):
        s = 1 / (1 + np.exp(-z))
        return s
    
    
    def initialize_with_zeros(dim):
        w = np.zeros((dim, 1))
        b = 0
        assert(w.shape == (dim, 1))
        assert(isinstance(b, float) or isinstance(b, int))
        return w, b
    
    
    def propagate(w, b, X, Y):
        m = X.shape[1]
        
        # FORWARD PROPAGATION (FROM X TO COST)
        A = sigmoid(np.dot(w.T, X) + b)            # compute activation
        cost = - 1 / m * np.sum(Y * np.log(A) + (1 - Y) * np.log(1 - A))         # compute cost
        
        # BACKWARD PROPAGATION (TO FIND GRAD)
        dw = 1 / m * np.dot(X, (A - Y).T)
        db = 1 / m * np.sum(A - Y)
    
        assert(dw.shape == w.shape)
        assert(db.dtype == float)
        cost = np.squeeze(cost)
        assert(cost.shape == ())
        
        grads = {"dw": dw,
                 "db": db}
        
        return grads, cost
    
    
    def optimize(w, b, X, Y, num_iterations, learning_rate, print_cost = False):
        costs = []
        
        for i in range(num_iterations):      
            # Cost and gradient calculation 
            grads, cost = propagate(w, b, X, Y)
            
            # Retrieve derivatives from grads
            dw = grads["dw"]
            db = grads["db"]
            
            # update rule
            w = w - learning_rate * dw
            b = b - learning_rate * db
            
            # Record the costs
            if i % 100 == 0:
                costs.append(cost)
            
            # Print the cost every 100 training examples
            if print_cost and i % 100 == 0:
                print ("Cost after iteration %i: %f" %(i, cost))
        
        params = {"w": w,
                  "b": b}
        
        grads = {"dw": dw,
                 "db": db}
        
        return params, grads, costs
    
    
    def predict(w, b, X):
        m = X.shape[1]
        Y_prediction = np.zeros((1,m))
        w = w.reshape(X.shape[0], 1)
        
        # Compute vector "A" predicting the probabilities of a cat being present in the picture
        A = sigmoid(np.dot(w.T, X) + b)
    
        for i in range(A.shape[1]):  
            # Convert probabilities A[0,i] to actual predictions p[0,i]
            Y_prediction[0, i] = 1 if A[0, i] > 0.5 else 0
        
        assert(Y_prediction.shape == (1, m))
        
        return Y_prediction
    
    
    def model(X_train, Y_train, X_test, Y_test, num_iterations = 2000, learning_rate = 0.5, print_cost = False):
        # initialize parameters with zeros
        w, b = initialize_with_zeros(X_train.shape[0])
    
        # Gradient descent 
        parameters, grads, costs = optimize(w, b, X_train, Y_train, num_iterations, learning_rate, print_cost)
        
        # Retrieve parameters w and b from dictionary "parameters"
        w = parameters["w"]
        b = parameters["b"]
        
        # Predict test/train set examples
        Y_prediction_test = predict(w, b, X_test)
        Y_prediction_train = predict(w, b, X_train)
    
        # Print train/test Errors
        print("train accuracy: {} %".format(100 - np.mean(np.abs(Y_prediction_train - Y_train)) * 100))
        print("test accuracy: {} %".format(100 - np.mean(np.abs(Y_prediction_test - Y_test)) * 100))
    
        
        d = {"costs": costs,
             "Y_prediction_test": Y_prediction_test, 
             "Y_prediction_train" : Y_prediction_train, 
             "w" : w, 
             "b" : b,
             "learning_rate" : learning_rate,
             "num_iterations": num_iterations}
        
        return d
    
    
    d = model(train_set_x, train_set_y, test_set_x, test_set_y, num_iterations = 2000, learning_rate = 0.005, print_cost = True)
    
    # lr_utils.py
    import numpy as np
    import h5py
        
        
    def load_dataset():
        train_dataset = h5py.File('datasets/train_catvnoncat.h5', "r")
        train_set_x_orig = np.array(train_dataset["train_set_x"][:]) # your train set features
        train_set_y_orig = np.array(train_dataset["train_set_y"][:]) # your train set labels
    
        test_dataset = h5py.File('datasets/test_catvnoncat.h5', "r")
        test_set_x_orig = np.array(test_dataset["test_set_x"][:]) # your test set features
        test_set_y_orig = np.array(test_dataset["test_set_y"][:]) # your test set labels
    
        classes = np.array(test_dataset["list_classes"][:]) # the list of classes
        
        train_set_y_orig = train_set_y_orig.reshape((1, train_set_y_orig.shape[0]))
        test_set_y_orig = test_set_y_orig.reshape((1, test_set_y_orig.shape[0]))
        
        return train_set_x_orig, train_set_y_orig, test_set_x_orig, test_set_y_orig, classes
    
  • 相关阅读:
    在ModelSim中添加Xilinx仿真库
    关于DDR3非常棒的文章
    Modelsim编译Xilinx器件库的另一种方法
    FPGA按键去抖verilog代码
    DDOS与DDOS追踪的介绍
    自动实时监控Windows2003服务器终端登录
    Java 如何实现线程间通信
    C# 获取FormData数据
    利用 FormData 对象发送 Key/Value 对的异步请求
    jdk初始安装配置
  • 原文地址:https://www.cnblogs.com/dplearning/p/8373803.html
Copyright © 2020-2023  润新知