• 第八周 编程作业


     1 function idx = findClosestCentroids(X, centroids)
     2 %FINDCLOSESTCENTROIDS computes the centroid memberships for every example
     3 %   idx = FINDCLOSESTCENTROIDS (X, centroids) returns the closest centroids
     4 %   in idx for a dataset X where each row is a single example. idx = m x 1 
     5 %   vector of centroid assignments (i.e. each entry in range [1..K])
     6 %
     7 
     8 % Set K
     9 K = size(centroids, 1);
    10 
    11 % You need to return the following variables correctly.
    12 idx = zeros(size(X,1), 1);
    13 
    14 % ====================== YOUR CODE HERE ======================
    15 % Instructions: Go over every example, find its closest centroid, and store
    16 %               the index inside idx at the appropriate location.
    17 %               Concretely, idx(i) should contain the index of the centroid
    18 %               closest to example i. Hence, it should be a value in the 
    19 %               range 1..K
    20 %
    21 % Note: You can use a for-loop over the examples to compute this.
    22 %
    23 
    24 
    25 for i=1:size(X,1)
    26      for j =1:K
    27      dis(j)=sum((centroids(j,:)-X(i,:)).^2,2);%sum 每行
    28      end
    29      [t,idx(i)]=min(dis);%t:最小值 idx :最小值的索引
    30 end
    31 
    32 
    33 
    34 
    35 % =============================================================
    36 
    37 end
    function centroids = computeCentroids(X, idx, K)
    %COMPUTECENTROIDS returns the new centroids by computing the means of the 
    %data points assigned to each centroid.
    %   centroids = COMPUTECENTROIDS(X, idx, K) returns the new centroids by 
    %   computing the means of the data points assigned to each centroid. It is
    %   given a dataset X where each row is a single data point, a vector
    %   idx of centroid assignments (i.e. each entry in range [1..K]) for each
    %   example, and K, the number of centroids. You should return a matrix
    %   centroids, where each row of centroids is the mean of the data points
    %   assigned to it.
    %
    
    % Useful variables
    [m n] = size(X);
    
    % You need to return the following variables correctly.
    centroids = zeros(K, n);
    
    
    % ====================== YOUR CODE HERE ======================
    % Instructions: Go over every centroid and compute mean of all points that
    %               belong to it. Concretely, the row vector centroids(i, :)
    %               should contain the mean of the data points assigned to
    %               centroid i.
    %
    % Note: You can use a for-loop over the centroids to compute this.
    %
    
    for i=1:K
        s=sum(idx==i);%第I个中心所包含的点的个数
        if(s~=0)%不为0
          centroids(i,:)=mean( X(find(idx==i),:));
        else
        centroids(i,:)=zeros(1,n);
    end
    
    
    
    
    
    
    
    % =============================================================
    
    
    end
    function centroids = kMeansInitCentroids(X, K)
    %KMEANSINITCENTROIDS This function initializes K centroids that are to be 
    %used in K-Means on the dataset X
    %   centroids = KMEANSINITCENTROIDS(X, K) returns K initial centroids to be
    %   used with the K-Means on the dataset X
    %
    
    % You should return this values correctly
    centroids = zeros(K, size(X, 2));
    
    % ====================== YOUR CODE HERE ======================
    % Instructions: You should set centroids to randomly chosen examples from
    %               the dataset X
    %
    
    
    
    randidx = randperm(size(X,1));
    
    centroids = X(randidx(1:K),:);
    
    
    
    
    
    % =============================================================
    
    end
    function [U, S] = pca(X)
    %PCA Run principal component analysis on the dataset X
    %   [U, S, X] = pca(X) computes eigenvectors of the covariance matrix of X
    %   Returns the eigenvectors U, the eigenvalues (on diagonal) in S
    %
    
    % Useful values
    [m, n] = size(X);
    
    % You need to return the following variables correctly.
    U = zeros(n);
    S = zeros(n);
    
    % ====================== YOUR CODE HERE ======================
    % Instructions: You should first compute the covariance matrix. Then, you
    %               should use the "svd" function to compute the eigenvectors
    %               and eigenvalues of the covariance matrix. 
    %
    % Note: When computing the covariance matrix, remember to divide by m (the
    %       number of examples).
    %
    
    
    
    
    sigma =1/m*X'*X;
    
    [U,S,V]=svd(sigma);
    
    
    % =========================================================================
    
    end
    function Z = projectData(X, U, K)
    %PROJECTDATA Computes the reduced data representation when projecting only 
    %on to the top k eigenvectors
    %   Z = projectData(X, U, K) computes the projection of 
    %   the normalized inputs X into the reduced dimensional space spanned by
    %   the first K columns of U. It returns the projected examples in Z.
    %
    
    % You need to return the following variables correctly.
    Z = zeros(size(X, 1), K);
    
    % ====================== YOUR CODE HERE ======================
    % Instructions: Compute the projection of the data using only the top K 
    %               eigenvectors in U (first K columns). 
    %               For the i-th example X(i,:), the projection on to the k-th 
    %               eigenvector is given as follows:
    %                    x = X(i, :)';
    %                    projection_k = x' * U(:, k);
    %
    
    
    U_reduce = U(:,1:K);
    
    Z= X*U_reduce;
    % =============================================================
    
    end
    function X_rec = recoverData(Z, U, K)
    %RECOVERDATA Recovers an approximation of the original data when using the 
    %projected data
    %   X_rec = RECOVERDATA(Z, U, K) recovers an approximation the 
    %   original data that has been reduced to K dimensions. It returns the
    %   approximate reconstruction in X_rec.
    %
    
    % You need to return the following variables correctly.
    X_rec = zeros(size(Z, 1), size(U, 1));
    
    % ====================== YOUR CODE HERE ======================
    % Instructions: Compute the approximation of the data by projecting back
    %               onto the original space using the top K eigenvectors in U.
    %
    %               For the i-th example Z(i,:), the (approximate)
    %               recovered data for dimension j is given as follows:
    %                    v = Z(i, :)';
    %                    recovered_j = v' * U(j, 1:K)';
    %
    %               Notice that U(j, 1:K) is a row vector.
    %               
    
    
    U_reduce = U(:,1:K);
    
    X_rec = Z*U_reduce';
    
    
    % =============================================================
    
    end
  • 相关阅读:
    简单计算器(栈和队列的应用)
    01背包问题(dfs+剪枝)
    PAT1057 stack(分块思想)
    算法笔记求序列A每个元素左边比它小的数的个数(树状数组和离散化)
    求序列A中每个数的左边比它小的数的个数(树状数组)
    ubuntu-创建python虚拟环境
    vue-打包
    django-项目目录结构
    接口自动化-常见工具
    selenium-封装一个browser
  • 原文地址:https://www.cnblogs.com/tingtin/p/12230877.html
Copyright © 2020-2023  润新知