• 画出决策边界线--plot_2d_separator.py源代码【来自python机器学习基础教程】


      1 import numpy as np
      2 import matplotlib.pyplot as plt
      3 from .plot_helpers import cm2, cm3, discrete_scatter
      4 
      5 def _call_classifier_chunked(classifier_pred_or_decide, X):
      6 # The chunk_size is used to chunk the large arrays to work with x86
      7 # memory models that are restricted to < 2 GB in memory allocation. The
      8 # chunk_size value used here is based on a measurement with the
      9 # MLPClassifier using the following parameters:
     10 # MLPClassifier(solver='lbfgs', random_state=0,
     11 # hidden_layer_sizes=[1000,1000,1000])
     12 # by reducing the value it is possible to trade in time for memory.
     13 # It is possible to chunk the array as the calculations are independent of
     14 # each other.
     15 # Note: an intermittent version made a distinction between
     16 # 32- and 64 bit architectures avoiding the chunking. Testing revealed
     17 # that even on 64 bit architectures the chunking increases the
     18 # performance by a factor of 3-5, largely due to the avoidance of memory
     19 # swapping.
     20 chunk_size = 10000
     21 
     22 # We use a list to collect all result chunks
     23 Y_result_chunks = []
     24 
     25 # Call the classifier in chunks.
     26 for x_chunk in np.array_split(X, np.arange(chunk_size, X.shape[0],
     27 chunk_size, dtype=np.int32),
     28 axis=0):
     29 Y_result_chunks.append(classifier_pred_or_decide(x_chunk))
     30 
     31 return np.concatenate(Y_result_chunks)
     32 
     33 
     34 def plot_2d_classification(classifier, X, fill=False, ax=None, eps=None,
     35 alpha=1, cm=cm3):
     36 # multiclass
     37 if eps is None:
     38 eps = X.std() / 2.
     39 
     40 if ax is None:
     41 ax = plt.gca()
     42 
     43 x_min, x_max = X[:, 0].min() - eps, X[:, 0].max() + eps
     44 y_min, y_max = X[:, 1].min() - eps, X[:, 1].max() + eps
     45 xx = np.linspace(x_min, x_max, 1000)
     46 yy = np.linspace(y_min, y_max, 1000)
     47 
     48 X1, X2 = np.meshgrid(xx, yy)
     49 X_grid = np.c_[X1.ravel(), X2.ravel()]
     50 decision_values = classifier.predict(X_grid)
     51 ax.imshow(decision_values.reshape(X1.shape), extent=(x_min, x_max,
     52 y_min, y_max),
     53 aspect='auto', origin='lower', alpha=alpha, cmap=cm)
     54 ax.set_xlim(x_min, x_max)
     55 ax.set_ylim(y_min, y_max)
     56 ax.set_xticks(())
     57 ax.set_yticks(())
     58 
     59 
     60 def plot_2d_scores(classifier, X, ax=None, eps=None, alpha=1, cm="viridis",
     61 function=None):
     62 # binary with fill
     63 if eps is None:
     64 eps = X.std() / 2.
     65 
     66 if ax is None:
     67 ax = plt.gca()
     68 
     69 x_min, x_max = X[:, 0].min() - eps, X[:, 0].max() + eps
     70 y_min, y_max = X[:, 1].min() - eps, X[:, 1].max() + eps
     71 xx = np.linspace(x_min, x_max, 100)
     72 yy = np.linspace(y_min, y_max, 100)
     73 
     74 X1, X2 = np.meshgrid(xx, yy)
     75 X_grid = np.c_[X1.ravel(), X2.ravel()]
     76 if function is None:
     77 function = getattr(classifier, "decision_function",
     78 getattr(classifier, "predict_proba"))
     79 else:
     80 function = getattr(classifier, function)
     81 decision_values = function(X_grid)
     82 if decision_values.ndim > 1 and decision_values.shape[1] > 1:
     83 # predict_proba
     84 decision_values = decision_values[:, 1]
     85 grr = ax.imshow(decision_values.reshape(X1.shape),
     86 extent=(x_min, x_max, y_min, y_max), aspect='auto',
     87 origin='lower', alpha=alpha, cmap=cm)
     88 
     89 ax.set_xlim(x_min, x_max)
     90 ax.set_ylim(y_min, y_max)
     91 ax.set_xticks(())
     92 ax.set_yticks(())
     93 return grr
     94 
     95 
     96 def plot_2d_separator(classifier, X, fill=False, ax=None, eps=None, alpha=1,
     97 cm=cm2, linewidth=None, threshold=None,
     98 linestyle="solid"):
     99 # binary?
    100 if eps is None:
    101 eps = X.std() / 2.
    102 
    103 if ax is None:
    104 ax = plt.gca()
    105 
    106 x_min, x_max = X[:, 0].min() - eps, X[:, 0].max() + eps
    107 y_min, y_max = X[:, 1].min() - eps, X[:, 1].max() + eps
    108 xx = np.linspace(x_min, x_max, 1000)
    109 yy = np.linspace(y_min, y_max, 1000)
    110 
    111 X1, X2 = np.meshgrid(xx, yy)
    112 X_grid = np.c_[X1.ravel(), X2.ravel()]
    113 if hasattr(classifier, "decision_function"):
    114 decision_values = _call_classifier_chunked(classifier.decision_function,
    115 X_grid)
    116 levels = [0] if threshold is None else [threshold]
    117 fill_levels = [decision_values.min()] + levels + [
    118 decision_values.max()]
    119 else:
    120 # no decision_function
    121 decision_values = _call_classifier_chunked(classifier.predict_proba,
    122 X_grid)[:, 1]
    123 levels = [.5] if threshold is None else [threshold]
    124 fill_levels = [0] + levels + [1]
    125 if fill:
    126 ax.contourf(X1, X2, decision_values.reshape(X1.shape),
    127 levels=fill_levels, alpha=alpha, cmap=cm)
    128 else:
    129 ax.contour(X1, X2, decision_values.reshape(X1.shape), levels=levels,
    130 colors="black", alpha=alpha, linewidths=linewidth,
    131 linestyles=linestyle, zorder=5)
    132 
    133 ax.set_xlim(x_min, x_max)
    134 ax.set_ylim(y_min, y_max)
    135 ax.set_xticks(())
    136 ax.set_yticks(())
    137 
    138 
    139 if __name__ == '__main__':
    140 from sklearn.datasets import make_blobs
    141 from sklearn.linear_model import LogisticRegression
    142 X, y = make_blobs(centers=2, random_state=42)
    143 clf = LogisticRegression(solver='lbfgs').fit(X, y)
    144 plot_2d_separator(clf, X, fill=True)
    145 discrete_scatter(X[:, 0], X[:, 1], y)
    146 plt.show()
  • 相关阅读:
    H5中canvas标签制作在线画板
    H5中标签Canvas实现图像动画
    H5中画图标签Canvas---画矩形,画线,画圆,渐变色,图形载入
    java 的反射机制
    maven的介绍
    000 关于IDEA的基本环境配置以及快速使用(git拉载程序,Jdk安装,tomcat部署,应用程序打包运行)
    H5视频播放器属性与API控件,以及对程序的解释
    HTML5 标签语法变化和使用概念
    H5的简介
    android驱动USB摄像头
  • 原文地址:https://www.cnblogs.com/aaronhoo/p/10146934.html
Copyright © 2020-2023  润新知