23
23
from sklearn .naive_bayes import GaussianNB
24
24
from sklearn .discriminant_analysis import QuadraticDiscriminantAnalysis
25
25
26
- h = .02 # step size in the mesh
27
-
28
- names = ["Nearest Neighbors" , "Linear SVM" , "RBF SVM" , "Gaussian Process" , "Neural Net" , "Naive Bayes" , "QDA" ,
29
- "Decision Tree" , "Random Forest" , "AdaBoost" , "SCM-Conjunction" , "SCM-Disjunction" ]
26
+ h = 0.02 # step size in the mesh
27
+
28
+ names = [
29
+ "Nearest Neighbors" ,
30
+ "Linear SVM" ,
31
+ "RBF SVM" ,
32
+ "Gaussian Process" ,
33
+ "Neural Net" ,
34
+ "Naive Bayes" ,
35
+ "QDA" ,
36
+ "Decision Tree" ,
37
+ "Random Forest" ,
38
+ "AdaBoost" ,
39
+ "SCM-Conjunction" ,
40
+ "SCM-Disjunction" ,
41
+ ]
30
42
31
43
classifiers = [
32
44
KNeighborsClassifier (3 ),
40
52
RandomForestClassifier (max_depth = 5 , n_estimators = 10 , max_features = 1 ),
41
53
AdaBoostClassifier (),
42
54
SetCoveringMachineClassifier (max_rules = 4 , model_type = "conjunction" , p = 2.0 ),
43
- SetCoveringMachineClassifier (max_rules = 4 , model_type = "disjunction" , p = 1.0 )]
55
+ SetCoveringMachineClassifier (max_rules = 4 , model_type = "disjunction" , p = 1.0 ),
56
+ ]
44
57
45
- X , y = make_classification (n_features = 2 , n_redundant = 0 , n_informative = 2 ,
46
- random_state = 1 , n_clusters_per_class = 1 )
58
+ X , y = make_classification (
59
+ n_features = 2 , n_redundant = 0 , n_informative = 2 , random_state = 1 , n_clusters_per_class = 1
60
+ )
47
61
rng = np .random .RandomState (2 )
48
62
X += 2 * rng .uniform (size = X .shape )
49
63
linearly_separable = (X , y )
50
64
51
- datasets = [make_moons (noise = 0.3 , random_state = 0 ),
52
- make_circles (noise = 0.2 , factor = 0.5 , random_state = 1 ),
53
- linearly_separable ]
65
+ datasets = [
66
+ make_moons (noise = 0.3 , random_state = 0 ),
67
+ make_circles (noise = 0.2 , factor = 0.5 , random_state = 1 ),
68
+ linearly_separable ,
69
+ ]
54
70
55
71
figure = plt .figure (figsize = (27 , 11 ))
56
72
i = 1
59
75
# preprocess dataset, split into training and test part
60
76
X , y = ds
61
77
X = StandardScaler ().fit_transform (X )
62
- X_train , X_test , y_train , y_test = \
63
- train_test_split (X , y , test_size = .4 , random_state = 42 )
78
+ X_train , X_test , y_train , y_test = train_test_split (
79
+ X , y , test_size = 0.4 , random_state = 42
80
+ )
64
81
65
- x_min , x_max = X [:, 0 ].min () - .5 , X [:, 0 ].max () + .5
66
- y_min , y_max = X [:, 1 ].min () - .5 , X [:, 1 ].max () + .5
67
- xx , yy = np .meshgrid (np .arange (x_min , x_max , h ),
68
- np .arange (y_min , y_max , h ))
82
+ x_min , x_max = X [:, 0 ].min () - 0.5 , X [:, 0 ].max () + 0.5
83
+ y_min , y_max = X [:, 1 ].min () - 0.5 , X [:, 1 ].max () + 0.5
84
+ xx , yy = np .meshgrid (np .arange (x_min , x_max , h ), np .arange (y_min , y_max , h ))
69
85
70
86
# just plot the dataset first
71
87
cm = plt .cm .RdBu
72
- cm_bright = ListedColormap ([' #FF0000' , ' #0000FF' ])
73
- #cm = plt.cm.PiYG
74
- #cm_bright = ListedColormap(['#FF0000', '#00FF00'])
75
- #cm = plt.cm.bwr
76
- #cm_bright = ListedColormap(['#0000FF', '#FF0000'])
88
+ cm_bright = ListedColormap ([" #FF0000" , " #0000FF" ])
89
+ # cm = plt.cm.PiYG
90
+ # cm_bright = ListedColormap(['#FF0000', '#00FF00'])
91
+ # cm = plt.cm.bwr
92
+ # cm_bright = ListedColormap(['#0000FF', '#FF0000'])
77
93
ax = plt .subplot (len (datasets ), len (classifiers ) + 1 , i )
78
94
if ds_cnt == 0 :
79
95
ax .set_title ("Input data" )
120
136
121
137
# Put the result into a color plot
122
138
Z = Z .reshape (xx .shape )
123
- ax .contourf (xx , yy , Z , cmap = cm , alpha = .8 )
139
+ ax .contourf (xx , yy , Z , cmap = cm , alpha = 0 .8 )
124
140
125
141
# Plot also the training points
126
142
ax .scatter (X_train [:, 0 ], X_train [:, 1 ], c = y_train , cmap = cm_bright )
127
143
# and testing points
128
- ax .scatter (X_test [:, 0 ], X_test [:, 1 ], c = y_test , cmap = cm_bright ,
129
- alpha = 0.6 )
144
+ ax .scatter (X_test [:, 0 ], X_test [:, 1 ], c = y_test , cmap = cm_bright , alpha = 0.6 )
130
145
131
146
ax .set_xlim (xx .min (), xx .max ())
132
147
ax .set_ylim (yy .min (), yy .max ())
133
148
ax .set_xticks (())
134
149
ax .set_yticks (())
135
150
if ds_cnt == 0 :
136
151
ax .set_title (name .title ())
137
- ax .text (xx .min () + 0.2 , yy .min () + 0.2 , 'Acc.: {0:.2f}' .format (score ).lstrip ('0' ), size = 15 ,
138
- horizontalalignment = 'left' , bbox = dict (facecolor = 'white' , edgecolor = 'black' , alpha = 0.8 ))
139
- ax .text (xx .min () + 0.2 , yy .min () + 0.8 , "Rules: {0!s}" .format (n_rules ) if n_rules is not None else "" ,
140
- size = 15 , horizontalalignment = 'left' , bbox = dict (facecolor = 'white' , edgecolor = 'black' , alpha = 0.8 ))
152
+ ax .text (
153
+ xx .min () + 0.2 ,
154
+ yy .min () + 0.2 ,
155
+ "Acc.: {0:.2f}" .format (score ).lstrip ("0" ),
156
+ size = 15 ,
157
+ horizontalalignment = "left" ,
158
+ bbox = dict (facecolor = "white" , edgecolor = "black" , alpha = 0.8 ),
159
+ )
160
+ ax .text (
161
+ xx .min () + 0.2 ,
162
+ yy .min () + 0.8 ,
163
+ "Rules: {0!s}" .format (n_rules ) if n_rules is not None else "" ,
164
+ size = 15 ,
165
+ horizontalalignment = "left" ,
166
+ bbox = dict (facecolor = "white" , edgecolor = "black" , alpha = 0.8 ),
167
+ )
141
168
i += 1
142
169
143
170
plt .tight_layout ()
144
- plt .savefig ("decision_boundary.pdf" , bbox_inches = "tight" )
171
+ plt .savefig ("decision_boundary.pdf" , bbox_inches = "tight" )
0 commit comments