48
48
def test_easy_ensemble_classifier (n_estimators , base_estimator ):
49
49
# Check classification for various parameter settings.
50
50
X , y = make_imbalance (
51
- iris .data , iris .target , sampling_strategy = {0 : 20 , 1 : 25 , 2 : 50 }, random_state = 0 ,
51
+ iris .data ,
52
+ iris .target ,
53
+ sampling_strategy = {0 : 20 , 1 : 25 , 2 : 50 },
54
+ random_state = 0 ,
52
55
)
53
56
X_train , X_test , y_train , y_test = train_test_split (X , y , random_state = 0 )
54
57
@@ -72,7 +75,10 @@ def test_easy_ensemble_classifier(n_estimators, base_estimator):
72
75
def test_base_estimator ():
73
76
# Check base_estimator and its default values.
74
77
X , y = make_imbalance (
75
- iris .data , iris .target , sampling_strategy = {0 : 20 , 1 : 25 , 2 : 50 }, random_state = 0 ,
78
+ iris .data ,
79
+ iris .target ,
80
+ sampling_strategy = {0 : 20 , 1 : 25 , 2 : 50 },
81
+ random_state = 0 ,
76
82
)
77
83
X_train , X_test , y_train , y_test = train_test_split (X , y , random_state = 0 )
78
84
@@ -91,7 +97,10 @@ def test_base_estimator():
91
97
92
98
def test_bagging_with_pipeline ():
93
99
X , y = make_imbalance (
94
- iris .data , iris .target , sampling_strategy = {0 : 20 , 1 : 25 , 2 : 50 }, random_state = 0 ,
100
+ iris .data ,
101
+ iris .target ,
102
+ sampling_strategy = {0 : 20 , 1 : 25 , 2 : 50 },
103
+ random_state = 0 ,
95
104
)
96
105
estimator = EasyEnsembleClassifier (
97
106
n_estimators = 2 ,
@@ -109,7 +118,9 @@ def test_warm_start(random_state=42):
109
118
for n_estimators in [5 , 10 ]:
110
119
if clf_ws is None :
111
120
clf_ws = EasyEnsembleClassifier (
112
- n_estimators = n_estimators , random_state = random_state , warm_start = True ,
121
+ n_estimators = n_estimators ,
122
+ random_state = random_state ,
123
+ warm_start = True ,
113
124
)
114
125
else :
115
126
clf_ws .set_params (n_estimators = n_estimators )
@@ -182,7 +193,10 @@ def test_warm_start_equivalence():
182
193
)
183
194
def test_easy_ensemble_classifier_error (n_estimators , msg_error ):
184
195
X , y = make_imbalance (
185
- iris .data , iris .target , sampling_strategy = {0 : 20 , 1 : 25 , 2 : 50 }, random_state = 0 ,
196
+ iris .data ,
197
+ iris .target ,
198
+ sampling_strategy = {0 : 20 , 1 : 25 , 2 : 50 },
199
+ random_state = 0 ,
186
200
)
187
201
with pytest .raises (ValueError , match = msg_error ):
188
202
eec = EasyEnsembleClassifier (n_estimators = n_estimators )
@@ -191,7 +205,10 @@ def test_easy_ensemble_classifier_error(n_estimators, msg_error):
191
205
192
206
def test_easy_ensemble_classifier_single_estimator ():
193
207
X , y = make_imbalance (
194
- iris .data , iris .target , sampling_strategy = {0 : 20 , 1 : 25 , 2 : 50 }, random_state = 0 ,
208
+ iris .data ,
209
+ iris .target ,
210
+ sampling_strategy = {0 : 20 , 1 : 25 , 2 : 50 },
211
+ random_state = 0 ,
195
212
)
196
213
X_train , X_test , y_train , y_test = train_test_split (X , y , random_state = 0 )
197
214
@@ -205,14 +222,19 @@ def test_easy_ensemble_classifier_single_estimator():
205
222
206
223
def test_easy_ensemble_classifier_grid_search ():
207
224
X , y = make_imbalance (
208
- iris .data , iris .target , sampling_strategy = {0 : 20 , 1 : 25 , 2 : 50 }, random_state = 0 ,
225
+ iris .data ,
226
+ iris .target ,
227
+ sampling_strategy = {0 : 20 , 1 : 25 , 2 : 50 },
228
+ random_state = 0 ,
209
229
)
210
230
211
231
parameters = {
212
232
"n_estimators" : [1 , 2 ],
213
233
"base_estimator__n_estimators" : [3 , 4 ],
214
234
}
215
235
grid_search = GridSearchCV (
216
- EasyEnsembleClassifier (base_estimator = AdaBoostClassifier ()), parameters , cv = 5 ,
236
+ EasyEnsembleClassifier (base_estimator = AdaBoostClassifier ()),
237
+ parameters ,
238
+ cv = 5 ,
217
239
)
218
240
grid_search .fit (X , y )
0 commit comments