@@ -111,9 +111,8 @@ def plot_confusion_matrix(cm, classes, ax,
111
111
# will use a bagging classifier and its counter part which internally uses a
112
112
# random under-sampling to balanced each boostrap sample.
113
113
114
- bagging = BaggingClassifier (n_estimators = 50 , random_state = 0 , n_jobs = - 1 )
115
- balanced_bagging = BalancedBaggingClassifier (n_estimators = 50 , random_state = 0 ,
116
- n_jobs = - 1 )
114
+ bagging = BaggingClassifier (n_estimators = 50 , random_state = 0 )
115
+ balanced_bagging = BalancedBaggingClassifier (n_estimators = 50 , random_state = 0 )
117
116
118
117
bagging .fit (X_train , y_train )
119
118
balanced_bagging .fit (X_train , y_train )
@@ -149,9 +148,8 @@ def plot_confusion_matrix(cm, classes, ax,
149
148
# outperforming bagging. Here, we used a vanilla random forest and its balanced
150
149
# counterpart in which each bootstrap sample is balanced.
151
150
152
- rf = RandomForestClassifier (n_estimators = 50 , random_state = 0 , n_jobs = - 1 )
153
- brf = BalancedRandomForestClassifier (n_estimators = 50 , random_state = 0 ,
154
- n_jobs = - 1 )
151
+ rf = RandomForestClassifier (n_estimators = 50 , random_state = 0 )
152
+ brf = BalancedRandomForestClassifier (n_estimators = 50 , random_state = 0 )
155
153
156
154
rf .fit (X_train , y_train )
157
155
brf .fit (X_train , y_train )
@@ -189,8 +187,7 @@ def plot_confusion_matrix(cm, classes, ax,
189
187
190
188
base_estimator = AdaBoostClassifier (n_estimators = 10 )
191
189
eec = EasyEnsembleClassifier (n_estimators = 10 ,
192
- base_estimator = base_estimator ,
193
- n_jobs = - 1 )
190
+ base_estimator = base_estimator )
194
191
eec .fit (X_train , y_train )
195
192
y_pred_eec = eec .predict (X_test )
196
193
print ('Easy ensemble classifier performance:' )
0 commit comments