@@ -111,9 +111,8 @@ def plot_confusion_matrix(cm, classes, ax,
111111# will use a bagging classifier and its counter part which internally uses a
112112# random under-sampling to balanced each boostrap sample.
113113
114- bagging = BaggingClassifier (n_estimators = 50 , random_state = 0 , n_jobs = - 1 )
115- balanced_bagging = BalancedBaggingClassifier (n_estimators = 50 , random_state = 0 ,
116- n_jobs = - 1 )
114+ bagging = BaggingClassifier (n_estimators = 50 , random_state = 0 )
115+ balanced_bagging = BalancedBaggingClassifier (n_estimators = 50 , random_state = 0 )
117116
118117bagging .fit (X_train , y_train )
119118balanced_bagging .fit (X_train , y_train )
@@ -149,9 +148,8 @@ def plot_confusion_matrix(cm, classes, ax,
149148# outperforming bagging. Here, we used a vanilla random forest and its balanced
150149# counterpart in which each bootstrap sample is balanced.
151150
152- rf = RandomForestClassifier (n_estimators = 50 , random_state = 0 , n_jobs = - 1 )
153- brf = BalancedRandomForestClassifier (n_estimators = 50 , random_state = 0 ,
154- n_jobs = - 1 )
151+ rf = RandomForestClassifier (n_estimators = 50 , random_state = 0 )
152+ brf = BalancedRandomForestClassifier (n_estimators = 50 , random_state = 0 )
155153
156154rf .fit (X_train , y_train )
157155brf .fit (X_train , y_train )
@@ -189,8 +187,7 @@ def plot_confusion_matrix(cm, classes, ax,
189187
190188base_estimator = AdaBoostClassifier (n_estimators = 10 )
191189eec = EasyEnsembleClassifier (n_estimators = 10 ,
192- base_estimator = base_estimator ,
193- n_jobs = - 1 )
190+ base_estimator = base_estimator )
194191eec .fit (X_train , y_train )
195192y_pred_eec = eec .predict (X_test )
196193print ('Easy ensemble classifier performance:' )
0 commit comments