@@ -32,11 +32,11 @@ under-sampling the original set::
3232 >>> print(sorted(Counter(y).items()))
3333 [(0, 64), (1, 262), (2, 4674)]
3434 >>> from imblearn.ensemble import EasyEnsemble
35- >>> ee = EasyEnsemble(random_state=0, n_subsets=10)
36- >>> X_resampled, y_resampled = ee.fit_resample(X, y)
37- >>> print(X_resampled.shape)
35+ >>> ee = EasyEnsemble(random_state=0, n_subsets=10) # doctest: +SKIP
36+ >>> X_resampled, y_resampled = ee.fit_resample(X, y) # doctest: +SKIP
37+ >>> print(X_resampled.shape) # doctest: +SKIP
3838 (10, 192, 2)
39- >>> print(sorted(Counter(y_resampled[0]).items()))
39+ >>> print(sorted(Counter(y_resampled[0]).items())) # doctest: +SKIP
4040 [(0, 64), (1, 64), (2, 64)]
4141
4242:class: `EasyEnsemble ` has two important parameters: (i) ``n_subsets `` will be
@@ -53,7 +53,9 @@ parameter ``n_max_subset`` and an additional bootstraping can be activated with
5353 >>> from imblearn.ensemble import BalanceCascade
5454 >>> from sklearn.linear_model import LogisticRegression
5555 >>> bc = BalanceCascade(random_state=0,
56- ... estimator=LogisticRegression(random_state=0),
56+ ... estimator=LogisticRegression(solver='lbfgs',
57+ ... multi_class='auto',
58+ ... random_state=0),
5759 ... n_max_subset=4)
5860 >>> X_resampled, y_resampled = bc.fit_resample(X, y)
5961 >>> print(X_resampled.shape)
0 commit comments