|
3 | 3 | # Authors: Christos Aridas
|
4 | 4 | #
|
5 | 5 | # License: MIT
|
| 6 | +from __future__ import print_function |
6 | 7 |
|
7 | 8 | import numpy as np
|
8 | 9 | from sklearn.base import ClassifierMixin, clone
|
9 | 10 | from sklearn.ensemble import VotingClassifier
|
10 | 11 | from sklearn.ensemble.base import BaseEnsemble, _set_random_states
|
11 | 12 | from sklearn.tree import DecisionTreeClassifier
|
12 | 13 | from sklearn.utils import check_random_state
|
| 14 | +from sklearn.utils.multiclass import check_classification_targets |
13 | 15 | from sklearn.utils.validation import check_is_fitted
|
14 | 16 |
|
15 | 17 | from ..pipeline import Pipeline
|
@@ -63,13 +65,13 @@ class EasyEnsembleGeneralization(BaseEnsemble, ClassifierMixin):
|
63 | 65 |
|
64 | 66 | Examples
|
65 | 67 | --------
|
66 |
| - >>>import numpy as np |
67 |
| - >>>from imblearn.ensemble import EasyEnsembleGeneralization as EEG |
68 |
| - >>>X = np.array([[-1, -1], [-2, -1], [-3, -2], [1, 1], [2, 1], [3, 2]]) |
69 |
| - >>>y = np.array([1, 1, 1, 2, 2, 2]) |
70 |
| - >>>eeg = EEG(voting='hard', random_state=0) |
71 |
| - >>>eeg.fit(X,y) |
72 |
| - >>>eeg.predict(X) |
| 68 | + >>> import numpy as np |
| 69 | + >>> from imblearn.ensemble import EasyEnsembleGeneralization as EEG |
| 70 | + >>> X = np.array([[-1, -1], [-2, -1], [-3, -2], [1, 1], [2, 1], [3, 2]]) |
| 71 | + >>> y = np.array([1, 1, 1, 2, 2, 2]) |
| 72 | + >>> eeg = EEG(voting='soft', random_state=0) |
| 73 | + >>> eeg = eeg.fit(X,y) |
| 74 | + >>> print(eeg.predict(X)) |
73 | 75 | [1 1 1 2 2 2]
|
74 | 76 | >>>
|
75 | 77 | """
|
@@ -129,6 +131,9 @@ def fit(self, X, y, sample_weight=None):
|
129 | 131 | Returns self.
|
130 | 132 | """
|
131 | 133 |
|
| 134 | + |
| 135 | + check_classification_targets(y) |
| 136 | + |
132 | 137 | random_state = check_random_state(self.random_state)
|
133 | 138 |
|
134 | 139 | self._validate_estimator()
|
|
0 commit comments