@@ -154,11 +154,20 @@ def _more_validate_params(self, for_partial_fit=False):
154
154
"learning_rate is 'optimal'. alpha is used "
155
155
"to compute the optimal learning rate."
156
156
)
157
+ if self .penalty == "elasticnet" and self .l1_ratio is None :
158
+ raise ValueError ("l1_ratio must be set when penalty is 'elasticnet'" )
157
159
158
160
# raises ValueError if not registered
159
161
self ._get_penalty_type (self .penalty )
160
162
self ._get_learning_rate_type (self .learning_rate )
161
163
164
+ def _get_l1_ratio (self ):
165
+ if self .l1_ratio is None :
166
+ # plain_sgd expects a float. Any value is fine since at this point
167
+ # penalty can't be "elsaticnet" so l1_ratio is not used.
168
+ return 0.0
169
+ return self .l1_ratio
170
+
162
171
def _get_loss_function (self , loss ):
163
172
"""Get concrete ``LossFunction`` object for str ``loss``."""
164
173
loss_ = self .loss_functions [loss ]
@@ -462,7 +471,7 @@ def fit_binary(
462
471
penalty_type ,
463
472
alpha ,
464
473
C ,
465
- est .l1_ratio ,
474
+ est ._get_l1_ratio () ,
466
475
dataset ,
467
476
validation_mask ,
468
477
est .early_stopping ,
@@ -993,7 +1002,11 @@ class SGDClassifier(BaseSGDClassifier):
993
1002
The Elastic Net mixing parameter, with 0 <= l1_ratio <= 1.
994
1003
l1_ratio=0 corresponds to L2 penalty, l1_ratio=1 to L1.
995
1004
Only used if `penalty` is 'elasticnet'.
996
- Values must be in the range `[0.0, 1.0]`.
1005
+ Values must be in the range `[0.0, 1.0]` or can be `None` if
1006
+ `penalty` is not `elasticnet`.
1007
+
1008
+ .. versionchanged:: 1.7
1009
+ `l1_ratio` can be `None` when `penalty` is not "elasticnet".
997
1010
998
1011
fit_intercept : bool, default=True
999
1012
Whether the intercept should be estimated or not. If False, the
@@ -1194,7 +1207,7 @@ class SGDClassifier(BaseSGDClassifier):
1194
1207
** BaseSGDClassifier ._parameter_constraints ,
1195
1208
"penalty" : [StrOptions ({"l2" , "l1" , "elasticnet" }), None ],
1196
1209
"alpha" : [Interval (Real , 0 , None , closed = "left" )],
1197
- "l1_ratio" : [Interval (Real , 0 , 1 , closed = "both" )],
1210
+ "l1_ratio" : [Interval (Real , 0 , 1 , closed = "both" ), None ],
1198
1211
"power_t" : [Interval (Real , None , None , closed = "neither" )],
1199
1212
"epsilon" : [Interval (Real , 0 , None , closed = "left" )],
1200
1213
"learning_rate" : [
@@ -1695,7 +1708,7 @@ def _fit_regressor(
1695
1708
penalty_type ,
1696
1709
alpha ,
1697
1710
C ,
1698
- self .l1_ratio ,
1711
+ self ._get_l1_ratio () ,
1699
1712
dataset ,
1700
1713
validation_mask ,
1701
1714
self .early_stopping ,
@@ -1796,7 +1809,11 @@ class SGDRegressor(BaseSGDRegressor):
1796
1809
The Elastic Net mixing parameter, with 0 <= l1_ratio <= 1.
1797
1810
l1_ratio=0 corresponds to L2 penalty, l1_ratio=1 to L1.
1798
1811
Only used if `penalty` is 'elasticnet'.
1799
- Values must be in the range `[0.0, 1.0]`.
1812
+ Values must be in the range `[0.0, 1.0]` or can be `None` if
1813
+ `penalty` is not `elasticnet`.
1814
+
1815
+ .. versionchanged:: 1.7
1816
+ `l1_ratio` can be `None` when `penalty` is not "elasticnet".
1800
1817
1801
1818
fit_intercept : bool, default=True
1802
1819
Whether the intercept should be estimated or not. If False, the
@@ -1976,7 +1993,7 @@ class SGDRegressor(BaseSGDRegressor):
1976
1993
** BaseSGDRegressor ._parameter_constraints ,
1977
1994
"penalty" : [StrOptions ({"l2" , "l1" , "elasticnet" }), None ],
1978
1995
"alpha" : [Interval (Real , 0 , None , closed = "left" )],
1979
- "l1_ratio" : [Interval (Real , 0 , 1 , closed = "both" )],
1996
+ "l1_ratio" : [Interval (Real , 0 , 1 , closed = "both" ), None ],
1980
1997
"power_t" : [Interval (Real , None , None , closed = "neither" )],
1981
1998
"learning_rate" : [
1982
1999
StrOptions ({"constant" , "optimal" , "invscaling" , "adaptive" }),
0 commit comments