Skip to content

Commit c2130d1

Browse files
committed
Add support for sklearn 1.7
Signed-off-by: Avi Shinnar <shinnar@us.ibm.com>
1 parent 6886400 commit c2130d1

File tree

10 files changed

+236
-8
lines changed

10 files changed

+236
-8
lines changed

lale/lib/autogen/elastic_net_cv.py

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -281,4 +281,31 @@ def predict(self, X):
281281
set_as_available=True,
282282
)
283283

284+
if sklearn_version >= version.Version("1.7"):
285+
286+
ElasticNetCV = ElasticNetCV.customize_schema(
287+
n_alphas={
288+
"anyOf": [
289+
{
290+
"type": "integer",
291+
"minimumForOptimizer": 100,
292+
"maximumForOptimizer": 101,
293+
"distribution": "uniform",
294+
"default": 100,
295+
"description": "Number of alphas along the regularization path, used for each l1_ratio.",
296+
},
297+
{"enum": ["deprecated"]},
298+
],
299+
"default": "deprecated",
300+
},
301+
set_as_available=True,
302+
)
303+
304+
if sklearn_version >= version.Version("1.9"):
305+
306+
ElasticNetCV = ElasticNetCV.customize_schema(
307+
n_alphas=None,
308+
set_as_available=True,
309+
)
310+
284311
set_docstrings(ElasticNetCV)

lale/lib/autogen/k_bins_discretizer.py

Lines changed: 23 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -12,11 +12,11 @@ def __init__(self, **hyperparams):
1212
self._hyperparams = hyperparams
1313
self._wrapped_model = Op(**self._hyperparams)
1414

15-
def fit(self, X, y=None):
15+
def fit(self, X, y=None, **kwargs):
1616
if y is not None:
17-
self._wrapped_model.fit(X, y)
17+
self._wrapped_model.fit(X, y, **kwargs)
1818
else:
19-
self._wrapped_model.fit(X)
19+
self._wrapped_model.fit(X, **kwargs)
2020
return self
2121

2222
def transform(self, X):
@@ -159,4 +159,24 @@ def transform(self, X):
159159
set_as_available=True,
160160
)
161161

162+
if sklearn_version >= version.Version("1.7"):
163+
KBinsDiscretizer = KBinsDiscretizer.customize_schema(
164+
quantile_method={
165+
"enum": [
166+
"inverted_cdf",
167+
"averaged_inverted_cdf",
168+
"closest_observation",
169+
"interpolated_inverted_cdf",
170+
"hazen",
171+
"weibull",
172+
"linear",
173+
"median_unbiased",
174+
"normal_unbiased",
175+
],
176+
"default": "linear",
177+
"description": "Method to pass on to np.percentile calculation when using strategy='quantile'.",
178+
},
179+
set_as_available=True,
180+
)
181+
162182
set_docstrings(KBinsDiscretizer)

lale/lib/autogen/lasso_cv.py

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -261,4 +261,31 @@ def predict(self, X):
261261
set_as_available=True,
262262
)
263263

264+
if sklearn_version >= version.Version("1.7"):
265+
266+
LassoCV = LassoCV.customize_schema(
267+
n_alphas={
268+
"anyOf": [
269+
{
270+
"type": "integer",
271+
"minimumForOptimizer": 100,
272+
"maximumForOptimizer": 101,
273+
"distribution": "uniform",
274+
"default": 100,
275+
"description": "Number of alphas along the regularization path, used for each l1_ratio.",
276+
},
277+
{"enum": ["deprecated"]},
278+
],
279+
"default": "deprecated",
280+
},
281+
set_as_available=True,
282+
)
283+
284+
if sklearn_version >= version.Version("1.9"):
285+
286+
LassoCV = LassoCV.customize_schema(
287+
n_alphas=None,
288+
set_as_available=True,
289+
)
290+
264291
set_docstrings(LassoCV)

lale/lib/autogen/mlp_regressor.py

Lines changed: 15 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,20 @@
1-
from numpy import inf, nan
1+
from packaging import version
22
from sklearn.neural_network import MLPRegressor as Op
33

44
from lale.docstrings import set_docstrings
5-
from lale.operators import make_operator
5+
from lale.operators import make_operator, sklearn_version
66

77

88
class _MLPRegressorImpl:
99
def __init__(self, **hyperparams):
1010
self._hyperparams = hyperparams
1111
self._wrapped_model = Op(**self._hyperparams)
1212

13-
def fit(self, X, y=None):
13+
def fit(self, X, y=None, **kwargs):
1414
if y is not None:
15-
self._wrapped_model.fit(X, y)
15+
self._wrapped_model.fit(X, y, **kwargs)
1616
else:
17-
self._wrapped_model.fit(X)
17+
self._wrapped_model.fit(X, **kwargs)
1818
return self
1919

2020
def predict(self, X):
@@ -360,4 +360,14 @@ def predict(self, X):
360360
}
361361
MLPRegressor = make_operator(_MLPRegressorImpl, _combined_schemas)
362362

363+
if sklearn_version >= version.Version("1.7"):
364+
365+
MLPRegressor = MLPRegressor.customize_schema(
366+
loss={
367+
"description": "The loss function to use when training the weights.",
368+
"enum": ["squared_error", "poisson"],
369+
"default": "squared_error",
370+
},
371+
set_as_available=True,
372+
)
363373
set_docstrings(MLPRegressor)

lale/lib/autogen/multi_task_elastic_net_cv.py

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -245,4 +245,31 @@ def predict(self, X):
245245
# new: https://scikit-learn.org/1.2/modules/generated/sklearn.linear_model.MultiTaskElasticNetCV.html
246246
MultiTaskElasticNetCV = MultiTaskElasticNetCV.customize_schema(normalize=None)
247247

248+
if lale.operators.sklearn_version >= version.Version("1.7"):
249+
250+
MultiTaskElasticNetCV = MultiTaskElasticNetCV.customize_schema(
251+
n_alphas={
252+
"anyOf": [
253+
{
254+
"type": "integer",
255+
"minimumForOptimizer": 100,
256+
"maximumForOptimizer": 101,
257+
"distribution": "uniform",
258+
"default": 100,
259+
"description": "Number of alphas along the regularization path, used for each l1_ratio.",
260+
},
261+
{"enum": ["deprecated"]},
262+
],
263+
"default": "deprecated",
264+
},
265+
set_as_available=True,
266+
)
267+
268+
if lale.operators.sklearn_version >= version.Version("1.9"):
269+
270+
MultiTaskElasticNetCV = MultiTaskElasticNetCV.customize_schema(
271+
n_alphas=None,
272+
set_as_available=True,
273+
)
274+
248275
set_docstrings(MultiTaskElasticNetCV)

lale/lib/autogen/multi_task_lasso_cv.py

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -238,4 +238,31 @@ def predict(self, X):
238238
# new: https://scikit-learn.org/1.2/modules/generated/sklearn.linear_model.MultiTaskLassoCV.html
239239
MultiTaskLassoCV = MultiTaskLassoCV.customize_schema(normalize=None)
240240

241+
if lale.operators.sklearn_version >= version.Version("1.7"):
242+
243+
MultiTaskLassoCV = MultiTaskLassoCV.customize_schema(
244+
n_alphas={
245+
"anyOf": [
246+
{
247+
"type": "integer",
248+
"minimumForOptimizer": 100,
249+
"maximumForOptimizer": 101,
250+
"distribution": "uniform",
251+
"default": 100,
252+
"description": "Number of alphas along the regularization path, used for each l1_ratio.",
253+
},
254+
{"enum": ["deprecated"]},
255+
],
256+
"default": "deprecated",
257+
},
258+
set_as_available=True,
259+
)
260+
261+
if lale.operators.sklearn_version >= version.Version("1.9"):
262+
263+
MultiTaskLassoCV = MultiTaskLassoCV.customize_schema(
264+
n_alphas=None,
265+
set_as_available=True,
266+
)
267+
241268
set_docstrings(MultiTaskLassoCV)

lale/lib/sklearn/column_transformer.py

Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@
1717

1818
import lale.docstrings
1919
import lale.operators
20+
from lale.schemas import AnyOf, Bool, Enum
2021

2122
_hyperparams_schema = {
2223
"allOf": [
@@ -232,6 +233,11 @@
232233
set_as_available=True,
233234
)
234235

236+
if lale.operators.sklearn_version >= version.Version("1.5"):
237+
ColumnTransformer = ColumnTransformer.customize_schema(
238+
force_int_remainder_cols=Bool(default=True), set_as_available=True
239+
)
240+
235241
if lale.operators.sklearn_version >= version.Version("1.6"):
236242
ColumnTransformer = ColumnTransformer.customize_schema(
237243
verbose_feature_names_out={
@@ -256,5 +262,25 @@
256262
set_as_available=True,
257263
)
258264

265+
if lale.operators.sklearn_version >= version.Version("1.7"):
266+
ColumnTransformer = ColumnTransformer.customize_schema(
267+
force_int_remainder_cols=AnyOf(
268+
types=[
269+
Bool(default=False),
270+
Enum(
271+
["deprecated"],
272+
desc="This parameter is deprecated and will be removed in v1.9.",
273+
),
274+
],
275+
default="deprecated",
276+
),
277+
set_as_available=True,
278+
)
279+
280+
if lale.operators.sklearn_version >= version.Version("1.9"):
281+
ColumnTransformer = ColumnTransformer.customize_schema(
282+
force_int_remainder_cols=None,
283+
set_as_available=True,
284+
)
259285

260286
lale.docstrings.set_docstrings(ColumnTransformer)

lale/lib/sklearn/linear_regression.py

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -179,4 +179,17 @@
179179
set_as_available=True,
180180
)
181181

182+
183+
if lale.operators.sklearn_version >= version.Version("1.7"):
184+
LinearRegression = LinearRegression.customize_schema(
185+
tol={
186+
"type": "number",
187+
"minimumForOptimizer": 1e-08,
188+
"maximumForOptimizer": 0.01,
189+
"default": 1e-06,
190+
"description": "The precision of the solution (coef_) is determined by tol which specifies a different convergence criterion for the lsqr solver. tol is set as atol and btol of scipy.sparse.linalg.lsqr when fitting on sparse training data. This parameter has no effect when fitting on dense data.",
191+
},
192+
set_as_available=True,
193+
)
194+
182195
lale.docstrings.set_docstrings(LinearRegression)

lale/lib/sklearn/sgd_classifier.py

Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -454,4 +454,29 @@
454454
set_as_available=True,
455455
)
456456

457+
if lale.operators.sklearn_version >= version.Version("1.7"):
458+
SGDClassifier = SGDClassifier.customize_schema(
459+
l1_ratio={
460+
"anyOf": [
461+
{
462+
"type": "number",
463+
"minimumForOptimizer": 1e-9,
464+
"maximumForOptimizer": 1.0,
465+
"distribution": "loguniform",
466+
"default": 0.15,
467+
"description": "The Elastic Net mixing parameter, with 0 <= l1_ratio <= 1.",
468+
},
469+
{"enum": [None]},
470+
],
471+
"default": 0.15,
472+
},
473+
constraint={
474+
"description": "l1_ratio can only be None if the penalty is not 'elasticnet'",
475+
"anyOf": [
476+
{"penalty": {"not": {"enum": ["elasticnet"]}}},
477+
{"l1_ratio": {"not": {"enum": [None]}}},
478+
],
479+
},
480+
set_as_available=True,
481+
)
457482
lale.docstrings.set_docstrings(SGDClassifier)

lale/lib/sklearn/sgd_regressor.py

Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -380,4 +380,30 @@
380380
set_as_available=True,
381381
)
382382

383+
if lale.operators.sklearn_version >= version.Version("1.7"):
384+
SGDRegressor = SGDRegressor.customize_schema(
385+
l1_ratio={
386+
"anyOf": [
387+
{
388+
"type": "number",
389+
"minimumForOptimizer": 1e-9,
390+
"maximumForOptimizer": 1.0,
391+
"distribution": "loguniform",
392+
"default": 0.15,
393+
"description": "The Elastic Net mixing parameter, with 0 <= l1_ratio <= 1.",
394+
},
395+
{"enum": [None]},
396+
],
397+
"default": 0.15,
398+
},
399+
constraint={
400+
"description": "l1_ratio can only be None if the penalty is not 'elasticnet'",
401+
"anyOf": [
402+
{"penalty": {"not": {"enum": ["elasticnet"]}}},
403+
{"l1_ratio": {"not": {"enum": [None]}}},
404+
],
405+
},
406+
set_as_available=True,
407+
)
408+
383409
lale.docstrings.set_docstrings(SGDRegressor)

0 commit comments

Comments
 (0)