11"""Xgboost pyspark integration submodule for estimator API."""
22
3- # pylint: disable=fixme, protected-access, no-member, invalid-name
3+ # pylint: disable=protected-access, no-member, invalid-name
44# pylint: disable=unused-argument, too-many-locals
55
6- import warnings
76from typing import Any , List , Optional , Type , Union
87
98import numpy as np
@@ -77,12 +76,6 @@ def set_param_attrs(attr_name: str, param: Param) -> None:
7776 set_param_attrs (name , param_obj )
7877
7978
80- def _deprecated_use_gpu () -> None :
81- warnings .warn (
82- "`use_gpu` is deprecated since 2.0.0, use `device` instead" , FutureWarning
83- )
84-
85-
8679class SparkXGBRegressor (_SparkXGBEstimator ):
8780 """SparkXGBRegressor is a PySpark ML estimator. It implements the XGBoost regression
8881 algorithm based on XGBoost python library, and it can be used in PySpark Pipeline
@@ -140,11 +133,6 @@ class SparkXGBRegressor(_SparkXGBEstimator):
140133 num_workers:
141134 How many XGBoost workers to be used to train.
142135 Each XGBoost worker corresponds to one spark task.
143- use_gpu:
144- .. deprecated:: 2.0.0
145-
146- Use `device` instead.
147-
148136 device:
149137
150138 .. versionadded:: 2.0.0
@@ -214,7 +202,6 @@ def __init__( # pylint:disable=too-many-arguments
214202 weight_col : Optional [str ] = None ,
215203 base_margin_col : Optional [str ] = None ,
216204 num_workers : int = 1 ,
217- use_gpu : Optional [bool ] = None ,
218205 device : Optional [str ] = None ,
219206 force_repartition : bool = False ,
220207 repartition_random_shuffle : bool = False ,
@@ -225,8 +212,6 @@ def __init__( # pylint:disable=too-many-arguments
225212 ) -> None :
226213 super ().__init__ ()
227214 input_kwargs = self ._input_kwargs
228- if use_gpu :
229- _deprecated_use_gpu ()
230215 self .setParams (** input_kwargs )
231216
232217 @classmethod
@@ -327,11 +312,6 @@ class SparkXGBClassifier(_SparkXGBEstimator, HasProbabilityCol, HasRawPrediction
327312 num_workers:
328313 How many XGBoost workers to be used to train.
329314 Each XGBoost worker corresponds to one spark task.
330- use_gpu:
331- .. deprecated:: 2.0.0
332-
333- Use `device` instead.
334-
335315 device:
336316
337317 .. versionadded:: 2.0.0
@@ -401,7 +381,6 @@ def __init__( # pylint:disable=too-many-arguments
401381 weight_col : Optional [str ] = None ,
402382 base_margin_col : Optional [str ] = None ,
403383 num_workers : int = 1 ,
404- use_gpu : Optional [bool ] = None ,
405384 device : Optional [str ] = None ,
406385 force_repartition : bool = False ,
407386 repartition_random_shuffle : bool = False ,
@@ -416,8 +395,6 @@ def __init__( # pylint:disable=too-many-arguments
416395 # binary or multinomial input dataset, and we need to remove the fixed default
417396 # param value as well to avoid causing ambiguity.
418397 input_kwargs = self ._input_kwargs
419- if use_gpu :
420- _deprecated_use_gpu ()
421398 self .setParams (** input_kwargs )
422399 self ._setDefault (objective = None )
423400
@@ -517,11 +494,6 @@ class SparkXGBRanker(_SparkXGBEstimator):
517494 num_workers:
518495 How many XGBoost workers to be used to train.
519496 Each XGBoost worker corresponds to one spark task.
520- use_gpu:
521- .. deprecated:: 2.0.0
522-
523- Use `device` instead.
524-
525497 device:
526498
527499 .. versionadded:: 2.0.0
@@ -597,7 +569,6 @@ def __init__( # pylint:disable=too-many-arguments
597569 base_margin_col : Optional [str ] = None ,
598570 qid_col : Optional [str ] = None ,
599571 num_workers : int = 1 ,
600- use_gpu : Optional [bool ] = None ,
601572 device : Optional [str ] = None ,
602573 force_repartition : bool = False ,
603574 repartition_random_shuffle : bool = False ,
@@ -608,8 +579,6 @@ def __init__( # pylint:disable=too-many-arguments
608579 ) -> None :
609580 super ().__init__ ()
610581 input_kwargs = self ._input_kwargs
611- if use_gpu :
612- _deprecated_use_gpu ()
613582 self .setParams (** input_kwargs )
614583
615584 @classmethod
0 commit comments