Skip to content

Commit 82c536e

Browse files
authored
[pyspark] Remove the deprecated use_gpu parameter. (#11554)
1 parent e0d72ac commit 82c536e

File tree

5 files changed

+53
-101
lines changed

5 files changed

+53
-101
lines changed

python-package/xgboost/spark/core.py

Lines changed: 1 addition & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -123,7 +123,6 @@
123123
"qid_col",
124124
"repartition_random_shuffle",
125125
"pred_contrib_col",
126-
"use_gpu",
127126
"launch_tracker_on_driver",
128127
"coll_cfg",
129128
]
@@ -218,16 +217,6 @@ class _SparkXGBParams(
218217
),
219218
TypeConverters.toString,
220219
)
221-
use_gpu = Param(
222-
Params._dummy(),
223-
"use_gpu",
224-
(
225-
"Deprecated, use `device` instead. A boolean variable. Set use_gpu=true "
226-
"if the executors are running on GPU instances. Currently, only one GPU per"
227-
" task is supported."
228-
),
229-
TypeConverters.toBoolean,
230-
)
231220
force_repartition = Param(
232221
Params._dummy(),
233222
"force_repartition",
@@ -503,9 +492,7 @@ def _validate_params(self) -> None:
503492
def _run_on_gpu(self) -> bool:
504493
"""If train or transform on the gpu according to the parameters"""
505494

506-
return use_cuda(self.getOrDefault(self.device)) or self.getOrDefault(
507-
self.use_gpu
508-
)
495+
return use_cuda(self.getOrDefault(self.device))
509496

510497
def _col_is_defined_not_empty(self, param: "Param[str]") -> bool:
511498
return self.isDefined(param) and self.getOrDefault(param) not in (None, "")
@@ -628,7 +615,6 @@ def __init__(self) -> None:
628615
self._setDefault(
629616
num_workers=1,
630617
device="cpu",
631-
use_gpu=False,
632618
force_repartition=False,
633619
repartition_random_shuffle=False,
634620
feature_names=None,

python-package/xgboost/spark/estimator.py

Lines changed: 1 addition & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,8 @@
11
"""Xgboost pyspark integration submodule for estimator API."""
22

3-
# pylint: disable=fixme, protected-access, no-member, invalid-name
3+
# pylint: disable=protected-access, no-member, invalid-name
44
# pylint: disable=unused-argument, too-many-locals
55

6-
import warnings
76
from typing import Any, List, Optional, Type, Union
87

98
import numpy as np
@@ -77,12 +76,6 @@ def set_param_attrs(attr_name: str, param: Param) -> None:
7776
set_param_attrs(name, param_obj)
7877

7978

80-
def _deprecated_use_gpu() -> None:
81-
warnings.warn(
82-
"`use_gpu` is deprecated since 2.0.0, use `device` instead", FutureWarning
83-
)
84-
85-
8679
class SparkXGBRegressor(_SparkXGBEstimator):
8780
"""SparkXGBRegressor is a PySpark ML estimator. It implements the XGBoost regression
8881
algorithm based on XGBoost python library, and it can be used in PySpark Pipeline
@@ -140,11 +133,6 @@ class SparkXGBRegressor(_SparkXGBEstimator):
140133
num_workers:
141134
How many XGBoost workers to be used to train.
142135
Each XGBoost worker corresponds to one spark task.
143-
use_gpu:
144-
.. deprecated:: 2.0.0
145-
146-
Use `device` instead.
147-
148136
device:
149137
150138
.. versionadded:: 2.0.0
@@ -214,7 +202,6 @@ def __init__( # pylint:disable=too-many-arguments
214202
weight_col: Optional[str] = None,
215203
base_margin_col: Optional[str] = None,
216204
num_workers: int = 1,
217-
use_gpu: Optional[bool] = None,
218205
device: Optional[str] = None,
219206
force_repartition: bool = False,
220207
repartition_random_shuffle: bool = False,
@@ -225,8 +212,6 @@ def __init__( # pylint:disable=too-many-arguments
225212
) -> None:
226213
super().__init__()
227214
input_kwargs = self._input_kwargs
228-
if use_gpu:
229-
_deprecated_use_gpu()
230215
self.setParams(**input_kwargs)
231216

232217
@classmethod
@@ -327,11 +312,6 @@ class SparkXGBClassifier(_SparkXGBEstimator, HasProbabilityCol, HasRawPrediction
327312
num_workers:
328313
How many XGBoost workers to be used to train.
329314
Each XGBoost worker corresponds to one spark task.
330-
use_gpu:
331-
.. deprecated:: 2.0.0
332-
333-
Use `device` instead.
334-
335315
device:
336316
337317
.. versionadded:: 2.0.0
@@ -401,7 +381,6 @@ def __init__( # pylint:disable=too-many-arguments
401381
weight_col: Optional[str] = None,
402382
base_margin_col: Optional[str] = None,
403383
num_workers: int = 1,
404-
use_gpu: Optional[bool] = None,
405384
device: Optional[str] = None,
406385
force_repartition: bool = False,
407386
repartition_random_shuffle: bool = False,
@@ -416,8 +395,6 @@ def __init__( # pylint:disable=too-many-arguments
416395
# binary or multinomial input dataset, and we need to remove the fixed default
417396
# param value as well to avoid causing ambiguity.
418397
input_kwargs = self._input_kwargs
419-
if use_gpu:
420-
_deprecated_use_gpu()
421398
self.setParams(**input_kwargs)
422399
self._setDefault(objective=None)
423400

@@ -517,11 +494,6 @@ class SparkXGBRanker(_SparkXGBEstimator):
517494
num_workers:
518495
How many XGBoost workers to be used to train.
519496
Each XGBoost worker corresponds to one spark task.
520-
use_gpu:
521-
.. deprecated:: 2.0.0
522-
523-
Use `device` instead.
524-
525497
device:
526498
527499
.. versionadded:: 2.0.0
@@ -597,7 +569,6 @@ def __init__( # pylint:disable=too-many-arguments
597569
base_margin_col: Optional[str] = None,
598570
qid_col: Optional[str] = None,
599571
num_workers: int = 1,
600-
use_gpu: Optional[bool] = None,
601572
device: Optional[str] = None,
602573
force_repartition: bool = False,
603574
repartition_random_shuffle: bool = False,
@@ -608,8 +579,6 @@ def __init__( # pylint:disable=too-many-arguments
608579
) -> None:
609580
super().__init__()
610581
input_kwargs = self._input_kwargs
611-
if use_gpu:
612-
_deprecated_use_gpu()
613582
self.setParams(**input_kwargs)
614583

615584
@classmethod

python-package/xgboost/spark/params.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ class HasBaseMarginCol(Params):
3939
class HasFeaturesCols(Params):
4040
"""
4141
Mixin for param features_cols: a list of feature column names.
42-
This parameter is taken effect only when use_gpu is enabled.
42+
This parameter is taken effect only when GPU is enabled.
4343
"""
4444

4545
features_cols = Param(

tests/test_distributed/test_gpu_with_spark/test_gpu_spark.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -208,7 +208,7 @@ def test_cv_sparkxgb_classifier_feature_cols_with_gpu(spark_iris_dataset_feature
208208
assert f1 >= 0.97
209209

210210
clf = SparkXGBClassifier(
211-
features_col=feature_names, use_gpu=True, num_workers=num_workers
211+
features_col=feature_names, device="cuda", num_workers=num_workers
212212
)
213213
grid = ParamGridBuilder().addGrid(clf.max_depth, [6, 8]).build()
214214
evaluator = MulticlassClassificationEvaluator(metricName="f1")

0 commit comments

Comments
 (0)