Skip to content

Commit 0d3da98

Browse files
authored
Require isort on all Python files. (#8420)
1 parent bf8de22 commit 0d3da98

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

69 files changed

+278
-175
lines changed

Diff for: demo/CLI/binary_classification/mknfold.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
#!/usr/bin/env python3
22

3-
import sys
43
import random
4+
import sys
55

66
if len(sys.argv) < 2:
77
print ('Usage:<filename> <k> [nfold = 5]')

Diff for: demo/CLI/regression/mknfold.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
#!/usr/bin/env python3
22

3-
import sys
43
import random
4+
import sys
55

66
if len(sys.argv) < 2:
77
print('Usage:<filename> <k> [nfold = 5]')

Diff for: demo/CLI/yearpredMSD/csv2libsvm.py

+1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
#!/usr/bin/env python3
22

33
import sys
4+
45
fo = open(sys.argv[2], 'w')
56

67
for l in open(sys.argv[1]):

Diff for: demo/aft_survival/aft_survival_demo.py

+4-2
Original file line numberDiff line numberDiff line change
@@ -6,9 +6,11 @@
66
"""
77

88
import os
9-
from sklearn.model_selection import ShuffleSplit
10-
import pandas as pd
9+
1110
import numpy as np
11+
import pandas as pd
12+
from sklearn.model_selection import ShuffleSplit
13+
1214
import xgboost as xgb
1315

1416
# The Veterans' Administration Lung Cancer Trial

Diff for: demo/aft_survival/aft_survival_demo_with_optuna.py

+4-3
Original file line numberDiff line numberDiff line change
@@ -6,11 +6,12 @@
66
using Optuna to tune hyperparameters
77
88
"""
9-
from sklearn.model_selection import ShuffleSplit
10-
import pandas as pd
119
import numpy as np
12-
import xgboost as xgb
1310
import optuna
11+
import pandas as pd
12+
from sklearn.model_selection import ShuffleSplit
13+
14+
import xgboost as xgb
1415

1516
# The Veterans' Administration Lung Cancer Trial
1617
# The Statistical Analysis of Failure Time Data by Kalbfleisch J. and Prentice R (1980)

Diff for: demo/aft_survival/aft_survival_viz_demo.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -6,9 +6,10 @@
66
model starts out as a flat line and evolves into a step function in order to account for
77
all ranged labels.
88
"""
9+
import matplotlib.pyplot as plt
910
import numpy as np
11+
1012
import xgboost as xgb
11-
import matplotlib.pyplot as plt
1213

1314
plt.rcParams.update({'font.size': 13})
1415

Diff for: demo/dask/cpu_survival.py

+6-4
Original file line numberDiff line numberDiff line change
@@ -4,12 +4,14 @@
44
55
"""
66

7-
import xgboost as xgb
87
import os
9-
from xgboost.dask import DaskDMatrix
8+
109
import dask.dataframe as dd
11-
from dask.distributed import Client
12-
from dask.distributed import LocalCluster
10+
from dask.distributed import Client, LocalCluster
11+
from xgboost.dask import DaskDMatrix
12+
13+
import xgboost as xgb
14+
1315

1416
def main(client):
1517
# Load an example survival data from CSV into a Dask data frame.

Diff for: demo/dask/cpu_training.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -3,11 +3,11 @@
33
====================================
44
55
"""
6-
import xgboost as xgb
7-
from xgboost.dask import DaskDMatrix
8-
from dask.distributed import Client
9-
from dask.distributed import LocalCluster
106
from dask import array as da
7+
from dask.distributed import Client, LocalCluster
8+
from xgboost.dask import DaskDMatrix
9+
10+
import xgboost as xgb
1111

1212

1313
def main(client):

Diff for: demo/dask/dask_callbacks.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -3,12 +3,12 @@
33
====================================
44
"""
55
import numpy as np
6-
import xgboost as xgb
7-
from xgboost.dask import DaskDMatrix
8-
from dask.distributed import Client
9-
from dask.distributed import LocalCluster
6+
from dask.distributed import Client, LocalCluster
107
from dask_ml.datasets import make_regression
118
from dask_ml.model_selection import train_test_split
9+
from xgboost.dask import DaskDMatrix
10+
11+
import xgboost as xgb
1212

1313

1414
def probability_for_going_backward(epoch):

Diff for: demo/dask/gpu_training.py

+4-3
Original file line numberDiff line numberDiff line change
@@ -2,14 +2,15 @@
22
Example of training with Dask on GPU
33
====================================
44
"""
5-
from dask_cuda import LocalCUDACluster
65
import dask_cudf
7-
from dask.distributed import Client
86
from dask import array as da
97
from dask import dataframe as dd
8+
from dask.distributed import Client
9+
from dask_cuda import LocalCUDACluster
10+
from xgboost.dask import DaskDMatrix
11+
1012
import xgboost as xgb
1113
from xgboost import dask as dxgb
12-
from xgboost.dask import DaskDMatrix
1314

1415

1516
def using_dask_matrix(client: Client, X, y):

Diff for: demo/dask/sklearn_cpu_training.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -2,9 +2,9 @@
22
Use scikit-learn regressor interface with CPU histogram tree method
33
===================================================================
44
"""
5-
from dask.distributed import Client
6-
from dask.distributed import LocalCluster
75
from dask import array as da
6+
from dask.distributed import Client, LocalCluster
7+
88
import xgboost
99

1010

Diff for: demo/dask/sklearn_gpu_training.py

+3-1
Original file line numberDiff line numberDiff line change
@@ -3,10 +3,12 @@
33
===================================================================
44
"""
55

6+
from dask import array as da
67
from dask.distributed import Client
8+
79
# It's recommended to use dask_cuda for GPU assignment
810
from dask_cuda import LocalCUDACluster
9-
from dask import array as da
11+
1012
import xgboost
1113

1214

Diff for: demo/gpu_acceleration/cover_type.py

+4-2
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,9 @@
1-
import xgboost as xgb
1+
import time
2+
23
from sklearn.datasets import fetch_covtype
34
from sklearn.model_selection import train_test_split
4-
import time
5+
6+
import xgboost as xgb
57

68
# Fetch dataset using sklearn
79
cov = fetch_covtype()

Diff for: demo/guide-python/basic_walkthrough.py

+4-3
Original file line numberDiff line numberDiff line change
@@ -9,13 +9,14 @@
99
See :doc:`/python/python_intro` and :doc:`/tutorials/index` for other references.
1010
1111
"""
12-
import numpy as np
13-
import pickle
14-
import xgboost as xgb
1512
import os
13+
import pickle
1614

15+
import numpy as np
1716
from sklearn.datasets import load_svmlight_file
1817

18+
import xgboost as xgb
19+
1920
# Make sure the demo knows where to load the data.
2021
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
2122
XGBOOST_ROOT_DIR = os.path.dirname(os.path.dirname(CURRENT_DIR))

Diff for: demo/guide-python/boost_from_prediction.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,8 @@
33
=================================
44
"""
55
import os
6-
import xgboost as xgb
76

7+
import xgboost as xgb
88

99
CURRENT_DIR = os.path.dirname(__file__)
1010
dtrain = xgb.DMatrix(os.path.join(CURRENT_DIR, '../data/agaricus.txt.train'))

Diff for: demo/guide-python/callbacks.py

+6-4
Original file line numberDiff line numberDiff line change
@@ -4,14 +4,16 @@
44
55
.. versionadded:: 1.3.0
66
'''
7-
import xgboost as xgb
8-
import tempfile
7+
import argparse
98
import os
9+
import tempfile
10+
1011
import numpy as np
12+
from matplotlib import pyplot as plt
1113
from sklearn.datasets import load_breast_cancer
1214
from sklearn.model_selection import train_test_split
13-
from matplotlib import pyplot as plt
14-
import argparse
15+
16+
import xgboost as xgb
1517

1618

1719
class Plotting(xgb.callback.TrainingCallback):

Diff for: demo/guide-python/continuation.py

+5-3
Original file line numberDiff line numberDiff line change
@@ -3,11 +3,13 @@
33
==============================
44
"""
55

6-
from sklearn.datasets import load_breast_cancer
7-
import xgboost
6+
import os
87
import pickle
98
import tempfile
10-
import os
9+
10+
from sklearn.datasets import load_breast_cancer
11+
12+
import xgboost
1113

1214

1315
def training_continuation(tmpdir: str, use_pickle: bool) -> None:

Diff for: demo/guide-python/cross_validation.py

+2
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,9 @@
33
===============================
44
"""
55
import os
6+
67
import numpy as np
8+
79
import xgboost as xgb
810

911
# load data in do training

Diff for: demo/guide-python/custom_rmsle.py

+6-4
Original file line numberDiff line numberDiff line change
@@ -14,14 +14,16 @@
1414
compare its performance with standard squared error.
1515
1616
"""
17-
import numpy as np
18-
import xgboost as xgb
19-
from typing import Tuple, Dict, List
20-
from time import time
2117
import argparse
18+
from time import time
19+
from typing import Dict, List, Tuple
20+
2221
import matplotlib
22+
import numpy as np
2323
from matplotlib import pyplot as plt
2424

25+
import xgboost as xgb
26+
2527
# shape of generated data.
2628
kRows = 4096
2729
kCols = 16

Diff for: demo/guide-python/custom_softmax.py

+4-2
Original file line numberDiff line numberDiff line change
@@ -10,10 +10,12 @@
1010
1111
'''
1212

13+
import argparse
14+
1315
import numpy as np
14-
import xgboost as xgb
1516
from matplotlib import pyplot as plt
16-
import argparse
17+
18+
import xgboost as xgb
1719

1820
np.random.seed(1994)
1921

Diff for: demo/guide-python/evals_result.py

+1
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
======================================================
44
"""
55
import os
6+
67
import xgboost as xgb
78

89
CURRENT_DIR = os.path.dirname(__file__)

Diff for: demo/guide-python/external_memory.py

+5-3
Original file line numberDiff line numberDiff line change
@@ -12,11 +12,13 @@
1212
1313
"""
1414
import os
15-
import xgboost
16-
from typing import Callable, List, Tuple
17-
from sklearn.datasets import make_regression
1815
import tempfile
16+
from typing import Callable, List, Tuple
17+
1918
import numpy as np
19+
from sklearn.datasets import make_regression
20+
21+
import xgboost
2022

2123

2224
def make_batches(

Diff for: demo/guide-python/feature_weights.py

+4-2
Original file line numberDiff line numberDiff line change
@@ -5,10 +5,12 @@
55
.. versionadded:: 1.3.0
66
'''
77

8+
import argparse
9+
810
import numpy as np
9-
import xgboost
1011
from matplotlib import pyplot as plt
11-
import argparse
12+
13+
import xgboost
1214

1315

1416
def main(args):

Diff for: demo/guide-python/gamma_regression.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -2,9 +2,10 @@
22
Demo for gamma regression
33
=========================
44
"""
5-
import xgboost as xgb
65
import numpy as np
76

7+
import xgboost as xgb
8+
89
# this script demonstrates how to fit gamma regression model (with log link function)
910
# in xgboost, before running the demo you need to generate the autoclaims dataset
1011
# by running gen_autoclaims.R located in xgboost/demo/data.

Diff for: demo/guide-python/generalized_linear_model.py

+2
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,9 @@
33
============
44
"""
55
import os
6+
67
import xgboost as xgb
8+
79
##
810
# this script demonstrate how to fit generalized linear model in xgboost
911
# basically, we are using linear model, instead of tree for our boosters

Diff for: demo/guide-python/multioutput_regression.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -10,10 +10,11 @@
1010
"""
1111

1212
import argparse
13-
from typing import Dict, Tuple, List
13+
from typing import Dict, List, Tuple
1414

1515
import numpy as np
1616
from matplotlib import pyplot as plt
17+
1718
import xgboost as xgb
1819

1920

Diff for: demo/guide-python/predict_first_ntree.py

+3-1
Original file line numberDiff line numberDiff line change
@@ -3,10 +3,12 @@
33
=========================================
44
"""
55
import os
6+
67
import numpy as np
7-
import xgboost as xgb
88
from sklearn.datasets import load_svmlight_file
99

10+
import xgboost as xgb
11+
1012
CURRENT_DIR = os.path.dirname(__file__)
1113
train = os.path.join(CURRENT_DIR, "../data/agaricus.txt.train")
1214
test = os.path.join(CURRENT_DIR, "../data/agaricus.txt.test")

Diff for: demo/guide-python/predict_leaf_indices.py

+1
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
=============================
44
"""
55
import os
6+
67
import xgboost as xgb
78

89
# load data in do training

Diff for: demo/guide-python/quantile_data_iterator.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -17,10 +17,11 @@
1717
1818
'''
1919

20-
import xgboost
2120
import cupy
2221
import numpy
2322

23+
import xgboost
24+
2425
COLS = 64
2526
ROWS_PER_BATCH = 1000 # data is splited by rows
2627
BATCHES = 32

0 commit comments

Comments
 (0)