From c58c21d0a78ebba64f7fcc079330142f154dd88c Mon Sep 17 00:00:00 2001 From: Jiaming Yuan Date: Thu, 20 Feb 2025 03:00:19 +0800 Subject: [PATCH] [doc] Mention the integer type for some parameters. Python type annotation is not changed. We can use np.int32, but it's just one implementation of int32, there are others. --- R-package/R/xgb.train.R | 6 +++--- R-package/man/xgb.params.Rd | 6 +++--- R-package/man/xgboost.Rd | 6 +++--- doc/parameter.rst | 8 ++++---- 4 files changed, 13 insertions(+), 13 deletions(-) diff --git a/R-package/R/xgb.train.R b/R-package/R/xgb.train.R index 3bfb5e350acc..305a7513da38 100644 --- a/R-package/R/xgb.train.R +++ b/R-package/R/xgb.train.R @@ -486,7 +486,7 @@ xgb.train <- function(params = xgb.params(), data, nrounds, evals = list(), #' range: \eqn{[0, \infty)} #' #' Note: should only pass one of `gamma` or `min_split_loss`. Both refer to the same parameter and there's thus no difference between one or the other. -#' @param max_depth (for Tree Booster) (default=6) +#' @param max_depth (for Tree Booster) (default=6, type=int32) #' Maximum depth of a tree. Increasing this value will make the model more complex and more likely to overfit. 0 indicates no limit on depth. Beware that XGBoost aggressively consumes memory when training a deep tree. `"exact"` tree method requires non-zero value. #' #' range: \eqn{[0, \infty)} @@ -579,9 +579,9 @@ xgb.train <- function(params = xgb.params(), data, nrounds, evals = list(), #' - Choices: `"depthwise"`, `"lossguide"` #' - `"depthwise"`: split at nodes closest to the root. #' - `"lossguide"`: split at nodes with highest loss change. -#' @param max_leaves (for Tree Booster) (default=0) +#' @param max_leaves (for Tree Booster) (default=0, type=int32) #' Maximum number of nodes to be added. Not used by `"exact"` tree method. -#' @param max_bin (for Tree Booster) (default=256) +#' @param max_bin (for Tree Booster) (default=256, type=int32) #' - Only used if `tree_method` is set to `"hist"` or `"approx"`. #' - Maximum number of discrete bins to bucket continuous features. #' - Increasing this number improves the optimality of splits at the cost of higher computation time. diff --git a/R-package/man/xgb.params.Rd b/R-package/man/xgb.params.Rd index e42d42fe810f..18a6eb8920db 100644 --- a/R-package/man/xgb.params.Rd +++ b/R-package/man/xgb.params.Rd @@ -137,7 +137,7 @@ range: \eqn{[0, \infty)} Note: should only pass one of \code{gamma} or \code{min_split_loss}. Both refer to the same parameter and there's thus no difference between one or the other.} -\item{max_depth}{(for Tree Booster) (default=6) +\item{max_depth}{(for Tree Booster) (default=6, type=int32) Maximum depth of a tree. Increasing this value will make the model more complex and more likely to overfit. 0 indicates no limit on depth. Beware that XGBoost aggressively consumes memory when training a deep tree. \code{"exact"} tree method requires non-zero value. range: \eqn{[0, \infty)}} @@ -264,10 +264,10 @@ This is a parameter of the \code{"refresh"} updater. When this flag is 1, tree l } }} -\item{max_leaves}{(for Tree Booster) (default=0) +\item{max_leaves}{(for Tree Booster) (default=0, type=int32) Maximum number of nodes to be added. Not used by \code{"exact"} tree method.} -\item{max_bin}{(for Tree Booster) (default=256) +\item{max_bin}{(for Tree Booster) (default=256, type=int32) \itemize{ \item Only used if \code{tree_method} is set to \code{"hist"} or \code{"approx"}. \item Maximum number of discrete bins to bucket continuous features. diff --git a/R-package/man/xgboost.Rd b/R-package/man/xgboost.Rd index 690019a99ae2..2c971d573ce6 100644 --- a/R-package/man/xgboost.Rd +++ b/R-package/man/xgboost.Rd @@ -169,7 +169,7 @@ The following values are \bold{NOT} supported by \code{xgboost}, but are support Note that the number of default boosting rounds here is not automatically tuned, and different problems will have vastly different optimal numbers of boosting rounds.} -\item{max_depth}{(for Tree Booster) (default=6) +\item{max_depth}{(for Tree Booster) (default=6, type=int32) Maximum depth of a tree. Increasing this value will make the model more complex and more likely to overfit. 0 indicates no limit on depth. Beware that XGBoost aggressively consumes memory when training a deep tree. \code{"exact"} tree method requires non-zero value. range: \eqn{[0, \infty)}} @@ -365,14 +365,14 @@ for more information.} \item range: \eqn{[0, \infty)} }} -\item{max_bin}{(for Tree Booster) (default=256) +\item{max_bin}{(for Tree Booster) (default=256, type=int32) \itemize{ \item Only used if \code{tree_method} is set to \code{"hist"} or \code{"approx"}. \item Maximum number of discrete bins to bucket continuous features. \item Increasing this number improves the optimality of splits at the cost of higher computation time. }} -\item{max_leaves}{(for Tree Booster) (default=0) +\item{max_leaves}{(for Tree Booster) (default=0, type=int32) Maximum number of nodes to be added. Not used by \code{"exact"} tree method.} \item{booster}{(default= \code{"gbtree"}) diff --git a/doc/parameter.rst b/doc/parameter.rst index 5de89ea06433..bdb47fedce08 100644 --- a/doc/parameter.rst +++ b/doc/parameter.rst @@ -39,7 +39,7 @@ The following parameters can be set in the global scope, using :py:func:`xgboost ****************** General Parameters ****************** -* ``booster`` [default= ``gbtree`` ] +* ``booster`` [default= ``gbtree``] - Which booster to use. Can be ``gbtree``, ``gblinear`` or ``dart``; ``gbtree`` and ``dart`` use tree based models while ``gblinear`` uses linear functions. @@ -90,7 +90,7 @@ Parameters for Tree Booster - Minimum loss reduction required to make a further partition on a leaf node of the tree. The larger ``gamma`` is, the more conservative the algorithm will be. Note that a tree where no splits were made might still contain a single terminal node with a non-zero score. - range: [0,∞] -* ``max_depth`` [default=6] +* ``max_depth`` [default=6, type=int32] - Maximum depth of a tree. Increasing this value will make the model more complex and more likely to overfit. 0 indicates no limit on depth. Beware that XGBoost aggressively consumes memory when training a deep tree. ``exact`` tree method requires non-zero value. - range: [0,∞] @@ -198,11 +198,11 @@ Parameters for Tree Booster - ``depthwise``: split at nodes closest to the root. - ``lossguide``: split at nodes with highest loss change. -* ``max_leaves`` [default=0] +* ``max_leaves`` [default=0, type=int32] - Maximum number of nodes to be added. Not used by ``exact`` tree method. -* ``max_bin``, [default=256] +* ``max_bin``, [default=256, type=int32] - Only used if ``tree_method`` is set to ``hist`` or ``approx``. - Maximum number of discrete bins to bucket continuous features.