|
38 | 38 | #' (based on C++ code), it starts at 0 (as in C/C++ or Python) instead of 1 (usual in R). |
39 | 39 | #' |
40 | 40 | #' @examples |
41 | | -#' |
42 | | -#' # binomial classification using "gbtree": |
43 | | -#' data(agaricus.train, package = "xgboost") |
44 | | -#' |
45 | | -#' bst <- xgb.train( |
46 | | -#' data = xgb.DMatrix(agaricus.train$data, label = agaricus.train$label), |
47 | | -#' nrounds = 2, |
48 | | -#' params = xgb.params( |
49 | | -#' max_depth = 2, |
50 | | -#' nthread = 2, |
51 | | -#' objective = "binary:logistic" |
52 | | -#' ) |
| 41 | +#' # binary classification using "gbtree": |
| 42 | +#' data("ToothGrowth") |
| 43 | +#' x <- ToothGrowth[, c("len", "dose")] |
| 44 | +#' y <- ToothGrowth$supp |
| 45 | +#' model_tree_binary <- xgboost( |
| 46 | +#' x, y, |
| 47 | +#' nrounds = 5L, |
| 48 | +#' nthreads = 1L, |
| 49 | +#' booster = "gbtree", |
| 50 | +#' max_depth = 2L |
53 | 51 | #' ) |
54 | | -#' |
55 | | -#' xgb.importance(model = bst) |
56 | | -#' |
57 | | -#' # binomial classification using "gblinear": |
58 | | -#' bst <- xgb.train( |
59 | | -#' data = xgb.DMatrix(agaricus.train$data, label = agaricus.train$label), |
60 | | -#' nrounds = 20, |
61 | | -#' params = xgb.params( |
62 | | -#' booster = "gblinear", |
63 | | -#' learning_rate = 0.3, |
64 | | -#' nthread = 1, |
65 | | -#' objective = "binary:logistic" |
66 | | -#' ) |
| 52 | +#' xgb.importance(model_tree_binary) |
| 53 | +#' |
| 54 | +#' # binary classification using "gblinear": |
| 55 | +#' model_tree_linear <- xgboost( |
| 56 | +#' x, y, |
| 57 | +#' nrounds = 5L, |
| 58 | +#' nthreads = 1L, |
| 59 | +#' booster = "gblinear", |
| 60 | +#' learning_rate = 0.3 |
67 | 61 | #' ) |
68 | | -#' |
69 | | -#' xgb.importance(model = bst) |
70 | | -#' |
71 | | -#' # multiclass classification using "gbtree": |
72 | | -#' nclass <- 3 |
73 | | -#' nrounds <- 10 |
74 | | -#' mbst <- xgb.train( |
75 | | -#' data = xgb.DMatrix( |
76 | | -#' as.matrix(iris[, -5]), |
77 | | -#' label = as.numeric(iris$Species) - 1 |
78 | | -#' ), |
79 | | -#' nrounds = nrounds, |
80 | | -#' params = xgb.params( |
81 | | -#' max_depth = 3, |
82 | | -#' nthread = 2, |
83 | | -#' objective = "multi:softprob", |
84 | | -#' num_class = nclass |
85 | | -#' ) |
| 62 | +#' xgb.importance(model_tree_linear) |
| 63 | +#' |
| 64 | +#' # multi-class classification using "gbtree": |
| 65 | +#' data("iris") |
| 66 | +#' x <- iris[, c("Sepal.Length", "Sepal.Width", "Petal.Length", "Petal.Width")] |
| 67 | +#' y <- iris$Species |
| 68 | +#' model_tree_multi <- xgboost( |
| 69 | +#' x, y, |
| 70 | +#' nrounds = 5L, |
| 71 | +#' nthreads = 1L, |
| 72 | +#' booster = "gbtree", |
| 73 | +#' max_depth = 3 |
86 | 74 | #' ) |
87 | | -#' |
88 | 75 | #' # all classes clumped together: |
89 | | -#' xgb.importance(model = mbst) |
90 | | -#' |
| 76 | +#' xgb.importance(model_tree_multi) |
91 | 77 | #' # inspect importances separately for each class: |
| 78 | +#' num_classes <- 3L |
| 79 | +#' nrounds <- 5L |
92 | 80 | #' xgb.importance( |
93 | | -#' model = mbst, trees = seq(from = 1, by = nclass, length.out = nrounds) |
| 81 | +#' model_tree_multi, trees = seq(from = 1, by = num_classes, length.out = nrounds) |
94 | 82 | #' ) |
95 | 83 | #' xgb.importance( |
96 | | -#' model = mbst, trees = seq(from = 2, by = nclass, length.out = nrounds) |
| 84 | +#' model_tree_multi, trees = seq(from = 2, by = num_classes, length.out = nrounds) |
97 | 85 | #' ) |
98 | 86 | #' xgb.importance( |
99 | | -#' model = mbst, trees = seq(from = 3, by = nclass, length.out = nrounds) |
| 87 | +#' model_tree_multi, trees = seq(from = 3, by = num_classes, length.out = nrounds) |
100 | 88 | #' ) |
101 | 89 | #' |
102 | | -#' # multiclass classification using "gblinear": |
103 | | -#' mbst <- xgb.train( |
104 | | -#' data = xgb.DMatrix( |
105 | | -#' scale(as.matrix(iris[, -5])), |
106 | | -#' label = as.numeric(iris$Species) - 1 |
107 | | -#' ), |
108 | | -#' nrounds = 15, |
109 | | -#' params = xgb.params( |
110 | | -#' booster = "gblinear", |
111 | | -#' learning_rate = 0.2, |
112 | | -#' nthread = 1, |
113 | | -#' objective = "multi:softprob", |
114 | | -#' num_class = nclass |
115 | | -#' ) |
| 90 | +#' # multi-class classification using "gblinear": |
| 91 | +#' model_linear_multi <- xgboost( |
| 92 | +#' x, y, |
| 93 | +#' nrounds = 5L, |
| 94 | +#' nthreads = 1L, |
| 95 | +#' booster = "gblinear", |
| 96 | +#' learning_rate = 0.2 |
116 | 97 | #' ) |
117 | | -#' |
118 | | -#' xgb.importance(model = mbst) |
119 | | -#' |
| 98 | +#' xgb.importance(model_linear_multi) |
120 | 99 | #' @export |
121 | 100 | xgb.importance <- function(model = NULL, feature_names = getinfo(model, "feature_name"), trees = NULL) { |
122 | 101 |
|
|
0 commit comments