Skip to content

Commit 543b57f

Browse files
authored
[R] Prefer descriptive parameter names (#11102)
1 parent 874a690 commit 543b57f

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

54 files changed

+205
-170
lines changed

R-package/R/callbacks.R

Lines changed: 20 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -960,17 +960,17 @@ xgb.cb.cv.predict <- function(save_models = FALSE, outputmargin = FALSE) {
960960
#' booster = "gblinear",
961961
#' objective = "reg:logistic",
962962
#' eval_metric = "auc",
963-
#' lambda = 0.0003,
964-
#' alpha = 0.0003,
963+
#' reg_lambda = 0.0003,
964+
#' reg_alpha = 0.0003,
965965
#' nthread = nthread
966966
#' )
967967
#'
968-
#' # For 'shotgun', which is a default linear updater, using high eta values may result in
968+
#' # For 'shotgun', which is a default linear updater, using high learning_rate values may result in
969969
#' # unstable behaviour in some datasets. With this simple dataset, however, the high learning
970970
#' # rate does not break the convergence, but allows us to illustrate the typical pattern of
971971
#' # "stochastic explosion" behaviour of this lock-free algorithm at early boosting iterations.
972972
#' bst <- xgb.train(
973-
#' c(param, list(eta = 1.)),
973+
#' c(param, list(learning_rate = 1.)),
974974
#' dtrain,
975975
#' evals = list(tr = dtrain),
976976
#' nrounds = 200,
@@ -987,7 +987,7 @@ xgb.cb.cv.predict <- function(save_models = FALSE, outputmargin = FALSE) {
987987
#' c(
988988
#' param,
989989
#' xgb.params(
990-
#' eta = 0.8,
990+
#' learning_rate = 0.8,
991991
#' updater = "coord_descent",
992992
#' feature_selector = "thrifty",
993993
#' top_k = 1
@@ -1000,12 +1000,20 @@ xgb.cb.cv.predict <- function(save_models = FALSE, outputmargin = FALSE) {
10001000
#' )
10011001
#' matplot(xgb.gblinear.history(bst), type = "l")
10021002
#' # Componentwise boosting is known to have similar effect to Lasso regularization.
1003-
#' # Try experimenting with various values of top_k, eta, nrounds,
1003+
#' # Try experimenting with various values of top_k, learning_rate, nrounds,
10041004
#' # as well as different feature_selectors.
10051005
#'
10061006
#' # For xgb.cv:
10071007
#' bst <- xgb.cv(
1008-
#' c(param, list(eta = 0.8)),
1008+
#' c(
1009+
#' param,
1010+
#' xgb.params(
1011+
#' learning_rate = 0.8,
1012+
#' updater = "coord_descent",
1013+
#' feature_selector = "thrifty",
1014+
#' top_k = 1
1015+
#' )
1016+
#' ),
10091017
#' dtrain,
10101018
#' nfold = 5,
10111019
#' nrounds = 100,
@@ -1022,15 +1030,15 @@ xgb.cb.cv.predict <- function(save_models = FALSE, outputmargin = FALSE) {
10221030
#' booster = "gblinear",
10231031
#' objective = "multi:softprob",
10241032
#' num_class = 3,
1025-
#' lambda = 0.0003,
1026-
#' alpha = 0.0003,
1033+
#' reg_lambda = 0.0003,
1034+
#' reg_alpha = 0.0003,
10271035
#' nthread = nthread
10281036
#' )
10291037
#'
10301038
#' # For the default linear updater 'shotgun' it sometimes is helpful
1031-
#' # to use smaller eta to reduce instability
1039+
#' # to use smaller learning_rate to reduce instability
10321040
#' bst <- xgb.train(
1033-
#' c(param, list(eta = 0.5)),
1041+
#' c(param, list(learning_rate = 0.5)),
10341042
#' dtrain,
10351043
#' evals = list(tr = dtrain),
10361044
#' nrounds = 50,
@@ -1044,7 +1052,7 @@ xgb.cb.cv.predict <- function(save_models = FALSE, outputmargin = FALSE) {
10441052
#'
10451053
#' # CV:
10461054
#' bst <- xgb.cv(
1047-
#' c(param, list(eta = 0.5)),
1055+
#' c(param, list(learning_rate = 0.5)),
10481056
#' dtrain,
10491057
#' nfold = 5,
10501058
#' nrounds = 70,

R-package/R/utils.R

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -493,7 +493,6 @@ NULL
493493
#' nrounds = 2,
494494
#' params = xgb.params(
495495
#' max_depth = 2,
496-
#' eta = 1,
497496
#' nthread = 2,
498497
#' objective = "binary:logistic"
499498
#' )

R-package/R/xgb.Booster.R

Lines changed: 2 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -267,7 +267,6 @@ xgb.get.handle <- function(object) {
267267
#' nrounds = 5,
268268
#' params = xgb.params(
269269
#' max_depth = 2,
270-
#' eta = 0.5,
271270
#' nthread = nthread,
272271
#' objective = "binary:logistic"
273272
#' )
@@ -312,7 +311,6 @@ xgb.get.handle <- function(object) {
312311
#' nrounds = 10,
313312
#' params = xgb.params(
314313
#' max_depth = 4,
315-
#' eta = 0.5,
316314
#' nthread = 2,
317315
#' subsample = 0.5,
318316
#' objective = "multi:softprob",
@@ -336,7 +334,6 @@ xgb.get.handle <- function(object) {
336334
#' nrounds = 10,
337335
#' params = xgb.params(
338336
#' max_depth = 4,
339-
#' eta = 0.5,
340337
#' nthread = 2,
341338
#' subsample = 0.5,
342339
#' objective = "multi:softmax",
@@ -671,7 +668,6 @@ validate.features <- function(bst, newdata) {
671668
#' nrounds = 2,
672669
#' params = xgb.params(
673670
#' max_depth = 2,
674-
#' eta = 1,
675671
#' nthread = 2,
676672
#' objective = "binary:logistic"
677673
#' )
@@ -779,7 +775,6 @@ xgb.attributes <- function(object) {
779775
#' nrounds = 2,
780776
#' params = xgb.params(
781777
#' max_depth = 2,
782-
#' eta = 1,
783778
#' nthread = nthread,
784779
#' objective = "binary:logistic"
785780
#' )
@@ -834,13 +829,13 @@ xgb.config <- function(object) {
834829
#' nrounds = 2,
835830
#' params = xgb.params(
836831
#' max_depth = 2,
837-
#' eta = 1,
832+
#' learning_rate = 1,
838833
#' nthread = 2,
839834
#' objective = "binary:logistic"
840835
#' )
841836
#' )
842837
#'
843-
#' xgb.model.parameters(bst) <- list(eta = 0.1)
838+
#' xgb.model.parameters(bst) <- list(learning_rate = 0.1)
844839
#'
845840
#' @rdname xgb.model.parameters
846841
#' @export
@@ -1285,7 +1280,6 @@ xgb.is.same.Booster <- function(obj1, obj2) {
12851280
#' nrounds = 2,
12861281
#' params = xgb.params(
12871282
#' max_depth = 2,
1288-
#' eta = 1,
12891283
#' nthread = 2,
12901284
#' objective = "binary:logistic"
12911285
#' )

R-package/R/xgb.create.features.R

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@
5252
#' dtrain <- with(agaricus.train, xgb.DMatrix(data, label = label, nthread = 2))
5353
#' dtest <- with(agaricus.test, xgb.DMatrix(data, label = label, nthread = 2))
5454
#'
55-
#' param <- list(max_depth = 2, eta = 1, objective = 'binary:logistic', nthread = 1)
55+
#' param <- list(max_depth = 2, learning_rate = 1, objective = 'binary:logistic', nthread = 1)
5656
#' nrounds = 4
5757
#'
5858
#' bst <- xgb.train(params = param, data = dtrain, nrounds = nrounds)

R-package/R/xgb.cv.R

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -96,7 +96,6 @@
9696
#' params = xgb.params(
9797
#' nthread = 2,
9898
#' max_depth = 3,
99-
#' eta = 1,
10099
#' objective = "binary:logistic"
101100
#' ),
102101
#' nfold = 5,
@@ -316,7 +315,6 @@ xgb.cv <- function(params = xgb.params(), data, nrounds, nfold,
316315
#' nrounds = 2,
317316
#' params = xgb.params(
318317
#' max_depth = 2,
319-
#' eta = 1,
320318
#' nthread = 2,
321319
#' objective = "binary:logistic"
322320
#' )

R-package/R/xgb.dump.R

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,6 @@
3434
#' nrounds = 2,
3535
#' params = xgb.params(
3636
#' max_depth = 2,
37-
#' eta = 1,
3837
#' nthread = 2,
3938
#' objective = "binary:logistic"
4039
#' )

R-package/R/xgb.importance.R

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,6 @@
4747
#' nrounds = 2,
4848
#' params = xgb.params(
4949
#' max_depth = 2,
50-
#' eta = 1,
5150
#' nthread = 2,
5251
#' objective = "binary:logistic"
5352
#' )
@@ -61,7 +60,7 @@
6160
#' nrounds = 20,
6261
#' params = xgb.params(
6362
#' booster = "gblinear",
64-
#' eta = 0.3,
63+
#' learning_rate = 0.3,
6564
#' nthread = 1,
6665
#' objective = "binary:logistic"
6766
#' )
@@ -80,7 +79,6 @@
8079
#' nrounds = nrounds,
8180
#' params = xgb.params(
8281
#' max_depth = 3,
83-
#' eta = 0.2,
8482
#' nthread = 2,
8583
#' objective = "multi:softprob",
8684
#' num_class = nclass
@@ -110,7 +108,7 @@
110108
#' nrounds = 15,
111109
#' params = xgb.params(
112110
#' booster = "gblinear",
113-
#' eta = 0.2,
111+
#' learning_rate = 0.2,
114112
#' nthread = 1,
115113
#' objective = "multi:softprob",
116114
#' num_class = nclass

R-package/R/xgb.load.R

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,6 @@
3535
#' nrounds = 2,
3636
#' params = xgb.params(
3737
#' max_depth = 2,
38-
#' eta = 1,
3938
#' nthread = nthread,
4039
#' objective = "binary:logistic"
4140
#' )

R-package/R/xgb.model.dt.tree.R

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,6 @@
4747
#' nrounds = 2,
4848
#' params = xgb.params(
4949
#' max_depth = 2,
50-
#' eta = 1,
5150
#' nthread = nthread,
5251
#' objective = "binary:logistic"
5352
#' )

R-package/R/xgb.plot.importance.R

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,6 @@
5555
#' nrounds = 2,
5656
#' params = xgb.params(
5757
#' max_depth = 3,
58-
#' eta = 1,
5958
#' nthread = nthread,
6059
#' objective = "binary:logistic"
6160
#' )

0 commit comments

Comments
 (0)