Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[R-package] remove support for '...' in lgb.cv() #4860

Merged
merged 2 commits into from
Dec 6, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 0 additions & 24 deletions R-package/R/lgb.cv.R
Original file line number Diff line number Diff line change
Expand Up @@ -43,18 +43,6 @@ CVBooster <- R6::R6Class(
#' @param callbacks List of callback functions that are applied at each iteration.
#' @param reset_data Boolean, setting it to TRUE (not the default value) will transform the booster model
#' into a predictor model which frees up memory and the original datasets
#' @param ... other parameters, see Parameters.rst for more information. A few key parameters:
#' \itemize{
#' \item{\code{boosting}: Boosting type. \code{"gbdt"}, \code{"rf"}, \code{"dart"} or \code{"goss"}.}
#' \item{\code{num_leaves}: Maximum number of leaves in one tree.}
#' \item{\code{max_depth}: Limit the max depth for tree model. This is used to deal with
#' overfit when #data is small. Tree still grow by leaf-wise.}
#' \item{\code{num_threads}: Number of threads for LightGBM. For the best speed, set this to
#' the number of real CPU cores(\code{parallel::detectCores(logical = FALSE)}),
#' not the number of threads (most CPU using hyper-threading to generate 2 threads
#' per CPU core).}
#' }
#' NOTE: As of v3.3.0, use of \code{...} is deprecated. Add parameters to \code{params} directly.
#' @inheritSection lgb_shared_params Early Stopping
#' @return a trained model \code{lgb.CVBooster}.
#'
Expand Down Expand Up @@ -99,7 +87,6 @@ lgb.cv <- function(params = list()
, callbacks = list()
, reset_data = FALSE
, serializable = TRUE
, ...
) {

if (nrounds <= 0L) {
Expand All @@ -115,23 +102,12 @@ lgb.cv <- function(params = list()
}

# Setup temporary variables
additional_params <- list(...)
params <- append(params, additional_params)
params$verbose <- verbose
params <- lgb.check.obj(params = params, obj = obj)
params <- lgb.check.eval(params = params, eval = eval)
fobj <- NULL
eval_functions <- list(NULL)

if (length(additional_params) > 0L) {
warning(paste0(
"lgb.cv: Found the following passed through '...': "
, paste(names(additional_params), collapse = ", ")
, ". These will be used, but in future releases of lightgbm, this warning will become an error. "
, "Add these to 'params' instead. See ?lgb.cv for documentation on how to call this function."
))
}

# set some parameters, resolving the way they were passed in with other parameters
# in `params`.
# this ensures that the model stored with Booster$save() correctly represents
Expand Down
16 changes: 1 addition & 15 deletions R-package/man/lgb.cv.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

33 changes: 22 additions & 11 deletions R-package/tests/testthat/test_basic.R
Original file line number Diff line number Diff line change
Expand Up @@ -347,30 +347,36 @@ context("lgb.cv()")

test_that("cv works", {
dtrain <- lgb.Dataset(train$data, label = train$label)
params <- list(objective = "regression", metric = "l2,l1")
params <- list(
objective = "regression"
, metric = "l2,l1"
, min_data = 1L
, learning_rate = 1.0
)
bst <- lgb.cv(
params
, dtrain
, 10L
, nfold = 5L
, min_data = 1L
, learning_rate = 1.0
, early_stopping_rounds = 10L
)
expect_false(is.null(bst$record_evals))
})

test_that("lgb.cv() rejects negative or 0 value passed to nrounds", {
dtrain <- lgb.Dataset(train$data, label = train$label)
params <- list(objective = "regression", metric = "l2,l1")
params <- list(
objective = "regression"
, metric = "l2,l1"
, min_data = 1L
)
for (nround_value in c(-10L, 0L)) {
expect_error({
bst <- lgb.cv(
params
, dtrain
, nround_value
, nfold = 5L
, min_data = 1L
)
}, "nrounds should be greater than zero")
}
Expand All @@ -388,11 +394,14 @@ test_that("lgb.cv() throws an informative error is 'data' is not an lgb.Dataset
for (val in bad_values) {
expect_error({
bst <- lgb.cv(
params = list(objective = "regression", metric = "l2,l1")
params = list(
objective = "regression"
, metric = "l2,l1"
, min_data = 1L
)
, data = val
, 10L
, nfold = 5L
, min_data = 1L
)
}, regexp = "'label' must be provided for lgb.cv if 'data' is not an 'lgb.Dataset'", fixed = TRUE)
}
Expand All @@ -409,11 +418,11 @@ test_that("lightgbm.cv() gives the correct best_score and best_iter for a metric
data = dtrain
, nfold = 5L
, nrounds = nrounds
, num_leaves = 5L
, params = list(
objective = "binary"
, metric = "auc,binary_error"
, learning_rate = 1.5
, num_leaves = 5L
)
)
expect_is(cv_bst, "lgb.CVBooster")
Expand Down Expand Up @@ -470,15 +479,18 @@ test_that("lgb.cv() fit on linearly-relatead data improves when using linear lea

test_that("lgb.cv() respects showsd argument", {
dtrain <- lgb.Dataset(train$data, label = train$label)
params <- list(objective = "regression", metric = "l2")
params <- list(
objective = "regression"
, metric = "l2"
, min_data = 1L
)
nrounds <- 5L
set.seed(708L)
bst_showsd <- lgb.cv(
params = params
, data = dtrain
, nrounds = nrounds
, nfold = 3L
, min_data = 1L
, showsd = TRUE
)
evals_showsd <- bst_showsd$record_evals[["valid"]][["l2"]]
Expand All @@ -488,7 +500,6 @@ test_that("lgb.cv() respects showsd argument", {
, data = dtrain
, nrounds = nrounds
, nfold = 3L
, min_data = 1L
, showsd = FALSE
)
evals_no_showsd <- bst_no_showsd$record_evals[["valid"]][["l2"]]
Expand Down
4 changes: 2 additions & 2 deletions R-package/tests/testthat/test_learning_to_rank.R
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,8 @@ test_that("learning-to-rank with lgb.cv() works as expected", {
, ndcg_at = ndcg_at
, lambdarank_truncation_level = 3L
, label_gain = "0,1,3"
, min_data = 1L
, learning_rate = 0.01
)
nfold <- 4L
nrounds <- 10L
Expand All @@ -86,8 +88,6 @@ test_that("learning-to-rank with lgb.cv() works as expected", {
, data = dtrain
, nrounds = nrounds
, nfold = nfold
, min_data = 1L
, learning_rate = 0.01
)
expect_is(cv_bst, "lgb.CVBooster")
expect_equal(length(cv_bst$boosters), nfold)
Expand Down