From 64f15005040aafa528bbec7f873364d94b7e36a4 Mon Sep 17 00:00:00 2001 From: Nikita Titov Date: Sat, 4 Sep 2021 20:03:39 +0300 Subject: [PATCH] [python] deprecate `silent` and standalone `verbose` args. Prefer global `verbose` param (#4577) * deprecate `silent` and standalone `verbose` args. Prefer global `verbose` param * simplify code * Rephrase warning messages --- python-package/lightgbm/basic.py | 24 +++++++++++++++++++----- python-package/lightgbm/dask.py | 6 +++--- python-package/lightgbm/engine.py | 2 +- python-package/lightgbm/sklearn.py | 10 ++++++++-- 4 files changed, 31 insertions(+), 11 deletions(-) diff --git a/python-package/lightgbm/basic.py b/python-package/lightgbm/basic.py index 2bb4c367cc1f..3968c5076cff 100644 --- a/python-package/lightgbm/basic.py +++ b/python-package/lightgbm/basic.py @@ -1123,7 +1123,7 @@ class Dataset: """Dataset in LightGBM.""" def __init__(self, data, label=None, reference=None, - weight=None, group=None, init_score=None, silent=False, + weight=None, group=None, init_score=None, silent='warn', feature_name='auto', categorical_feature='auto', params=None, free_raw_data=True): """Initialize Dataset. @@ -1439,6 +1439,11 @@ def _lazy_init(self, data, label=None, reference=None, _log_warning(f'{key} keyword has been found in `params` and will be ignored.\n' f'Please use {key} argument of the Dataset constructor to pass this parameter.') # user can set verbose with params, it has higher priority + if silent != "warn": + _log_warning("'silent' argument is deprecated and will be removed in a future release of LightGBM. " + "Pass 'verbose' parameter via 'params' instead.") + else: + silent = False if not any(verbose_alias in params for verbose_alias in _ConfigAliases.get("verbosity")) and silent: params["verbose"] = -1 # get categorical features @@ -1769,7 +1774,7 @@ def construct(self): return self def create_valid(self, data, label=None, weight=None, group=None, - init_score=None, silent=False, params=None): + init_score=None, silent='warn', params=None): """Create validation data align with current Dataset. Parameters @@ -2462,7 +2467,7 @@ def _dump_text(self, filename): class Booster: """Booster in LightGBM.""" - def __init__(self, params=None, train_set=None, model_file=None, model_str=None, silent=False): + def __init__(self, params=None, train_set=None, model_file=None, model_str=None, silent='warn'): """Initialize the Booster. Parameters @@ -2488,6 +2493,11 @@ def __init__(self, params=None, train_set=None, model_file=None, model_str=None, self.best_score = {} params = {} if params is None else deepcopy(params) # user can set verbose with params, it has higher priority + if silent != 'warn': + _log_warning("'silent' argument is deprecated and will be removed in a future release of LightGBM. " + "Pass 'verbose' parameter via 'params' instead.") + else: + silent = False if not any(verbose_alias in params for verbose_alias in _ConfigAliases.get("verbosity")) and silent: params["verbose"] = -1 if train_set is not None: @@ -2574,7 +2584,7 @@ def __init__(self, params=None, train_set=None, model_file=None, model_str=None, self.__num_class = out_num_class.value self.pandas_categorical = _load_pandas_categorical(file_name=model_file) elif model_str is not None: - self.model_from_string(model_str, not silent) + self.model_from_string(model_str, verbose="_silent_false") else: raise TypeError('Need at least one training dataset or model file or model string ' 'to create Booster instance') @@ -3255,7 +3265,7 @@ def shuffle_models(self, start_iteration=0, end_iteration=-1): ctypes.c_int(end_iteration))) return self - def model_from_string(self, model_str, verbose=True): + def model_from_string(self, model_str, verbose='warn'): """Load Booster from a string. Parameters @@ -3283,6 +3293,10 @@ def model_from_string(self, model_str, verbose=True): _safe_call(_LIB.LGBM_BoosterGetNumClasses( self.handle, ctypes.byref(out_num_class))) + if verbose in {'warn', '_silent_false'}: + verbose = verbose == 'warn' + else: + _log_warning("'verbose' argument is deprecated and will be removed in a future release of LightGBM.") if verbose: _log_info(f'Finished loading model, total used {int(out_num_iterations.value)} iterations') self.__num_class = out_num_class.value diff --git a/python-package/lightgbm/dask.py b/python-package/lightgbm/dask.py index 4bcb24a21399..2be4b7202a35 100644 --- a/python-package/lightgbm/dask.py +++ b/python-package/lightgbm/dask.py @@ -1108,7 +1108,7 @@ def __init__( reg_lambda: float = 0., random_state: Optional[Union[int, np.random.RandomState]] = None, n_jobs: int = -1, - silent: bool = True, + silent: bool = "warn", importance_type: str = 'split', client: Optional[Client] = None, **kwargs: Any @@ -1288,7 +1288,7 @@ def __init__( reg_lambda: float = 0., random_state: Optional[Union[int, np.random.RandomState]] = None, n_jobs: int = -1, - silent: bool = True, + silent: bool = "warn", importance_type: str = 'split', client: Optional[Client] = None, **kwargs: Any @@ -1448,7 +1448,7 @@ def __init__( reg_lambda: float = 0., random_state: Optional[Union[int, np.random.RandomState]] = None, n_jobs: int = -1, - silent: bool = True, + silent: bool = "warn", importance_type: str = 'split', client: Optional[Client] = None, **kwargs: Any diff --git a/python-package/lightgbm/engine.py b/python-package/lightgbm/engine.py index 934e14eddde2..ac3108b4087e 100644 --- a/python-package/lightgbm/engine.py +++ b/python-package/lightgbm/engine.py @@ -299,7 +299,7 @@ def train( for dataset_name, eval_name, score, _ in evaluation_result_list: booster.best_score[dataset_name][eval_name] = score if not keep_training_booster: - booster.model_from_string(booster.model_to_string(), False).free_dataset() + booster.model_from_string(booster.model_to_string(), verbose='_silent_false').free_dataset() return booster diff --git a/python-package/lightgbm/sklearn.py b/python-package/lightgbm/sklearn.py index 0e195af6b1e8..c59582464f88 100644 --- a/python-package/lightgbm/sklearn.py +++ b/python-package/lightgbm/sklearn.py @@ -369,7 +369,7 @@ def __init__( reg_lambda: float = 0., random_state: Optional[Union[int, np.random.RandomState]] = None, n_jobs: int = -1, - silent: bool = True, + silent: Union[bool, str] = 'warn', importance_type: str = 'split', **kwargs ): @@ -590,7 +590,13 @@ def fit(self, X, y, evals_result = {} params = self.get_params() # user can set verbose with kwargs, it has higher priority - if not any(verbose_alias in params for verbose_alias in _ConfigAliases.get("verbosity")) and self.silent: + if self.silent != "warn": + _log_warning("'silent' argument is deprecated and will be removed in a future release of LightGBM. " + "Pass 'verbose' parameter via keyword arguments instead.") + silent = self.silent + else: + silent = True + if not any(verbose_alias in params for verbose_alias in _ConfigAliases.get("verbosity")) and silent: params['verbose'] = -1 params.pop('silent', None) params.pop('importance_type', None)