Skip to content

Commit

Permalink
[python] deprecate silent and standalone verbose args. Prefer glo…
Browse files Browse the repository at this point in the history
…bal `verbose` param (#4577)

* deprecate `silent` and standalone `verbose` args. Prefer global `verbose` param

* simplify code

* Rephrase warning messages
  • Loading branch information
StrikerRUS authored Sep 4, 2021
1 parent b7120d2 commit 64f1500
Show file tree
Hide file tree
Showing 4 changed files with 31 additions and 11 deletions.
24 changes: 19 additions & 5 deletions python-package/lightgbm/basic.py
Original file line number Diff line number Diff line change
Expand Up @@ -1123,7 +1123,7 @@ class Dataset:
"""Dataset in LightGBM."""

def __init__(self, data, label=None, reference=None,
weight=None, group=None, init_score=None, silent=False,
weight=None, group=None, init_score=None, silent='warn',
feature_name='auto', categorical_feature='auto', params=None,
free_raw_data=True):
"""Initialize Dataset.
Expand Down Expand Up @@ -1439,6 +1439,11 @@ def _lazy_init(self, data, label=None, reference=None,
_log_warning(f'{key} keyword has been found in `params` and will be ignored.\n'
f'Please use {key} argument of the Dataset constructor to pass this parameter.')
# user can set verbose with params, it has higher priority
if silent != "warn":
_log_warning("'silent' argument is deprecated and will be removed in a future release of LightGBM. "
"Pass 'verbose' parameter via 'params' instead.")
else:
silent = False
if not any(verbose_alias in params for verbose_alias in _ConfigAliases.get("verbosity")) and silent:
params["verbose"] = -1
# get categorical features
Expand Down Expand Up @@ -1769,7 +1774,7 @@ def construct(self):
return self

def create_valid(self, data, label=None, weight=None, group=None,
init_score=None, silent=False, params=None):
init_score=None, silent='warn', params=None):
"""Create validation data align with current Dataset.
Parameters
Expand Down Expand Up @@ -2462,7 +2467,7 @@ def _dump_text(self, filename):
class Booster:
"""Booster in LightGBM."""

def __init__(self, params=None, train_set=None, model_file=None, model_str=None, silent=False):
def __init__(self, params=None, train_set=None, model_file=None, model_str=None, silent='warn'):
"""Initialize the Booster.
Parameters
Expand All @@ -2488,6 +2493,11 @@ def __init__(self, params=None, train_set=None, model_file=None, model_str=None,
self.best_score = {}
params = {} if params is None else deepcopy(params)
# user can set verbose with params, it has higher priority
if silent != 'warn':
_log_warning("'silent' argument is deprecated and will be removed in a future release of LightGBM. "
"Pass 'verbose' parameter via 'params' instead.")
else:
silent = False
if not any(verbose_alias in params for verbose_alias in _ConfigAliases.get("verbosity")) and silent:
params["verbose"] = -1
if train_set is not None:
Expand Down Expand Up @@ -2574,7 +2584,7 @@ def __init__(self, params=None, train_set=None, model_file=None, model_str=None,
self.__num_class = out_num_class.value
self.pandas_categorical = _load_pandas_categorical(file_name=model_file)
elif model_str is not None:
self.model_from_string(model_str, not silent)
self.model_from_string(model_str, verbose="_silent_false")
else:
raise TypeError('Need at least one training dataset or model file or model string '
'to create Booster instance')
Expand Down Expand Up @@ -3255,7 +3265,7 @@ def shuffle_models(self, start_iteration=0, end_iteration=-1):
ctypes.c_int(end_iteration)))
return self

def model_from_string(self, model_str, verbose=True):
def model_from_string(self, model_str, verbose='warn'):
"""Load Booster from a string.
Parameters
Expand Down Expand Up @@ -3283,6 +3293,10 @@ def model_from_string(self, model_str, verbose=True):
_safe_call(_LIB.LGBM_BoosterGetNumClasses(
self.handle,
ctypes.byref(out_num_class)))
if verbose in {'warn', '_silent_false'}:
verbose = verbose == 'warn'
else:
_log_warning("'verbose' argument is deprecated and will be removed in a future release of LightGBM.")
if verbose:
_log_info(f'Finished loading model, total used {int(out_num_iterations.value)} iterations')
self.__num_class = out_num_class.value
Expand Down
6 changes: 3 additions & 3 deletions python-package/lightgbm/dask.py
Original file line number Diff line number Diff line change
Expand Up @@ -1108,7 +1108,7 @@ def __init__(
reg_lambda: float = 0.,
random_state: Optional[Union[int, np.random.RandomState]] = None,
n_jobs: int = -1,
silent: bool = True,
silent: bool = "warn",
importance_type: str = 'split',
client: Optional[Client] = None,
**kwargs: Any
Expand Down Expand Up @@ -1288,7 +1288,7 @@ def __init__(
reg_lambda: float = 0.,
random_state: Optional[Union[int, np.random.RandomState]] = None,
n_jobs: int = -1,
silent: bool = True,
silent: bool = "warn",
importance_type: str = 'split',
client: Optional[Client] = None,
**kwargs: Any
Expand Down Expand Up @@ -1448,7 +1448,7 @@ def __init__(
reg_lambda: float = 0.,
random_state: Optional[Union[int, np.random.RandomState]] = None,
n_jobs: int = -1,
silent: bool = True,
silent: bool = "warn",
importance_type: str = 'split',
client: Optional[Client] = None,
**kwargs: Any
Expand Down
2 changes: 1 addition & 1 deletion python-package/lightgbm/engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -299,7 +299,7 @@ def train(
for dataset_name, eval_name, score, _ in evaluation_result_list:
booster.best_score[dataset_name][eval_name] = score
if not keep_training_booster:
booster.model_from_string(booster.model_to_string(), False).free_dataset()
booster.model_from_string(booster.model_to_string(), verbose='_silent_false').free_dataset()
return booster


Expand Down
10 changes: 8 additions & 2 deletions python-package/lightgbm/sklearn.py
Original file line number Diff line number Diff line change
Expand Up @@ -369,7 +369,7 @@ def __init__(
reg_lambda: float = 0.,
random_state: Optional[Union[int, np.random.RandomState]] = None,
n_jobs: int = -1,
silent: bool = True,
silent: Union[bool, str] = 'warn',
importance_type: str = 'split',
**kwargs
):
Expand Down Expand Up @@ -590,7 +590,13 @@ def fit(self, X, y,
evals_result = {}
params = self.get_params()
# user can set verbose with kwargs, it has higher priority
if not any(verbose_alias in params for verbose_alias in _ConfigAliases.get("verbosity")) and self.silent:
if self.silent != "warn":
_log_warning("'silent' argument is deprecated and will be removed in a future release of LightGBM. "
"Pass 'verbose' parameter via keyword arguments instead.")
silent = self.silent
else:
silent = True
if not any(verbose_alias in params for verbose_alias in _ConfigAliases.get("verbosity")) and silent:
params['verbose'] = -1
params.pop('silent', None)
params.pop('importance_type', None)
Expand Down

0 comments on commit 64f1500

Please sign in to comment.