Skip to content

Commit

Permalink
restore n_jobs=-2 and outer_bags=14 parameters
Browse files Browse the repository at this point in the history
  • Loading branch information
paulbkoch committed Jan 11, 2025
1 parent 4fe5db6 commit fbe7cf8
Showing 1 changed file with 12 additions and 12 deletions.
24 changes: 12 additions & 12 deletions python/interpret-core/interpret/glassbox/_ebm/_ebm.py
Original file line number Diff line number Diff line change
Expand Up @@ -2739,7 +2739,7 @@ class ExplainableBoostingClassifier(ClassifierMixin, EBMModel):
- Integer (1 <= validation_size): Count of samples to put in the validation sets
- Percentage (validation_size < 1.0): Percentage of the data to put in the validation sets
- 0: Turns off early stopping. Outer bags have no utility. Error bounds will be eliminated
outer_bags : int, default=16
outer_bags : int, default=14
Number of outer bags. Outer bags are used to generate error bounds and help with smoothing the graphs.
inner_bags : int, default=0
Number of inner bags. 0 turns off inner bagging.
Expand Down Expand Up @@ -2838,7 +2838,7 @@ class ExplainableBoostingClassifier(ClassifierMixin, EBMModel):
- -1: The partial response of the corresponding feature should be monotonically decreasing with respect to the target.
objective : str, default="log_loss"
The objective to optimize.
n_jobs : int, default=-1
n_jobs : int, default=-2
Number of jobs to run in parallel. Negative integers are interpreted as following joblib's formula
(n_cpus + 1 + n_jobs), just like scikit-learn. Eg: -2 means using all threads except 1.
random_state : int or None, default=42
Expand Down Expand Up @@ -2957,7 +2957,7 @@ def __init__(
exclude: Optional[Sequence[Union[int, str, Sequence[Union[int, str]]]]] = None,
# Ensemble
validation_size: Optional[Union[int, float]] = 0.15,
outer_bags: int = 16,
outer_bags: int = 14,
inner_bags: Optional[int] = 0,
# Boosting
learning_rate: float = 0.015,
Expand All @@ -2982,7 +2982,7 @@ def __init__(
monotone_constraints: Optional[Sequence[int]] = None,
objective: str = "log_loss",
# Overall
n_jobs: Optional[int] = -1,
n_jobs: Optional[int] = -2,
random_state: Optional[int] = 42,
):
super().__init__(
Expand Down Expand Up @@ -3126,7 +3126,7 @@ class ExplainableBoostingRegressor(RegressorMixin, EBMModel):
- Integer (1 <= validation_size): Count of samples to put in the validation sets
- Percentage (validation_size < 1.0): Percentage of the data to put in the validation sets
- 0: Turns off early stopping. Outer bags have no utility. Error bounds will be eliminated
outer_bags : int, default=16
outer_bags : int, default=14
Number of outer bags. Outer bags are used to generate error bounds and help with smoothing the graphs.
inner_bags : int, default=0
Number of inner bags. 0 turns off inner bagging.
Expand Down Expand Up @@ -3227,7 +3227,7 @@ class ExplainableBoostingRegressor(RegressorMixin, EBMModel):
The objective to optimize. Options include: "rmse",
"poisson_deviance", "tweedie_deviance:variance_power=1.5", "gamma_deviance",
"pseudo_huber:delta=1.0", "rmse_log" (rmse with a log link function)
n_jobs : int, default=-1
n_jobs : int, default=-2
Number of jobs to run in parallel. Negative integers are interpreted as following joblib's formula
(n_cpus + 1 + n_jobs), just like scikit-learn. Eg: -2 means using all threads except 1.
random_state : int or None, default=42
Expand Down Expand Up @@ -3344,7 +3344,7 @@ def __init__(
exclude: Optional[Sequence[Union[int, str, Sequence[Union[int, str]]]]] = None,
# Ensemble
validation_size: Optional[Union[int, float]] = 0.15,
outer_bags: int = 16,
outer_bags: int = 14,
inner_bags: Optional[int] = 0,
# Boosting
learning_rate: float = 0.04,
Expand All @@ -3369,7 +3369,7 @@ def __init__(
monotone_constraints: Optional[Sequence[int]] = None,
objective: str = "rmse",
# Overall
n_jobs: Optional[int] = -1,
n_jobs: Optional[int] = -2,
random_state: Optional[int] = 42,
):
super().__init__(
Expand Down Expand Up @@ -3474,7 +3474,7 @@ class DPExplainableBoostingClassifier(ClassifierMixin, EBMModel):
Total number of boosting rounds with n_terms boosting steps per round.
max_leaves : int, default=3
Maximum number of leaves allowed in each tree.
n_jobs : int, default=-1
n_jobs : int, default=-2
Number of jobs to run in parallel. Negative integers are interpreted as following joblib's formula
(n_cpus + 1 + n_jobs), just like scikit-learn. Eg: -2 means using all threads except 1.
random_state : int or None, default=None
Expand Down Expand Up @@ -3598,7 +3598,7 @@ def __init__(
# Trees
max_leaves: int = 3,
# Overall
n_jobs: Optional[int] = -1,
n_jobs: Optional[int] = -2,
random_state: Optional[int] = None,
# Differential Privacy
epsilon: float = 1.0,
Expand Down Expand Up @@ -3745,7 +3745,7 @@ class DPExplainableBoostingRegressor(RegressorMixin, EBMModel):
Total number of boosting rounds with n_terms boosting steps per round.
max_leaves : int, default=3
Maximum number of leaves allowed in each tree.
n_jobs : int, default=-1
n_jobs : int, default=-2
Number of jobs to run in parallel. Negative integers are interpreted as following joblib's formula
(n_cpus + 1 + n_jobs), just like scikit-learn. Eg: -2 means using all threads except 1.
random_state : int or None, default=None
Expand Down Expand Up @@ -3878,7 +3878,7 @@ def __init__(
# Trees
max_leaves: int = 3,
# Overall
n_jobs: Optional[int] = -1,
n_jobs: Optional[int] = -2,
random_state: Optional[int] = None,
# Differential Privacy
epsilon: float = 1.0,
Expand Down

0 comments on commit fbe7cf8

Please sign in to comment.