Skip to content

Commit

Permalink
added manual print statements
Browse files Browse the repository at this point in the history
  • Loading branch information
raptor419 committed Aug 13, 2024
1 parent 90b87fe commit 5df7cd0
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 0 deletions.
3 changes: 3 additions & 0 deletions streamline/modeling/basemodel.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,7 @@ def optimize(self, x_train, y_train, n_trails, timeout, feature_names=None):

if not self.is_single:
optuna.logging.set_verbosity(optuna.logging.WARNING)
logging.info("Running Optuna Hyperparameter Optimization")
self.study = optuna.create_study(direction=self.metric_direction, sampler=self.sampler)
if self.model_name in ["Extreme Gradient Boosting", "Light Gradient Boosting"]:
pos_inst = sum(y_train)
Expand Down Expand Up @@ -124,6 +125,8 @@ def optimize(self, x_train, y_train, n_trails, timeout, feature_names=None):
self.params = best_trial.params
self.model = copy.deepcopy(self.model).set_params(**best_trial.params)
else:
logging.info("Only one set of hyperparameters")
logging.info("Skipping Optuna Hyperparameter Optimization")
self.params = copy.deepcopy(self.param_grid)
for key, value in self.param_grid.items():
self.params[key] = value[0]
Expand Down
2 changes: 2 additions & 0 deletions streamline/models/learning_based.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,8 @@ def __init__(self, cv_folds=3, scoring_metric='balanced_accuracy',
if len(self.param_grid['learning_iterations']) == 1 and len(self.param_grid['N']) == 1 and \
len(self.param_grid['nu']) == 1:
self.param_grid['rule_compaction'] = ['QRF', ]
logging.info("Printing Rule Compaction Parameters")
logging.info(str(self.param_grid['rule_compaction']))
self.param_grid['expert_knowledge'] = expert_knowledge
self.param_grid['random_state'] = [random_state, ]
self.small_name = "ExSTraCS"
Expand Down

0 comments on commit 5df7cd0

Please sign in to comment.