Skip to content

Commit

Permalink
Gradient boosting - Adapt tests to version xgboost=2.0.0
Browse files Browse the repository at this point in the history
  • Loading branch information
PrimozGodec committed Sep 12, 2023
1 parent 24596fe commit 5ce3c81
Show file tree
Hide file tree
Showing 3 changed files with 6 additions and 10 deletions.
12 changes: 4 additions & 8 deletions Orange/widgets/model/tests/test_owgradientboosting.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,8 +159,7 @@ def test_default_parameters_cls(self):
model = booster(data)
params = model.skl_model.get_params()
booster_params = json.loads(model.skl_model.get_booster().save_config())
updater = booster_params["learner"]["gradient_booster"]["updater"]
tp = updater["grow_colmaker"]["train_param"]
tp = booster_params["learner"]["gradient_booster"]["tree_train_param"]
self.assertEqual(params["n_estimators"], self.editor.n_estimators)
self.assertEqual(
round(float(tp["learning_rate"]), 1), self.editor.learning_rate
Expand All @@ -179,8 +178,7 @@ def test_default_parameters_reg(self):
model = booster(data)
params = model.skl_model.get_params()
booster_params = json.loads(model.skl_model.get_booster().save_config())
updater = booster_params["learner"]["gradient_booster"]["updater"]
tp = updater["grow_colmaker"]["train_param"]
tp = booster_params["learner"]["gradient_booster"]["tree_train_param"]
self.assertEqual(params["n_estimators"], self.editor.n_estimators)
self.assertEqual(
round(float(tp["learning_rate"]), 1), self.editor.learning_rate
Expand Down Expand Up @@ -224,8 +222,7 @@ def test_default_parameters_cls(self):
model = booster(data)
params = model.skl_model.get_params()
booster_params = json.loads(model.skl_model.get_booster().save_config())
updater = booster_params["learner"]["gradient_booster"]["updater"]
tp = updater["grow_colmaker"]["train_param"]
tp = booster_params["learner"]["gradient_booster"]["tree_train_param"]
self.assertEqual(params["n_estimators"], self.editor.n_estimators)
self.assertEqual(
round(float(tp["learning_rate"]), 1), self.editor.learning_rate
Expand All @@ -244,8 +241,7 @@ def test_default_parameters_reg(self):
model = booster(data)
params = model.skl_model.get_params()
booster_params = json.loads(model.skl_model.get_booster().save_config())
updater = booster_params["learner"]["gradient_booster"]["updater"]
tp = updater["grow_colmaker"]["train_param"]
tp = booster_params["learner"]["gradient_booster"]["tree_train_param"]
self.assertEqual(params["n_estimators"], self.editor.n_estimators)
self.assertEqual(
round(float(tp["learning_rate"]), 1), self.editor.learning_rate
Expand Down
2 changes: 1 addition & 1 deletion requirements-opt.txt
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
catboost>=1.0.1
xgboost>=1.5.0
xgboost>=2.0.0
2 changes: 1 addition & 1 deletion tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ deps =
# oldest: pyyaml
# oldest: openpyxl
oldest: httpx==0.21.0
oldest: xgboost==1.5.0
oldest: xgboost==2.0.0
oldest: catboost==1.0.1

commands_pre =
Expand Down

0 comments on commit 5ce3c81

Please sign in to comment.