From ebc11c73cd3f8143b7c5eac36ed5e52948015448 Mon Sep 17 00:00:00 2001 From: Joppe Geluykens Date: Fri, 7 Oct 2022 16:50:32 +0200 Subject: [PATCH] gbm: increase boosting_rounds_per_checkpoint to reduce evaluation overhead (#2612) --- ludwig/schema/trainer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ludwig/schema/trainer.py b/ludwig/schema/trainer.py index 36d839af0d4..1dcfbd420f9 100644 --- a/ludwig/schema/trainer.py +++ b/ludwig/schema/trainer.py @@ -336,7 +336,7 @@ class GBMTrainerConfig(BaseTrainerConfig): ) boosting_rounds_per_checkpoint: int = schema_utils.PositiveInteger( - default=10, description="Number of boosting rounds per checkpoint / evaluation round." + default=50, description="Number of boosting rounds per checkpoint / evaluation round." ) # LightGBM core parameters (https://lightgbm.readthedocs.io/en/latest/Parameters.html)