From 562d8d48644b79cc7f4af9c31dc1b7de5fdc5a95 Mon Sep 17 00:00:00 2001 From: =?utf8?q?J=C3=A9r=C3=B4me=20Benoit?= Date: Thu, 8 Jan 2026 12:49:39 +0100 Subject: [PATCH] perf(quickadapter): optimize Optuna log scale for LightGBM and CatBoost hyperparameters Apply logarithmic sampling scale to regularization and tree complexity parameters for improved hyperparameter search efficiency: - LightGBM: Add num_leaves to log scale (exponential tree growth) - CatBoost: Add l2_leaf_reg and random_strength to log scale (multiplicative effects) - Reverted bagging_temperature to linear scale (0 has special meaning: disables Bayesian bootstrap) Log scale provides better exploration in low-value regions where these parameters have the most impact, consistent with Optuna best practices and industry standards (FLAML, XGBoost patterns). Bump version to 3.10.6 --- .../freqaimodels/QuickAdapterRegressorV3.py | 2 +- .../user_data/strategies/QuickAdapterV3.py | 2 +- quickadapter/user_data/strategies/Utils.py | 15 ++++++++++++--- 3 files changed, 14 insertions(+), 5 deletions(-) diff --git a/quickadapter/user_data/freqaimodels/QuickAdapterRegressorV3.py b/quickadapter/user_data/freqaimodels/QuickAdapterRegressorV3.py index a8b49b5..1c3cec2 100644 --- a/quickadapter/user_data/freqaimodels/QuickAdapterRegressorV3.py +++ b/quickadapter/user_data/freqaimodels/QuickAdapterRegressorV3.py @@ -87,7 +87,7 @@ class QuickAdapterRegressorV3(BaseRegressionModel): https://github.com/sponsors/robcaulk """ - version = "3.10.5" + version = "3.10.6" _TEST_SIZE: Final[float] = 0.1 diff --git a/quickadapter/user_data/strategies/QuickAdapterV3.py b/quickadapter/user_data/strategies/QuickAdapterV3.py index 7f47761..3ed132f 100644 --- a/quickadapter/user_data/strategies/QuickAdapterV3.py +++ b/quickadapter/user_data/strategies/QuickAdapterV3.py @@ -110,7 +110,7 @@ class QuickAdapterV3(IStrategy): _PLOT_EXTREMA_MIN_EPS: Final[float] = 0.01 def version(self) -> str: - return "3.10.5" + return "3.10.6" timeframe = "5m" timeframe_minutes = timeframe_to_minutes(timeframe) diff --git a/quickadapter/user_data/strategies/Utils.py b/quickadapter/user_data/strategies/Utils.py index a4e00bf..9be9a73 100644 --- a/quickadapter/user_data/strategies/Utils.py +++ b/quickadapter/user_data/strategies/Utils.py @@ -1612,7 +1612,9 @@ def zigzag( ) -Regressor = Literal["xgboost", "lightgbm", "histgradientboostingregressor", "ngboost", "catboost"] +Regressor = Literal[ + "xgboost", "lightgbm", "histgradientboostingregressor", "ngboost", "catboost" +] REGRESSORS: Final[tuple[Regressor, ...]] = ( "xgboost", "lightgbm", @@ -2163,6 +2165,7 @@ def get_optuna_study_model_parameters( log_scaled_params = { "n_estimators", "learning_rate", + "num_leaves", "min_child_weight", "min_split_gain", "reg_alpha", @@ -2184,7 +2187,7 @@ def get_optuna_study_model_parameters( ), # Tree structure "num_leaves": _optuna_suggest_int_from_range( - trial, "num_leaves", ranges["num_leaves"], min_val=2 + trial, "num_leaves", ranges["num_leaves"], min_val=2, log=True ), # Leaf constraints "min_child_weight": trial.suggest_float( @@ -2433,6 +2436,8 @@ def get_optuna_study_model_parameters( log_scaled_params = { "iterations", "learning_rate", + "l2_leaf_reg", + "random_strength", } ranges = _build_ranges(default_ranges, log_scaled_params) @@ -2462,7 +2467,10 @@ def get_optuna_study_model_parameters( ), # Regularization "l2_leaf_reg": trial.suggest_float( - "l2_leaf_reg", ranges["l2_leaf_reg"][0], ranges["l2_leaf_reg"][1] + "l2_leaf_reg", + ranges["l2_leaf_reg"][0], + ranges["l2_leaf_reg"][1], + log=True, ), "model_size_reg": trial.suggest_float( "model_size_reg", @@ -2475,6 +2483,7 @@ def get_optuna_study_model_parameters( "random_strength", ranges["random_strength"][0], ranges["random_strength"][1], + log=True, ), "rsm": trial.suggest_float( "rsm", -- 2.43.0