From: Jérôme Benoit Date: Mon, 27 Jan 2025 11:40:55 +0000 (+0100) Subject: perf: fine tune optuna search space X-Git-Url: https://git.piment-noir.org/?a=commitdiff_plain;h=267f2a3e0cd6d0b8e4df8d871e77825bb587c365;p=freqai-strategies.git perf: fine tune optuna search space Signed-off-by: Jérôme Benoit --- diff --git a/quickadapter/user_data/freqaimodels/LightGBMRegressorQuickAdapterV35.py b/quickadapter/user_data/freqaimodels/LightGBMRegressorQuickAdapterV35.py index bbaccb3..19d117f 100644 --- a/quickadapter/user_data/freqaimodels/LightGBMRegressorQuickAdapterV35.py +++ b/quickadapter/user_data/freqaimodels/LightGBMRegressorQuickAdapterV35.py @@ -104,6 +104,8 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel): **{ "n_estimators": hp.get("n_estimators"), "learning_rate": hp.get("learning_rate"), + "subsample": hp.get("subsample"), + "colsample_bytree": hp.get("colsample_bytree"), "reg_alpha": hp.get("reg_alpha"), "reg_lambda": hp.get("reg_lambda"), }, @@ -217,10 +219,12 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel): def objective(trial, X, y, weights, X_test, y_test, params): study_params = { "objective": "rmse", - "n_estimators": trial.suggest_int("n_estimators", 100, 1000), - "learning_rate": trial.suggest_loguniform("learning_rate", 1e-8, 1.0), - "reg_alpha": trial.suggest_loguniform("reg_alpha", 1e-8, 10.0), - "reg_lambda": trial.suggest_loguniform("reg_lambda", 1e-8, 10.0), + "n_estimators": trial.suggest_int("n_estimators", 100, 800), + "learning_rate": trial.suggest_float("learning_rate", 1e-3, 0.3, log=True), + "subsample": trial.suggest_float("subsample", 0.6, 1.0), + "colsample_bytree": trial.suggest_float("colsample_bytree", 0.6, 1.0), + "reg_alpha": trial.suggest_float("reg_alpha", 1e-8, 10.0, log=True), + "reg_lambda": trial.suggest_float("reg_lambda", 1e-8, 10.0, log=True), } params = {**params, **study_params} window = trial.suggest_int("train_period_candles", 1152, 17280, step=100) diff --git a/quickadapter/user_data/freqaimodels/XGBoostRegressorQuickAdapterV35.py b/quickadapter/user_data/freqaimodels/XGBoostRegressorQuickAdapterV35.py index 6c053d6..3128ab3 100644 --- a/quickadapter/user_data/freqaimodels/XGBoostRegressorQuickAdapterV35.py +++ b/quickadapter/user_data/freqaimodels/XGBoostRegressorQuickAdapterV35.py @@ -106,6 +106,8 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel): "learning_rate": hp.get("learning_rate"), "max_depth": hp.get("max_depth"), "gamma": hp.get("gamma"), + "subsample": hp.get("subsample"), + "colsample_bytree": hp.get("colsample_bytree"), "reg_alpha": hp.get("reg_alpha"), "reg_lambda": hp.get("reg_lambda"), }, @@ -220,12 +222,14 @@ def objective(trial, X, y, weights, X_test, y_test, params): study_params = { "objective": "reg:squarederror", "eval_metric": "rmse", - "n_estimators": trial.suggest_int("n_estimators", 100, 1000), - "learning_rate": trial.suggest_loguniform("learning_rate", 1e-8, 1.0), - "max_depth": trial.suggest_int("max_depth", 1, 10), - "gamma": trial.suggest_loguniform("gamma", 1e-8, 1.0), - "reg_alpha": trial.suggest_loguniform("reg_alpha", 1e-8, 10.0), - "reg_lambda": trial.suggest_loguniform("reg_lambda", 1e-8, 10.0), + "n_estimators": trial.suggest_int("n_estimators", 100, 800), + "learning_rate": trial.suggest_float("learning_rate", 1e-3, 0.3, log=True), + "max_depth": trial.suggest_int("max_depth", 3, 12), + "gamma": trial.suggest_float("gamma", 0, 2), + "subsample": trial.suggest_float("subsample", 0.6, 1.0), + "colsample_bytree": trial.suggest_float("colsample_bytree", 0.6, 1.0), + "reg_alpha": trial.suggest_float("reg_alpha", 1e-8, 10.0, log=True), + "reg_lambda": trial.suggest_float("reg_lambda", 1e-8, 10.0, log=True), "callbacks": [ optuna.integration.XGBoostPruningCallback(trial, "validation_0-rmse") ],