]> Piment Noir Git Repositories - freqai-strategies.git/commitdiff
perf: fine tune optuna search space
authorJérôme Benoit <jerome.benoit@piment-noir.org>
Mon, 27 Jan 2025 11:40:55 +0000 (12:40 +0100)
committerJérôme Benoit <jerome.benoit@piment-noir.org>
Mon, 27 Jan 2025 11:40:55 +0000 (12:40 +0100)
Signed-off-by: Jérôme Benoit <jerome.benoit@piment-noir.org>
quickadapter/user_data/freqaimodels/LightGBMRegressorQuickAdapterV35.py
quickadapter/user_data/freqaimodels/XGBoostRegressorQuickAdapterV35.py

index bbaccb38fe69988ec2d6312d95c8189b092c161d..19d117f6eeb231d88e1e878981686c9122f4809c 100644 (file)
@@ -104,6 +104,8 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel):
                 **{
                     "n_estimators": hp.get("n_estimators"),
                     "learning_rate": hp.get("learning_rate"),
+                    "subsample": hp.get("subsample"),
+                    "colsample_bytree": hp.get("colsample_bytree"),
                     "reg_alpha": hp.get("reg_alpha"),
                     "reg_lambda": hp.get("reg_lambda"),
                 },
@@ -217,10 +219,12 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel):
 def objective(trial, X, y, weights, X_test, y_test, params):
     study_params = {
         "objective": "rmse",
-        "n_estimators": trial.suggest_int("n_estimators", 100, 1000),
-        "learning_rate": trial.suggest_loguniform("learning_rate", 1e-8, 1.0),
-        "reg_alpha": trial.suggest_loguniform("reg_alpha", 1e-8, 10.0),
-        "reg_lambda": trial.suggest_loguniform("reg_lambda", 1e-8, 10.0),
+        "n_estimators": trial.suggest_int("n_estimators", 100, 800),
+        "learning_rate": trial.suggest_float("learning_rate", 1e-3, 0.3, log=True),
+        "subsample": trial.suggest_float("subsample", 0.6, 1.0),
+        "colsample_bytree": trial.suggest_float("colsample_bytree", 0.6, 1.0),
+        "reg_alpha": trial.suggest_float("reg_alpha", 1e-8, 10.0, log=True),
+        "reg_lambda": trial.suggest_float("reg_lambda", 1e-8, 10.0, log=True),
     }
     params = {**params, **study_params}
     window = trial.suggest_int("train_period_candles", 1152, 17280, step=100)
index 6c053d611b6b380b176e6bf0deb9ec3b0c2ac161..3128ab371113a78618a7e77016bbb7d85f94498b 100644 (file)
@@ -106,6 +106,8 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel):
                     "learning_rate": hp.get("learning_rate"),
                     "max_depth": hp.get("max_depth"),
                     "gamma": hp.get("gamma"),
+                    "subsample": hp.get("subsample"),
+                    "colsample_bytree": hp.get("colsample_bytree"),
                     "reg_alpha": hp.get("reg_alpha"),
                     "reg_lambda": hp.get("reg_lambda"),
                 },
@@ -220,12 +222,14 @@ def objective(trial, X, y, weights, X_test, y_test, params):
     study_params = {
         "objective": "reg:squarederror",
         "eval_metric": "rmse",
-        "n_estimators": trial.suggest_int("n_estimators", 100, 1000),
-        "learning_rate": trial.suggest_loguniform("learning_rate", 1e-8, 1.0),
-        "max_depth": trial.suggest_int("max_depth", 1, 10),
-        "gamma": trial.suggest_loguniform("gamma", 1e-8, 1.0),
-        "reg_alpha": trial.suggest_loguniform("reg_alpha", 1e-8, 10.0),
-        "reg_lambda": trial.suggest_loguniform("reg_lambda", 1e-8, 10.0),
+        "n_estimators": trial.suggest_int("n_estimators", 100, 800),
+        "learning_rate": trial.suggest_float("learning_rate", 1e-3, 0.3, log=True),
+        "max_depth": trial.suggest_int("max_depth", 3, 12),
+        "gamma": trial.suggest_float("gamma", 0, 2),
+        "subsample": trial.suggest_float("subsample", 0.6, 1.0),
+        "colsample_bytree": trial.suggest_float("colsample_bytree", 0.6, 1.0),
+        "reg_alpha": trial.suggest_float("reg_alpha", 1e-8, 10.0, log=True),
+        "reg_lambda": trial.suggest_float("reg_lambda", 1e-8, 10.0, log=True),
         "callbacks": [
             optuna.integration.XGBoostPruningCallback(trial, "validation_0-rmse")
         ],