]> Piment Noir Git Repositories - freqai-strategies.git/commitdiff
fix(qav3): ensure disabling optuna_hyperopt match v3 behavior
authorJérôme Benoit <jerome.benoit@piment-noir.org>
Sun, 26 Jan 2025 13:13:21 +0000 (14:13 +0100)
committerJérôme Benoit <jerome.benoit@piment-noir.org>
Sun, 26 Jan 2025 13:13:21 +0000 (14:13 +0100)
Signed-off-by: Jérôme Benoit <jerome.benoit@piment-noir.org>
quickadapter/user_data/freqaimodels/LightGBMRegressorQuickAdapterV35.py
quickadapter/user_data/freqaimodels/XGBoostRegressorQuickAdapterV35.py

index 301ac9eaf7e55e81123dc4c7cc36942d992c6124..f6cf01e01d7524103ab362fd997475582b556541 100644 (file)
@@ -54,9 +54,10 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel):
 
         sample_weight = data_dictionary["train_weights"]
 
+        model_training_parameters = self.model_training_parameters
+
         lgbm_model = self.get_init_model(dk.pair)
-        start = time.time()
-        hp = {}
+
         optuna_hyperopt: bool = (
             self.freqai_info.get("optuna_hyperopt", False)
             and self.freqai_info.get("data_split_parameters", {}).get(
@@ -64,7 +65,10 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel):
             )
             > 0
         )
+
+        start = time.time()
         if optuna_hyperopt:
+            hp = {}
             pruner = optuna.pruners.MedianPruner(n_warmup_steps=5)
             study = optuna.create_study(pruner=pruner, direction="minimize")
             study.optimize(
@@ -90,12 +94,12 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel):
                 f"Optuna hyperopt {'best objective value':>20s} : {study.best_value}"
             )
 
-        window = hp.get("train_period_candles", 4032)
-        X = X.tail(window)
-        y = y.tail(window)
-        sample_weight = sample_weight[-window:]
-        if optuna_hyperopt:
-            params = {
+            window = hp.get("train_period_candles")
+            X = X.tail(window)
+            y = y.tail(window)
+            sample_weight = sample_weight[-window:]
+
+            model_training_parameters = {
                 **self.model_training_parameters,
                 **{
                     "n_estimators": hp.get("n_estimators"),
@@ -104,12 +108,10 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel):
                     "reg_lambda": hp.get("reg_lambda"),
                 },
             }
-        else:
-            params = self.model_training_parameters
 
-        logger.info(f"Model training parameters : {params}")
+        logger.info(f"Model training parameters : {model_training_parameters}")
 
-        model = LGBMRegressor(**params)
+        model = LGBMRegressor(**model_training_parameters)
 
         model.fit(
             X=X,
index 55a1e662d976dd0842495682d9b041fa28be640e..259acc76f171af7f74473b3999851e6092ecbf0f 100644 (file)
@@ -54,9 +54,10 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel):
 
         sample_weight = data_dictionary["train_weights"]
 
+        model_training_parameters = self.model_training_parameters
+
         xgb_model = self.get_init_model(dk.pair)
-        start = time.time()
-        hp = {}
+
         optuna_hyperopt: bool = (
             self.freqai_info.get("optuna_hyperopt", False)
             and self.freqai_info.get("data_split_parameters", {}).get(
@@ -64,7 +65,10 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel):
             )
             > 0
         )
+
+        start = time.time()
         if optuna_hyperopt:
+            hp = {}
             pruner = optuna.pruners.MedianPruner(n_warmup_steps=5)
             study = optuna.create_study(pruner=pruner, direction="minimize")
             study.optimize(
@@ -90,13 +94,13 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel):
                 f"Optuna hyperopt {'best objective value':>20s} : {study.best_value}"
             )
 
-        window = hp.get("train_period_candles", 4032)
-        X = X.tail(window)
-        y = y.tail(window)
-        sample_weight = sample_weight[-window:]
-        if optuna_hyperopt:
-            params = {
-                **self.model_training_parameters,
+            window = hp.get("train_period_candles")
+            X = X.tail(window)
+            y = y.tail(window)
+            sample_weight = sample_weight[-window:]
+
+            model_training_parameters = {
+                **model_training_parameters,
                 **{
                     "n_estimators": hp.get("n_estimators"),
                     "learning_rate": hp.get("learning_rate"),
@@ -105,12 +109,10 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel):
                     "reg_lambda": hp.get("reg_lambda"),
                 },
             }
-        else:
-            params = self.model_training_parameters
 
-        logger.info(f"Model training parameters : {params}")
+        logger.info(f"Model training parameters : {model_training_parameters}")
 
-        model = XGBRegressor(**params)
+        model = XGBRegressor(**model_training_parameters)
 
         model.fit(
             X=X,