]> Piment Noir Git Repositories - freqai-strategies.git/commitdiff
feat(reforcexy): make hyperopt per pair tunable
authorJérôme Benoit <jerome.benoit@piment-noir.org>
Sun, 23 Feb 2025 17:15:23 +0000 (18:15 +0100)
committerJérôme Benoit <jerome.benoit@piment-noir.org>
Sun, 23 Feb 2025 17:15:23 +0000 (18:15 +0100)
Signed-off-by: Jérôme Benoit <jerome.benoit@piment-noir.org>
ReforceXY/user_data/config-template.json
ReforceXY/user_data/freqaimodels/ReforceXY.py
quickadapter/user_data/freqaimodels/LightGBMRegressorQuickAdapterV35.py
quickadapter/user_data/freqaimodels/XGBoostRegressorQuickAdapterV35.py

index 693d638cd0a3ed877b22402046bdbb133ed086d3..994fb94349a84c26807f4bc3afc40a1159b5d8c6 100644 (file)
       "max_no_improvement_evals": 0, // Maximum consecutive evaluations without a new best model
       "min_evals": 0, // Number of evaluations before start to count evaluations without improvements
       "check_envs": true, // Check that an environment follows Gym API
-      "plot_new_best": false // Enable tensorboard rollout plot upon finding a new best model
+      "plot_new_best": true // Enable tensorboard rollout plot upon finding a new best model
     },
     "rl_config_optuna": {
       "enabled": true, // Enable optuna hyperopt
+      "per_pair": false, // Enable per pair hyperopt
       "n_trials": 100,
       "n_startup_trials": 10,
       "timeout_hours": 0
index d841540fe4d939d5c589080a0e5511abc8031047..b5d3e8487c6f7dc9410b53b64081f22fbe21bb3f 100644 (file)
@@ -84,6 +84,7 @@ class ReforceXY(BaseReinforcementLearningModel):
             },
             "rl_config_optuna": {
                 "enabled": false,                   // Enable optuna hyperopt
+                "per_pair: false,                   // Enable per pair hyperopt
                 "n_trials": 100,
                 "n_startup_trials": 10,
                 "timeout_hours": 0,
@@ -457,17 +458,18 @@ class ReforceXY(BaseReinforcementLearningModel):
         output = output.rolling(window=self.CONV_WIDTH).apply(_predict)
         return output
 
-    def get_storage(self, pair: str) -> BaseStorage:
+    def get_storage(self, pair: str | None = None) -> BaseStorage:
         """
         Get the storage for Optuna
         """
         storage_dir = str(self.full_path)
+        storage_filename = f"optuna-{pair.split('/')[0]}" if pair else "optuna"
         storage_backend = self.rl_config_optuna.get("storage", "sqlite")
         if storage_backend == "sqlite":
-            storage = f"sqlite:///{storage_dir}/optuna-{pair.split('/')[0]}.sqlite"
+            storage = f"sqlite:///{storage_dir}/{storage_filename}.sqlite"
         elif storage_backend == "file":
             storage = JournalStorage(
-                JournalFileBackend(f"{storage_dir}/optuna-{pair.split('/')[0]}.log")
+                JournalFileBackend(f"{storage_dir}/{storage_filename}.log")
             )
         return storage
 
@@ -478,8 +480,13 @@ class ReforceXY(BaseReinforcementLearningModel):
         Runs hyperparameter optimization using Optuna and
         returns the best hyperparameters found
         """
-        study_name = str(dk.pair)
-        storage = self.get_storage(dk.pair)
+        _, identifier = str(self.full_path).rsplit("/", 1)
+        if self.rl_config_optuna.get("per_pair", False):
+            study_name = f"{identifier}-{dk.pair}"
+            storage = self.get_storage(dk.pair)
+        else:
+            study_name = identifier
+            storage = self.get_storage()
         study: Study = create_study(
             study_name=study_name,
             sampler=TPESampler(
@@ -514,7 +521,7 @@ class ReforceXY(BaseReinforcementLearningModel):
 
         logger.info(
             "------------ Hyperopt results %s (%.2f secs) ------------",
-            dk.pair,
+            study_name,
             time_spent,
         )
         logger.info(
@@ -523,32 +530,49 @@ class ReforceXY(BaseReinforcementLearningModel):
         logger.info("Best trial params: %s", study.best_trial.params)
         logger.info("-------------------------------------------------------")
 
-        self.save_best_params(dk.pair, study.best_trial.params)
+        self.save_best_params(
+            study.best_trial.params,
+            dk.pair if self.rl_config_optuna.get("per_pair", False) else None,
+        )
 
         return study.best_trial.params
 
-    def save_best_params(self, pair: str, best_params: Dict) -> None:
+    def save_best_params(self, best_params: Dict, pair: str | None = None) -> None:
         """
         Save the best hyperparameters found during hyperparameter optimization
         """
-        best_params_path = Path(
-            self.full_path / f"hyperopt-best-params-{pair.split('/')[0]}.json"
+        best_params_filename = (
+            f"hyperopt-best-params-{pair.split('/')[0]}"
+            if pair
+            else "hyperopt-best-params"
         )
-        logger.info(f"{pair}: saving best params to %s JSON file", best_params_path)
+        best_params_path = Path(self.full_path / f"{best_params_filename}.json")
+        log_msg: str = (
+            f"{pair}: saving best params to {best_params_path} JSON file"
+            if pair
+            else f"saving best params to {best_params_path} JSON file"
+        )
+        logger.info(log_msg)
         with best_params_path.open("w", encoding="utf-8") as write_file:
             json.dump(best_params, write_file, indent=4)
 
-    def load_best_params(self, pair: str) -> Dict | None:
+    def load_best_params(self, pair: str | None = None) -> Dict | None:
         """
         Load the best hyperparameters found and saved during hyperparameter optimization
         """
-        best_params_path = Path(
-            self.full_path / f"hyperopt-best-params-{pair.split('/')[0]}.json"
+        best_params_filename = (
+            f"hyperopt-best-params-{pair.split('/')[0]}"
+            if pair
+            else "hyperopt-best-params.json"
+        )
+        best_params_path = Path(self.full_path / f"{best_params_filename}.json")
+        log_msg: str = (
+            f"{pair}: loading best params from {best_params_path} JSON file"
+            if pair
+            else f"loading best params from {best_params_path} JSON file"
         )
         if best_params_path.is_file():
-            logger.info(
-                f"{pair}: loading best params from %s JSON file", best_params_path
-            )
+            logger.info(log_msg)
             with best_params_path.open("r", encoding="utf-8") as read_file:
                 best_params = json.load(read_file)
             return best_params
@@ -584,9 +608,7 @@ class ReforceXY(BaseReinforcementLearningModel):
         else:
             tensorboard_log_path = None
 
-        logger.info(
-            "------------ Hyperopt trial %d %s ------------", trial.number, dk.pair
-        )
+        logger.info("------------ Hyperopt trial %d ------------", trial.number)
         logger.info("Trial %s params: %s", trial.number, params)
 
         model = self.MODELCLASS(
index 7ca72fba300bb74ba85d4649b2df8280f720aef9..3fd426969f2b284e456a8f56cbc99316c010a5a7 100644 (file)
@@ -253,13 +253,14 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel):
 
     def optuna_storage(self, pair: str) -> optuna.storages.BaseStorage:
         storage_dir = str(self.full_path)
+        storage_filename = f"optuna-{pair.split('/')[0]}"
         storage_backend = self.__optuna_config.get("storage", "file")
         if storage_backend == "sqlite":
-            storage = f"sqlite:///{storage_dir}/optuna-{pair.split('/')[0]}.sqlite"
+            storage = f"sqlite:///{storage_dir}/{storage_filename}.sqlite"
         elif storage_backend == "file":
             storage = optuna.storages.JournalStorage(
                 optuna.storages.journal.JournalFileBackend(
-                    f"{storage_dir}/optuna-{pair.split('/')[0]}.log"
+                    f"{storage_dir}/{storage_filename}.log"
                 )
             )
         return storage
@@ -307,8 +308,9 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel):
         y_test,
         test_weights,
     ) -> tuple[Dict | None, float | None]:
+        _, identifier = str(self.full_path).rsplit("/", 1)
         study_namespace = "hp"
-        study_name = f"{study_namespace}-{pair}"
+        study_name = f"{identifier}-{study_namespace}-{pair}"
         storage = self.optuna_storage(pair)
         pruner = optuna.pruners.HyperbandPruner()
         self.optuna_study_delete(study_name, storage)
@@ -378,8 +380,9 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel):
         test_weights,
         model_training_parameters,
     ) -> tuple[Dict | None, float | None]:
+        _, identifier = str(self.full_path).rsplit("/", 1)
         study_namespace = "period"
-        study_name = f"{study_namespace}-{pair}"
+        study_name = f"{identifier}-{study_namespace}-{pair}"
         storage = self.optuna_storage(pair)
         pruner = optuna.pruners.HyperbandPruner()
         self.optuna_study_delete(study_name, storage)
index ebcb25b9f0a716b2daf64b731ac74f2e14839da3..bbc2b2848816862e901dd2bcdc77d820ca6f1b2c 100644 (file)
@@ -254,13 +254,14 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel):
 
     def optuna_storage(self, pair: str) -> optuna.storages.BaseStorage:
         storage_dir = str(self.full_path)
+        storage_filename = f"optuna-{pair.split('/')[0]}"
         storage_backend = self.__optuna_config.get("storage", "file")
         if storage_backend == "sqlite":
-            storage = f"sqlite:///{storage_dir}/optuna-{pair.split('/')[0]}.sqlite"
+            storage = f"sqlite:///{storage_dir}/{storage_filename}.sqlite"
         elif storage_backend == "file":
             storage = optuna.storages.JournalStorage(
                 optuna.storages.journal.JournalFileBackend(
-                    f"{storage_dir}/optuna-{pair.split('/')[0]}.log"
+                    f"{storage_dir}/{storage_filename}.log"
                 )
             )
         return storage
@@ -308,8 +309,9 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel):
         y_test,
         test_weights,
     ) -> tuple[Dict | None, float | None]:
+        _, identifier = str(self.full_path).rsplit("/", 1)
         study_namespace = "hp"
-        study_name = f"{study_namespace}-{pair}"
+        study_name = f"{identifier}-{study_namespace}-{pair}"
         storage = self.optuna_storage(pair)
         pruner = optuna.pruners.HyperbandPruner()
         self.optuna_study_delete(study_name, storage)
@@ -379,8 +381,9 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel):
         test_weights,
         model_training_parameters,
     ) -> tuple[Dict | None, float | None]:
+        _, identifier = str(self.full_path).rsplit("/", 1)
         study_namespace = "period"
-        study_name = f"{study_namespace}-{pair}"
+        study_name = f"{identifier}-{study_namespace}-{pair}"
         storage = self.optuna_storage(pair)
         pruner = optuna.pruners.HyperbandPruner()
         self.optuna_study_delete(study_name, storage)