]> Piment Noir Git Repositories - freqai-strategies.git/commitdiff
fix(qav3): enqueue previously saved optuna best params
authorJérôme Benoit <jerome.benoit@piment-noir.org>
Wed, 19 Feb 2025 09:56:13 +0000 (10:56 +0100)
committerJérôme Benoit <jerome.benoit@piment-noir.org>
Wed, 19 Feb 2025 09:56:13 +0000 (10:56 +0100)
Signed-off-by: Jérôme Benoit <jerome.benoit@piment-noir.org>
ReforceXY/user_data/config-template.json
ReforceXY/user_data/freqaimodels/ReforceXY.py
quickadapter/user_data/freqaimodels/LightGBMRegressorQuickAdapterV35.py
quickadapter/user_data/freqaimodels/XGBoostRegressorQuickAdapterV35.py

index cefcccf788536022c17543cee6cdb9b5b2cbe323..6ca8c0802043fb09ad4a64ea74bb24448deb1af7 100644 (file)
     // "live_retrain_hours": 0.5,
     "backtest_period_days": 2,
     "write_metrics_to_disk": false,
-    "identifier": "ReforceXY",
+    "identifier": "ReforceXY-PPO",
     "fit_live_predictions_candles": 300,
     "data_kitchen_thread_count": 6, // set to number of CPU threads / 4
     "track_performance": false,
index 0d9a5768bd185eca0acac95ada7d743d87b99208..6951f6f17c3409acab96a2cac23e1f37f090fd9a 100644 (file)
@@ -402,6 +402,22 @@ class ReforceXY(BaseReinforcementLearningModel):
         :param model: Any = the trained model used to inference the features.
         """
 
+        def _is_valid(action: int, position: float) -> bool:
+            """
+            Determine if the action is valid for the step
+            """
+            # Agent should only try to exit if it is in position
+            if action in (Actions.Short_exit.value, Actions.Long_exit.value):
+                if position not in (Positions.Short, Positions.Long):
+                    return False
+
+            # Agent should only try to enter if it is not in position
+            if action in (Actions.Short_enter.value, Actions.Long_enter.value):
+                if position != Positions.Neutral:
+                    return False
+
+            return True
+
         def _action_masks(position: float):
             return [_is_valid(action.value, position) for action in Actions]
 
@@ -502,14 +518,34 @@ class ReforceXY(BaseReinforcementLearningModel):
         )
         logger.info("-------------------------------------------------------")
 
+        self.save_best_params(dk, study.best_trial.params)
+
+        return self.optuna_trial_params[dk.pair][study.best_trial.number]
+
+    def save_best_params(self, dk: FreqaiDataKitchen, best_params: Dict) -> None:
+        """
+        Save the best hyperparameters found during hyperparameter optimization
+        """
         best_params_path = Path(
             dk.full_path / f"{dk.pair.split('/')[0]}_hyperopt_best_params.json"
         )
-        logger.info("dumping to %s JSON file", best_params_path)
+        logger.info("saving to %s JSON file", best_params_path)
         with best_params_path.open("w", encoding="utf-8") as write_file:
-            json.dump(study.best_trial.params, write_file, indent=4)
+            json.dump(best_params, write_file, indent=4)
 
-        return self.optuna_trial_params[dk.pair][study.best_trial.number]
+    def load_best_params(self, dk: FreqaiDataKitchen) -> Dict | None:
+        """
+        Load the best hyperparameters found and saved during hyperparameter optimization
+        """
+        best_params_path = Path(
+            dk.full_path / f"{dk.pair.split('/')[0]}_hyperopt_best_params.json"
+        )
+        if best_params_path.is_file():
+            logger.info("loading from %s JSON file", best_params_path)
+            with best_params_path.open("r", encoding="utf-8") as read_file:
+                best_params = json.load(read_file)
+            return best_params
+        return None
 
     def objective(
         self, trial: Trial, train_df, total_timesteps: int, dk: FreqaiDataKitchen
@@ -1374,20 +1410,3 @@ def sample_params_qrdqn(trial: Trial) -> Dict[str, Any]:
     n_quantiles = trial.suggest_int("n_quantiles", 5, 200)
     hyperparams["policy_kwargs"].update({"n_quantiles": n_quantiles})
     return hyperparams
-
-
-def _is_valid(action: int, position: float) -> bool:
-    """
-    Determine if the action is valid for the step
-    """
-    # Agent should only try to exit if it is in position
-    if action in (Actions.Short_exit.value, Actions.Long_exit.value):
-        if position not in (Positions.Short, Positions.Long):
-            return False
-
-    # Agent should only try to enter if it is not in position
-    if action in (Actions.Short_enter.value, Actions.Long_enter.value):
-        if position != Positions.Neutral:
-            return False
-
-    return True
index e6b18abbbd17fecbd286c1c2a5d419f90077ed68..91d37ec28a1f2d331034e3860258cb1f1161c144 100644 (file)
@@ -224,9 +224,9 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel):
         dk.data["extra_returns_per_train"]["DI_cutoff"] = cutoff
 
         dk.data["extra_returns_per_train"]["label_period_candles"] = (
-            self.__optuna_period_params.get(
-                pair, {}
-            ).get("label_period_candles", self.ft_params["label_period_candles"])
+            self.__optuna_period_params.get(pair, {}).get(
+                "label_period_candles", self.ft_params["label_period_candles"]
+            )
         )
         dk.data["extra_returns_per_train"]["hp_rmse"] = self.__optuna_hp_rmse.get(
             pair, {}
@@ -283,6 +283,19 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel):
                 pred_df, fit_live_predictions_candles, label_period_candles
             )
 
+    def optuna_hp_enqueue_previous_best_trial(
+        self,
+        dk: FreqaiDataKitchen,
+        study: optuna.study.Study,
+        previous_study: optuna.study.Study,
+    ) -> None:
+        if self.optuna_study_has_best_params(previous_study):
+            study.enqueue_trial(previous_study.best_params)
+        elif self.__optuna_hp_params.get(dk.pair):
+            study.enqueue_trial(self.__optuna_hp_params[dk.pair])
+        elif self.optuna_load_best_params(dk, "hp"):
+            study.enqueue_trial(self.optuna_load_best_params(dk, "hp"))
+
     def optuna_hp_optimize(
         self,
         dk: FreqaiDataKitchen,
@@ -307,10 +320,7 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel):
             direction=optuna.study.StudyDirection.MINIMIZE,
             storage=storage,
         )
-        if self.optuna_study_has_best_params(previous_study):
-            study.enqueue_trial(previous_study.best_params)
-        elif self.__optuna_hp_params.get(dk.pair):
-            study.enqueue_trial(self.__optuna_hp_params[dk.pair])
+        self.optuna_hp_enqueue_previous_best_trial(dk, study, previous_study)
         start = time.time()
         try:
             study.optimize(
@@ -335,17 +345,26 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel):
         time_spent = time.time() - start
         logger.info(f"Optuna hp hyperopt done ({time_spent:.2f} secs)")
 
-        hp_best_params_path = Path(
-            dk.full_path / f"{dk.pair.split('/')[0]}_optuna_hp_best_params.json"
-        )
-        with hp_best_params_path.open("w", encoding="utf-8") as write_file:
-            json.dump(study.best_params, write_file, indent=4)
         params = study.best_params
+        self.optuna_save_best_params(dk, "hp", params)
         # log params
         for key, value in {"rmse": study.best_value, **params}.items():
             logger.info(f"Optuna hp hyperopt | {key:>20s} : {value}")
         return params, study.best_value
 
+    def optuna_period_enqueue_previous_best_trial(
+        self,
+        dk: FreqaiDataKitchen,
+        study: optuna.study.Study,
+        previous_study: optuna.study.Study,
+    ) -> None:
+        if self.optuna_study_has_best_params(previous_study):
+            study.enqueue_trial(previous_study.best_params)
+        elif self.__optuna_period_params.get(dk.pair):
+            study.enqueue_trial(self.__optuna_period_params[dk.pair])
+        elif self.optuna_load_best_params(dk, "period"):
+            study.enqueue_trial(self.optuna_load_best_params(dk, "period"))
+
     def optuna_period_optimize(
         self,
         dk: FreqaiDataKitchen,
@@ -371,10 +390,7 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel):
             direction=optuna.study.StudyDirection.MINIMIZE,
             storage=storage,
         )
-        if self.optuna_study_has_best_params(previous_study):
-            study.enqueue_trial(previous_study.best_params)
-        elif self.__optuna_period_params.get(dk.pair):
-            study.enqueue_trial(self.__optuna_period_params[dk.pair])
+        self.optuna_period_enqueue_previous_best_trial(dk, study, previous_study)
         start = time.time()
         try:
             study.optimize(
@@ -402,17 +418,35 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel):
         time_spent = time.time() - start
         logger.info(f"Optuna period hyperopt done ({time_spent:.2f} secs)")
 
-        period_best_params_path = Path(
-            dk.full_path / f"{dk.pair.split('/')[0]}_optuna_period_best_params.json"
-        )
-        with period_best_params_path.open("w", encoding="utf-8") as write_file:
-            json.dump(study.best_params, write_file, indent=4)
         params = study.best_params
+        self.optuna_save_best_params(dk, "period", params)
         # log params
         for key, value in {"rmse": study.best_value, **params}.items():
             logger.info(f"Optuna period hyperopt | {key:>20s} : {value}")
         return params, study.best_value
 
+    def optuna_save_best_params(
+        self, dk: FreqaiDataKitchen, namespace: str, best_params: dict
+    ) -> None:
+        best_params_path = Path(
+            dk.full_path
+            / f"{dk.pair.split('/')[0]}_optuna_{namespace}_best_params.json"
+        )
+        with best_params_path.open("w", encoding="utf-8") as write_file:
+            json.dump(best_params, write_file, indent=4)
+
+    def optuna_load_best_params(
+        self, dk: FreqaiDataKitchen, namespace: str
+    ) -> dict | None:
+        best_params_path = Path(
+            dk.full_path
+            / f"{dk.pair.split('/')[0]}_optuna_{namespace}_best_params.json"
+        )
+        if best_params_path.is_file():
+            with best_params_path.open("r", encoding="utf-8") as read_file:
+                return json.load(read_file)
+        return None
+
     def optuna_study_load_and_cleanup(
         self, study_name: str, storage
     ) -> optuna.study.Study | None:
index 6af91384bd1dbba7708e8845342d4d3a1917d2da..6361907e0d76e8210a5fb4b7c98f1cb0fcf99e2b 100644 (file)
@@ -225,9 +225,9 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel):
         dk.data["extra_returns_per_train"]["DI_cutoff"] = cutoff
 
         dk.data["extra_returns_per_train"]["label_period_candles"] = (
-            self.__optuna_period_params.get(
-                pair, {}
-            ).get("label_period_candles", self.ft_params["label_period_candles"])
+            self.__optuna_period_params.get(pair, {}).get(
+                "label_period_candles", self.ft_params["label_period_candles"]
+            )
         )
         dk.data["extra_returns_per_train"]["hp_rmse"] = self.__optuna_hp_rmse.get(
             pair, {}
@@ -284,6 +284,19 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel):
                 pred_df, fit_live_predictions_candles, label_period_candles
             )
 
+    def optuna_hp_enqueue_previous_best_trial(
+        self,
+        dk: FreqaiDataKitchen,
+        study: optuna.study.Study,
+        previous_study: optuna.study.Study,
+    ) -> None:
+        if self.optuna_study_has_best_params(previous_study):
+            study.enqueue_trial(previous_study.best_params)
+        elif self.__optuna_hp_params.get(dk.pair):
+            study.enqueue_trial(self.__optuna_hp_params[dk.pair])
+        elif self.optuna_load_best_params(dk, "hp"):
+            study.enqueue_trial(self.optuna_load_best_params(dk, "hp"))
+
     def optuna_hp_optimize(
         self,
         dk: FreqaiDataKitchen,
@@ -308,10 +321,7 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel):
             direction=optuna.study.StudyDirection.MINIMIZE,
             storage=storage,
         )
-        if self.optuna_study_has_best_params(previous_study):
-            study.enqueue_trial(previous_study.best_params)
-        elif self.__optuna_hp_params.get(dk.pair):
-            study.enqueue_trial(self.__optuna_hp_params[dk.pair])
+        self.optuna_hp_enqueue_previous_best_trial(dk, study, previous_study)
         start = time.time()
         try:
             study.optimize(
@@ -336,17 +346,26 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel):
         time_spent = time.time() - start
         logger.info(f"Optuna hp hyperopt done ({time_spent:.2f} secs)")
 
-        hp_best_params_path = Path(
-            dk.full_path / f"{dk.pair.split('/')[0]}_optuna_hp_best_params.json"
-        )
-        with hp_best_params_path.open("w", encoding="utf-8") as write_file:
-            json.dump(study.best_params, write_file, indent=4)
         params = study.best_params
+        self.optuna_save_best_params(dk, "hp", params)
         # log params
         for key, value in {"rmse": study.best_value, **params}.items():
             logger.info(f"Optuna hp hyperopt | {key:>20s} : {value}")
         return params, study.best_value
 
+    def optuna_period_enqueue_previous_best_trial(
+        self,
+        dk: FreqaiDataKitchen,
+        study: optuna.study.Study,
+        previous_study: optuna.study.Study,
+    ) -> None:
+        if self.optuna_study_has_best_params(previous_study):
+            study.enqueue_trial(previous_study.best_params)
+        elif self.__optuna_period_params.get(dk.pair):
+            study.enqueue_trial(self.__optuna_period_params[dk.pair])
+        elif self.optuna_load_best_params(dk, "period"):
+            study.enqueue_trial(self.optuna_load_best_params(dk, "period"))
+
     def optuna_period_optimize(
         self,
         dk: FreqaiDataKitchen,
@@ -372,10 +391,7 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel):
             direction=optuna.study.StudyDirection.MINIMIZE,
             storage=storage,
         )
-        if self.optuna_study_has_best_params(previous_study):
-            study.enqueue_trial(previous_study.best_params)
-        elif self.__optuna_period_params.get(dk.pair):
-            study.enqueue_trial(self.__optuna_period_params[dk.pair])
+        self.optuna_period_enqueue_previous_best_trial(dk, study, previous_study)
         start = time.time()
         try:
             study.optimize(
@@ -403,17 +419,35 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel):
         time_spent = time.time() - start
         logger.info(f"Optuna period hyperopt done ({time_spent:.2f} secs)")
 
-        period_best_params_path = Path(
-            dk.full_path / f"{dk.pair.split('/')[0]}_optuna_period_best_params.json"
-        )
-        with period_best_params_path.open("w", encoding="utf-8") as write_file:
-            json.dump(study.best_params, write_file, indent=4)
         params = study.best_params
+        self.optuna_save_best_params(dk, "period", params)
         # log params
         for key, value in {"rmse": study.best_value, **params}.items():
             logger.info(f"Optuna period hyperopt | {key:>20s} : {value}")
         return params, study.best_value
 
+    def optuna_save_best_params(
+        self, dk: FreqaiDataKitchen, namespace: str, best_params: dict
+    ) -> None:
+        best_params_path = Path(
+            dk.full_path
+            / f"{dk.pair.split('/')[0]}_optuna_{namespace}_best_params.json"
+        )
+        with best_params_path.open("w", encoding="utf-8") as write_file:
+            json.dump(best_params, write_file, indent=4)
+
+    def optuna_load_best_params(
+        self, dk: FreqaiDataKitchen, namespace: str
+    ) -> dict | None:
+        best_params_path = Path(
+            dk.full_path
+            / f"{dk.pair.split('/')[0]}_optuna_{namespace}_best_params.json"
+        )
+        if best_params_path.is_file():
+            with best_params_path.open("r", encoding="utf-8") as read_file:
+                return json.load(read_file)
+        return None
+
     def optuna_study_load_and_cleanup(
         self, study_name: str, storage
     ) -> optuna.study.Study | None: