]> Piment Noir Git Repositories - freqai-strategies.git/commitdiff
refactor!: align file namespace
authorJérôme Benoit <jerome.benoit@piment-noir.org>
Fri, 21 Feb 2025 17:03:59 +0000 (18:03 +0100)
committerJérôme Benoit <jerome.benoit@piment-noir.org>
Fri, 21 Feb 2025 17:03:59 +0000 (18:03 +0100)
Signed-off-by: Jérôme Benoit <jerome.benoit@piment-noir.org>
ReforceXY/user_data/freqaimodels/ReforceXY.py
quickadapter/user_data/freqaimodels/LightGBMRegressorQuickAdapterV35.py
quickadapter/user_data/freqaimodels/XGBoostRegressorQuickAdapterV35.py

index acd1d477aa017ac5714b0ee2741f6fa327a0170d..0d063da392a7560579cdac6be90fe478179ae124 100644 (file)
@@ -339,15 +339,17 @@ class ReforceXY(BaseReinforcementLearningModel):
         logger.info("%s params: %s", self.model_type, model_params)
 
         if self.activate_tensorboard:
-            tb_path = Path(self.full_path / "tensorboard" / dk.pair.split("/")[0])
+            tensorboard_log_path = Path(
+                self.full_path / "tensorboard" / dk.pair.split("/")[0]
+            )
         else:
-            tb_path = None
+            tensorboard_log_path = None
 
         if dk.pair not in self.dd.model_dictionary or not self.continual_learning:
             model = self.MODELCLASS(
                 self.policy_type,
                 self.train_env,
-                tensorboard_log=tb_path,
+                tensorboard_log=tensorboard_log_path,
                 **model_params,
             )
         else:
@@ -523,7 +525,7 @@ class ReforceXY(BaseReinforcementLearningModel):
         Save the best hyperparameters found during hyperparameter optimization
         """
         best_params_path = Path(
-            self.full_path / f"{pair.split('/')[0]}_hyperopt_best_params.json"
+            self.full_path / f"hyperopt-best-params-{pair.split('/')[0]}.json"
         )
         logger.info(f"{pair}: saving best params to %s JSON file", best_params_path)
         with best_params_path.open("w", encoding="utf-8") as write_file:
@@ -534,7 +536,7 @@ class ReforceXY(BaseReinforcementLearningModel):
         Load the best hyperparameters found and saved during hyperparameter optimization
         """
         best_params_path = Path(
-            self.full_path / f"{pair.split('/')[0]}_hyperopt_best_params.json"
+            self.full_path / f"hyperopt-best-params-{pair.split('/')[0]}.json"
         )
         if best_params_path.is_file():
             logger.info(
@@ -563,9 +565,13 @@ class ReforceXY(BaseReinforcementLearningModel):
         params = {**self.model_training_parameters, **params}
 
         nan_encountered = False
-        tensorboard_log_path = Path(
-            self.full_path / "tensorboard" / dk.pair.split("/")[0]
-        )
+
+        if self.activate_tensorboard:
+            tensorboard_log_path = Path(
+                self.full_path / "tensorboard" / dk.pair.split("/")[0]
+            )
+        else:
+            tensorboard_log_path = None
 
         logger.info(
             "------------ Hyperopt trial %d %s ------------", trial.number, dk.pair
index b1f316cbb0739ceb051393954e90b5f80740ed5e..f213deca48e35bfdb1e7fa695691bc25b7d3499a 100644 (file)
@@ -430,14 +430,14 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel):
         self, pair: str, namespace: str, best_params: Dict
     ) -> None:
         best_params_path = Path(
-            self.full_path / f"{pair.split('/')[0]}_optuna_{namespace}_best_params.json"
+            self.full_path / f"optuna-{namespace}-best-params-{pair.split('/')[0]}.json"
         )
         with best_params_path.open("w", encoding="utf-8") as write_file:
             json.dump(best_params, write_file, indent=4)
 
     def optuna_load_best_params(self, pair: str, namespace: str) -> Dict | None:
         best_params_path = Path(
-            self.full_path / f"{pair.split('/')[0]}_optuna_{namespace}_best_params.json"
+            self.full_path / f"optuna-{namespace}-best-params-{pair.split('/')[0]}.json"
         )
         if best_params_path.is_file():
             with best_params_path.open("r", encoding="utf-8") as read_file:
index caff5fc2fd2f611e8bf37a29394344441799cc02..8e4a3a209a520be49891a17c9e8c7bacf4320ef2 100644 (file)
@@ -431,14 +431,14 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel):
         self, pair: str, namespace: str, best_params: Dict
     ) -> None:
         best_params_path = Path(
-            self.full_path / f"{pair.split('/')[0]}_optuna_{namespace}_best_params.json"
+            self.full_path / f"optuna-{namespace}-best-params-{pair.split('/')[0]}.json"
         )
         with best_params_path.open("w", encoding="utf-8") as write_file:
             json.dump(best_params, write_file, indent=4)
 
     def optuna_load_best_params(self, pair: str, namespace: str) -> Dict | None:
         best_params_path = Path(
-            self.full_path / f"{pair.split('/')[0]}_optuna_{namespace}_best_params.json"
+            self.full_path / f"optuna-{namespace}-best-params-{pair.split('/')[0]}.json"
         )
         if best_params_path.is_file():
             with best_params_path.open("r", encoding="utf-8") as read_file: