]> Piment Noir Git Repositories - freqai-strategies.git/commitdiff
refactor(qav3): refine typing
authorJérôme Benoit <jerome.benoit@piment-noir.org>
Thu, 20 Feb 2025 18:21:37 +0000 (19:21 +0100)
committerJérôme Benoit <jerome.benoit@piment-noir.org>
Thu, 20 Feb 2025 18:21:37 +0000 (19:21 +0100)
Signed-off-by: Jérôme Benoit <jerome.benoit@piment-noir.org>
ReforceXY/user_data/config-template.json
quickadapter/user_data/freqaimodels/LightGBMRegressorQuickAdapterV35.py
quickadapter/user_data/freqaimodels/XGBoostRegressorQuickAdapterV35.py

index 6ca8c0802043fb09ad4a64ea74bb24448deb1af7..ab6634552b8586a6fe1f0f9c45c39cddbe09cac0 100644 (file)
       "shuffle": false
     },
     "model_training_parameters": {
-      // "device": "cuda",
+      "device": "auto",
       "verbose": 1
     },
     "rl_config": {
index a5e12c798c11ef2ead5ea49220d53596cd310b0a..17c90c3eb7d4d41ab3eb7a317e5c58d292e7830b 100644 (file)
@@ -52,10 +52,10 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel):
             and self.__optuna_config.get("enabled", False)
             and self.data_split_parameters.get("test_size", TEST_SIZE) > 0
         )
-        self.__optuna_hp_rmse: dict[str, float] = {}
-        self.__optuna_period_rmse: dict[str, float] = {}
-        self.__optuna_hp_params: dict[str, dict] = {}
-        self.__optuna_period_params: dict[str, dict] = {}
+        self.__optuna_hp_rmse: Dict[str, float] = {}
+        self.__optuna_period_rmse: Dict[str, float] = {}
+        self.__optuna_hp_params: Dict[str, Dict] = {}
+        self.__optuna_period_params: Dict[str, Dict] = {}
         for pair in self.pairs:
             self.__optuna_hp_rmse[pair] = -1
             self.__optuna_period_rmse[pair] = -1
@@ -299,7 +299,7 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel):
         X_test,
         y_test,
         test_weights,
-    ) -> tuple[dict | None, float | None]:
+    ) -> tuple[Dict | None, float | None]:
         study_name = f"hp-{dk.pair}"
         storage = self.optuna_storage(dk)
         pruner = optuna.pruners.HyperbandPruner()
@@ -370,7 +370,7 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel):
         y_test,
         test_weights,
         model_training_parameters,
-    ) -> tuple[dict | None, float | None]:
+    ) -> tuple[Dict | None, float | None]:
         study_name = f"period-{dk.pair}"
         storage = self.optuna_storage(dk)
         pruner = optuna.pruners.HyperbandPruner()
@@ -422,7 +422,7 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel):
         return params, study.best_value
 
     def optuna_save_best_params(
-        self, dk: FreqaiDataKitchen, namespace: str, best_params: dict
+        self, dk: FreqaiDataKitchen, namespace: str, best_params: Dict
     ) -> None:
         best_params_path = Path(
             dk.full_path
@@ -433,7 +433,7 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel):
 
     def optuna_load_best_params(
         self, dk: FreqaiDataKitchen, namespace: str
-    ) -> dict | None:
+    ) -> Dict | None:
         best_params_path = Path(
             dk.full_path
             / f"{dk.pair.split('/')[0]}_optuna_{namespace}_best_params.json"
index 924790fcb3edf9e5199c4074744a4638c00ee99d..90011db585bd17ba1d92f795c78d4c60b72b1c8b 100644 (file)
@@ -52,10 +52,10 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel):
             and self.__optuna_config.get("enabled", False)
             and self.data_split_parameters.get("test_size", TEST_SIZE) > 0
         )
-        self.__optuna_hp_rmse: dict[str, float] = {}
-        self.__optuna_period_rmse: dict[str, float] = {}
-        self.__optuna_hp_params: dict[str, dict] = {}
-        self.__optuna_period_params: dict[str, dict] = {}
+        self.__optuna_hp_rmse: Dict[str, float] = {}
+        self.__optuna_period_rmse: Dict[str, float] = {}
+        self.__optuna_hp_params: Dict[str, Dict] = {}
+        self.__optuna_period_params: Dict[str, Dict] = {}
         for pair in self.pairs:
             self.__optuna_hp_rmse[pair] = -1
             self.__optuna_period_rmse[pair] = -1
@@ -300,7 +300,7 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel):
         X_test,
         y_test,
         test_weights,
-    ) -> tuple[dict | None, float | None]:
+    ) -> tuple[Dict | None, float | None]:
         study_name = f"hp-{dk.pair}"
         storage = self.optuna_storage(dk)
         pruner = optuna.pruners.HyperbandPruner()
@@ -371,7 +371,7 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel):
         y_test,
         test_weights,
         model_training_parameters,
-    ) -> tuple[dict | None, float | None]:
+    ) -> tuple[Dict | None, float | None]:
         study_name = f"period-{dk.pair}"
         storage = self.optuna_storage(dk)
         pruner = optuna.pruners.HyperbandPruner()
@@ -423,7 +423,7 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel):
         return params, study.best_value
 
     def optuna_save_best_params(
-        self, dk: FreqaiDataKitchen, namespace: str, best_params: dict
+        self, dk: FreqaiDataKitchen, namespace: str, best_params: Dict
     ) -> None:
         best_params_path = Path(
             dk.full_path
@@ -434,7 +434,7 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel):
 
     def optuna_load_best_params(
         self, dk: FreqaiDataKitchen, namespace: str
-    ) -> dict | None:
+    ) -> Dict | None:
         best_params_path = Path(
             dk.full_path
             / f"{dk.pair.split('/')[0]}_optuna_{namespace}_best_params.json"