From: Jérôme Benoit Date: Thu, 20 Feb 2025 18:21:37 +0000 (+0100) Subject: refactor(qav3): refine typing X-Git-Url: https://git.piment-noir.org/?a=commitdiff_plain;h=0d971a4c575975ebf4e97ea09b7206518466d154;p=freqai-strategies.git refactor(qav3): refine typing Signed-off-by: Jérôme Benoit --- diff --git a/ReforceXY/user_data/config-template.json b/ReforceXY/user_data/config-template.json index 6ca8c08..ab66345 100644 --- a/ReforceXY/user_data/config-template.json +++ b/ReforceXY/user_data/config-template.json @@ -151,7 +151,7 @@ "shuffle": false }, "model_training_parameters": { - // "device": "cuda", + "device": "auto", "verbose": 1 }, "rl_config": { diff --git a/quickadapter/user_data/freqaimodels/LightGBMRegressorQuickAdapterV35.py b/quickadapter/user_data/freqaimodels/LightGBMRegressorQuickAdapterV35.py index a5e12c7..17c90c3 100644 --- a/quickadapter/user_data/freqaimodels/LightGBMRegressorQuickAdapterV35.py +++ b/quickadapter/user_data/freqaimodels/LightGBMRegressorQuickAdapterV35.py @@ -52,10 +52,10 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel): and self.__optuna_config.get("enabled", False) and self.data_split_parameters.get("test_size", TEST_SIZE) > 0 ) - self.__optuna_hp_rmse: dict[str, float] = {} - self.__optuna_period_rmse: dict[str, float] = {} - self.__optuna_hp_params: dict[str, dict] = {} - self.__optuna_period_params: dict[str, dict] = {} + self.__optuna_hp_rmse: Dict[str, float] = {} + self.__optuna_period_rmse: Dict[str, float] = {} + self.__optuna_hp_params: Dict[str, Dict] = {} + self.__optuna_period_params: Dict[str, Dict] = {} for pair in self.pairs: self.__optuna_hp_rmse[pair] = -1 self.__optuna_period_rmse[pair] = -1 @@ -299,7 +299,7 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel): X_test, y_test, test_weights, - ) -> tuple[dict | None, float | None]: + ) -> tuple[Dict | None, float | None]: study_name = f"hp-{dk.pair}" storage = self.optuna_storage(dk) pruner = optuna.pruners.HyperbandPruner() @@ -370,7 +370,7 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel): y_test, test_weights, model_training_parameters, - ) -> tuple[dict | None, float | None]: + ) -> tuple[Dict | None, float | None]: study_name = f"period-{dk.pair}" storage = self.optuna_storage(dk) pruner = optuna.pruners.HyperbandPruner() @@ -422,7 +422,7 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel): return params, study.best_value def optuna_save_best_params( - self, dk: FreqaiDataKitchen, namespace: str, best_params: dict + self, dk: FreqaiDataKitchen, namespace: str, best_params: Dict ) -> None: best_params_path = Path( dk.full_path @@ -433,7 +433,7 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel): def optuna_load_best_params( self, dk: FreqaiDataKitchen, namespace: str - ) -> dict | None: + ) -> Dict | None: best_params_path = Path( dk.full_path / f"{dk.pair.split('/')[0]}_optuna_{namespace}_best_params.json" diff --git a/quickadapter/user_data/freqaimodels/XGBoostRegressorQuickAdapterV35.py b/quickadapter/user_data/freqaimodels/XGBoostRegressorQuickAdapterV35.py index 924790f..90011db 100644 --- a/quickadapter/user_data/freqaimodels/XGBoostRegressorQuickAdapterV35.py +++ b/quickadapter/user_data/freqaimodels/XGBoostRegressorQuickAdapterV35.py @@ -52,10 +52,10 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel): and self.__optuna_config.get("enabled", False) and self.data_split_parameters.get("test_size", TEST_SIZE) > 0 ) - self.__optuna_hp_rmse: dict[str, float] = {} - self.__optuna_period_rmse: dict[str, float] = {} - self.__optuna_hp_params: dict[str, dict] = {} - self.__optuna_period_params: dict[str, dict] = {} + self.__optuna_hp_rmse: Dict[str, float] = {} + self.__optuna_period_rmse: Dict[str, float] = {} + self.__optuna_hp_params: Dict[str, Dict] = {} + self.__optuna_period_params: Dict[str, Dict] = {} for pair in self.pairs: self.__optuna_hp_rmse[pair] = -1 self.__optuna_period_rmse[pair] = -1 @@ -300,7 +300,7 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel): X_test, y_test, test_weights, - ) -> tuple[dict | None, float | None]: + ) -> tuple[Dict | None, float | None]: study_name = f"hp-{dk.pair}" storage = self.optuna_storage(dk) pruner = optuna.pruners.HyperbandPruner() @@ -371,7 +371,7 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel): y_test, test_weights, model_training_parameters, - ) -> tuple[dict | None, float | None]: + ) -> tuple[Dict | None, float | None]: study_name = f"period-{dk.pair}" storage = self.optuna_storage(dk) pruner = optuna.pruners.HyperbandPruner() @@ -423,7 +423,7 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel): return params, study.best_value def optuna_save_best_params( - self, dk: FreqaiDataKitchen, namespace: str, best_params: dict + self, dk: FreqaiDataKitchen, namespace: str, best_params: Dict ) -> None: best_params_path = Path( dk.full_path @@ -434,7 +434,7 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel): def optuna_load_best_params( self, dk: FreqaiDataKitchen, namespace: str - ) -> dict | None: + ) -> Dict | None: best_params_path = Path( dk.full_path / f"{dk.pair.split('/')[0]}_optuna_{namespace}_best_params.json"