From 326ba7a45781c86b343ff9485c6c59d9026752ff Mon Sep 17 00:00:00 2001 From: =?utf8?q?J=C3=A9r=C3=B4me=20Benoit?= Date: Wed, 19 Feb 2025 14:41:00 +0100 Subject: [PATCH] fix(qav3): fix optuna storage building MIME-Version: 1.0 Content-Type: text/plain; charset=utf8 Content-Transfer-Encoding: 8bit Signed-off-by: Jérôme Benoit --- .../LightGBMRegressorQuickAdapterV35.py | 19 ++++++------------- .../XGBoostRegressorQuickAdapterV35.py | 18 +++++------------- 2 files changed, 11 insertions(+), 26 deletions(-) diff --git a/quickadapter/user_data/freqaimodels/LightGBMRegressorQuickAdapterV35.py b/quickadapter/user_data/freqaimodels/LightGBMRegressorQuickAdapterV35.py index 91d37ec..ccf5a2b 100644 --- a/quickadapter/user_data/freqaimodels/LightGBMRegressorQuickAdapterV35.py +++ b/quickadapter/user_data/freqaimodels/LightGBMRegressorQuickAdapterV35.py @@ -224,9 +224,9 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel): dk.data["extra_returns_per_train"]["DI_cutoff"] = cutoff dk.data["extra_returns_per_train"]["label_period_candles"] = ( - self.__optuna_period_params.get(pair, {}).get( - "label_period_candles", self.ft_params["label_period_candles"] - ) + self.__optuna_period_params.get( + pair, {} + ).get("label_period_candles", self.ft_params["label_period_candles"]) ) dk.data["extra_returns_per_train"]["hp_rmse"] = self.__optuna_hp_rmse.get( pair, {} @@ -249,14 +249,12 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel): storage_dir = str(dk.full_path) storage_backend = self.__optuna_config.get("storage", "file") if storage_backend == "sqlite": - sqlite_path = sanitize_path( - f"{storage_dir}/optuna-{dk.pair.split('/')[0]}.sqlite" - ) - storage = f"sqlite:///{sqlite_path}" + storage = f"sqlite:///{storage_dir}/optuna-{dk.pair.split('/')[0]}.sqlite" + logging.info(f"Optuna storage: {storage}") elif storage_backend == "file": storage = optuna.storages.JournalStorage( optuna.storages.journal.JournalFileBackend( - sanitize_path(f"{storage_dir}/optuna-{dk.pair.split('/')[0]}.log") + f"{storage_dir}/optuna-{dk.pair.split('/')[0]}.log" ) ) return storage @@ -622,11 +620,6 @@ def hp_objective( return error -def sanitize_path(path: str) -> str: - allowed = re.compile(r"[^A-Za-z0-9 _\-\.\(\)]") - return allowed.sub("_", path) - - def smooth_max(series: pd.Series, beta=1.0) -> float: return np.log(np.sum(np.exp(beta * series))) / beta diff --git a/quickadapter/user_data/freqaimodels/XGBoostRegressorQuickAdapterV35.py b/quickadapter/user_data/freqaimodels/XGBoostRegressorQuickAdapterV35.py index 6361907..c559cb8 100644 --- a/quickadapter/user_data/freqaimodels/XGBoostRegressorQuickAdapterV35.py +++ b/quickadapter/user_data/freqaimodels/XGBoostRegressorQuickAdapterV35.py @@ -225,9 +225,9 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel): dk.data["extra_returns_per_train"]["DI_cutoff"] = cutoff dk.data["extra_returns_per_train"]["label_period_candles"] = ( - self.__optuna_period_params.get(pair, {}).get( - "label_period_candles", self.ft_params["label_period_candles"] - ) + self.__optuna_period_params.get( + pair, {} + ).get("label_period_candles", self.ft_params["label_period_candles"]) ) dk.data["extra_returns_per_train"]["hp_rmse"] = self.__optuna_hp_rmse.get( pair, {} @@ -250,14 +250,11 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel): storage_dir = str(dk.full_path) storage_backend = self.__optuna_config.get("storage", "file") if storage_backend == "sqlite": - sqlite_path = sanitize_path( - f"{storage_dir}/optuna-{dk.pair.split('/')[0]}.sqlite" - ) - storage = f"sqlite:///{sqlite_path}" + storage = f"sqlite:///{storage_dir}/optuna-{dk.pair.split('/')[0]}.sqlite" elif storage_backend == "file": storage = optuna.storages.JournalStorage( optuna.storages.journal.JournalFileBackend( - sanitize_path(f"{storage_dir}/optuna-{dk.pair.split('/')[0]}.log") + f"{storage_dir}/optuna-{dk.pair.split('/')[0]}.log" ) ) return storage @@ -628,11 +625,6 @@ def hp_objective( return error -def sanitize_path(path: str) -> str: - allowed = re.compile(r"[^A-Za-z0-9 _\-\.\(\)]") - return allowed.sub("_", path) - - def smooth_max(series: pd.Series, beta=1.0) -> float: return np.log(np.sum(np.exp(beta * series))) / beta -- 2.43.0