From 6ddf6cc8b4da631d058e164ef68c19a10482d390 Mon Sep 17 00:00:00 2001 From: =?utf8?q?J=C3=A9r=C3=B4me=20Benoit?= Date: Thu, 20 Feb 2025 22:54:38 +0100 Subject: [PATCH] feat(qav3): make persistent hyperopt params MIME-Version: 1.0 Content-Type: text/plain; charset=utf8 Content-Transfer-Encoding: 8bit Signed-off-by: Jérôme Benoit --- ReforceXY/user_data/freqaimodels/ReforceXY.py | 31 +++---- .../LightGBMRegressorQuickAdapterV35.py | 89 ++++++++++--------- .../XGBoostRegressorQuickAdapterV35.py | 89 ++++++++++--------- 3 files changed, 106 insertions(+), 103 deletions(-) diff --git a/ReforceXY/user_data/freqaimodels/ReforceXY.py b/ReforceXY/user_data/freqaimodels/ReforceXY.py index 65a5d30..d12c85a 100644 --- a/ReforceXY/user_data/freqaimodels/ReforceXY.py +++ b/ReforceXY/user_data/freqaimodels/ReforceXY.py @@ -20,7 +20,7 @@ from optuna.exceptions import ExperimentalWarning from optuna.pruners import HyperbandPruner from optuna.samplers import TPESampler from optuna.study import Study, StudyDirection -from optuna.storages import JournalStorage +from optuna.storages import JournalStorage, BaseStorage from optuna.storages.journal import JournalFileBackend from pandas import DataFrame, concat, merge from sb3_contrib.common.maskable.callbacks import MaskableEvalCallback @@ -99,6 +99,7 @@ class ReforceXY(BaseReinforcementLearningModel): def __init__(self, **kwargs): super().__init__(**kwargs) + self.pairs = self.config.get("exchange", {}).get("pair_whitelist") self.is_maskable: bool = ( self.model_type == "MaskablePPO" ) # Enable action masking @@ -125,6 +126,8 @@ class ReforceXY(BaseReinforcementLearningModel): "n_startup_trials", 10 ) self.optuna_trial_params: Dict[str, list] = {} + for pair in self.pairs: + self.optuna_trial_params[pair] = [] self.optuna_callback: Optional[MaskableTrialEvalCallback] = None self.unset_unsupported() @@ -339,7 +342,7 @@ class ReforceXY(BaseReinforcementLearningModel): logger.info("%s params: %s", self.model_type, model_params) if self.activate_tensorboard: - tb_path = Path(dk.full_path / "tensorboard" / dk.pair.split("/")[0]) + tb_path = Path(self.full_path / "tensorboard" / dk.pair.split("/")[0]) else: tb_path = None @@ -450,17 +453,17 @@ class ReforceXY(BaseReinforcementLearningModel): output = output.rolling(window=self.CONV_WIDTH).apply(_predict) return output - def get_storage(self, dk: FreqaiDataKitchen): + def get_storage(self, pair: str) -> BaseStorage: """ Get the storage for Optuna """ - storage_dir = str(dk.full_path) + storage_dir = str(self.full_path) storage_backend = self.rl_config_optuna.get("storage", "sqlite") if storage_backend == "sqlite": - storage = f"sqlite:///{storage_dir}/optuna-{dk.pair.split('/')[0]}.sqlite" + storage = f"sqlite:///{storage_dir}/optuna-{pair.split('/')[0]}.sqlite" elif storage_backend == "file": storage = JournalStorage( - JournalFileBackend(f"{storage_dir}/optuna-{dk.pair.split('/')[0]}.log") + JournalFileBackend(f"{storage_dir}/optuna-{pair.split('/')[0]}.log") ) return storage @@ -470,7 +473,7 @@ class ReforceXY(BaseReinforcementLearningModel): returns the best hyperparameters found """ study_name = str(dk.pair) - storage = self.get_storage(dk) + storage = self.get_storage(dk.pair) study: Study = create_study( study_name=study_name, sampler=TPESampler( @@ -517,27 +520,27 @@ class ReforceXY(BaseReinforcementLearningModel): ) logger.info("-------------------------------------------------------") - self.save_best_params(dk, study.best_trial.params) + self.save_best_params(dk.pair, study.best_trial.params) return self.optuna_trial_params[dk.pair][study.best_trial.number] - def save_best_params(self, dk: FreqaiDataKitchen, best_params: Dict) -> None: + def save_best_params(self, pair: str, best_params: Dict) -> None: """ Save the best hyperparameters found during hyperparameter optimization """ best_params_path = Path( - dk.full_path / f"{dk.pair.split('/')[0]}_hyperopt_best_params.json" + self.full_path / f"{pair.split('/')[0]}_hyperopt_best_params.json" ) logger.info("saving to %s JSON file", best_params_path) with best_params_path.open("w", encoding="utf-8") as write_file: json.dump(best_params, write_file, indent=4) - def load_best_params(self, dk: FreqaiDataKitchen) -> Dict | None: + def load_best_params(self, pair: str) -> Dict | None: """ Load the best hyperparameters found and saved during hyperparameter optimization """ best_params_path = Path( - dk.full_path / f"{dk.pair.split('/')[0]}_hyperopt_best_params.json" + self.full_path / f"{pair.split('/')[0]}_hyperopt_best_params.json" ) if best_params_path.is_file(): logger.info("loading from %s JSON file", best_params_path) @@ -565,15 +568,13 @@ class ReforceXY(BaseReinforcementLearningModel): nan_encountered = False tensorboard_log_path = Path( - dk.full_path / "tensorboard" / dk.pair.split("/")[0] + self.full_path / "tensorboard" / dk.pair.split("/")[0] ) logger.info( "------------ Hyperopt trial %d %s ------------", trial.number, dk.pair ) logger.info("Trial %s params: %s", trial.number, params) - if dk.pair not in self.optuna_trial_params: - self.optuna_trial_params[dk.pair] = [] self.optuna_trial_params[dk.pair].append(params) model = self.MODELCLASS( diff --git a/quickadapter/user_data/freqaimodels/LightGBMRegressorQuickAdapterV35.py b/quickadapter/user_data/freqaimodels/LightGBMRegressorQuickAdapterV35.py index 17c90c3..5d909b3 100644 --- a/quickadapter/user_data/freqaimodels/LightGBMRegressorQuickAdapterV35.py +++ b/quickadapter/user_data/freqaimodels/LightGBMRegressorQuickAdapterV35.py @@ -59,8 +59,18 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel): for pair in self.pairs: self.__optuna_hp_rmse[pair] = -1 self.__optuna_period_rmse[pair] = -1 - self.__optuna_hp_params[pair] = {} - self.__optuna_period_params[pair] = {} + self.__optuna_hp_params[pair] = ( + self.optuna_load_best_params(pair, "hp") or {} + ) + self.__optuna_period_params[pair] = ( + self.optuna_load_best_params(pair, "period") or {} + ) + self.freqai_info["feature_parameters"][pair] = {} + self.freqai_info["feature_parameters"][pair]["label_period_candles"] = ( + self.__optuna_period_params[pair].get( + "label_period_candles", self.ft_params["label_period_candles"] + ) + ) def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any: """ @@ -84,7 +94,7 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel): start = time.time() if self.__optuna_hyperopt: optuna_hp_params, optuna_hp_rmse = self.optuna_hp_optimize( - dk, X, y, train_weights, X_test, y_test, test_weights + dk.pair, X, y, train_weights, X_test, y_test, test_weights ) if optuna_hp_params: self.__optuna_hp_params[dk.pair] = optuna_hp_params @@ -96,7 +106,7 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel): self.__optuna_hp_rmse[dk.pair] = optuna_hp_rmse optuna_period_params, optuna_period_rmse = self.optuna_period_optimize( - dk, + dk.pair, X, y, train_weights, @@ -126,8 +136,6 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel): test_weights = test_weights[-test_window:] # FIXME: find a better way to propagate optuna computed params to strategy - if dk.pair not in self.freqai_info["feature_parameters"]: - self.freqai_info["feature_parameters"][dk.pair] = {} self.freqai_info["feature_parameters"][dk.pair][ "label_period_candles" ] = self.__optuna_period_params[dk.pair].get("label_period_candles") @@ -176,12 +184,9 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel): dk.data["extra_returns_per_train"][MINIMA_THRESHOLD_COLUMN] = -2 dk.data["extra_returns_per_train"][MAXIMA_THRESHOLD_COLUMN] = 2 else: - if self.__optuna_hyperopt: - label_period_candles = self.__optuna_period_params.get(pair, {}).get( - "label_period_candles", self.ft_params["label_period_candles"] - ) - else: - label_period_candles = self.ft_params["label_period_candles"] + label_period_candles = self.__optuna_period_params.get(pair, {}).get( + "label_period_candles", self.ft_params["label_period_candles"] + ) min_pred, max_pred = self.min_max_pred( pred_df_full, num_candles, @@ -242,15 +247,15 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel): return eval_set, eval_weights - def optuna_storage(self, dk: FreqaiDataKitchen): - storage_dir = str(dk.full_path) + def optuna_storage(self, pair: str) -> optuna.storages.BaseStorage: + storage_dir = str(self.full_path) storage_backend = self.__optuna_config.get("storage", "file") if storage_backend == "sqlite": - storage = f"sqlite:///{storage_dir}/optuna-{dk.pair.split('/')[0]}.sqlite" + storage = f"sqlite:///{storage_dir}/optuna-{pair.split('/')[0]}.sqlite" elif storage_backend == "file": storage = optuna.storages.JournalStorage( optuna.storages.journal.JournalFileBackend( - f"{storage_dir}/optuna-{dk.pair.split('/')[0]}.log" + f"{storage_dir}/optuna-{pair.split('/')[0]}.log" ) ) return storage @@ -279,20 +284,20 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel): def optuna_hp_enqueue_previous_best_trial( self, - dk: FreqaiDataKitchen, + pair: str, study: optuna.study.Study, previous_study: optuna.study.Study, ) -> None: if self.optuna_study_has_best_params(previous_study): study.enqueue_trial(previous_study.best_params) - elif self.__optuna_hp_params.get(dk.pair): - study.enqueue_trial(self.__optuna_hp_params[dk.pair]) - elif self.optuna_load_best_params(dk, "hp"): - study.enqueue_trial(self.optuna_load_best_params(dk, "hp")) + elif self.__optuna_hp_params.get(pair): + study.enqueue_trial(self.__optuna_hp_params[pair]) + elif self.optuna_load_best_params(pair, "hp"): + study.enqueue_trial(self.optuna_load_best_params(pair, "hp")) def optuna_hp_optimize( self, - dk: FreqaiDataKitchen, + pair: str, X, y, train_weights, @@ -300,8 +305,8 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel): y_test, test_weights, ) -> tuple[Dict | None, float | None]: - study_name = f"hp-{dk.pair}" - storage = self.optuna_storage(dk) + study_name = f"hp-{pair}" + storage = self.optuna_storage(pair) pruner = optuna.pruners.HyperbandPruner() previous_study = self.optuna_study_load(study_name, storage) self.optuna_study_delete(study_name, storage) @@ -315,7 +320,7 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel): direction=optuna.study.StudyDirection.MINIMIZE, storage=storage, ) - self.optuna_hp_enqueue_previous_best_trial(dk, study, previous_study) + self.optuna_hp_enqueue_previous_best_trial(pair, study, previous_study) start = time.time() try: study.optimize( @@ -341,7 +346,7 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel): logger.info(f"Optuna hp hyperopt done ({time_spent:.2f} secs)") params = study.best_params - self.optuna_save_best_params(dk, "hp", params) + self.optuna_save_best_params(pair, "hp", params) # log params for key, value in {"rmse": study.best_value, **params}.items(): logger.info(f"Optuna hp hyperopt | {key:>20s} : {value}") @@ -349,20 +354,20 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel): def optuna_period_enqueue_previous_best_trial( self, - dk: FreqaiDataKitchen, + pair: str, study: optuna.study.Study, previous_study: optuna.study.Study, ) -> None: if self.optuna_study_has_best_params(previous_study): study.enqueue_trial(previous_study.best_params) - elif self.__optuna_period_params.get(dk.pair): - study.enqueue_trial(self.__optuna_period_params[dk.pair]) - elif self.optuna_load_best_params(dk, "period"): - study.enqueue_trial(self.optuna_load_best_params(dk, "period")) + elif self.__optuna_period_params.get(pair): + study.enqueue_trial(self.__optuna_period_params[pair]) + elif self.optuna_load_best_params(pair, "period"): + study.enqueue_trial(self.optuna_load_best_params(pair, "period")) def optuna_period_optimize( self, - dk: FreqaiDataKitchen, + pair: str, X, y, train_weights, @@ -371,8 +376,8 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel): test_weights, model_training_parameters, ) -> tuple[Dict | None, float | None]: - study_name = f"period-{dk.pair}" - storage = self.optuna_storage(dk) + study_name = f"period-{pair}" + storage = self.optuna_storage(pair) pruner = optuna.pruners.HyperbandPruner() previous_study = self.optuna_study_load(study_name, storage) self.optuna_study_delete(study_name, storage) @@ -386,7 +391,7 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel): direction=optuna.study.StudyDirection.MINIMIZE, storage=storage, ) - self.optuna_period_enqueue_previous_best_trial(dk, study, previous_study) + self.optuna_period_enqueue_previous_best_trial(pair, study, previous_study) start = time.time() try: study.optimize( @@ -415,28 +420,24 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel): logger.info(f"Optuna period hyperopt done ({time_spent:.2f} secs)") params = study.best_params - self.optuna_save_best_params(dk, "period", params) + self.optuna_save_best_params(pair, "period", params) # log params for key, value in {"rmse": study.best_value, **params}.items(): logger.info(f"Optuna period hyperopt | {key:>20s} : {value}") return params, study.best_value def optuna_save_best_params( - self, dk: FreqaiDataKitchen, namespace: str, best_params: Dict + self, pair: str, namespace: str, best_params: Dict ) -> None: best_params_path = Path( - dk.full_path - / f"{dk.pair.split('/')[0]}_optuna_{namespace}_best_params.json" + self.full_path / f"{pair.split('/')[0]}_optuna_{namespace}_best_params.json" ) with best_params_path.open("w", encoding="utf-8") as write_file: json.dump(best_params, write_file, indent=4) - def optuna_load_best_params( - self, dk: FreqaiDataKitchen, namespace: str - ) -> Dict | None: + def optuna_load_best_params(self, pair: str, namespace: str) -> Dict | None: best_params_path = Path( - dk.full_path - / f"{dk.pair.split('/')[0]}_optuna_{namespace}_best_params.json" + self.full_path / f"{pair.split('/')[0]}_optuna_{namespace}_best_params.json" ) if best_params_path.is_file(): with best_params_path.open("r", encoding="utf-8") as read_file: diff --git a/quickadapter/user_data/freqaimodels/XGBoostRegressorQuickAdapterV35.py b/quickadapter/user_data/freqaimodels/XGBoostRegressorQuickAdapterV35.py index 90011db..872bfa5 100644 --- a/quickadapter/user_data/freqaimodels/XGBoostRegressorQuickAdapterV35.py +++ b/quickadapter/user_data/freqaimodels/XGBoostRegressorQuickAdapterV35.py @@ -59,8 +59,18 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel): for pair in self.pairs: self.__optuna_hp_rmse[pair] = -1 self.__optuna_period_rmse[pair] = -1 - self.__optuna_hp_params[pair] = {} - self.__optuna_period_params[pair] = {} + self.__optuna_hp_params[pair] = ( + self.optuna_load_best_params(pair, "hp") or {} + ) + self.__optuna_period_params[pair] = ( + self.optuna_load_best_params(pair, "period") or {} + ) + self.freqai_info["feature_parameters"][pair] = {} + self.freqai_info["feature_parameters"][pair]["label_period_candles"] = ( + self.__optuna_period_params[pair].get( + "label_period_candles", self.ft_params["label_period_candles"] + ) + ) def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any: """ @@ -84,7 +94,7 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel): start = time.time() if self.__optuna_hyperopt: optuna_hp_params, optuna_hp_rmse = self.optuna_hp_optimize( - dk, X, y, train_weights, X_test, y_test, test_weights + dk.pair, X, y, train_weights, X_test, y_test, test_weights ) if optuna_hp_params: self.__optuna_hp_params[dk.pair] = optuna_hp_params @@ -96,7 +106,7 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel): self.__optuna_hp_rmse[dk.pair] = optuna_hp_rmse optuna_period_params, optuna_period_rmse = self.optuna_period_optimize( - dk, + dk.pair, X, y, train_weights, @@ -126,8 +136,6 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel): test_weights = test_weights[-test_window:] # FIXME: find a better way to propagate optuna computed params to strategy - if dk.pair not in self.freqai_info["feature_parameters"]: - self.freqai_info["feature_parameters"][dk.pair] = {} self.freqai_info["feature_parameters"][dk.pair][ "label_period_candles" ] = self.__optuna_period_params[dk.pair].get("label_period_candles") @@ -177,12 +185,9 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel): dk.data["extra_returns_per_train"][MINIMA_THRESHOLD_COLUMN] = -2 dk.data["extra_returns_per_train"][MAXIMA_THRESHOLD_COLUMN] = 2 else: - if self.__optuna_hyperopt: - label_period_candles = self.__optuna_period_params.get(pair, {}).get( - "label_period_candles", self.ft_params["label_period_candles"] - ) - else: - label_period_candles = self.ft_params["label_period_candles"] + label_period_candles = self.__optuna_period_params.get(pair, {}).get( + "label_period_candles", self.ft_params["label_period_candles"] + ) min_pred, max_pred = self.min_max_pred( pred_df_full, num_candles, @@ -243,15 +248,15 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel): return eval_set, eval_weights - def optuna_storage(self, dk: FreqaiDataKitchen): - storage_dir = str(dk.full_path) + def optuna_storage(self, pair: str) -> optuna.storages.BaseStorage: + storage_dir = str(self.full_path) storage_backend = self.__optuna_config.get("storage", "file") if storage_backend == "sqlite": - storage = f"sqlite:///{storage_dir}/optuna-{dk.pair.split('/')[0]}.sqlite" + storage = f"sqlite:///{storage_dir}/optuna-{pair.split('/')[0]}.sqlite" elif storage_backend == "file": storage = optuna.storages.JournalStorage( optuna.storages.journal.JournalFileBackend( - f"{storage_dir}/optuna-{dk.pair.split('/')[0]}.log" + f"{storage_dir}/optuna-{pair.split('/')[0]}.log" ) ) return storage @@ -280,20 +285,20 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel): def optuna_hp_enqueue_previous_best_trial( self, - dk: FreqaiDataKitchen, + pair: str, study: optuna.study.Study, previous_study: optuna.study.Study, ) -> None: if self.optuna_study_has_best_params(previous_study): study.enqueue_trial(previous_study.best_params) - elif self.__optuna_hp_params.get(dk.pair): - study.enqueue_trial(self.__optuna_hp_params[dk.pair]) - elif self.optuna_load_best_params(dk, "hp"): - study.enqueue_trial(self.optuna_load_best_params(dk, "hp")) + elif self.__optuna_hp_params.get(pair): + study.enqueue_trial(self.__optuna_hp_params[pair]) + elif self.optuna_load_best_params(pair, "hp"): + study.enqueue_trial(self.optuna_load_best_params(pair, "hp")) def optuna_hp_optimize( self, - dk: FreqaiDataKitchen, + pair: str, X, y, train_weights, @@ -301,8 +306,8 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel): y_test, test_weights, ) -> tuple[Dict | None, float | None]: - study_name = f"hp-{dk.pair}" - storage = self.optuna_storage(dk) + study_name = f"hp-{pair}" + storage = self.optuna_storage(pair) pruner = optuna.pruners.HyperbandPruner() previous_study = self.optuna_study_load(study_name, storage) self.optuna_study_delete(study_name, storage) @@ -316,7 +321,7 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel): direction=optuna.study.StudyDirection.MINIMIZE, storage=storage, ) - self.optuna_hp_enqueue_previous_best_trial(dk, study, previous_study) + self.optuna_hp_enqueue_previous_best_trial(pair, study, previous_study) start = time.time() try: study.optimize( @@ -342,7 +347,7 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel): logger.info(f"Optuna hp hyperopt done ({time_spent:.2f} secs)") params = study.best_params - self.optuna_save_best_params(dk, "hp", params) + self.optuna_save_best_params(pair, "hp", params) # log params for key, value in {"rmse": study.best_value, **params}.items(): logger.info(f"Optuna hp hyperopt | {key:>20s} : {value}") @@ -350,20 +355,20 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel): def optuna_period_enqueue_previous_best_trial( self, - dk: FreqaiDataKitchen, + pair: str, study: optuna.study.Study, previous_study: optuna.study.Study, ) -> None: if self.optuna_study_has_best_params(previous_study): study.enqueue_trial(previous_study.best_params) - elif self.__optuna_period_params.get(dk.pair): - study.enqueue_trial(self.__optuna_period_params[dk.pair]) - elif self.optuna_load_best_params(dk, "period"): - study.enqueue_trial(self.optuna_load_best_params(dk, "period")) + elif self.__optuna_period_params.get(pair): + study.enqueue_trial(self.__optuna_period_params[pair]) + elif self.optuna_load_best_params(pair, "period"): + study.enqueue_trial(self.optuna_load_best_params(pair, "period")) def optuna_period_optimize( self, - dk: FreqaiDataKitchen, + pair: str, X, y, train_weights, @@ -372,8 +377,8 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel): test_weights, model_training_parameters, ) -> tuple[Dict | None, float | None]: - study_name = f"period-{dk.pair}" - storage = self.optuna_storage(dk) + study_name = f"period-{pair}" + storage = self.optuna_storage(pair) pruner = optuna.pruners.HyperbandPruner() previous_study = self.optuna_study_load(study_name, storage) self.optuna_study_delete(study_name, storage) @@ -387,7 +392,7 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel): direction=optuna.study.StudyDirection.MINIMIZE, storage=storage, ) - self.optuna_period_enqueue_previous_best_trial(dk, study, previous_study) + self.optuna_period_enqueue_previous_best_trial(pair, study, previous_study) start = time.time() try: study.optimize( @@ -416,28 +421,24 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel): logger.info(f"Optuna period hyperopt done ({time_spent:.2f} secs)") params = study.best_params - self.optuna_save_best_params(dk, "period", params) + self.optuna_save_best_params(pair, "period", params) # log params for key, value in {"rmse": study.best_value, **params}.items(): logger.info(f"Optuna period hyperopt | {key:>20s} : {value}") return params, study.best_value def optuna_save_best_params( - self, dk: FreqaiDataKitchen, namespace: str, best_params: Dict + self, pair: str, namespace: str, best_params: Dict ) -> None: best_params_path = Path( - dk.full_path - / f"{dk.pair.split('/')[0]}_optuna_{namespace}_best_params.json" + self.full_path / f"{pair.split('/')[0]}_optuna_{namespace}_best_params.json" ) with best_params_path.open("w", encoding="utf-8") as write_file: json.dump(best_params, write_file, indent=4) - def optuna_load_best_params( - self, dk: FreqaiDataKitchen, namespace: str - ) -> Dict | None: + def optuna_load_best_params(self, pair: str, namespace: str) -> Dict | None: best_params_path = Path( - dk.full_path - / f"{dk.pair.split('/')[0]}_optuna_{namespace}_best_params.json" + self.full_path / f"{pair.split('/')[0]}_optuna_{namespace}_best_params.json" ) if best_params_path.is_file(): with best_params_path.open("r", encoding="utf-8") as read_file: -- 2.43.0