From: Jérôme Benoit Date: Mon, 17 Feb 2025 20:13:29 +0000 (+0100) Subject: refactor(reforcexy): display optuna optimization runtime X-Git-Url: https://git.piment-noir.org/?a=commitdiff_plain;h=5b7accb51a4576fcd46e47468985e96748ecaf2f;p=freqai-strategies.git refactor(reforcexy): display optuna optimization runtime Signed-off-by: Jérôme Benoit --- diff --git a/ReforceXY/user_data/freqaimodels/ReforceXY.py b/ReforceXY/user_data/freqaimodels/ReforceXY.py index 5536de1..c0b9aee 100644 --- a/ReforceXY/user_data/freqaimodels/ReforceXY.py +++ b/ReforceXY/user_data/freqaimodels/ReforceXY.py @@ -3,6 +3,7 @@ import gc import json import logging import warnings +import time from enum import Enum from pathlib import Path from typing import Any, Callable, Dict, Optional, Type, Tuple @@ -474,6 +475,7 @@ class ReforceXY(BaseReinforcementLearningModel): storage=storage, load_if_exists=True, ) + start = time.time() try: study.optimize( lambda trial: self.objective(trial, train_df, total_timesteps, dk), @@ -489,8 +491,13 @@ class ReforceXY(BaseReinforcementLearningModel): ) except KeyboardInterrupt: pass + time_spent = time.time() - start - logger.info("------------ Hyperopt results %s ------------", dk.pair) + logger.info( + "------------ Hyperopt results %s (%.2f secs) ------------", + dk.pair, + time_spent, + ) logger.info( "Best trial: %s. Score: %s", study.best_trial.number, study.best_trial.value ) @@ -498,7 +505,7 @@ class ReforceXY(BaseReinforcementLearningModel): "Best trial params: %s", self.optuna_trial_params[dk.pair][study.best_trial.number], ) - logger.info("---------------------------------------------") + logger.info("-------------------------------------------------------") best_params_path = Path( dk.full_path / f"{dk.pair.split('/')[0]}_hyperopt_best_params.json" diff --git a/quickadapter/user_data/freqaimodels/LightGBMRegressorQuickAdapterV35.py b/quickadapter/user_data/freqaimodels/LightGBMRegressorQuickAdapterV35.py index 8e71c3c..e6b18ab 100644 --- a/quickadapter/user_data/freqaimodels/LightGBMRegressorQuickAdapterV35.py +++ b/quickadapter/user_data/freqaimodels/LightGBMRegressorQuickAdapterV35.py @@ -224,9 +224,9 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel): dk.data["extra_returns_per_train"]["DI_cutoff"] = cutoff dk.data["extra_returns_per_train"]["label_period_candles"] = ( - self.__optuna_period_params.get(pair, {}).get( - "label_period_candles", self.ft_params["label_period_candles"] - ) + self.__optuna_period_params.get( + pair, {} + ).get("label_period_candles", self.ft_params["label_period_candles"]) ) dk.data["extra_returns_per_train"]["hp_rmse"] = self.__optuna_hp_rmse.get( pair, {} @@ -417,14 +417,14 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel): self, study_name: str, storage ) -> optuna.study.Study | None: try: - previous_study = optuna.load_study(study_name=study_name, storage=storage) + study = optuna.load_study(study_name=study_name, storage=storage) except Exception: - previous_study = None + study = None try: optuna.delete_study(study_name=study_name, storage=storage) except Exception: pass - return previous_study + return study def optuna_study_has_best_params(self, study: optuna.study.Study | None) -> bool: if not study: diff --git a/quickadapter/user_data/freqaimodels/XGBoostRegressorQuickAdapterV35.py b/quickadapter/user_data/freqaimodels/XGBoostRegressorQuickAdapterV35.py index 65e1ffe..6af9138 100644 --- a/quickadapter/user_data/freqaimodels/XGBoostRegressorQuickAdapterV35.py +++ b/quickadapter/user_data/freqaimodels/XGBoostRegressorQuickAdapterV35.py @@ -225,9 +225,9 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel): dk.data["extra_returns_per_train"]["DI_cutoff"] = cutoff dk.data["extra_returns_per_train"]["label_period_candles"] = ( - self.__optuna_period_params.get(pair, {}).get( - "label_period_candles", self.ft_params["label_period_candles"] - ) + self.__optuna_period_params.get( + pair, {} + ).get("label_period_candles", self.ft_params["label_period_candles"]) ) dk.data["extra_returns_per_train"]["hp_rmse"] = self.__optuna_hp_rmse.get( pair, {} @@ -418,14 +418,14 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel): self, study_name: str, storage ) -> optuna.study.Study | None: try: - previous_study = optuna.load_study(study_name=study_name, storage=storage) + study = optuna.load_study(study_name=study_name, storage=storage) except Exception: - previous_study = None + study = None try: optuna.delete_study(study_name=study_name, storage=storage) except Exception: pass - return previous_study + return study def optuna_study_has_best_params(self, study: optuna.study.Study | None) -> bool: if not study: