]> Piment Noir Git Repositories - freqai-strategies.git/commitdiff
refactor(reforcexy): display optuna optimization runtime
authorJérôme Benoit <jerome.benoit@sap.com>
Mon, 17 Feb 2025 20:13:29 +0000 (21:13 +0100)
committerJérôme Benoit <jerome.benoit@sap.com>
Mon, 17 Feb 2025 20:13:29 +0000 (21:13 +0100)
Signed-off-by: Jérôme Benoit <jerome.benoit@sap.com>
ReforceXY/user_data/freqaimodels/ReforceXY.py
quickadapter/user_data/freqaimodels/LightGBMRegressorQuickAdapterV35.py
quickadapter/user_data/freqaimodels/XGBoostRegressorQuickAdapterV35.py

index 5536de14646fd84360b9af337375ade8d7c4db79..c0b9aee3127f4e9ec886d59810bec82151ee4dda 100644 (file)
@@ -3,6 +3,7 @@ import gc
 import json
 import logging
 import warnings
+import time
 from enum import Enum
 from pathlib import Path
 from typing import Any, Callable, Dict, Optional, Type, Tuple
@@ -474,6 +475,7 @@ class ReforceXY(BaseReinforcementLearningModel):
             storage=storage,
             load_if_exists=True,
         )
+        start = time.time()
         try:
             study.optimize(
                 lambda trial: self.objective(trial, train_df, total_timesteps, dk),
@@ -489,8 +491,13 @@ class ReforceXY(BaseReinforcementLearningModel):
             )
         except KeyboardInterrupt:
             pass
+        time_spent = time.time() - start
 
-        logger.info("------------ Hyperopt results %s ------------", dk.pair)
+        logger.info(
+            "------------ Hyperopt results %s (%.2f secs) ------------",
+            dk.pair,
+            time_spent,
+        )
         logger.info(
             "Best trial: %s. Score: %s", study.best_trial.number, study.best_trial.value
         )
@@ -498,7 +505,7 @@ class ReforceXY(BaseReinforcementLearningModel):
             "Best trial params: %s",
             self.optuna_trial_params[dk.pair][study.best_trial.number],
         )
-        logger.info("---------------------------------------------")
+        logger.info("-------------------------------------------------------")
 
         best_params_path = Path(
             dk.full_path / f"{dk.pair.split('/')[0]}_hyperopt_best_params.json"
index 8e71c3c5d320698acc535c8d7e4fe10aff5cacc6..e6b18abbbd17fecbd286c1c2a5d419f90077ed68 100644 (file)
@@ -224,9 +224,9 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel):
         dk.data["extra_returns_per_train"]["DI_cutoff"] = cutoff
 
         dk.data["extra_returns_per_train"]["label_period_candles"] = (
-            self.__optuna_period_params.get(pair, {}).get(
-                "label_period_candles", self.ft_params["label_period_candles"]
-            )
+            self.__optuna_period_params.get(
+                pair, {}
+            ).get("label_period_candles", self.ft_params["label_period_candles"])
         )
         dk.data["extra_returns_per_train"]["hp_rmse"] = self.__optuna_hp_rmse.get(
             pair, {}
@@ -417,14 +417,14 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel):
         self, study_name: str, storage
     ) -> optuna.study.Study | None:
         try:
-            previous_study = optuna.load_study(study_name=study_name, storage=storage)
+            study = optuna.load_study(study_name=study_name, storage=storage)
         except Exception:
-            previous_study = None
+            study = None
         try:
             optuna.delete_study(study_name=study_name, storage=storage)
         except Exception:
             pass
-        return previous_study
+        return study
 
     def optuna_study_has_best_params(self, study: optuna.study.Study | None) -> bool:
         if not study:
index 65e1ffe91dba53736bcf3a689cbfef941fb22ecb..6af91384bd1dbba7708e8845342d4d3a1917d2da 100644 (file)
@@ -225,9 +225,9 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel):
         dk.data["extra_returns_per_train"]["DI_cutoff"] = cutoff
 
         dk.data["extra_returns_per_train"]["label_period_candles"] = (
-            self.__optuna_period_params.get(pair, {}).get(
-                "label_period_candles", self.ft_params["label_period_candles"]
-            )
+            self.__optuna_period_params.get(
+                pair, {}
+            ).get("label_period_candles", self.ft_params["label_period_candles"])
         )
         dk.data["extra_returns_per_train"]["hp_rmse"] = self.__optuna_hp_rmse.get(
             pair, {}
@@ -418,14 +418,14 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel):
         self, study_name: str, storage
     ) -> optuna.study.Study | None:
         try:
-            previous_study = optuna.load_study(study_name=study_name, storage=storage)
+            study = optuna.load_study(study_name=study_name, storage=storage)
         except Exception:
-            previous_study = None
+            study = None
         try:
             optuna.delete_study(study_name=study_name, storage=storage)
         except Exception:
             pass
-        return previous_study
+        return study
 
     def optuna_study_has_best_params(self, study: optuna.study.Study | None) -> bool:
         if not study: