]> Piment Noir Git Repositories - freqai-strategies.git/commitdiff
fix: improve optuna error handling
authorJérôme Benoit <jerome.benoit@piment-noir.org>
Sat, 15 Mar 2025 16:10:23 +0000 (17:10 +0100)
committerJérôme Benoit <jerome.benoit@piment-noir.org>
Sat, 15 Mar 2025 16:10:23 +0000 (17:10 +0100)
Signed-off-by: Jérôme Benoit <jerome.benoit@piment-noir.org>
ReforceXY/user_data/freqaimodels/ReforceXY.py
quickadapter/user_data/freqaimodels/LightGBMRegressorQuickAdapterV35.py
quickadapter/user_data/freqaimodels/XGBoostRegressorQuickAdapterV35.py
quickadapter/user_data/strategies/QuickAdapterV3.py

index e381af9ecb506a63351ff69191da904bdf932bf4..c500b060d4da9d0babba5f5938ed5ae6b757bc32 100644 (file)
@@ -347,7 +347,13 @@ class ReforceXY(BaseReinforcementLearningModel):
 
         start = time.time()
         if self.hyperopt:
-            model_params = self.study(train_df, total_timesteps, dk)
+            best_trial_params = self.study(train_df, total_timesteps, dk)
+            if best_trial_params is None:
+                logger.error(
+                    "Hyperopt failed. Using default configured model params instead."
+                )
+                best_trial_params = self.get_model_params()
+            model_params = best_trial_params
         else:
             model_params = self.get_model_params()
         logger.info("%s params: %s", self.model_type, model_params)
@@ -495,9 +501,22 @@ class ReforceXY(BaseReinforcementLearningModel):
             )
         return storage
 
+    def study_has_best_trial_params(self, study: Study | None) -> bool:
+        if not study:
+            return False
+        try:
+            _ = study.best_trial.params
+            return True
+        # file backend storage raises KeyError
+        except KeyError:
+            return False
+        # sqlite backend storage raises ValueError
+        except ValueError:
+            return False
+
     def study(
         self, train_df: DataFrame, total_timesteps: int, dk: FreqaiDataKitchen
-    ) -> Dict:
+    ) -> Dict | None:
         """
         Runs hyperparameter optimization using Optuna and
         returns the best hyperparameters found merged with the user defined parameters
@@ -526,6 +545,7 @@ class ReforceXY(BaseReinforcementLearningModel):
             storage=storage,
             load_if_exists=True,
         )
+        hyperopt_failed = False
         start = time.time()
         try:
             study.optimize(
@@ -542,65 +562,95 @@ class ReforceXY(BaseReinforcementLearningModel):
             )
         except KeyboardInterrupt:
             pass
+        except Exception as e:
+            time_spent = time.time() - start
+            logger.error(
+                f"Hyperopt {study_name} failed ({time_spent:.2f} secs): {e}",
+                exc_info=True,
+            )
+            hyperopt_failed = True
         time_spent = time.time() - start
+        if self.study_has_best_trial_params(study) is False:
+            logger.error(
+                f"Hyperopt {study_name} failed ({time_spent:.2f} secs): no study best trial params found"
+            )
+            hyperopt_failed = True
+
+        if hyperopt_failed:
+            best_trial_params = self.load_best_trial_params(
+                dk.pair if self.rl_config_optuna.get("per_pair", False) else None
+            )
+            if best_trial_params is None:
+                logger.error(
+                    f"Hyperopt {study_name} failed ({time_spent:.2f} secs): no previously saved best trial params found"
+                )
+                return None
+        else:
+            best_trial_params = study.best_trial.params
 
         logger.info(
-            "------------ Hyperopt results %s (%.2f secs) ------------",
+            "------------ Hyperopt %s results (%.2f secs) ------------",
             study_name,
             time_spent,
         )
         logger.info(
             "Best trial: %s. Score: %s", study.best_trial.number, study.best_trial.value
         )
-        logger.info("Best trial params: %s", study.best_trial.params)
+        logger.info("Best trial params: %s", best_trial_params)
         logger.info("-------------------------------------------------------")
 
-        self.save_best_params(
-            study.best_trial.params,
+        self.save_best_trial_params(
+            best_trial_params,
             dk.pair if self.rl_config_optuna.get("per_pair", False) else None,
         )
 
-        return {**self.model_training_parameters, **study.best_trial.params}
+        return {**self.model_training_parameters, **best_trial_params}
 
-    def save_best_params(self, best_params: Dict, pair: str | None = None) -> None:
+    def save_best_trial_params(
+        self, best_trial_params: Dict, pair: str | None = None
+    ) -> None:
         """
-        Save the best hyperparameters found during hyperparameter optimization
+        Save the best trial hyperparameters found during hyperparameter optimization
         """
-        best_params_filename = (
+        best_trial_params_filename = (
             f"hyperopt-best-params-{pair.split('/')[0]}"
             if pair
             else "hyperopt-best-params"
         )
-        best_params_path = Path(self.full_path / f"{best_params_filename}.json")
+        best_trial_params_path = Path(
+            self.full_path / f"{best_trial_params_filename}.json"
+        )
         log_msg: str = (
-            f"{pair}: saving best params to {best_params_path} JSON file"
+            f"{pair}: saving best params to {best_trial_params_path} JSON file"
             if pair
-            else f"saving best params to {best_params_path} JSON file"
+            else f"saving best params to {best_trial_params_path} JSON file"
         )
         logger.info(log_msg)
-        with best_params_path.open("w", encoding="utf-8") as write_file:
-            json.dump(best_params, write_file, indent=4)
+        with best_trial_params_path.open("w", encoding="utf-8") as write_file:
+            json.dump(best_trial_params, write_file, indent=4)
 
-    def load_best_params(self, pair: str | None = None) -> Dict | None:
+    def load_best_trial_params(self, pair: str | None = None) -> Dict | None:
         """
-        Load the best hyperparameters found and saved during hyperparameter optimization
+        Load the best trial hyperparameters found and saved during hyperparameter optimization
         """
-        best_params_filename = (
+        best_trial_params_filename = (
             f"hyperopt-best-params-{pair.split('/')[0]}"
             if pair
             else "hyperopt-best-params"
         )
-        best_params_path = Path(self.full_path / f"{best_params_filename}.json")
+        best_trial_params_path = Path(
+            self.full_path / f"{best_trial_params_filename}.json"
+        )
         log_msg: str = (
-            f"{pair}: loading best params from {best_params_path} JSON file"
+            f"{pair}: loading best params from {best_trial_params_path} JSON file"
             if pair
-            else f"loading best params from {best_params_path} JSON file"
+            else f"loading best params from {best_trial_params_path} JSON file"
         )
-        if best_params_path.is_file():
+        if best_trial_params_path.is_file():
             logger.info(log_msg)
-            with best_params_path.open("r", encoding="utf-8") as read_file:
-                best_params = json.load(read_file)
-            return best_params
+            with best_trial_params_path.open("r", encoding="utf-8") as read_file:
+                best_trial_params = json.load(read_file)
+            return best_trial_params
         return None
 
     def objective(
index e595f7b410734958e6f19e88265a2864cf28e713..bea7910cce5bc7fdd75c209618ff69c303839c44 100644 (file)
@@ -338,11 +338,18 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel):
                 gc_after_trial=True,
             )
         except Exception as e:
+            time_spent = time.time() - start
             logger.error(
-                f"Optuna {study_namespace} hyperopt failed: {e}", exc_info=True
+                f"Optuna {study_namespace} hyperopt failed ({time_spent:.2f} secs): {e}",
+                exc_info=True,
             )
             return None, None
         time_spent = time.time() - start
+        if self.optuna_study_has_best_params(study) is False:
+            logger.error(
+                f"Optuna {study_namespace} hyperopt failed ({time_spent:.2f} secs): no study best params found"
+            )
+            return None, None
         logger.info(f"Optuna {study_namespace} hyperopt done ({time_spent:.2f} secs)")
 
         params = study.best_params
@@ -420,11 +427,18 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel):
                 gc_after_trial=True,
             )
         except Exception as e:
+            time_spent = time.time() - start
             logger.error(
-                f"Optuna {study_namespace} hyperopt failed: {e}", exc_info=True
+                f"Optuna {study_namespace} hyperopt failed ({time_spent:.2f} secs): {e}",
+                exc_info=True,
             )
             return None, None
         time_spent = time.time() - start
+        if self.optuna_study_has_best_params(study) is False:
+            logger.error(
+                f"Optuna {study_namespace} hyperopt failed ({time_spent:.2f} secs): no study best params found"
+            )
+            return None, None
         logger.info(f"Optuna {study_namespace} hyperopt done ({time_spent:.2f} secs)")
 
         params = study.best_params
@@ -583,7 +597,7 @@ def period_objective(
     )
     y_pred = model.predict(X_test)
 
-    min_label_period_candles: int = fit_live_predictions_candles // 20
+    min_label_period_candles: int = max(fit_live_predictions_candles // 20, 20)
     max_label_period_candles: int = max(
         fit_live_predictions_candles // 6, min_label_period_candles
     )
@@ -600,12 +614,18 @@ def period_objective(
     y_test = y_test.iloc[-label_windows_length:].to_numpy()
     test_weights = test_weights[-label_windows_length:]
     y_pred = y_pred[-label_windows_length:]
-    y_test = [y_test[i : i + label_window] for i in range(0, len(y_test), label_window)]
+    y_test = [
+        y_test[i : i + label_window]
+        for i in range(0, label_windows_length, label_window)
+    ]
     test_weights = [
         test_weights[i : i + label_window]
-        for i in range(0, len(test_weights), label_window)
+        for i in range(0, label_windows_length, label_window)
+    ]
+    y_pred = [
+        y_pred[i : i + label_window]
+        for i in range(0, label_windows_length, label_window)
     ]
-    y_pred = [y_pred[i : i + label_window] for i in range(0, len(y_pred), label_window)]
 
     errors = [
         sklearn.metrics.root_mean_squared_error(y_t, y_p, sample_weight=t_w)
index cb475a49bbc9e228801587d67725fdbb65b1a268..e2cabdd47262033f05c757acd3ec3bc123372b8d 100644 (file)
@@ -341,11 +341,18 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel):
                 gc_after_trial=True,
             )
         except Exception as e:
+            time_spent = time.time() - start
             logger.error(
-                f"Optuna {study_namespace} hyperopt failed: {e}", exc_info=True
+                f"Optuna {study_namespace} hyperopt failed ({time_spent:.2f} secs): {e}",
+                exc_info=True,
             )
             return None, None
         time_spent = time.time() - start
+        if self.optuna_study_has_best_params(study) is False:
+            logger.error(
+                f"Optuna {study_namespace} hyperopt failed ({time_spent:.2f} secs): no study best params found"
+            )
+            return None, None
         logger.info(f"Optuna {study_namespace} hyperopt done ({time_spent:.2f} secs)")
 
         params = study.best_params
@@ -423,11 +430,18 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel):
                 gc_after_trial=True,
             )
         except Exception as e:
+            time_spent = time.time() - start
             logger.error(
-                f"Optuna {study_namespace} hyperopt failed: {e}", exc_info=True
+                f"Optuna {study_namespace} hyperopt failed ({time_spent:.2f} secs): {e}",
+                exc_info=True,
             )
             return None, None
         time_spent = time.time() - start
+        if self.optuna_study_has_best_params(study) is False:
+            logger.error(
+                f"Optuna {study_namespace} hyperopt failed ({time_spent:.2f} secs): no study best params found"
+            )
+            return None, None
         logger.info(f"Optuna {study_namespace} hyperopt done ({time_spent:.2f} secs)")
 
         params = study.best_params
@@ -591,7 +605,7 @@ def period_objective(
     )
     y_pred = model.predict(X_test)
 
-    min_label_period_candles: int = fit_live_predictions_candles // 20
+    min_label_period_candles: int = max(fit_live_predictions_candles // 20, 20)
     max_label_period_candles: int = max(
         fit_live_predictions_candles // 6, min_label_period_candles
     )
@@ -608,12 +622,18 @@ def period_objective(
     y_test = y_test.iloc[-label_windows_length:].to_numpy()
     test_weights = test_weights[-label_windows_length:]
     y_pred = y_pred[-label_windows_length:]
-    y_test = [y_test[i : i + label_window] for i in range(0, len(y_test), label_window)]
+    y_test = [
+        y_test[i : i + label_window]
+        for i in range(0, label_windows_length, label_window)
+    ]
     test_weights = [
         test_weights[i : i + label_window]
-        for i in range(0, len(test_weights), label_window)
+        for i in range(0, label_windows_length, label_window)
+    ]
+    y_pred = [
+        y_pred[i : i + label_window]
+        for i in range(0, label_windows_length, label_window)
     ]
-    y_pred = [y_pred[i : i + label_window] for i in range(0, len(y_pred), label_window)]
 
     errors = [
         sklearn.metrics.root_mean_squared_error(y_t, y_p, sample_weight=t_w)
index 8235af639f786bdb61f201225e09ff611e20b688..b120341570c1a7acf93c6541fda980fc94b4a4e8 100644 (file)
@@ -393,6 +393,9 @@ class QuickAdapterV3(IStrategy):
                     num_shorts += 1
                 elif "long" in trade.enter_tag:
                     num_longs += 1
+            total_open_trades = num_longs + num_shorts
+            if total_open_trades >= self.config.get("max_open_trades"):
+                return False
             if (side == "long" and num_longs >= max_open_trades_per_side) or (
                 side == "short" and num_shorts >= max_open_trades_per_side
             ):
@@ -412,7 +415,9 @@ class QuickAdapterV3(IStrategy):
         if max_open_trades < 0:
             return -1
         if self.is_short_allowed():
-            return (max_open_trades + 1) // 2
+            if max_open_trades % 2 == 1:
+                max_open_trades += 1
+            return max_open_trades // 2
         else:
             return max_open_trades