]> Piment Noir Git Repositories - freqai-strategies.git/commitdiff
fix: ensure different hyperopt seed per trial
authorJérôme Benoit <jerome.benoit@piment-noir.org>
Sun, 21 Sep 2025 12:31:54 +0000 (14:31 +0200)
committerJérôme Benoit <jerome.benoit@piment-noir.org>
Sun, 21 Sep 2025 12:31:54 +0000 (14:31 +0200)
Signed-off-by: Jérôme Benoit <jerome.benoit@piment-noir.org>
ReforceXY/user_data/freqaimodels/ReforceXY.py
quickadapter/user_data/freqaimodels/QuickAdapterRegressorV3.py
quickadapter/user_data/strategies/Utils.py

index dedd96d1b473e286c5f3cebed852fb7dc9910c2d..bcad2a8cb3adcfeafbcbdd4710675c3e3780eaff 100644 (file)
@@ -963,6 +963,7 @@ class ReforceXY(BaseReinforcementLearningModel):
 
         # Ensure that the sampled parameters take precedence
         params = deepmerge(self.get_model_params(), params)
+        params["seed"] = params.get("seed", 42) + trial.number
 
         logger.info("Trial %s params: %s", trial.number, params)
 
index 79cc03a80643c00f2f47de55b4ba96404f1332a3..2468a7f575bc535a0c174311bfa892fd94fe1e49 100644 (file)
@@ -1450,6 +1450,7 @@ def train_objective(
         eval_weights=[test_weights],
         model_training_parameters=model_training_parameters,
         callbacks=get_optuna_callbacks(trial, regressor),
+        trial=trial,
     )
     y_pred = model.predict(X_test)
 
@@ -1485,6 +1486,7 @@ def hp_objective(
         eval_weights=[test_weights],
         model_training_parameters=model_training_parameters,
         callbacks=get_optuna_callbacks(trial, regressor),
+        trial=trial,
     )
     y_pred = model.predict(X_test)
 
index f50bc3eeb5bdefc0b89a19feef0080957cf93b5f..47c2376628869f645c471ac7064789fcc29f12dd 100644 (file)
@@ -742,6 +742,7 @@ def fit_regressor(
     model_training_parameters: dict[str, Any],
     init_model: Any = None,
     callbacks: Optional[list[Callable]] = None,
+    trial: Optional[optuna.trial.Trial] = None,
 ) -> Any:
     if regressor == "xgboost":
         from xgboost import XGBRegressor
@@ -749,6 +750,11 @@ def fit_regressor(
         if model_training_parameters.get("random_state") is None:
             model_training_parameters["random_state"] = 1
 
+        if trial is not None:
+            model_training_parameters["random_state"] = (
+                model_training_parameters["random_state"] + trial.number
+            )
+
         model = XGBRegressor(
             objective="reg:squarederror",
             eval_metric="rmse",
@@ -769,6 +775,11 @@ def fit_regressor(
         if model_training_parameters.get("seed") is None:
             model_training_parameters["seed"] = 1
 
+        if trial is not None:
+            model_training_parameters["seed"] = (
+                model_training_parameters["seed"] + trial.number
+            )
+
         model = LGBMRegressor(objective="regression", **model_training_parameters)
         model.fit(
             X=X,