From: Jérôme Benoit Date: Sun, 21 Sep 2025 12:31:54 +0000 (+0200) Subject: fix: ensure different hyperopt seed per trial X-Git-Url: https://git.piment-noir.org/?a=commitdiff_plain;h=868ec7c4defcbc5a7453d2c8a6b43f0b0b8c8441;p=freqai-strategies.git fix: ensure different hyperopt seed per trial Signed-off-by: Jérôme Benoit --- diff --git a/ReforceXY/user_data/freqaimodels/ReforceXY.py b/ReforceXY/user_data/freqaimodels/ReforceXY.py index dedd96d..bcad2a8 100644 --- a/ReforceXY/user_data/freqaimodels/ReforceXY.py +++ b/ReforceXY/user_data/freqaimodels/ReforceXY.py @@ -963,6 +963,7 @@ class ReforceXY(BaseReinforcementLearningModel): # Ensure that the sampled parameters take precedence params = deepmerge(self.get_model_params(), params) + params["seed"] = params.get("seed", 42) + trial.number logger.info("Trial %s params: %s", trial.number, params) diff --git a/quickadapter/user_data/freqaimodels/QuickAdapterRegressorV3.py b/quickadapter/user_data/freqaimodels/QuickAdapterRegressorV3.py index 79cc03a..2468a7f 100644 --- a/quickadapter/user_data/freqaimodels/QuickAdapterRegressorV3.py +++ b/quickadapter/user_data/freqaimodels/QuickAdapterRegressorV3.py @@ -1450,6 +1450,7 @@ def train_objective( eval_weights=[test_weights], model_training_parameters=model_training_parameters, callbacks=get_optuna_callbacks(trial, regressor), + trial=trial, ) y_pred = model.predict(X_test) @@ -1485,6 +1486,7 @@ def hp_objective( eval_weights=[test_weights], model_training_parameters=model_training_parameters, callbacks=get_optuna_callbacks(trial, regressor), + trial=trial, ) y_pred = model.predict(X_test) diff --git a/quickadapter/user_data/strategies/Utils.py b/quickadapter/user_data/strategies/Utils.py index f50bc3e..47c2376 100644 --- a/quickadapter/user_data/strategies/Utils.py +++ b/quickadapter/user_data/strategies/Utils.py @@ -742,6 +742,7 @@ def fit_regressor( model_training_parameters: dict[str, Any], init_model: Any = None, callbacks: Optional[list[Callable]] = None, + trial: Optional[optuna.trial.Trial] = None, ) -> Any: if regressor == "xgboost": from xgboost import XGBRegressor @@ -749,6 +750,11 @@ def fit_regressor( if model_training_parameters.get("random_state") is None: model_training_parameters["random_state"] = 1 + if trial is not None: + model_training_parameters["random_state"] = ( + model_training_parameters["random_state"] + trial.number + ) + model = XGBRegressor( objective="reg:squarederror", eval_metric="rmse", @@ -769,6 +775,11 @@ def fit_regressor( if model_training_parameters.get("seed") is None: model_training_parameters["seed"] = 1 + if trial is not None: + model_training_parameters["seed"] = ( + model_training_parameters["seed"] + trial.number + ) + model = LGBMRegressor(objective="regression", **model_training_parameters) model.fit( X=X,