From: Jérôme Benoit Date: Sun, 26 Jan 2025 17:35:41 +0000 (+0100) Subject: feat(qav3): expose more optuna tunables X-Git-Url: https://git.piment-noir.org/?a=commitdiff_plain;h=1d1452cd1e0c8983631bcc76889e6176acc8b22d;p=freqai-strategies.git feat(qav3): expose more optuna tunables Signed-off-by: Jérôme Benoit --- diff --git a/quickadapter/user_data/config-template.json b/quickadapter/user_data/config-template.json index 692a353..de65213 100644 --- a/quickadapter/user_data/config-template.json +++ b/quickadapter/user_data/config-template.json @@ -130,6 +130,9 @@ "data_kitchen_thread_count": 6, // set to number of CPU threads / 4 "weibull_outlier_threshold": 0.999, "optuna_hyperopt": true, + "optuna_hyperopt_trials": 36, + "optuna_hyperopt_timeout": 3600, + "optuna_hyperopt_jobs": 1, "extra_returns_per_train": { "DI_value_param1": 0, "DI_value_param2": 0, diff --git a/quickadapter/user_data/freqaimodels/LightGBMRegressorQuickAdapterV35.py b/quickadapter/user_data/freqaimodels/LightGBMRegressorQuickAdapterV35.py index dec8de6..91cf269 100644 --- a/quickadapter/user_data/freqaimodels/LightGBMRegressorQuickAdapterV35.py +++ b/quickadapter/user_data/freqaimodels/LightGBMRegressorQuickAdapterV35.py @@ -80,8 +80,9 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel): y_test, self.model_training_parameters, ), - n_trials=N_TRIALS, - n_jobs=1, + n_trials=self.freqai_info.get("optuna_hyperopt_trials", N_TRIALS), + n_jobs=self.freqai_info.get("optuna_hyperopt_jobs", 1), + timeout=self.freqai_info.get("optuna_hyperopt_timeout", 7200), ) hp = study.best_params diff --git a/quickadapter/user_data/freqaimodels/XGBoostRegressorQuickAdapterV35.py b/quickadapter/user_data/freqaimodels/XGBoostRegressorQuickAdapterV35.py index f9cecc3..082c0bd 100644 --- a/quickadapter/user_data/freqaimodels/XGBoostRegressorQuickAdapterV35.py +++ b/quickadapter/user_data/freqaimodels/XGBoostRegressorQuickAdapterV35.py @@ -80,8 +80,9 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel): y_test, self.model_training_parameters, ), - n_trials=N_TRIALS, - n_jobs=1, + n_trials=self.freqai_info.get("optuna_hyperopt_trials", N_TRIALS), + n_jobs=self.freqai_info.get("optuna_hyperopt_jobs", 1), + timeout=self.freqai_info.get("optuna_hyperopt_timeout", 7200), ) hp = study.best_params @@ -103,6 +104,7 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel): **{ "n_estimators": hp.get("n_estimators"), "learning_rate": hp.get("learning_rate"), + "max_depth": hp.get("max_depth"), "gamma": hp.get("gamma"), "reg_alpha": hp.get("reg_alpha"), "reg_lambda": hp.get("reg_lambda"), @@ -220,6 +222,7 @@ def objective(trial, X, y, weights, X_test, y_test, params): "eval_metric": "rmse", "n_estimators": trial.suggest_int("n_estimators", 100, 1000), "learning_rate": trial.suggest_loguniform("learning_rate", 1e-8, 1.0), + "max_depth": trial.suggest_int("max_depth", 1, 10), "gamma": trial.suggest_loguniform("gamma", 1e-8, 1.0), "reg_alpha": trial.suggest_loguniform("reg_alpha", 1e-8, 10.0), "reg_lambda": trial.suggest_loguniform("reg_lambda", 1e-8, 10.0),