From 493e918a25270b15eaf379618295da17918568e1 Mon Sep 17 00:00:00 2001 From: =?utf8?q?J=C3=A9r=C3=B4me=20Benoit?= Date: Tue, 28 Jan 2025 23:31:07 +0100 Subject: [PATCH] refactor: code cleanup MIME-Version: 1.0 Content-Type: text/plain; charset=utf8 Content-Transfer-Encoding: 8bit Signed-off-by: Jérôme Benoit --- .../LightGBMRegressorQuickAdapterV35.py | 7 ++----- .../XGBoostRegressorQuickAdapterV35.py | 16 ++++++++-------- 2 files changed, 10 insertions(+), 13 deletions(-) diff --git a/quickadapter/user_data/freqaimodels/LightGBMRegressorQuickAdapterV35.py b/quickadapter/user_data/freqaimodels/LightGBMRegressorQuickAdapterV35.py index 37c69a8..3316c6c 100644 --- a/quickadapter/user_data/freqaimodels/LightGBMRegressorQuickAdapterV35.py +++ b/quickadapter/user_data/freqaimodels/LightGBMRegressorQuickAdapterV35.py @@ -53,8 +53,6 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel): lgbm_model = self.get_init_model(dk.pair) - logger.info(f"Model training parameters : {self.model_training_parameters}") - model = LGBMRegressor(**self.model_training_parameters) optuna_hyperopt: bool = ( @@ -222,7 +220,7 @@ def objective(trial, X, y, train_weights, X_test, y_test, test_weights, params): test_weights = test_weights[-test_window:] # Fit the model - model = LGBMRegressor(**params) + model = LGBMRegressor(objective="rmse", **params) model.fit( X=X, y=y, @@ -241,7 +239,6 @@ def objective(trial, X, y, train_weights, X_test, y_test, test_weights, params): def hp_objective(trial, X, y, train_weights, X_test, y_test, test_weights, params): study_params = { - "objective": "rmse", "n_estimators": trial.suggest_int("n_estimators", 100, 800), "num_leaves": trial.suggest_int("num_leaves", 2, 256), "learning_rate": trial.suggest_float("learning_rate", 1e-3, 0.3, log=True), @@ -254,7 +251,7 @@ def hp_objective(trial, X, y, train_weights, X_test, y_test, test_weights, param params = {**params, **study_params} # Fit the model - model = LGBMRegressor(**params) + model = LGBMRegressor(objective="rmse", **params) model.fit( X=X, y=y, diff --git a/quickadapter/user_data/freqaimodels/XGBoostRegressorQuickAdapterV35.py b/quickadapter/user_data/freqaimodels/XGBoostRegressorQuickAdapterV35.py index 3233c5c..6464920 100644 --- a/quickadapter/user_data/freqaimodels/XGBoostRegressorQuickAdapterV35.py +++ b/quickadapter/user_data/freqaimodels/XGBoostRegressorQuickAdapterV35.py @@ -53,8 +53,6 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel): xgb_model = self.get_init_model(dk.pair) - logger.info(f"Model training parameters : {self.model_training_parameters}") - model = XGBRegressor(**self.model_training_parameters) optuna_hyperopt: bool = ( @@ -243,8 +241,6 @@ def objective(trial, X, y, train_weights, X_test, y_test, test_weights, params): def hp_objective(trial, X, y, train_weights, X_test, y_test, test_weights, params): study_params = { - "objective": "reg:squarederror", - "eval_metric": "rmse", "n_estimators": trial.suggest_int("n_estimators", 100, 800), "learning_rate": trial.suggest_float("learning_rate", 1e-3, 0.3, log=True), "max_depth": trial.suggest_int("max_depth", 3, 12), @@ -253,14 +249,18 @@ def hp_objective(trial, X, y, train_weights, X_test, y_test, test_weights, param "colsample_bytree": trial.suggest_float("colsample_bytree", 0.6, 1.0), "reg_alpha": trial.suggest_float("reg_alpha", 1e-8, 10.0, log=True), "reg_lambda": trial.suggest_float("reg_lambda", 1e-8, 10.0, log=True), - "callbacks": [ - optuna.integration.XGBoostPruningCallback(trial, "validation_0-rmse") - ], } params = {**params, **study_params} # Fit the model - model = XGBRegressor(**params) + model = XGBRegressor( + objective="reg:squarederror", + eval_metric="rmse", + callbacks=[ + optuna.integration.XGBoostPruningCallback(trial, "validation_0-rmse") + ], + **params, + ) model.fit( X=X, y=y, -- 2.43.0