]> Piment Noir Git Repositories - freqai-strategies.git/commitdiff
refactor: code cleanup
authorJérôme Benoit <jerome.benoit@piment-noir.org>
Tue, 28 Jan 2025 22:31:07 +0000 (23:31 +0100)
committerJérôme Benoit <jerome.benoit@piment-noir.org>
Tue, 28 Jan 2025 22:31:07 +0000 (23:31 +0100)
Signed-off-by: Jérôme Benoit <jerome.benoit@piment-noir.org>
quickadapter/user_data/freqaimodels/LightGBMRegressorQuickAdapterV35.py
quickadapter/user_data/freqaimodels/XGBoostRegressorQuickAdapterV35.py

index 37c69a8267cce69b88fb2af6e8822d22f33a54cc..3316c6c432d97c86964b4bd9eecabfc2c012ca0a 100644 (file)
@@ -53,8 +53,6 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel):
 
         lgbm_model = self.get_init_model(dk.pair)
 
-        logger.info(f"Model training parameters : {self.model_training_parameters}")
-
         model = LGBMRegressor(**self.model_training_parameters)
 
         optuna_hyperopt: bool = (
@@ -222,7 +220,7 @@ def objective(trial, X, y, train_weights, X_test, y_test, test_weights, params):
     test_weights = test_weights[-test_window:]
 
     # Fit the model
-    model = LGBMRegressor(**params)
+    model = LGBMRegressor(objective="rmse", **params)
     model.fit(
         X=X,
         y=y,
@@ -241,7 +239,6 @@ def objective(trial, X, y, train_weights, X_test, y_test, test_weights, params):
 
 def hp_objective(trial, X, y, train_weights, X_test, y_test, test_weights, params):
     study_params = {
-        "objective": "rmse",
         "n_estimators": trial.suggest_int("n_estimators", 100, 800),
         "num_leaves": trial.suggest_int("num_leaves", 2, 256),
         "learning_rate": trial.suggest_float("learning_rate", 1e-3, 0.3, log=True),
@@ -254,7 +251,7 @@ def hp_objective(trial, X, y, train_weights, X_test, y_test, test_weights, param
     params = {**params, **study_params}
 
     # Fit the model
-    model = LGBMRegressor(**params)
+    model = LGBMRegressor(objective="rmse", **params)
     model.fit(
         X=X,
         y=y,
index 3233c5c393d8dc7c3b41e5ad772b8b0fed547312..646492031f11afc07da6aa86a891bf9f967fc252 100644 (file)
@@ -53,8 +53,6 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel):
 
         xgb_model = self.get_init_model(dk.pair)
 
-        logger.info(f"Model training parameters : {self.model_training_parameters}")
-
         model = XGBRegressor(**self.model_training_parameters)
 
         optuna_hyperopt: bool = (
@@ -243,8 +241,6 @@ def objective(trial, X, y, train_weights, X_test, y_test, test_weights, params):
 
 def hp_objective(trial, X, y, train_weights, X_test, y_test, test_weights, params):
     study_params = {
-        "objective": "reg:squarederror",
-        "eval_metric": "rmse",
         "n_estimators": trial.suggest_int("n_estimators", 100, 800),
         "learning_rate": trial.suggest_float("learning_rate", 1e-3, 0.3, log=True),
         "max_depth": trial.suggest_int("max_depth", 3, 12),
@@ -253,14 +249,18 @@ def hp_objective(trial, X, y, train_weights, X_test, y_test, test_weights, param
         "colsample_bytree": trial.suggest_float("colsample_bytree", 0.6, 1.0),
         "reg_alpha": trial.suggest_float("reg_alpha", 1e-8, 10.0, log=True),
         "reg_lambda": trial.suggest_float("reg_lambda", 1e-8, 10.0, log=True),
-        "callbacks": [
-            optuna.integration.XGBoostPruningCallback(trial, "validation_0-rmse")
-        ],
     }
     params = {**params, **study_params}
 
     # Fit the model
-    model = XGBRegressor(**params)
+    model = XGBRegressor(
+        objective="reg:squarederror",
+        eval_metric="rmse",
+        callbacks=[
+            optuna.integration.XGBoostPruningCallback(trial, "validation_0-rmse")
+        ],
+        **params,
+    )
     model.fit(
         X=X,
         y=y,