]> Piment Noir Git Repositories - freqai-strategies.git/commitdiff
fix(quickadapter): preserve rsm parameter for CatBoost GPU pairwise modes (#37)
authorJérôme Benoit <jerome.benoit@piment-noir.org>
Fri, 9 Jan 2026 12:48:49 +0000 (13:48 +0100)
committerGitHub <noreply@github.com>
Fri, 9 Jan 2026 12:48:49 +0000 (13:48 +0100)
* fix(quickadapter): preserve rsm parameter for CatBoost GPU pairwise modes

The previous fix unconditionally removed the rsm parameter when using GPU,
but according to CatBoost documentation, rsm IS supported on GPU for
pairwise loss functions (PairLogit and PairLogitPairwise).

This commit refines the logic to only remove rsm for non-pairwise modes
on GPU, allowing users to benefit from rsm optimization when using
pairwise ranking loss functions.

Reference: https://github.com/catboost/catboost/issues/983

* refactor: rename constant and remove comments

* refactor(quickadapter): define _CATBOOST_GPU_RSM_LOSS_FUNCTIONS as global constant

- Define _CATBOOST_GPU_RSM_LOSS_FUNCTIONS as a reusable global constant
- Remove duplicate definitions in fit_regressor() and get_optuna_study_model_parameters()
- Improves maintainability: single source of truth for GPU rsm compatibility
- Ensures consistency between runtime logic and Optuna hyperparameter search

* chore: bump version to 3.10.8

Includes:
- CatBoost GPU rsm parameter fix for pairwise loss functions
- Optuna hyperparameter search optimization for rsm parameter
- Global constant for GPU rsm compatibility

quickadapter/user_data/freqaimodels/QuickAdapterRegressorV3.py
quickadapter/user_data/strategies/QuickAdapterV3.py
quickadapter/user_data/strategies/Utils.py

index e1b6225e026c599480a618c246bb0ecfb4a83586..03249cc4803750ec14264b8a7cd65e7b36e0905a 100644 (file)
@@ -87,7 +87,7 @@ class QuickAdapterRegressorV3(BaseRegressionModel):
     https://github.com/sponsors/robcaulk
     """
 
-    version = "3.10.7"
+    version = "3.10.8"
 
     _TEST_SIZE: Final[float] = 0.1
 
index a5a0854fa3ed7990d27b5bb0c7d7eef64f4e5bb1..583c076f39fb45dc9733d07bea6c26ad1a33e1bb 100644 (file)
@@ -110,7 +110,7 @@ class QuickAdapterV3(IStrategy):
     _PLOT_EXTREMA_MIN_EPS: Final[float] = 0.01
 
     def version(self) -> str:
-        return "3.10.7"
+        return "3.10.8"
 
     timeframe = "5m"
     timeframe_minutes = timeframe_to_minutes(timeframe)
index 65d87ffa0155e4bca5ca53d8836ddba969cfff2e..13f87a152373e00328ed366530282bd821fd8327 100644 (file)
@@ -1627,6 +1627,11 @@ RegressorCallback = Union[Callable[..., Any], XGBoostTrainingCallback]
 
 _EARLY_STOPPING_ROUNDS_DEFAULT: Final[int] = 50
 
+_CATBOOST_GPU_RSM_LOSS_FUNCTIONS: Final[tuple[str, ...]] = (
+    "PairLogit",
+    "PairLogitPairwise",
+)
+
 
 def get_ngboost_dist(dist_name: str) -> type:
     from ngboost.distns import Exponential, Laplace, LogNormal, Normal, T
@@ -1875,10 +1880,12 @@ def fit_regressor(
         model_training_parameters.setdefault("loss_function", "RMSE")
 
         task_type = model_training_parameters.get("task_type", "CPU")
+        loss_function = model_training_parameters.get("loss_function", "RMSE")
         if task_type == "GPU":
             model_training_parameters.setdefault("max_ctr_complexity", 4)
             model_training_parameters.pop("n_jobs", None)
-            model_training_parameters.pop("rsm", None)
+            if loss_function not in _CATBOOST_GPU_RSM_LOSS_FUNCTIONS:
+                model_training_parameters.pop("rsm", None)
         else:
             n_jobs = model_training_parameters.pop("n_jobs", None)
             if n_jobs is not None:
@@ -1903,14 +1910,8 @@ def fit_regressor(
             )
 
         pruning_callback = None
-        if (
-            trial is not None
-            and has_eval_set
-            and task_type != "GPU"
-        ):
-            pruning_callback = optuna.integration.CatBoostPruningCallback(
-                trial, "RMSE"
-            )
+        if trial is not None and has_eval_set and task_type != "GPU":
+            pruning_callback = optuna.integration.CatBoostPruningCallback(trial, "RMSE")
             fit_callbacks.append(pruning_callback)
 
         model = CatBoostRegressor(**model_training_parameters)
@@ -2547,12 +2548,15 @@ def get_optuna_study_model_parameters(
                 ranges["random_strength"][1],
                 log=True,
             ),
-            "rsm": trial.suggest_float(
+        }
+
+        loss_function = model_training_parameters.get("loss_function", "RMSE")
+        if task_type == "CPU" or loss_function in _CATBOOST_GPU_RSM_LOSS_FUNCTIONS:
+            params["rsm"] = trial.suggest_float(
                 "rsm",
                 ranges["rsm"][0],
                 ranges["rsm"][1],
-            ),
-        }
+            )
 
         if bootstrap_type == "Bayesian":
             params["bagging_temperature"] = trial.suggest_float(