]> Piment Noir Git Repositories - freqai-strategies.git/commitdiff
fix(qav3): handle NaN with imputation in extrema weighting computation pipeline
authorJérôme Benoit <jerome.benoit@piment-noir.org>
Sat, 13 Dec 2025 11:51:50 +0000 (12:51 +0100)
committerJérôme Benoit <jerome.benoit@piment-noir.org>
Sat, 13 Dec 2025 11:51:50 +0000 (12:51 +0100)
When normalizing extrema weights, NaN values can lead to issues in
Signed-off-by: Jérôme Benoit <jerome.benoit@piment-noir.org>
quickadapter/user_data/config-template.json
quickadapter/user_data/freqaimodels/QuickAdapterRegressorV3.py
quickadapter/user_data/strategies/QuickAdapterV3.py
quickadapter/user_data/strategies/Utils.py

index 38b90b2b84f887a23c71a47d1b73fdbbc913daa9..9f0e89f889902a599c77941afdb1333e00dad24e 100644 (file)
     "extrema_smoothing": {
       "method": "kaiser",
       "window": 5,
-      "beta": 15.0
+      "beta": 12.0
     },
     "predictions_extrema": {
       "thresholds_smoothing": "isodata"
index 43dad13607bec3a52aef9034c48f58e4d1a748d1..65cb63132028acd7c8af36837898f0a1b34e5ca9 100644 (file)
@@ -73,7 +73,7 @@ class QuickAdapterRegressorV3(BaseRegressionModel):
     https://github.com/sponsors/robcaulk
     """
 
-    version = "3.7.129"
+    version = "3.7.130"
 
     _SQRT_2: Final[float] = np.sqrt(2.0)
 
@@ -364,6 +364,7 @@ class QuickAdapterRegressorV3(BaseRegressionModel):
         self._optuna_train_params: dict[str, dict[str, Any]] = {}
         self._optuna_label_params: dict[str, dict[str, Any]] = {}
         self._optuna_label_candle_pool_full_cache: dict[int, list[int]] = {}
+        self._optuna_label_shuffle_rng = random.Random(self._optuna_config.get("seed"))
         self.init_optuna_label_candle_pool()
         self._optuna_label_candle: dict[str, int] = {}
         self._optuna_label_candles: dict[str, int] = {}
@@ -502,7 +503,7 @@ class QuickAdapterRegressorV3(BaseRegressionModel):
         if len(optuna_label_candle_pool_full) == 0:
             raise RuntimeError("Failed to initialize optuna label candle pool full")
         self._optuna_label_candle_pool = optuna_label_candle_pool_full
-        random.shuffle(self._optuna_label_candle_pool)
+        self._optuna_label_shuffle_rng.shuffle(self._optuna_label_candle_pool)
         if len(self._optuna_label_candle_pool) == 0:
             raise RuntimeError("Failed to initialize optuna label candle pool")
 
@@ -541,8 +542,10 @@ class QuickAdapterRegressorV3(BaseRegressionModel):
             - set(self._optuna_label_candle.values())
         )
         if len(optuna_label_available_candles) > 0:
-            self._optuna_label_candle_pool.extend(optuna_label_available_candles)
-            random.shuffle(self._optuna_label_candle_pool)
+            self._optuna_label_candle_pool.extend(
+                sorted(optuna_label_available_candles)
+            )
+            self._optuna_label_shuffle_rng.shuffle(self._optuna_label_candle_pool)
 
     def fit(
         self, data_dictionary: dict[str, Any], dk: FreqaiDataKitchen, **kwargs
index e20c041daeaa478d4a8d5088625279035f487b74..52da129ac4ea1a94f7f8747d9a1b72de3d584034 100644 (file)
@@ -106,7 +106,7 @@ class QuickAdapterV3(IStrategy):
     _TRADING_MODES: Final[tuple[TradingMode, ...]] = ("spot", "margin", "futures")
 
     def version(self) -> str:
-        return "3.3.179"
+        return "3.3.180"
 
     timeframe = "5m"
 
index 8a9109f815755ed50daddf3fb39570b4362bfcb5..823a4cfd55713826af00b096e703d212d7fe399f 100644 (file)
@@ -477,6 +477,28 @@ def _normalize_rank(
     return (ranks - 1) / (n - 1)
 
 
+def _impute_weights(
+    weights: NDArray[np.floating],
+    *,
+    finite_mask: NDArray[np.bool_] | None = None,
+    default_weight: float = DEFAULT_EXTREMA_WEIGHT,
+) -> NDArray[np.floating]:
+    weights = weights.astype(float, copy=False)
+    if finite_mask is None:
+        finite_mask = np.isfinite(weights)
+
+    if not finite_mask.any():
+        return np.full_like(weights, default_weight, dtype=float)
+
+    median_weight = np.nanmedian(weights[finite_mask])
+    if not np.isfinite(median_weight):
+        median_weight = default_weight
+
+    weights_out = weights.astype(float, copy=True)
+    weights_out[~finite_mask] = median_weight
+    return weights_out
+
+
 def normalize_weights(
     weights: NDArray[np.floating],
     # Phase 1: Standardization
@@ -505,15 +527,19 @@ def normalize_weights(
     if weights.size == 0:
         return weights
 
-    weights_out = np.full_like(weights, DEFAULT_EXTREMA_WEIGHT, dtype=float)
-
     weights_finite_mask = np.isfinite(weights)
     if not weights_finite_mask.any():
-        return weights_out
+        return np.full_like(weights, DEFAULT_EXTREMA_WEIGHT, dtype=float)
+
+    weights = _impute_weights(
+        weights,
+        finite_mask=weights_finite_mask,
+        default_weight=DEFAULT_EXTREMA_WEIGHT,
+    )
 
     # Phase 1: Standardization
     standardized_weights = standardize_weights(
-        weights[weights_finite_mask],
+        weights,
         method=standardization,
         robust_quantiles=robust_quantiles,
         mmad_scaling_factor=mmad_scaling_factor,
@@ -547,9 +573,10 @@ def normalize_weights(
             normalized_weights
         )
 
-    weights_out[weights_finite_mask] = normalized_weights
-    weights_out[~np.isfinite(weights_out)] = DEFAULT_EXTREMA_WEIGHT
-    return weights_out
+    if not np.isfinite(normalized_weights).all():
+        return np.full_like(weights, DEFAULT_EXTREMA_WEIGHT, dtype=float)
+
+    return normalized_weights
 
 
 def calculate_extrema_weights(