From c7ad9b81e16896b5cf1895fed84e7a3eb66b4bf7 Mon Sep 17 00:00:00 2001 From: =?utf8?q?J=C3=A9r=C3=B4me=20Benoit?= Date: Sat, 13 Dec 2025 12:51:50 +0100 Subject: [PATCH] fix(qav3): handle NaN with imputation in extrema weighting computation pipeline MIME-Version: 1.0 Content-Type: text/plain; charset=utf8 Content-Transfer-Encoding: 8bit When normalizing extrema weights, NaN values can lead to issues in Signed-off-by: Jérôme Benoit --- quickadapter/user_data/config-template.json | 2 +- .../freqaimodels/QuickAdapterRegressorV3.py | 11 +++-- .../user_data/strategies/QuickAdapterV3.py | 2 +- quickadapter/user_data/strategies/Utils.py | 41 +++++++++++++++---- 4 files changed, 43 insertions(+), 13 deletions(-) diff --git a/quickadapter/user_data/config-template.json b/quickadapter/user_data/config-template.json index 38b90b2..9f0e89f 100644 --- a/quickadapter/user_data/config-template.json +++ b/quickadapter/user_data/config-template.json @@ -131,7 +131,7 @@ "extrema_smoothing": { "method": "kaiser", "window": 5, - "beta": 15.0 + "beta": 12.0 }, "predictions_extrema": { "thresholds_smoothing": "isodata" diff --git a/quickadapter/user_data/freqaimodels/QuickAdapterRegressorV3.py b/quickadapter/user_data/freqaimodels/QuickAdapterRegressorV3.py index 43dad13..65cb631 100644 --- a/quickadapter/user_data/freqaimodels/QuickAdapterRegressorV3.py +++ b/quickadapter/user_data/freqaimodels/QuickAdapterRegressorV3.py @@ -73,7 +73,7 @@ class QuickAdapterRegressorV3(BaseRegressionModel): https://github.com/sponsors/robcaulk """ - version = "3.7.129" + version = "3.7.130" _SQRT_2: Final[float] = np.sqrt(2.0) @@ -364,6 +364,7 @@ class QuickAdapterRegressorV3(BaseRegressionModel): self._optuna_train_params: dict[str, dict[str, Any]] = {} self._optuna_label_params: dict[str, dict[str, Any]] = {} self._optuna_label_candle_pool_full_cache: dict[int, list[int]] = {} + self._optuna_label_shuffle_rng = random.Random(self._optuna_config.get("seed")) self.init_optuna_label_candle_pool() self._optuna_label_candle: dict[str, int] = {} self._optuna_label_candles: dict[str, int] = {} @@ -502,7 +503,7 @@ class QuickAdapterRegressorV3(BaseRegressionModel): if len(optuna_label_candle_pool_full) == 0: raise RuntimeError("Failed to initialize optuna label candle pool full") self._optuna_label_candle_pool = optuna_label_candle_pool_full - random.shuffle(self._optuna_label_candle_pool) + self._optuna_label_shuffle_rng.shuffle(self._optuna_label_candle_pool) if len(self._optuna_label_candle_pool) == 0: raise RuntimeError("Failed to initialize optuna label candle pool") @@ -541,8 +542,10 @@ class QuickAdapterRegressorV3(BaseRegressionModel): - set(self._optuna_label_candle.values()) ) if len(optuna_label_available_candles) > 0: - self._optuna_label_candle_pool.extend(optuna_label_available_candles) - random.shuffle(self._optuna_label_candle_pool) + self._optuna_label_candle_pool.extend( + sorted(optuna_label_available_candles) + ) + self._optuna_label_shuffle_rng.shuffle(self._optuna_label_candle_pool) def fit( self, data_dictionary: dict[str, Any], dk: FreqaiDataKitchen, **kwargs diff --git a/quickadapter/user_data/strategies/QuickAdapterV3.py b/quickadapter/user_data/strategies/QuickAdapterV3.py index e20c041..52da129 100644 --- a/quickadapter/user_data/strategies/QuickAdapterV3.py +++ b/quickadapter/user_data/strategies/QuickAdapterV3.py @@ -106,7 +106,7 @@ class QuickAdapterV3(IStrategy): _TRADING_MODES: Final[tuple[TradingMode, ...]] = ("spot", "margin", "futures") def version(self) -> str: - return "3.3.179" + return "3.3.180" timeframe = "5m" diff --git a/quickadapter/user_data/strategies/Utils.py b/quickadapter/user_data/strategies/Utils.py index 8a9109f..823a4cf 100644 --- a/quickadapter/user_data/strategies/Utils.py +++ b/quickadapter/user_data/strategies/Utils.py @@ -477,6 +477,28 @@ def _normalize_rank( return (ranks - 1) / (n - 1) +def _impute_weights( + weights: NDArray[np.floating], + *, + finite_mask: NDArray[np.bool_] | None = None, + default_weight: float = DEFAULT_EXTREMA_WEIGHT, +) -> NDArray[np.floating]: + weights = weights.astype(float, copy=False) + if finite_mask is None: + finite_mask = np.isfinite(weights) + + if not finite_mask.any(): + return np.full_like(weights, default_weight, dtype=float) + + median_weight = np.nanmedian(weights[finite_mask]) + if not np.isfinite(median_weight): + median_weight = default_weight + + weights_out = weights.astype(float, copy=True) + weights_out[~finite_mask] = median_weight + return weights_out + + def normalize_weights( weights: NDArray[np.floating], # Phase 1: Standardization @@ -505,15 +527,19 @@ def normalize_weights( if weights.size == 0: return weights - weights_out = np.full_like(weights, DEFAULT_EXTREMA_WEIGHT, dtype=float) - weights_finite_mask = np.isfinite(weights) if not weights_finite_mask.any(): - return weights_out + return np.full_like(weights, DEFAULT_EXTREMA_WEIGHT, dtype=float) + + weights = _impute_weights( + weights, + finite_mask=weights_finite_mask, + default_weight=DEFAULT_EXTREMA_WEIGHT, + ) # Phase 1: Standardization standardized_weights = standardize_weights( - weights[weights_finite_mask], + weights, method=standardization, robust_quantiles=robust_quantiles, mmad_scaling_factor=mmad_scaling_factor, @@ -547,9 +573,10 @@ def normalize_weights( normalized_weights ) - weights_out[weights_finite_mask] = normalized_weights - weights_out[~np.isfinite(weights_out)] = DEFAULT_EXTREMA_WEIGHT - return weights_out + if not np.isfinite(normalized_weights).all(): + return np.full_like(weights, DEFAULT_EXTREMA_WEIGHT, dtype=float) + + return normalized_weights def calculate_extrema_weights( -- 2.43.0