From: Jérôme Benoit Date: Wed, 19 Nov 2025 20:19:55 +0000 (+0100) Subject: feat(qav3): weighted reversals labeling (#17) X-Git-Url: https://git.piment-noir.org/?a=commitdiff_plain;h=608a8590fc4ed4aa9c19250799517a96e6feb5de;p=freqai-strategies.git feat(qav3): weighted reversals labeling (#17) * feat(qav3): weighted reversals labeling Signed-off-by: Jérôme Benoit * refactor: cleanup extrema tunables Signed-off-by: Jérôme Benoit * docs: refine README Signed-off-by: Jérôme Benoit * fix: address review comments Signed-off-by: Jérôme Benoit * docs: refine README.md Signed-off-by: Jérôme Benoit * Apply suggestion from @Copilot Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * refactor: consolidate extrema tunables implementation Signed-off-by: Jérôme Benoit * Apply suggestion from @Copilot Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * Apply suggestion from @Copilot Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * Apply suggestion from @Copilot Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * Apply suggestion from @Copilot Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * Apply suggestion from @Copilot Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * perf: smooth extrema once Signed-off-by: Jérôme Benoit * refactor: cleanup Signed-off-by: Jérôme Benoit --------- Signed-off-by: Jérôme Benoit Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- diff --git a/README.md b/README.md index 29d74a2..d90a7cd 100644 --- a/README.md +++ b/README.md @@ -48,13 +48,17 @@ docker compose up -d --build | reversal_confirmation.lookback_period | 0 | int >= 0 | Prior confirming candles; 0 = none. | | reversal_confirmation.decay_ratio | 0.5 | float (0,1] | Geometric per-candle relaxation factor. | | reversal_confirmation.min_natr_ratio_percent | 0.0095 | float [0,1] | Lower bound fraction for volatility adjusted reversal threshold. | -| reversal_confirmation.max_natr_ratio_percent | 0.25 | float [0,1] | Upper bound fraction (>= lower bound) for volatility adjusted reversal threshold. | +| reversal_confirmation.max_natr_ratio_percent | 0.2 | float [0,1] | Upper bound fraction (>= lower bound) for volatility adjusted reversal threshold. | | _Regressor model_ | | | | | freqai.regressor | `xgboost` | enum {`xgboost`,`lightgbm`} | Machine learning regressor algorithm. | | _Extrema smoothing_ | | | | -| freqai.extrema_smoothing | `gaussian` | enum {`gaussian`,`kaiser`,`triang`,`smm`,`sma`} | Extrema smoothing kernel (smm=simple moving median, sma=simple moving average). | -| freqai.extrema_smoothing_window | 5 | int >= 1 | Window size for extrema smoothing. | -| freqai.extrema_smoothing_beta | 8.0 | float > 0 | Kaiser kernel shape parameter. | +| freqai.extrema_smoothing.method | `gaussian` | enum {`gaussian`,`kaiser`,`triang`,`smm`,`sma`} | Extrema smoothing kernel (smm=simple moving median, sma=simple moving average). | +| freqai.extrema_smoothing.window | 5 | int >= 3 | Window size for extrema smoothing. | +| freqai.extrema_smoothing.beta | 8.0 | float > 0 | Kaiser kernel shape parameter. | +| _Extrema weighting_ | | | | +| freqai.extrema_weighting.strategy | `none` | enum {`none`,`pivot_threshold`} | Weighting strategy applied before smoothing. | +| freqai.extrema_weighting.normalization | `minmax` | enum {`minmax`,`l1`,`none`} | Normalization method for weights. | +| freqai.extrema_weighting.gamma | 1.0 | float (0,10] | Contrast exponent applied after normalization (>1 emphasizes extremes, 0= 1 | Zigzag labeling NATR horizon. | | freqai.feature_parameters.min_label_period_candles | 12 | int >= 1 | Minimum labeling NATR horizon used for reversals labeling HPO. | diff --git a/ReforceXY/user_data/freqaimodels/ReforceXY.py b/ReforceXY/user_data/freqaimodels/ReforceXY.py index da734e3..47b2cfa 100644 --- a/ReforceXY/user_data/freqaimodels/ReforceXY.py +++ b/ReforceXY/user_data/freqaimodels/ReforceXY.py @@ -2374,8 +2374,9 @@ class MyRLEnv(Base5ActionRLEnv): strategy_fn = strategies.get(exit_attenuation_mode, None) if strategy_fn is None: logger.debug( - "Unknown exit_attenuation_mode '%s'; defaulting to linear. Valid modes: %s", + "Unknown exit_attenuation_mode '%s'; defaulting to %s. Valid modes: %s", exit_attenuation_mode, + ReforceXY._EXIT_ATTENUATION_MODES[2], # "linear" ", ".join(ReforceXY._EXIT_ATTENUATION_MODES), ) strategy_fn = _linear @@ -2384,9 +2385,10 @@ class MyRLEnv(Base5ActionRLEnv): factor = strategy_fn(factor, effective_dr, model_reward_parameters) except Exception as e: logger.warning( - "exit_attenuation_mode '%s' failed (%r); fallback linear (effective_dr=%.5f)", + "exit_attenuation_mode '%s' failed (%r); fallback to %s (effective_dr=%.5f)", exit_attenuation_mode, e, + ReforceXY._EXIT_ATTENUATION_MODES[2], # "linear" effective_dr, ) factor = _linear(factor, effective_dr, model_reward_parameters) diff --git a/quickadapter/user_data/freqaimodels/QuickAdapterRegressorV3.py b/quickadapter/user_data/freqaimodels/QuickAdapterRegressorV3.py index 26dca00..055dc07 100644 --- a/quickadapter/user_data/freqaimodels/QuickAdapterRegressorV3.py +++ b/quickadapter/user_data/freqaimodels/QuickAdapterRegressorV3.py @@ -66,7 +66,7 @@ class QuickAdapterRegressorV3(BaseRegressionModel): https://github.com/sponsors/robcaulk """ - version = "3.7.121" + version = "3.7.122" _SQRT_2: Final[float] = np.sqrt(2.0) @@ -731,7 +731,6 @@ class QuickAdapterRegressorV3(BaseRegressionModel): f"Unsupported extrema selection method: {extrema_selection}. " f"Supported methods are {', '.join(self._EXTREMA_SELECTION_METHODS)}" ) - extrema_selection: ExtremaSelectionMethod = extrema_selectionx thresholds_smoothing = str( self.freqai_info.get("prediction_thresholds_smoothing", "mean") ) @@ -1119,7 +1118,7 @@ class QuickAdapterRegressorV3(BaseRegressionModel): raise ValueError("label_weights must contain only finite values") if np.any(np_weights < 0): raise ValueError("label_weights values must be non-negative") - label_weights_sum = np.sum(np_weights) + label_weights_sum = np.sum(np.abs(np_weights)) if np.isclose(label_weights_sum, 0.0): raise ValueError("label_weights sum cannot be zero") np_weights = np_weights / label_weights_sum diff --git a/quickadapter/user_data/strategies/QuickAdapterV3.py b/quickadapter/user_data/strategies/QuickAdapterV3.py index 00e0832..2590693 100644 --- a/quickadapter/user_data/strategies/QuickAdapterV3.py +++ b/quickadapter/user_data/strategies/QuickAdapterV3.py @@ -5,7 +5,16 @@ import logging import math from functools import cached_property, lru_cache, reduce from pathlib import Path -from typing import Any, Callable, ClassVar, Final, Literal, Optional, Sequence, Tuple +from typing import ( + Any, + Callable, + ClassVar, + Final, + Literal, + Optional, + Sequence, + Tuple, +) import numpy as np import pandas_ta as pta @@ -19,6 +28,11 @@ from scipy.stats import t from technical.pivots_points import pivots_points from Utils import ( + DEFAULTS_EXTREMA_SMOOTHING, + DEFAULTS_EXTREMA_WEIGHTING, + NORMALIZATION_TYPES, + SMOOTHING_METHODS, + WEIGHT_STRATEGIES, TrendDirection, alligator, bottom_change_percent, @@ -29,6 +43,7 @@ from Utils import ( get_callable_sha256, get_distance, get_label_defaults, + get_weighted_extrema, get_zl_ma_fn, non_zero_diff, price_retracement_percent, @@ -88,7 +103,7 @@ class QuickAdapterV3(IStrategy): _TRADING_MODES: Final[tuple[TradingMode, ...]] = ("spot", "margin", "futures") def version(self) -> str: - return "3.3.171" + return "3.3.172" timeframe = "5m" @@ -108,7 +123,7 @@ class QuickAdapterV3(IStrategy): "lookback_period": 0, "decay_ratio": 0.5, "min_natr_ratio_percent": 0.0095, - "max_natr_ratio_percent": 0.25, + "max_natr_ratio_percent": 0.2, } position_adjustment_enable = True @@ -590,7 +605,7 @@ class QuickAdapterV3(IStrategy): pair = str(metadata.get("pair")) label_period_candles = self.get_label_period_candles(pair) label_natr_ratio = self.get_label_natr_ratio(pair) - pivots_indices, _, pivots_directions, _ = zigzag( + pivots_indices, _, pivots_directions, pivots_thresholds = zigzag( dataframe, natr_period=label_period_candles, natr_ratio=label_natr_ratio, @@ -614,11 +629,79 @@ class QuickAdapterV3(IStrategy): logger.info( f"{pair}: labeled {len(pivots_indices)} extrema (label_period={QuickAdapterV3.td_format(label_period)} / {label_period_candles=} / {label_natr_ratio=:.2f})" ) + + extrema_smoothing_config = self.freqai_info.get("extrema_smoothing", {}) + if not isinstance(extrema_smoothing_config, dict): + extrema_smoothing_config = {} + + smoothing_method = str( + extrema_smoothing_config.get("method", DEFAULTS_EXTREMA_SMOOTHING["method"]) + ) + if smoothing_method not in SMOOTHING_METHODS: + logger.warning( + f"{pair}: invalid extrema_smoothing method '{smoothing_method}', using default '{SMOOTHING_METHODS[0]}'" + ) + smoothing_method = SMOOTHING_METHODS[0] + smoothing_window = int( + extrema_smoothing_config.get("window", DEFAULTS_EXTREMA_SMOOTHING["window"]) + ) + smoothing_beta = float( + extrema_smoothing_config.get("beta", DEFAULTS_EXTREMA_SMOOTHING["beta"]) + ) + + extrema_weighting_config = self.freqai_info.get("extrema_weighting", {}) + if not isinstance(extrema_weighting_config, dict): + extrema_weighting_config = {} + + weighting_strategy = str( + extrema_weighting_config.get( + "strategy", DEFAULTS_EXTREMA_WEIGHTING["strategy"] + ) + ) + if weighting_strategy not in WEIGHT_STRATEGIES: + logger.warning( + f"{pair}: invalid extrema_weighting strategy '{weighting_strategy}', using default '{WEIGHT_STRATEGIES[0]}'" + ) + weighting_strategy = WEIGHT_STRATEGIES[0] + weighting_normalization = str( + extrema_weighting_config.get( + "normalization", DEFAULTS_EXTREMA_WEIGHTING["normalization"] + ) + ) + if weighting_normalization not in NORMALIZATION_TYPES: + logger.warning( + f"{pair}: invalid extrema_weighting normalization '{weighting_normalization}', using default '{NORMALIZATION_TYPES[0]}'" + ) + weighting_normalization = NORMALIZATION_TYPES[0] + weighting_gamma = extrema_weighting_config.get( + "gamma", DEFAULTS_EXTREMA_WEIGHTING["gamma"] + ) + if ( + not isinstance(weighting_gamma, (int, float)) + or not np.isfinite(weighting_gamma) + or not (0 < float(weighting_gamma) <= 10.0) + ): + logger.warning( + f"{pair}: invalid extrema_weighting gamma {weighting_gamma}, must be a finite number in (0, 10], using default 1.0" + ) + weighting_gamma = 1.0 + else: + weighting_gamma = float(weighting_gamma) + + weighted_extrema, _ = get_weighted_extrema( + extrema=dataframe[EXTREMA_COLUMN], + indices=pivots_indices, + weights=np.array(pivots_thresholds), + strategy=weighting_strategy, + normalization=weighting_normalization, + gamma=weighting_gamma, + ) + dataframe[EXTREMA_COLUMN] = smooth_extrema( - dataframe[EXTREMA_COLUMN], - str(self.freqai_info.get("extrema_smoothing", "gaussian")), - int(self.freqai_info.get("extrema_smoothing_window", 5)), - float(self.freqai_info.get("extrema_smoothing_beta", 8.0)), + weighted_extrema, + smoothing_method, + smoothing_window, + smoothing_beta, ) if debug: extrema = dataframe[EXTREMA_COLUMN] diff --git a/quickadapter/user_data/strategies/Utils.py b/quickadapter/user_data/strategies/Utils.py index bc4661e..5149bd4 100644 --- a/quickadapter/user_data/strategies/Utils.py +++ b/quickadapter/user_data/strategies/Utils.py @@ -17,6 +17,31 @@ from technical import qtpylib T = TypeVar("T", pd.Series, float) +WEIGHT_STRATEGIES = ("none", "pivot_threshold") +WeightStrategy = Literal["none", "pivot_threshold"] + +NORMALIZATION_TYPES = ("minmax", "l1", "none") +NormalizationType = Literal["minmax", "l1", "none"] + +SMOOTHING_METHODS = ("gaussian", "kaiser", "triang", "smm", "sma") +SmoothingKernel = Literal["gaussian", "kaiser", "triang"] +SmoothingMethod = Literal["gaussian", "kaiser", "triang", "smm", "sma"] + + +DEFAULTS_EXTREMA_SMOOTHING = { + "method": SMOOTHING_METHODS[0], # "gaussian" + "window": 5, + "beta": 8.0, +} + +DEFAULTS_EXTREMA_WEIGHTING = { + "normalization": NORMALIZATION_TYPES[0], # "minmax" + "gamma": 1.0, + "strategy": WEIGHT_STRATEGIES[0], # "none" +} + +DEFAULT_EXTREMA_WEIGHT = 1.0 + def get_distance(p1: T, p2: T) -> T: return abs(p1 - p2) @@ -49,15 +74,15 @@ def get_gaussian_std(window: int) -> float: @lru_cache(maxsize=8) def _calculate_coeffs( window: int, - win_type: Literal["gaussian", "kaiser", "triang"], + win_type: SmoothingKernel, std: float, beta: float, ) -> NDArray[np.floating]: - if win_type == "gaussian": + if win_type == SMOOTHING_METHODS[0]: # "gaussian" coeffs = sp.signal.windows.gaussian(M=window, std=std, sym=True) - elif win_type == "kaiser": + elif win_type == SMOOTHING_METHODS[1]: # "kaiser" coeffs = sp.signal.windows.kaiser(M=window, beta=beta, sym=True) - elif win_type == "triang": + elif win_type == SMOOTHING_METHODS[2]: # "triang" coeffs = sp.signal.windows.triang(M=window, sym=True) else: raise ValueError(f"Unknown window type: {win_type}") @@ -67,14 +92,14 @@ def _calculate_coeffs( def zero_phase( series: pd.Series, window: int, - win_type: Literal["gaussian", "kaiser", "triang"], + win_type: SmoothingKernel, std: float, beta: float, ) -> pd.Series: if len(series) == 0: return series if len(series) < window: - raise ValueError("Series length must be greater than or equal to window size") + return series values = series.to_numpy() b = _calculate_coeffs(window=window, win_type=win_type, std=std, beta=beta) a = 1.0 @@ -83,39 +108,163 @@ def zero_phase( def smooth_extrema( - series: pd.Series, method: str, window: int, beta: float + series: pd.Series, + method: SmoothingMethod = DEFAULTS_EXTREMA_SMOOTHING["method"], + window: int = DEFAULTS_EXTREMA_SMOOTHING["window"], + beta: float = DEFAULTS_EXTREMA_SMOOTHING["beta"], ) -> pd.Series: + if window < 3: + window = 3 + if beta <= 0 or not np.isfinite(beta): + beta = 1.0 + std = get_gaussian_std(window) odd_window = get_odd_window(window) - smoothing_methods: dict[str, pd.Series] = { - "gaussian": zero_phase( + + if method == SMOOTHING_METHODS[0]: # "gaussian" + return zero_phase( series=series, window=window, - win_type="gaussian", + win_type=SMOOTHING_METHODS[0], std=std, beta=beta, - ), - "kaiser": zero_phase( + ) + elif method == SMOOTHING_METHODS[1]: # "kaiser" + return zero_phase( series=series, window=window, - win_type="kaiser", + win_type=SMOOTHING_METHODS[1], std=std, beta=beta, - ), - "triang": zero_phase( + ) + elif method == SMOOTHING_METHODS[2]: # "triang" + return zero_phase( series=series, window=window, - win_type="triang", + win_type=SMOOTHING_METHODS[2], std=std, beta=beta, - ), - "smm": series.rolling(window=odd_window, center=True).median(), - "sma": series.rolling(window=odd_window, center=True).mean(), - } - return smoothing_methods.get( - method, - smoothing_methods["gaussian"], - ) + ) + elif method == SMOOTHING_METHODS[3]: # "smm" (Simple Moving Median) + return series.rolling(window=odd_window, center=True).median() + elif method == SMOOTHING_METHODS[4]: # "sma" (Simple Moving Average) + return series.rolling(window=odd_window, center=True).mean() + else: + return zero_phase( + series=series, + window=window, + win_type=SMOOTHING_METHODS[0], + std=std, + beta=beta, + ) + + +def normalize_weights( + weights: NDArray[np.floating], + normalization: NormalizationType = DEFAULTS_EXTREMA_WEIGHTING["normalization"], + gamma: float = DEFAULTS_EXTREMA_WEIGHTING["gamma"], +) -> NDArray[np.floating]: + if weights.size == 0: + return weights + if normalization == NORMALIZATION_TYPES[2]: # "none" + return weights + + if normalization == NORMALIZATION_TYPES[0]: # "minmax" + weights = weights.astype(float, copy=False) + if np.isnan(weights).any(): + return np.full_like(weights, 1.0, dtype=float) + w_min = np.min(weights) + w_max = np.max(weights) + if not (np.isfinite(w_min) and np.isfinite(w_max)): + return np.full_like(weights, 1.0, dtype=float) + w_range = w_max - w_min + if np.isclose(w_range, 0.0): + return np.full_like(weights, 1.0, dtype=float) + normalized_weights = (weights - w_min) / w_range + if np.isnan(normalized_weights).any(): + return np.full_like(weights, 1.0, dtype=float) + if gamma != 1.0 and np.isfinite(gamma) and gamma > 0: + normalized_weights = np.power(normalized_weights, gamma) + if np.isnan(normalized_weights).any(): + return np.full_like(weights, 1.0, dtype=float) + return normalized_weights + + if normalization == NORMALIZATION_TYPES[1]: # "l1" + weights_sum = np.sum(np.abs(weights)) + if weights_sum <= 0 or not np.isfinite(weights_sum): + return np.full_like(weights, 1.0, dtype=float) + normalized_weights = weights / weights_sum + if np.isnan(normalized_weights).any(): + return np.full_like(weights, 1.0, dtype=float) + if gamma != 1.0 and np.isfinite(gamma) and gamma > 0: + normalized_weights = np.power(normalized_weights, gamma) + if np.isnan(normalized_weights).any(): + return np.full_like(weights, 1.0, dtype=float) + return normalized_weights + + raise ValueError(f"Unknown normalization method: {normalization}") + + +def calculate_extrema_weights( + series: pd.Series, + indices: list[int], + weights: NDArray[np.floating], + normalization: NormalizationType = DEFAULTS_EXTREMA_WEIGHTING["normalization"], + gamma: float = DEFAULTS_EXTREMA_WEIGHTING["gamma"], +) -> pd.Series: + if len(indices) == 0 or len(weights) == 0: + return pd.Series(float(DEFAULT_EXTREMA_WEIGHT), index=series.index) + + if len(indices) != len(weights): + raise ValueError( + f"Length mismatch: {len(indices)} indices but {len(weights)} weights" + ) + + normalized_weights = normalize_weights(weights, normalization, gamma) + + if normalized_weights.size == 0 or np.allclose( + normalized_weights, normalized_weights[0] + ): + normalized_weights = np.full_like( + normalized_weights, float(DEFAULT_EXTREMA_WEIGHT) + ) + + weights_series = pd.Series(float(DEFAULT_EXTREMA_WEIGHT), index=series.index) + mask = pd.Index(indices).isin(series.index) + normalized_weights = normalized_weights[mask] + valid_indices = [idx for idx, is_valid in zip(indices, mask) if is_valid] + if len(valid_indices) > 0: + weights_series.loc[valid_indices] = normalized_weights + return weights_series + + +def get_weighted_extrema( + extrema: pd.Series, + indices: list[int], + weights: NDArray[np.floating], + strategy: WeightStrategy = DEFAULTS_EXTREMA_WEIGHTING["strategy"], + normalization: NormalizationType = DEFAULTS_EXTREMA_WEIGHTING["normalization"], + gamma: float = DEFAULTS_EXTREMA_WEIGHTING["gamma"], +) -> tuple[pd.Series, pd.Series]: + default_weights = pd.Series(float(DEFAULT_EXTREMA_WEIGHT), index=extrema.index) + if ( + len(indices) == 0 or len(weights) == 0 or strategy == WEIGHT_STRATEGIES[0] + ): # "none" + return extrema, default_weights + + if strategy == WEIGHT_STRATEGIES[1]: # "pivot_threshold" + extrema_weights = calculate_extrema_weights( + series=extrema, + indices=indices, + weights=weights, + normalization=normalization, + gamma=gamma, + ) + if np.allclose(extrema_weights, DEFAULT_EXTREMA_WEIGHT): + return extrema, default_weights + return extrema * extrema_weights, extrema_weights + + raise ValueError(f"Unknown weight strategy: {strategy}") def get_callable_sha256(fn: Callable) -> str: