]> Piment Noir Git Repositories - freqai-strategies.git/commitdiff
feat(qav3): add more extrema weighting tunables
authorJérôme Benoit <jerome.benoit@piment-noir.org>
Fri, 21 Nov 2025 16:57:21 +0000 (17:57 +0100)
committerJérôme Benoit <jerome.benoit@piment-noir.org>
Fri, 21 Nov 2025 16:57:21 +0000 (17:57 +0100)
Signed-off-by: Jérôme Benoit <jerome.benoit@piment-noir.org>
README.md
quickadapter/user_data/strategies/QuickAdapterV3.py
quickadapter/user_data/strategies/Utils.py

index d2b5f806f1d59da6912677c9e7997c283bfef862..f124648e4a8ab46538d9e18cfb812a25f6386e56 100644 (file)
--- a/README.md
+++ b/README.md
@@ -60,6 +60,8 @@ docker compose up -d --build
 | freqai.extrema_weighting.normalization               | `minmax`          | enum {`minmax`,`zscore`,`l1`,`l2`,`robust`,`softmax`,`tanh`,`rank`,`none`}                                                       | Normalization method for weights.                                                                                                                                                                          |
 | freqai.extrema_weighting.gamma                       | 1.0               | float (0,10]                                                                                                                     | Contrast exponent applied after normalization (>1 emphasizes extremes, 0<gamma<1 softens).                                                                                                                 |
 | freqai.extrema_weighting.softmax_temperature         | 1.0               | float > 0                                                                                                                        | Temperature parameter for softmax normalization (lower values sharpen distribution, higher values flatten it).                                                                                             |
+| freqai.extrema_weighting.tanh_scale                  | 1.0               | float > 0                                                                                                                        | Scale parameter for tanh normalization applied to z-scores before tanh transformation (higher values make tanh steeper, lower values make it gentler).                                                     |
+| freqai.extrema_weighting.tanh_gain                   | 0.5               | float > 0                                                                                                                        | Gain parameter for tanh normalization that replaces the default 0.5 scaling factor (controls output range after tanh transformation).                                                                      |
 | freqai.extrema_weighting.robust_quantiles            | [0.25, 0.75]      | list[float] where 0 <= q_low < q_high <= 1                                                                                       | Quantile range for robust normalization.                                                                                                                                                                   |
 | freqai.extrema_weighting.rank_method                 | `average`         | enum {`average`,`min`,`max`,`dense`,`ordinal`}                                                                                   | Ranking method for rank normalization.                                                                                                                                                                     |
 | _Feature parameters_                                 |                   |                                                                                                                                  |                                                                                                                                                                                                            |
index 517cb58d719bd5f79f778b161262d92e7d817df4..72215485ba05d08530f8ce29a6dd7fd823c0b253 100644 (file)
@@ -616,7 +616,7 @@ class QuickAdapterV3(IStrategy):
             or not (0 < float(weighting_gamma) <= 10.0)
         ):
             logger.warning(
-                f"{pair}: invalid extrema_weighting gamma {weighting_gamma}, must be a finite number in (0, 10], using default 1.0"
+                f"{pair}: invalid extrema_weighting gamma {weighting_gamma}, must be a finite number in (0, 10], using default {DEFAULTS_EXTREMA_WEIGHTING['gamma']}"
             )
             weighting_gamma = DEFAULTS_EXTREMA_WEIGHTING["gamma"]
         else:
@@ -631,7 +631,7 @@ class QuickAdapterV3(IStrategy):
             or weighting_softmax_temperature <= 0
         ):
             logger.warning(
-                f"{pair}: invalid extrema_weighting softmax_temperature {weighting_softmax_temperature}, must be > 0, using default 1.0"
+                f"{pair}: invalid extrema_weighting softmax_temperature {weighting_softmax_temperature}, must be > 0, using default {DEFAULTS_EXTREMA_WEIGHTING['softmax_temperature']}"
             )
             weighting_softmax_temperature = DEFAULTS_EXTREMA_WEIGHTING[
                 "softmax_temperature"
@@ -652,7 +652,7 @@ class QuickAdapterV3(IStrategy):
             or weighting_robust_quantiles[0] >= weighting_robust_quantiles[1]
         ):
             logger.warning(
-                f"{pair}: invalid extrema_weighting robust_quantiles {weighting_robust_quantiles}, must be (q_low, q_high) with 0 <= q_low < q_high <= 1, using default (0.25, 0.75)"
+                f"{pair}: invalid extrema_weighting robust_quantiles {weighting_robust_quantiles}, must be (q_low, q_high) with 0 <= q_low < q_high <= 1, using default {DEFAULTS_EXTREMA_WEIGHTING['robust_quantiles']}"
             )
             weighting_robust_quantiles = DEFAULTS_EXTREMA_WEIGHTING["robust_quantiles"]
         else:
@@ -672,11 +672,43 @@ class QuickAdapterV3(IStrategy):
             )
             weighting_rank_method = RANK_METHODS[0]
 
+        weighting_tanh_scale = extrema_weighting.get(
+            "tanh_scale", DEFAULTS_EXTREMA_WEIGHTING["tanh_scale"]
+        )
+        if (
+            not isinstance(weighting_tanh_scale, (int, float))
+            or not np.isfinite(weighting_tanh_scale)
+            or weighting_tanh_scale <= 0
+        ):
+            logger.warning(
+                f"{pair}: invalid extrema_weighting tanh_scale {weighting_tanh_scale}, must be > 0, using default {DEFAULTS_EXTREMA_WEIGHTING['tanh_scale']}"
+            )
+            weighting_tanh_scale = DEFAULTS_EXTREMA_WEIGHTING["tanh_scale"]
+        else:
+            weighting_tanh_scale = float(weighting_tanh_scale)
+
+        weighting_tanh_gain = extrema_weighting.get(
+            "tanh_gain", DEFAULTS_EXTREMA_WEIGHTING["tanh_gain"]
+        )
+        if (
+            not isinstance(weighting_tanh_gain, (int, float))
+            or not np.isfinite(weighting_tanh_gain)
+            or weighting_tanh_gain <= 0
+        ):
+            logger.warning(
+                f"{pair}: invalid extrema_weighting tanh_gain {weighting_tanh_gain}, must be > 0, using default {DEFAULTS_EXTREMA_WEIGHTING['tanh_gain']}"
+            )
+            weighting_tanh_gain = DEFAULTS_EXTREMA_WEIGHTING["tanh_gain"]
+        else:
+            weighting_tanh_gain = float(weighting_tanh_gain)
+
         return {
             "strategy": weighting_strategy,
             "normalization": weighting_normalization,
             "gamma": weighting_gamma,
             "softmax_temperature": weighting_softmax_temperature,
+            "tanh_scale": weighting_tanh_scale,
+            "tanh_gain": weighting_tanh_gain,
             "robust_quantiles": weighting_robust_quantiles,
             "rank_method": weighting_rank_method,
         }
@@ -763,6 +795,8 @@ class QuickAdapterV3(IStrategy):
             normalization=extrema_weighting_params["normalization"],
             gamma=extrema_weighting_params["gamma"],
             softmax_temperature=extrema_weighting_params["softmax_temperature"],
+            tanh_scale=extrema_weighting_params["tanh_scale"],
+            tanh_gain=extrema_weighting_params["tanh_gain"],
             robust_quantiles=extrema_weighting_params["robust_quantiles"],
             rank_method=extrema_weighting_params["rank_method"],
         )
index 20b86e891a0714d97a72420e612c251f390e736d..2f0ad92fa57150b43c4f60db3eb3d3e4e85364bc 100644 (file)
@@ -67,6 +67,8 @@ DEFAULTS_EXTREMA_WEIGHTING: Final[dict[str, Any]] = {
     "gamma": 1.0,
     "strategy": WEIGHT_STRATEGIES[0],  # "none"
     "softmax_temperature": 1.0,
+    "tanh_scale": 1.0,
+    "tanh_gain": 0.5,
     "robust_quantiles": (0.25, 0.75),
     "rank_method": RANK_METHODS[0],  # "average"
 }
@@ -300,13 +302,15 @@ def _normalize_softmax(
     return sp.special.softmax(weights)
 
 
-def _normalize_tanh(weights: NDArray[np.floating]) -> NDArray[np.floating]:
+def _normalize_tanh(
+    weights: NDArray[np.floating], scale: float = 1.0, gain: float = 0.5
+) -> NDArray[np.floating]:
     weights = weights.astype(float, copy=False)
     if np.isnan(weights).any():
         return np.full_like(weights, float(DEFAULT_EXTREMA_WEIGHT), dtype=float)
 
     z_scores = _normalize_zscore(weights, rescale_to_unit_range=False)
-    normalized_weights = 0.5 * (np.tanh(z_scores) + 1.0)
+    normalized_weights = gain * (np.tanh(scale * z_scores) + 1.0)
     return normalized_weights
 
 
@@ -331,6 +335,8 @@ def normalize_weights(
     normalization: NormalizationType = DEFAULTS_EXTREMA_WEIGHTING["normalization"],
     gamma: float = DEFAULTS_EXTREMA_WEIGHTING["gamma"],
     softmax_temperature: float = DEFAULTS_EXTREMA_WEIGHTING["softmax_temperature"],
+    tanh_scale: float = DEFAULTS_EXTREMA_WEIGHTING["tanh_scale"],
+    tanh_gain: float = DEFAULTS_EXTREMA_WEIGHTING["tanh_gain"],
     robust_quantiles: tuple[float, float] = DEFAULTS_EXTREMA_WEIGHTING[
         "robust_quantiles"
     ],
@@ -364,7 +370,7 @@ def normalize_weights(
         )
 
     elif normalization == NORMALIZATION_TYPES[6]:  # "tanh"
-        normalized_weights = _normalize_tanh(weights)
+        normalized_weights = _normalize_tanh(weights, scale=tanh_scale, gain=tanh_gain)
 
     elif normalization == NORMALIZATION_TYPES[7]:  # "rank"
         normalized_weights = _normalize_rank(weights, method=rank_method)
@@ -390,6 +396,8 @@ def calculate_extrema_weights(
     normalization: NormalizationType = DEFAULTS_EXTREMA_WEIGHTING["normalization"],
     gamma: float = DEFAULTS_EXTREMA_WEIGHTING["gamma"],
     softmax_temperature: float = DEFAULTS_EXTREMA_WEIGHTING["softmax_temperature"],
+    tanh_scale: float = DEFAULTS_EXTREMA_WEIGHTING["tanh_scale"],
+    tanh_gain: float = DEFAULTS_EXTREMA_WEIGHTING["tanh_gain"],
     robust_quantiles: tuple[float, float] = DEFAULTS_EXTREMA_WEIGHTING[
         "robust_quantiles"
     ],
@@ -408,6 +416,8 @@ def calculate_extrema_weights(
         normalization,
         gamma,
         softmax_temperature,
+        tanh_scale,
+        tanh_gain,
         robust_quantiles,
         rank_method,
     )
@@ -436,6 +446,8 @@ def get_weighted_extrema(
     normalization: NormalizationType = DEFAULTS_EXTREMA_WEIGHTING["normalization"],
     gamma: float = DEFAULTS_EXTREMA_WEIGHTING["gamma"],
     softmax_temperature: float = DEFAULTS_EXTREMA_WEIGHTING["softmax_temperature"],
+    tanh_scale: float = DEFAULTS_EXTREMA_WEIGHTING["tanh_scale"],
+    tanh_gain: float = DEFAULTS_EXTREMA_WEIGHTING["tanh_gain"],
     robust_quantiles: tuple[float, float] = DEFAULTS_EXTREMA_WEIGHTING[
         "robust_quantiles"
     ],
@@ -455,6 +467,8 @@ def get_weighted_extrema(
             normalization=normalization,
             gamma=gamma,
             softmax_temperature=softmax_temperature,
+            tanh_scale=tanh_scale,
+            tanh_gain=tanh_gain,
             robust_quantiles=robust_quantiles,
             rank_method=rank_method,
         )