From: Jérôme Benoit Date: Mon, 5 Jan 2026 12:13:14 +0000 (+0100) Subject: refactor(quickadapter): harmonize log and error messages across codebase X-Git-Url: https://git.piment-noir.org/?a=commitdiff_plain;h=6423d4a09143156d9138a5a2f17fc308fe1e705d;p=freqai-strategies.git refactor(quickadapter): harmonize log and error messages across codebase Standardize error and log messages for consistency and clarity: - Standardize 29 ValueError messages with 'Invalid {param} value {value}' format - Harmonize 35 warning messages with fallback defaults ('using default'/'using uniform') - Replace {trade.pair} with {pair} in 33 log messages for consistent context - Ensure all 7 exception handlers use exc_info=True for complete stack traces - Normalize punctuation and capitalization in validation messages This improves debugging experience and maintains uniform message patterns throughout the QuickAdapter, Utils, and ExtremaWeightingTransformer modules. --- diff --git a/quickadapter/user_data/freqaimodels/QuickAdapterRegressorV3.py b/quickadapter/user_data/freqaimodels/QuickAdapterRegressorV3.py index 34cf60c..6b67b31 100644 --- a/quickadapter/user_data/freqaimodels/QuickAdapterRegressorV3.py +++ b/quickadapter/user_data/freqaimodels/QuickAdapterRegressorV3.py @@ -341,14 +341,14 @@ class QuickAdapterRegressorV3(BaseRegressionModel): return float(p) if (np.isfinite(p) and p > 0) else None if not np.isfinite(p): - msg = f"Invalid {ctx} {p!r}: must be finite" + msg = f"Invalid {ctx} value {p!r}: must be finite" if mode == "raise": raise ValueError(msg) logger.warning(f"{msg}, using default") return None if p <= 0: - msg = f"Invalid {ctx} {p!r}: must be > 0" + msg = f"Invalid {ctx} value {p!r}: must be > 0" if mode == "raise": raise ValueError(msg) logger.warning(f"{msg}, using default") @@ -395,14 +395,14 @@ class QuickAdapterRegressorV3(BaseRegressionModel): return float(q) if (np.isfinite(q) and 0.0 <= q <= 1.0) else None if not np.isfinite(q): - msg = f"Invalid {ctx} {q!r}: must be finite" + msg = f"Invalid {ctx} value {q!r}: must be finite" if mode == "raise": raise ValueError(msg) logger.warning(f"{msg}, using default") return None if q < 0.0 or q > 1.0: - msg = f"Invalid {ctx} {q!r}: must be in [0, 1]" + msg = f"Invalid {ctx} value {q!r}: must be in [0, 1]" if mode == "raise": raise ValueError(msg) logger.warning(f"{msg}, using default") @@ -420,7 +420,7 @@ class QuickAdapterRegressorV3(BaseRegressionModel): return float(p) if np.isfinite(p) else None if not np.isfinite(p): - msg = f"Invalid {ctx} {p!r}: must be finite" + msg = f"Invalid {ctx} value {p!r}: must be finite" if mode == "raise": raise ValueError(msg) logger.warning(f"{msg}, using default") @@ -438,7 +438,7 @@ class QuickAdapterRegressorV3(BaseRegressionModel): if mode == "none": return None - msg = f"Invalid {ctx} {metric!r}: does not support custom weights" + msg = f"Invalid {ctx} value {metric!r}: does not support custom weights" if mode == "raise": raise ValueError(msg) logger.warning(f"{msg}, using uniform weights") @@ -498,14 +498,14 @@ class QuickAdapterRegressorV3(BaseRegressionModel): return uniform_weights if not np.all(np.isfinite(np_weights)): - msg = f"Invalid {ctx}: contains non-finite values" + msg = f"Invalid {ctx} value: contains non-finite values" if mode == "raise": raise ValueError(msg) logger.warning(f"{msg}, using uniform weights") return uniform_weights if np.any(np_weights < 0): - msg = f"Invalid {ctx}: contains negative values" + msg = f"Invalid {ctx} value: contains negative values" if mode == "raise": raise ValueError(msg) logger.warning(f"{msg}, using uniform weights") @@ -513,7 +513,7 @@ class QuickAdapterRegressorV3(BaseRegressionModel): weights_sum = np.nansum(np_weights) if np.isclose(weights_sum, 0.0): - msg = f"Invalid {ctx}: sum is zero" + msg = f"Invalid {ctx} value: sum is zero" if mode == "raise": raise ValueError(msg) logger.warning(f"{msg}, using uniform weights") @@ -537,7 +537,9 @@ class QuickAdapterRegressorV3(BaseRegressionModel): if mode == "none": return default - msg = f"Invalid {ctx} {value!r}. Supported: {', '.join(valid_options)}" + msg = ( + f"Invalid {ctx} {value!r}: supported values are {', '.join(valid_options)}" + ) if mode == "raise": raise ValueError(msg) logger.warning(f"{msg}, using {default!r}") @@ -684,7 +686,7 @@ class QuickAdapterRegressorV3(BaseRegressionModel): ) if not isinstance(n_neighbors, int) or n_neighbors < 1: raise ValueError( - f"Invalid label_density_n_neighbors {n_neighbors!r}: must be int >= 1" + f"Invalid label_density_n_neighbors value {n_neighbors!r}: must be int >= 1" ) config["n_neighbors"] = n_neighbors @@ -821,7 +823,7 @@ class QuickAdapterRegressorV3(BaseRegressionModel): label_frequency_candles = default_label_frequency_candles else: logger.warning( - f"Invalid label_frequency_candles {label_frequency_candles!r}: only 'auto' is supported for string values, using default {default_label_frequency_candles!r}" + f"Invalid label_frequency_candles value {label_frequency_candles!r}: only 'auto' is supported for string values, using default {default_label_frequency_candles!r}" ) label_frequency_candles = default_label_frequency_candles elif isinstance(label_frequency_candles, (int, float)): @@ -829,12 +831,12 @@ class QuickAdapterRegressorV3(BaseRegressionModel): label_frequency_candles = int(label_frequency_candles) else: logger.warning( - f"Invalid label_frequency_candles {label_frequency_candles!r}: must be in range [2, 10000], using default {default_label_frequency_candles!r}" + f"Invalid label_frequency_candles value {label_frequency_candles!r}: must be in range [2, 10000], using default {default_label_frequency_candles!r}" ) label_frequency_candles = default_label_frequency_candles else: logger.warning( - f"Invalid label_frequency_candles {label_frequency_candles!r}: expected int, float, or 'auto', using default {default_label_frequency_candles!r}" + f"Invalid label_frequency_candles value {label_frequency_candles!r}: expected int, float, or 'auto', using default {default_label_frequency_candles!r}" ) label_frequency_candles = default_label_frequency_candles @@ -1174,8 +1176,8 @@ class QuickAdapterRegressorV3(BaseRegressionModel): params = self._optuna_label_params.get(pair) else: raise ValueError( - f"Invalid namespace {namespace!r}. " - f"Supported: {', '.join(QuickAdapterRegressorV3._OPTUNA_NAMESPACES)}" + f"Invalid namespace value {namespace!r}: " + f"supported values are {', '.join(QuickAdapterRegressorV3._OPTUNA_NAMESPACES)}" ) return params @@ -1188,8 +1190,8 @@ class QuickAdapterRegressorV3(BaseRegressionModel): self._optuna_label_params[pair] = params else: raise ValueError( - f"Invalid namespace {namespace!r}. " - f"Supported: {', '.join(QuickAdapterRegressorV3._OPTUNA_NAMESPACES)}" + f"Invalid namespace value {namespace!r}: " + f"supported values are {', '.join(QuickAdapterRegressorV3._OPTUNA_NAMESPACES)}" ) def get_optuna_value(self, pair: str, namespace: OptunaNamespace) -> float: @@ -1197,8 +1199,8 @@ class QuickAdapterRegressorV3(BaseRegressionModel): value = self._optuna_hp_value.get(pair) else: raise ValueError( - f"Invalid namespace {namespace!r}. " - f"Supported: {QuickAdapterRegressorV3._OPTUNA_NAMESPACES[0]!r}" # "hp" + f"Invalid namespace value {namespace!r}: " + f"supported values are {QuickAdapterRegressorV3._OPTUNA_NAMESPACES[0]!r}" # "hp" ) return value @@ -1209,8 +1211,8 @@ class QuickAdapterRegressorV3(BaseRegressionModel): self._optuna_hp_value[pair] = value else: raise ValueError( - f"Invalid namespace {namespace!r}. " - f"Supported: {QuickAdapterRegressorV3._OPTUNA_NAMESPACES[0]!r}" # "hp" + f"Invalid namespace value {namespace!r}: " + f"supported values are {QuickAdapterRegressorV3._OPTUNA_NAMESPACES[0]!r}" # "hp" ) def get_optuna_values( @@ -1220,8 +1222,8 @@ class QuickAdapterRegressorV3(BaseRegressionModel): values = self._optuna_label_values.get(pair) else: raise ValueError( - f"Invalid namespace {namespace!r}. " - f"Supported: {QuickAdapterRegressorV3._OPTUNA_NAMESPACES[1]}" # "label" + f"Invalid namespace value {namespace!r}: " + f"supported values are {QuickAdapterRegressorV3._OPTUNA_NAMESPACES[1]}" # "label" ) return values @@ -1232,8 +1234,8 @@ class QuickAdapterRegressorV3(BaseRegressionModel): self._optuna_label_values[pair] = values else: raise ValueError( - f"Invalid namespace {namespace!r}. " - f"Supported: {QuickAdapterRegressorV3._OPTUNA_NAMESPACES[1]}" # "label" + f"Invalid namespace value {namespace!r}: " + f"supported values are {QuickAdapterRegressorV3._OPTUNA_NAMESPACES[1]}" # "label" ) def init_optuna_label_candle_pool(self) -> None: @@ -1402,12 +1404,12 @@ class QuickAdapterRegressorV3(BaseRegressionModel): ) -> None: if namespace not in {QuickAdapterRegressorV3._OPTUNA_NAMESPACES[1]}: # "label" raise ValueError( - f"Invalid namespace {namespace!r}. " - f"Supported: {QuickAdapterRegressorV3._OPTUNA_NAMESPACES[1]}" # "label" + f"Invalid namespace value {namespace!r}: " + f"supported values are {QuickAdapterRegressorV3._OPTUNA_NAMESPACES[1]}" # "label" ) if not callable(callback): raise ValueError( - f"Invalid callback {type(callback).__name__!r}: must be callable" + f"Invalid callback value {type(callback).__name__!r}: must be callable" ) self._optuna_label_candles[pair] += 1 if pair not in self._optuna_label_incremented_pairs: @@ -1729,8 +1731,8 @@ class QuickAdapterRegressorV3(BaseRegressionModel): pred_minima = pred_extrema[pred_extrema < -eps] else: raise ValueError( - f"Invalid extrema_selection {extrema_selection!r}. " - f"Supported: {', '.join(QuickAdapterRegressorV3._EXTREMA_SELECTION_METHODS)}" + f"Invalid extrema_selection value {extrema_selection!r}: " + f"supported values are {', '.join(QuickAdapterRegressorV3._EXTREMA_SELECTION_METHODS)}" ) return pred_minima, pred_maxima @@ -1771,7 +1773,7 @@ class QuickAdapterRegressorV3(BaseRegressionModel): keep_extrema_fraction: float = 1.0, ) -> tuple[float, float]: if alpha < 0: - raise ValueError(f"Invalid alpha {alpha!r}: must be >= 0") + raise ValueError(f"Invalid alpha value {alpha!r}: must be >= 0") pred_minima, pred_maxima = QuickAdapterRegressorV3.get_pred_min_max( pred_extrema, extrema_selection, keep_extrema_fraction ) @@ -1824,8 +1826,8 @@ class QuickAdapterRegressorV3(BaseRegressionModel): threshold_func = getattr(skimage.filters, f"threshold_{method}") except AttributeError: raise ValueError( - f"Invalid skimage threshold method {method!r}. " - f"Supported: {', '.join(QuickAdapterRegressorV3._SKIMAGE_THRESHOLD_METHODS)}" + f"Invalid skimage threshold method value {method!r}: " + f"supported values are {', '.join(QuickAdapterRegressorV3._SKIMAGE_THRESHOLD_METHODS)}" ) min_func = QuickAdapterRegressorV3.apply_skimage_threshold @@ -2002,8 +2004,8 @@ class QuickAdapterRegressorV3(BaseRegressionModel): ) raise ValueError( - f"Invalid distance_metric {distance_metric!r} for {QuickAdapterRegressorV3._DISTANCE_METHODS[0]}. " - f"Supported: {', '.join(QuickAdapterRegressorV3._DISTANCE_METRICS)}" + f"Invalid distance_metric value {distance_metric!r} for {QuickAdapterRegressorV3._DISTANCE_METHODS[0]}: " + f"supported values are {', '.join(QuickAdapterRegressorV3._DISTANCE_METRICS)}" ) @staticmethod @@ -2166,8 +2168,8 @@ class QuickAdapterRegressorV3(BaseRegressionModel): ) else: raise ValueError( - f"Invalid distance_metric {distance_metric!r} for {QuickAdapterRegressorV3._DISTANCE_METHODS[1]}. " - f"Supported: {', '.join(QuickAdapterRegressorV3._DISTANCE_METRICS)}" + f"Invalid distance_metric value {distance_metric!r} for {QuickAdapterRegressorV3._DISTANCE_METHODS[1]}: " + f"supported values are {', '.join(QuickAdapterRegressorV3._DISTANCE_METRICS)}" ) denominator = dist_to_ideal + dist_to_anti_ideal @@ -2247,8 +2249,8 @@ class QuickAdapterRegressorV3(BaseRegressionModel): ) else: raise ValueError( - f"Invalid trial_selection_method {trial_selection_method!r}. " - f"Supported: {', '.join(QuickAdapterRegressorV3._DISTANCE_METHODS)}" + f"Invalid trial_selection_method value {trial_selection_method!r}: " + f"supported values are {', '.join(QuickAdapterRegressorV3._DISTANCE_METHODS)}" ) min_score_position = np.nanargmin(scores) @@ -2322,8 +2324,8 @@ class QuickAdapterRegressorV3(BaseRegressionModel): ) else: raise ValueError( - f"Invalid selection_method {selection_method!r}. " - f"Supported: {', '.join(QuickAdapterRegressorV3._DISTANCE_METHODS)}" + f"Invalid selection_method value {selection_method!r}: " + f"supported values are {', '.join(QuickAdapterRegressorV3._DISTANCE_METHODS)}" ) ordered_cluster_indices = np.argsort(cluster_center_scores) @@ -2381,8 +2383,8 @@ class QuickAdapterRegressorV3(BaseRegressionModel): ) else: raise ValueError( - f"Invalid selection_method {selection_method!r}. " - f"Supported: {', '.join(QuickAdapterRegressorV3._DISTANCE_METHODS)}" + f"Invalid selection_method value {selection_method!r}: " + f"supported values are {', '.join(QuickAdapterRegressorV3._DISTANCE_METHODS)}" ) best_medoid_score_position = np.nanargmin(medoid_scores) best_medoid_index = medoid_indices[best_medoid_score_position] @@ -2407,8 +2409,8 @@ class QuickAdapterRegressorV3(BaseRegressionModel): else: raise ValueError( - f"Invalid cluster_method {cluster_method!r}. " - f"Supported: {', '.join(QuickAdapterRegressorV3._CLUSTER_METHODS)}" + f"Invalid cluster_method value {cluster_method!r}: " + f"supported values are {', '.join(QuickAdapterRegressorV3._CLUSTER_METHODS)}" ) @staticmethod @@ -2487,8 +2489,8 @@ class QuickAdapterRegressorV3(BaseRegressionModel): return np.nanmax(neighbor_distances, axis=1) else: raise ValueError( - f"Invalid aggregation {aggregation!r}. " - f"Supported: {', '.join(QuickAdapterRegressorV3._DENSITY_AGGREGATIONS)}" + f"Invalid aggregation value {aggregation!r}: " + f"supported values are {', '.join(QuickAdapterRegressorV3._DENSITY_AGGREGATIONS)}" ) @staticmethod @@ -2700,8 +2702,8 @@ class QuickAdapterRegressorV3(BaseRegressionModel): not in QuickAdapterRegressorV3._density_aggregations_set() ): raise ValueError( - f"Invalid aggregation in label_config {knn_aggregation!r}. " - f"Supported: {', '.join(QuickAdapterRegressorV3._DENSITY_AGGREGATIONS)}" + f"Invalid aggregation value in label_config {knn_aggregation!r}: " + f"supported values are {', '.join(QuickAdapterRegressorV3._DENSITY_AGGREGATIONS)}" ) knn_aggregation_param = label_config["aggregation_param"] return QuickAdapterRegressorV3._knn_based_selection( @@ -2725,8 +2727,8 @@ class QuickAdapterRegressorV3(BaseRegressionModel): ) raise ValueError( - f"Invalid label_method {selection_method!r}. " - f"Supported: {', '.join(QuickAdapterRegressorV3._SELECTION_METHODS)}" + f"Invalid label_method value {selection_method!r}: " + f"supported values are {', '.join(QuickAdapterRegressorV3._SELECTION_METHODS)}" ) def _get_multi_objective_study_best_trial( @@ -2734,8 +2736,8 @@ class QuickAdapterRegressorV3(BaseRegressionModel): ) -> Optional[optuna.trial.FrozenTrial]: if namespace not in {QuickAdapterRegressorV3._OPTUNA_NAMESPACES[1]}: # "label" raise ValueError( - f"Invalid namespace {namespace!r}. " - f"Supported: {QuickAdapterRegressorV3._OPTUNA_NAMESPACES[1]}" # "label" + f"Invalid namespace value {namespace!r}: " + f"supported values are {QuickAdapterRegressorV3._OPTUNA_NAMESPACES[1]}" # "label" ) n_objectives = len(study.directions) if n_objectives < 2: @@ -2926,8 +2928,8 @@ class QuickAdapterRegressorV3(BaseRegressionModel): ) else: raise ValueError( - f"Invalid optuna storage_backend {storage_backend!r}. " - f"Supported: {', '.join(QuickAdapterRegressorV3._OPTUNA_STORAGE_BACKENDS)}" + f"Invalid optuna storage_backend value {storage_backend!r}: " + f"supported values are {', '.join(QuickAdapterRegressorV3._OPTUNA_STORAGE_BACKENDS)}" ) return storage @@ -2969,8 +2971,8 @@ class QuickAdapterRegressorV3(BaseRegressionModel): ) else: raise ValueError( - f"Invalid optuna sampler {sampler!r}. " - f"Supported: {', '.join(QuickAdapterRegressorV3._OPTUNA_SAMPLERS)}" + f"Invalid optuna sampler value {sampler!r}: " + f"supported values are {', '.join(QuickAdapterRegressorV3._OPTUNA_SAMPLERS)}" ) @lru_cache(maxsize=8) @@ -2993,8 +2995,8 @@ class QuickAdapterRegressorV3(BaseRegressionModel): ) else: raise ValueError( - f"Invalid namespace {namespace!r}. " - f"Supported: {', '.join(QuickAdapterRegressorV3._OPTUNA_NAMESPACES)}" + f"Invalid namespace value {namespace!r}: " + f"supported values are {', '.join(QuickAdapterRegressorV3._OPTUNA_NAMESPACES)}" ) def optuna_create_study( @@ -3037,8 +3039,8 @@ class QuickAdapterRegressorV3(BaseRegressionModel): samplers, sampler = self.optuna_samplers_by_namespace(namespace) if sampler not in set(samplers): raise ValueError( - f"Invalid optuna {namespace} sampler {sampler!r}. " - f"Supported: {', '.join(samplers)}" + f"Invalid optuna {namespace} sampler value {sampler!r}: " + f"supported values are {', '.join(samplers)}" ) try: diff --git a/quickadapter/user_data/strategies/ExtremaWeightingTransformer.py b/quickadapter/user_data/strategies/ExtremaWeightingTransformer.py index c4ba318..695d7b4 100644 --- a/quickadapter/user_data/strategies/ExtremaWeightingTransformer.py +++ b/quickadapter/user_data/strategies/ExtremaWeightingTransformer.py @@ -182,8 +182,8 @@ class ExtremaWeightingTransformer(BaseTransform): scaler_attr = self._STANDARDIZATION_SCALERS.get(method) if scaler_attr is None: raise ValueError( - f"Invalid standardization {method!r}. " - f"Supported: {', '.join(STANDARDIZATION_TYPES)}" + f"Invalid standardization value {method!r}: " + f"supported values are {', '.join(STANDARDIZATION_TYPES)}" ) scaler = getattr(self, scaler_attr, None) if scaler is None: @@ -204,8 +204,8 @@ class ExtremaWeightingTransformer(BaseTransform): scaler_attr = self._NORMALIZATION_SCALERS.get(method) if scaler_attr is None: raise ValueError( - f"Invalid normalization {method!r}. " - f"Supported: {', '.join(NORMALIZATION_TYPES)}" + f"Invalid normalization value {method!r}: " + f"supported values are {', '.join(NORMALIZATION_TYPES)}" ) scaler = getattr(self, scaler_attr, None) if scaler is None: @@ -238,8 +238,8 @@ class ExtremaWeightingTransformer(BaseTransform): scaler_attr = self._STANDARDIZATION_SCALERS.get(method) if scaler_attr is None: raise ValueError( - f"Invalid standardization {method!r}. " - f"Supported: {', '.join(STANDARDIZATION_TYPES)}" + f"Invalid standardization value {method!r}: " + f"supported values are {', '.join(STANDARDIZATION_TYPES)}" ) scaler = getattr(self, scaler_attr, None) if scaler is None: @@ -260,8 +260,8 @@ class ExtremaWeightingTransformer(BaseTransform): scaler_attr = self._NORMALIZATION_SCALERS.get(method) if scaler_attr is None: raise ValueError( - f"Invalid normalization {method!r}. " - f"Supported: {', '.join(NORMALIZATION_TYPES)}" + f"Invalid normalization value {method!r}: " + f"supported values are {', '.join(NORMALIZATION_TYPES)}" ) scaler = getattr(self, scaler_attr, None) if scaler is None: @@ -308,7 +308,7 @@ class ExtremaWeightingTransformer(BaseTransform): return raise ValueError( - f"Invalid standardization {method!r}. Supported: {', '.join(STANDARDIZATION_TYPES)}" + f"Invalid standardization value {method!r}: supported values are {', '.join(STANDARDIZATION_TYPES)}" ) def _fit_normalization(self, values: NDArray[np.floating]) -> None: @@ -329,7 +329,7 @@ class ExtremaWeightingTransformer(BaseTransform): return raise ValueError( - f"Invalid normalization {method!r}. Supported: {', '.join(NORMALIZATION_TYPES)}" + f"Invalid normalization value {method!r}: supported values are {', '.join(NORMALIZATION_TYPES)}" ) def fit( diff --git a/quickadapter/user_data/strategies/QuickAdapterV3.py b/quickadapter/user_data/strategies/QuickAdapterV3.py index 83b0dd6..7b2da16 100644 --- a/quickadapter/user_data/strategies/QuickAdapterV3.py +++ b/quickadapter/user_data/strategies/QuickAdapterV3.py @@ -308,7 +308,7 @@ class QuickAdapterV3(IStrategy): method = extrema_smoothing.get("method", DEFAULTS_EXTREMA_SMOOTHING["method"]) if method not in set(SMOOTHING_METHODS): logger.warning( - f"Invalid extrema_smoothing method {method!r}, supported: {', '.join(SMOOTHING_METHODS)}, using default {SMOOTHING_METHODS[0]!r}" + f"Invalid extrema_smoothing method value {method!r}: supported values are {', '.join(SMOOTHING_METHODS)}, using default {SMOOTHING_METHODS[0]!r}" ) method = SMOOTHING_METHODS[0] @@ -323,14 +323,14 @@ class QuickAdapterV3(IStrategy): ) if not isinstance(window_candles, int) or window_candles < 3: logger.warning( - f"Invalid extrema_smoothing window_candles {window_candles!r}: must be an integer >= 3, using default {DEFAULTS_EXTREMA_SMOOTHING['window_candles']!r}" + f"Invalid extrema_smoothing window_candles value {window_candles!r}: must be an integer >= 3, using default {DEFAULTS_EXTREMA_SMOOTHING['window_candles']!r}" ) window_candles = int(DEFAULTS_EXTREMA_SMOOTHING["window_candles"]) beta = extrema_smoothing.get("beta", DEFAULTS_EXTREMA_SMOOTHING["beta"]) if not isinstance(beta, (int, float)) or not np.isfinite(beta) or beta <= 0: logger.warning( - f"Invalid extrema_smoothing beta {beta!r}: must be a finite number > 0, using default {DEFAULTS_EXTREMA_SMOOTHING['beta']!r}" + f"Invalid extrema_smoothing beta value {beta!r}: must be a finite number > 0, using default {DEFAULTS_EXTREMA_SMOOTHING['beta']!r}" ) beta = DEFAULTS_EXTREMA_SMOOTHING["beta"] @@ -339,21 +339,21 @@ class QuickAdapterV3(IStrategy): ) if not isinstance(polyorder, int) or polyorder < 1: logger.warning( - f"Invalid extrema_smoothing polyorder {polyorder!r}: must be an integer >= 1, using default {DEFAULTS_EXTREMA_SMOOTHING['polyorder']!r}" + f"Invalid extrema_smoothing polyorder value {polyorder!r}: must be an integer >= 1, using default {DEFAULTS_EXTREMA_SMOOTHING['polyorder']!r}" ) polyorder = DEFAULTS_EXTREMA_SMOOTHING["polyorder"] mode = str(extrema_smoothing.get("mode", DEFAULTS_EXTREMA_SMOOTHING["mode"])) if mode not in set(SMOOTHING_MODES): logger.warning( - f"Invalid extrema_smoothing mode {mode!r}, supported: {', '.join(SMOOTHING_MODES)}, using default {SMOOTHING_MODES[0]!r}" + f"Invalid extrema_smoothing mode value {mode!r}: supported values are {', '.join(SMOOTHING_MODES)}, using default {SMOOTHING_MODES[0]!r}" ) mode = SMOOTHING_MODES[0] sigma = extrema_smoothing.get("sigma", DEFAULTS_EXTREMA_SMOOTHING["sigma"]) if not isinstance(sigma, (int, float)) or sigma <= 0 or not np.isfinite(sigma): logger.warning( - f"Invalid extrema_smoothing sigma {sigma!r}: must be a finite number > 0, using default {DEFAULTS_EXTREMA_SMOOTHING['sigma']!r}" + f"Invalid extrema_smoothing sigma value {sigma!r}: must be a finite number > 0, using default {DEFAULTS_EXTREMA_SMOOTHING['sigma']!r}" ) sigma = DEFAULTS_EXTREMA_SMOOTHING["sigma"] @@ -380,9 +380,9 @@ class QuickAdapterV3(IStrategy): ) if trade_price_target_method not in set(TRADE_PRICE_TARGETS): logger.warning( - f"Invalid trade_price_target_method {trade_price_target_method!r}. " - f"Supported: {', '.join(TRADE_PRICE_TARGETS)}. " - f"Using default {TRADE_PRICE_TARGETS[0]!r}" + f"Invalid trade_price_target_method value {trade_price_target_method!r}: " + f"supported values are {', '.join(TRADE_PRICE_TARGETS)}, " + f"using default {TRADE_PRICE_TARGETS[0]!r}" ) trade_price_target_method = TRADE_PRICE_TARGETS[0] return str(trade_price_target_method) @@ -437,7 +437,7 @@ class QuickAdapterV3(IStrategy): if not isinstance(lookback_period_candles, int) or lookback_period_candles < 0: logger.warning( - f"Invalid reversal_confirmation lookback_period_candles {lookback_period_candles!r}: must be >= 0, using default {QuickAdapterV3.default_reversal_confirmation['lookback_period_candles']!r}" + f"Invalid reversal_confirmation lookback_period_candles value {lookback_period_candles!r}: must be >= 0, using default {QuickAdapterV3.default_reversal_confirmation['lookback_period_candles']!r}" ) lookback_period_candles = QuickAdapterV3.default_reversal_confirmation[ "lookback_period_candles" @@ -447,7 +447,7 @@ class QuickAdapterV3(IStrategy): 0.0 < decay_fraction <= 1.0 ): logger.warning( - f"Invalid reversal_confirmation decay_fraction {decay_fraction!r}: must be in range (0, 1], using default {QuickAdapterV3.default_reversal_confirmation['decay_fraction']!r}" + f"Invalid reversal_confirmation decay_fraction value {decay_fraction!r}: must be in range (0, 1], using default {QuickAdapterV3.default_reversal_confirmation['decay_fraction']!r}" ) decay_fraction = QuickAdapterV3.default_reversal_confirmation[ "decay_fraction" @@ -841,7 +841,7 @@ class QuickAdapterV3(IStrategy): def get_label_natr_multiplier_fraction(self, pair: str, fraction: float) -> float: if not isinstance(fraction, float) or not (0.0 <= fraction <= 1.0): raise ValueError( - f"Invalid fraction {fraction!r}: must be a float in range [0, 1]" + f"Invalid fraction value {fraction!r}: must be a float in range [0, 1]" ) return self.get_label_natr_multiplier(pair) * fraction @@ -861,7 +861,7 @@ class QuickAdapterV3(IStrategy): return pattern.format(**duration) except (KeyError, ValueError) as e: raise ValueError( - f"Invalid pattern {pattern!r}: failed to format with {e!r}" + f"Invalid pattern value {pattern!r}: failed to format with {e!r}" ) def set_freqai_targets( @@ -1187,8 +1187,8 @@ class QuickAdapterV3(IStrategy): ) if trade_price_target_method_fn is None: raise ValueError( - f"Invalid trade_price_target_method {self.trade_price_target_method!r}. " - f"Supported: {', '.join(TRADE_PRICE_TARGETS)}" + f"Invalid trade_price_target_method value {self.trade_price_target_method!r}: " + f"supported values are {', '.join(TRADE_PRICE_TARGETS)}" ) return trade_price_target_method_fn() @@ -1217,7 +1217,7 @@ class QuickAdapterV3(IStrategy): ) -> Optional[float]: if not (0.0 <= natr_multiplier_fraction <= 1.0): raise ValueError( - f"Invalid natr_multiplier_fraction {natr_multiplier_fraction!r}: must be in range [0, 1]" + f"Invalid natr_multiplier_fraction value {natr_multiplier_fraction!r}: must be in range [0, 1]" ) trade_duration_candles = self.get_trade_duration_candles(df, trade) if not QuickAdapterV3.is_trade_duration_valid(trade_duration_candles): @@ -1246,7 +1246,7 @@ class QuickAdapterV3(IStrategy): ) -> Optional[float]: if not (0.0 <= natr_multiplier_fraction <= 1.0): raise ValueError( - f"Invalid natr_multiplier_fraction {natr_multiplier_fraction!r}: must be in range [0, 1]" + f"Invalid natr_multiplier_fraction value {natr_multiplier_fraction!r}: must be in range [0, 1]" ) trade_duration_candles = self.get_trade_duration_candles(df, trade) if not QuickAdapterV3.is_trade_duration_valid(trade_duration_candles): @@ -1270,7 +1270,7 @@ class QuickAdapterV3(IStrategy): callback: Callable[[], None], ) -> None: if not callable(callback): - raise ValueError(f"Invalid callback {callback!r}: must be callable") + raise ValueError(f"Invalid callback value {callback!r}: must be callable") timestamp = int(current_time.timestamp()) candle_duration_secs = max(1, int(self._candle_duration_secs)) candle_start_secs = (timestamp // candle_duration_secs) * candle_duration_secs @@ -1454,6 +1454,7 @@ class QuickAdapterV3(IStrategy): current_exit_profit: float, **kwargs, ) -> Optional[float] | tuple[Optional[float], Optional[str]]: + pair = trade.pair if trade.has_open_orders: return None @@ -1462,7 +1463,7 @@ class QuickAdapterV3(IStrategy): return None df, _ = self.dp.get_analyzed_dataframe( - pair=trade.pair, timeframe=self.config.get("timeframe") + pair=pair, timeframe=self.config.get("timeframe") ) if df.empty: return None @@ -1482,10 +1483,10 @@ class QuickAdapterV3(IStrategy): ) if not trade_partial_exit: self.throttle_callback( - pair=trade.pair, + pair=pair, current_time=current_time, callback=lambda: logger.info( - f"[{trade.pair}] Trade {trade.trade_direction} stage {trade_exit_stage} | " + f"[{pair}] Trade {trade.trade_direction} stage {trade_exit_stage} | " f"Take Profit: {format_number(trade_take_profit_price)}, Rate: {format_number(current_rate)}" ), ) @@ -1503,7 +1504,7 @@ class QuickAdapterV3(IStrategy): initial_trade_partial_stake_amount = trade_partial_stake_amount trade_partial_stake_amount = trade.stake_amount - min_stake logger.info( - f"[{trade.pair}] Trade {trade.trade_direction} stage {trade_exit_stage} | " + f"[{pair}] Trade {trade.trade_direction} stage {trade_exit_stage} | " f"Partial stake amount adjusted from {format_number(initial_trade_partial_stake_amount)} to {format_number(trade_partial_stake_amount)} to respect min_stake {format_number(min_stake)}" ) return ( @@ -1599,8 +1600,8 @@ class QuickAdapterV3(IStrategy): ) else: raise ValueError( - f"Invalid interpolation_direction {interpolation_direction!r}. " - f"Supported: {', '.join(QuickAdapterV3._INTERPOLATION_DIRECTIONS)}" + f"Invalid interpolation_direction value {interpolation_direction!r}: " + f"supported values are {', '.join(QuickAdapterV3._INTERPOLATION_DIRECTIONS)}" ) candle_deviation = ( candle_label_natr_value / 100.0 @@ -1673,7 +1674,7 @@ class QuickAdapterV3(IStrategy): candle_threshold = base_price * (1 - current_deviation) else: raise ValueError( - f"Invalid side {side!r}, supported: {', '.join(QuickAdapterV3._TRADE_DIRECTIONS)}" + f"Invalid side value {side!r}: supported values are {', '.join(QuickAdapterV3._TRADE_DIRECTIONS)}" ) self._candle_threshold_cache[cache_key] = candle_threshold return self._candle_threshold_cache[cache_key] @@ -2257,8 +2258,8 @@ class QuickAdapterV3(IStrategy): return False else: raise ValueError( - f"Invalid trading_mode {trading_mode!r}. " - f"Supported: {', '.join(QuickAdapterV3._TRADING_MODES)}" + f"Invalid trading_mode value {trading_mode!r}: " + f"supported values are {', '.join(QuickAdapterV3._TRADING_MODES)}" ) def leverage( diff --git a/quickadapter/user_data/strategies/Utils.py b/quickadapter/user_data/strategies/Utils.py index c342359..51d904f 100644 --- a/quickadapter/user_data/strategies/Utils.py +++ b/quickadapter/user_data/strategies/Utils.py @@ -108,7 +108,7 @@ def get_extrema_weighting_config( strategy = extrema_weighting.get("strategy", DEFAULTS_EXTREMA_WEIGHTING["strategy"]) if strategy not in set(WEIGHT_STRATEGIES): logger.warning( - f"Invalid extrema_weighting strategy {strategy!r}, supported: {', '.join(WEIGHT_STRATEGIES)}, using default {WEIGHT_STRATEGIES[0]!r}" + f"Invalid extrema_weighting strategy value {strategy!r}: supported values are {', '.join(WEIGHT_STRATEGIES)}, using default {WEIGHT_STRATEGIES[0]!r}" ) strategy = WEIGHT_STRATEGIES[0] metric_coefficients = extrema_weighting.get( @@ -116,7 +116,7 @@ def get_extrema_weighting_config( ) if not isinstance(metric_coefficients, dict): logger.warning( - f"Invalid extrema_weighting metric_coefficients {metric_coefficients!r}: must be a mapping, using default {DEFAULTS_EXTREMA_WEIGHTING['metric_coefficients']!r}" + f"Invalid extrema_weighting metric_coefficients value value {metric_coefficients!r}: must be a mapping, using default {DEFAULTS_EXTREMA_WEIGHTING['metric_coefficients']!r}" ) metric_coefficients = DEFAULTS_EXTREMA_WEIGHTING["metric_coefficients"] elif invalid_keys := set(metric_coefficients.keys()) - set(COMBINED_METRICS): @@ -132,7 +132,7 @@ def get_extrema_weighting_config( ) if aggregation not in set(COMBINED_AGGREGATIONS): logger.warning( - f"Invalid extrema_weighting aggregation {aggregation!r}, supported: {', '.join(COMBINED_AGGREGATIONS)}, using default {DEFAULTS_EXTREMA_WEIGHTING['aggregation']!r}" + f"Invalid extrema_weighting aggregation value {aggregation!r}: supported values are {', '.join(COMBINED_AGGREGATIONS)}, using default {DEFAULTS_EXTREMA_WEIGHTING['aggregation']!r}" ) aggregation = DEFAULTS_EXTREMA_WEIGHTING["aggregation"] @@ -142,7 +142,7 @@ def get_extrema_weighting_config( ) if standardization not in set(STANDARDIZATION_TYPES): logger.warning( - f"Invalid extrema_weighting standardization {standardization!r}, supported: {', '.join(STANDARDIZATION_TYPES)}, using default {STANDARDIZATION_TYPES[0]!r}" + f"Invalid extrema_weighting standardization value {standardization!r}: supported values are {', '.join(STANDARDIZATION_TYPES)}, using default {STANDARDIZATION_TYPES[0]!r}" ) standardization = STANDARDIZATION_TYPES[0] @@ -159,7 +159,7 @@ def get_extrema_weighting_config( or robust_quantiles[0] >= robust_quantiles[1] ): logger.warning( - f"Invalid extrema_weighting robust_quantiles {robust_quantiles!r}: must be (q1, q3) with 0 <= q1 < q3 <= 1, using default {DEFAULTS_EXTREMA_WEIGHTING['robust_quantiles']!r}" + f"Invalid extrema_weighting robust_quantiles value {robust_quantiles!r}: must be (q1, q3) with 0 <= q1 < q3 <= 1, using default {DEFAULTS_EXTREMA_WEIGHTING['robust_quantiles']!r}" ) robust_quantiles = DEFAULTS_EXTREMA_WEIGHTING["robust_quantiles"] else: @@ -177,7 +177,7 @@ def get_extrema_weighting_config( or mmad_scaling_factor <= 0 ): logger.warning( - f"Invalid extrema_weighting mmad_scaling_factor {mmad_scaling_factor!r}: must be a finite number > 0, using default {DEFAULTS_EXTREMA_WEIGHTING['mmad_scaling_factor']!r}" + f"Invalid extrema_weighting mmad_scaling_factor value {mmad_scaling_factor!r}: must be a finite number > 0, using default {DEFAULTS_EXTREMA_WEIGHTING['mmad_scaling_factor']!r}" ) mmad_scaling_factor = DEFAULTS_EXTREMA_WEIGHTING["mmad_scaling_factor"] @@ -187,7 +187,7 @@ def get_extrema_weighting_config( ) if normalization not in set(NORMALIZATION_TYPES): logger.warning( - f"Invalid extrema_weighting normalization {normalization!r}, supported: {', '.join(NORMALIZATION_TYPES)}, using default {NORMALIZATION_TYPES[0]!r}" + f"Invalid extrema_weighting normalization value {normalization!r}: supported values are {', '.join(NORMALIZATION_TYPES)}, using default {NORMALIZATION_TYPES[0]!r}" ) normalization = NORMALIZATION_TYPES[0] @@ -212,7 +212,7 @@ def get_extrema_weighting_config( or minmax_range[0] >= minmax_range[1] ): logger.warning( - f"Invalid extrema_weighting minmax_range {minmax_range!r}: must be (min, max) with min < max, using default {DEFAULTS_EXTREMA_WEIGHTING['minmax_range']!r}" + f"Invalid extrema_weighting minmax_range value {minmax_range!r}: must be (min, max) with min < max, using default {DEFAULTS_EXTREMA_WEIGHTING['minmax_range']!r}" ) minmax_range = DEFAULTS_EXTREMA_WEIGHTING["minmax_range"] else: @@ -230,7 +230,7 @@ def get_extrema_weighting_config( or sigmoid_scale <= 0 ): logger.warning( - f"Invalid extrema_weighting sigmoid_scale {sigmoid_scale!r}: must be a finite number > 0, using default {DEFAULTS_EXTREMA_WEIGHTING['sigmoid_scale']!r}" + f"Invalid extrema_weighting sigmoid_scale value {sigmoid_scale!r}: must be a finite number > 0, using default {DEFAULTS_EXTREMA_WEIGHTING['sigmoid_scale']!r}" ) sigmoid_scale = DEFAULTS_EXTREMA_WEIGHTING["sigmoid_scale"] @@ -242,7 +242,7 @@ def get_extrema_weighting_config( or not (0 < gamma <= 10.0) ): logger.warning( - f"Invalid extrema_weighting gamma {gamma!r}: must be in range (0, 10], using default {DEFAULTS_EXTREMA_WEIGHTING['gamma']!r}" + f"Invalid extrema_weighting gamma value {gamma!r}: must be in range (0, 10], using default {DEFAULTS_EXTREMA_WEIGHTING['gamma']!r}" ) gamma = DEFAULTS_EXTREMA_WEIGHTING["gamma"] @@ -300,7 +300,7 @@ def non_zero_diff(s1: pd.Series, s2: pd.Series) -> pd.Series: @lru_cache(maxsize=8) def get_odd_window(window: int) -> int: if window < 1: - raise ValueError(f"Invalid window {window!r}: must be > 0") + raise ValueError(f"Invalid window value {window!r}: must be > 0") return window if window % 2 == 1 else window + 1 @@ -334,8 +334,8 @@ def _calculate_coeffs( coeffs = sp.signal.windows.triang(M=window, sym=True) else: raise ValueError( - f"Invalid window type {win_type!r}. " - f"Supported: {', '.join(SMOOTHING_KERNELS)}" + f"Invalid window type value {win_type!r}: " + f"supported values are {', '.join(SMOOTHING_KERNELS)}" ) return coeffs / np.sum(coeffs) @@ -486,7 +486,7 @@ def _build_weights_array( if len(indices) != weights.size: raise ValueError( - f"Invalid indices/weights: length mismatch, got {len(indices)} indices but {weights.size} weights" + f"Invalid indices/weights values: length mismatch, got {len(indices)} indices but {weights.size} weights" ) weights_array = np.full(n_extrema, default_weight, dtype=float) @@ -531,7 +531,7 @@ def _aggregate_metrics( ) else: raise ValueError( - f"Invalid aggregation {aggregation!r}. Supported: {', '.join(COMBINED_AGGREGATIONS)}" + f"Invalid aggregation value {aggregation!r}: supported values are {', '.join(COMBINED_AGGREGATIONS)}" ) @@ -634,8 +634,8 @@ def compute_extrema_weights( else: raise ValueError( - f"Invalid extrema weighting strategy {strategy!r}. " - f"Supported: {', '.join(WEIGHT_STRATEGIES)}" + f"Invalid extrema weighting strategy value {strategy!r}: " + f"supported values are {', '.join(WEIGHT_STRATEGIES)}" ) weights = _impute_weights( @@ -702,7 +702,7 @@ def get_weighted_extrema( def get_callable_sha256(fn: Callable[..., Any]) -> str: if not callable(fn): - raise ValueError(f"Invalid fn {type(fn).__name__!r}: must be callable") + raise ValueError(f"Invalid fn value {type(fn).__name__!r}: must be callable") code = getattr(fn, "__code__", None) if code is None and isinstance(fn, functools.partial): fn = fn.func @@ -715,7 +715,7 @@ def get_callable_sha256(fn: Callable[..., Any]) -> str: code = getattr(fn.__call__, "__code__", None) if code is None: raise ValueError( - f"Invalid fn: unable to retrieve code object, got {type(fn).__name__!r}" + f"Invalid fn value: unable to retrieve code object, got {type(fn).__name__!r}" ) return hashlib.sha256(code.co_code).hexdigest() @@ -775,7 +775,7 @@ def top_change_percent(dataframe: pd.DataFrame, period: int) -> pd.Series: :return: The top change percentage series """ if period < 1: - raise ValueError(f"Invalid period {period!r}: must be >= 1") + raise ValueError(f"Invalid period value {period!r}: must be >= 1") previous_close_top = ( dataframe.get("close").rolling(period, min_periods=period).max().shift(1) @@ -793,7 +793,7 @@ def bottom_change_percent(dataframe: pd.DataFrame, period: int) -> pd.Series: :return: The bottom change percentage series """ if period < 1: - raise ValueError(f"Invalid period {period!r}: must be >= 1") + raise ValueError(f"Invalid period value {period!r}: must be >= 1") previous_close_bottom = ( dataframe.get("close").rolling(period, min_periods=period).min().shift(1) @@ -812,7 +812,7 @@ def price_retracement_percent(dataframe: pd.DataFrame, period: int) -> pd.Series :return: Retracement percentage series """ if period < 1: - raise ValueError(f"Invalid period {period!r}: must be >= 1") + raise ValueError(f"Invalid period value {period!r}: must be >= 1") previous_close_low = ( dataframe.get("close").rolling(period, min_periods=period).min().shift(1) @@ -894,7 +894,7 @@ def _fractal_dimension( ) -> float: """Original fractal dimension computation implementation per Ehlers' paper.""" if period % 2 != 0: - raise ValueError(f"Invalid period {period!r}: must be even") + raise ValueError(f"Invalid period value {period!r}: must be even") half_period = period // 2 @@ -923,7 +923,7 @@ def frama(df: pd.DataFrame, period: int = 16, zero_lag: bool = False) -> pd.Seri Original FRAMA implementation per Ehlers' paper with optional zero lag. """ if period % 2 != 0: - raise ValueError(f"Invalid period {period!r}: must be even") + raise ValueError(f"Invalid period value {period!r}: must be even") n = len(df) @@ -965,7 +965,7 @@ def smma(series: pd.Series, period: int, zero_lag=False, offset=0) -> pd.Series: https://www.sierrachart.com/index.php?page=doc/StudiesReference.php&ID=173&Name=Moving_Average_-_Smoothed """ if period <= 0: - raise ValueError(f"Invalid period {period!r}: must be > 0") + raise ValueError(f"Invalid period value {period!r}: must be > 0") n = len(series) if n < period: return pd.Series(index=series.index, dtype=float) @@ -1755,7 +1755,7 @@ def fit_regressor( ) else: raise ValueError( - f"Invalid regressor {regressor!r}. Supported: {', '.join(REGRESSORS)}" + f"Invalid regressor value {regressor!r}: supported values are {', '.join(REGRESSORS)}" ) return model @@ -1806,7 +1806,7 @@ def get_optuna_study_model_parameters( ) -> dict[str, Any]: if regressor not in set(REGRESSORS): raise ValueError( - f"Invalid regressor {regressor!r}. Supported: {', '.join(REGRESSORS)}" + f"Invalid regressor value {regressor!r}: supported values are {', '.join(REGRESSORS)}" ) if not isinstance(space_fraction, (int, float)) or not ( 0.0 <= space_fraction <= 1.0 @@ -2159,16 +2159,18 @@ def get_optuna_study_model_parameters( else: raise ValueError( - f"Invalid regressor {regressor!r}. Supported: {', '.join(REGRESSORS)}" + f"Invalid regressor value {regressor!r}: supported values are {', '.join(REGRESSORS)}" ) @lru_cache(maxsize=128) def largest_divisor_to_step(integer: int, step: int) -> Optional[int]: if not isinstance(integer, int) or integer <= 0: - raise ValueError(f"Invalid integer {integer!r}: must be a positive integer") + raise ValueError( + f"Invalid integer value {integer!r}: must be a positive integer" + ) if not isinstance(step, int) or step <= 0: - raise ValueError(f"Invalid step {step!r}: must be a positive integer") + raise ValueError(f"Invalid step value {step!r}: must be a positive integer") if step == 1 or integer % step == 0: return integer @@ -2275,7 +2277,7 @@ def round_to_step(value: float | int, step: int) -> int: if not isinstance(value, (int, float)): raise ValueError(f"Invalid value {value!r}: must be an integer or float") if not isinstance(step, int) or step <= 0: - raise ValueError(f"Invalid step {step!r}: must be a positive integer") + raise ValueError(f"Invalid step value {step!r}: must be a positive integer") if isinstance(value, (int, np.integer)): q, r = divmod(value, step) twice_r = r * 2 @@ -2294,7 +2296,7 @@ def ceil_to_step(value: float | int, step: int) -> int: if not isinstance(value, (int, float)): raise ValueError(f"Invalid value {value!r}: must be an integer or float") if not isinstance(step, int) or step <= 0: - raise ValueError(f"Invalid step {step!r}: must be a positive integer") + raise ValueError(f"Invalid step value {step!r}: must be a positive integer") if isinstance(value, (int, np.integer)): return int(-(-int(value) // step) * step) if not np.isfinite(value): @@ -2307,7 +2309,7 @@ def floor_to_step(value: float | int, step: int) -> int: if not isinstance(value, (int, float)): raise ValueError(f"Invalid value {value!r}: must be an integer or float") if not isinstance(step, int) or step <= 0: - raise ValueError(f"Invalid step {step!r}: must be a positive integer") + raise ValueError(f"Invalid step value {step!r}: must be a positive integer") if isinstance(value, (int, np.integer)): return int((int(value) // step) * step) if not np.isfinite(value):