return float(p) if (np.isfinite(p) and p > 0) else None
if not np.isfinite(p):
- msg = f"Invalid {ctx} {p!r}: must be finite"
+ msg = f"Invalid {ctx} value {p!r}: must be finite"
if mode == "raise":
raise ValueError(msg)
logger.warning(f"{msg}, using default")
return None
if p <= 0:
- msg = f"Invalid {ctx} {p!r}: must be > 0"
+ msg = f"Invalid {ctx} value {p!r}: must be > 0"
if mode == "raise":
raise ValueError(msg)
logger.warning(f"{msg}, using default")
return float(q) if (np.isfinite(q) and 0.0 <= q <= 1.0) else None
if not np.isfinite(q):
- msg = f"Invalid {ctx} {q!r}: must be finite"
+ msg = f"Invalid {ctx} value {q!r}: must be finite"
if mode == "raise":
raise ValueError(msg)
logger.warning(f"{msg}, using default")
return None
if q < 0.0 or q > 1.0:
- msg = f"Invalid {ctx} {q!r}: must be in [0, 1]"
+ msg = f"Invalid {ctx} value {q!r}: must be in [0, 1]"
if mode == "raise":
raise ValueError(msg)
logger.warning(f"{msg}, using default")
return float(p) if np.isfinite(p) else None
if not np.isfinite(p):
- msg = f"Invalid {ctx} {p!r}: must be finite"
+ msg = f"Invalid {ctx} value {p!r}: must be finite"
if mode == "raise":
raise ValueError(msg)
logger.warning(f"{msg}, using default")
if mode == "none":
return None
- msg = f"Invalid {ctx} {metric!r}: does not support custom weights"
+ msg = f"Invalid {ctx} value {metric!r}: does not support custom weights"
if mode == "raise":
raise ValueError(msg)
logger.warning(f"{msg}, using uniform weights")
return uniform_weights
if not np.all(np.isfinite(np_weights)):
- msg = f"Invalid {ctx}: contains non-finite values"
+ msg = f"Invalid {ctx} value: contains non-finite values"
if mode == "raise":
raise ValueError(msg)
logger.warning(f"{msg}, using uniform weights")
return uniform_weights
if np.any(np_weights < 0):
- msg = f"Invalid {ctx}: contains negative values"
+ msg = f"Invalid {ctx} value: contains negative values"
if mode == "raise":
raise ValueError(msg)
logger.warning(f"{msg}, using uniform weights")
weights_sum = np.nansum(np_weights)
if np.isclose(weights_sum, 0.0):
- msg = f"Invalid {ctx}: sum is zero"
+ msg = f"Invalid {ctx} value: sum is zero"
if mode == "raise":
raise ValueError(msg)
logger.warning(f"{msg}, using uniform weights")
if mode == "none":
return default
- msg = f"Invalid {ctx} {value!r}. Supported: {', '.join(valid_options)}"
+ msg = (
+ f"Invalid {ctx} {value!r}: supported values are {', '.join(valid_options)}"
+ )
if mode == "raise":
raise ValueError(msg)
logger.warning(f"{msg}, using {default!r}")
)
if not isinstance(n_neighbors, int) or n_neighbors < 1:
raise ValueError(
- f"Invalid label_density_n_neighbors {n_neighbors!r}: must be int >= 1"
+ f"Invalid label_density_n_neighbors value {n_neighbors!r}: must be int >= 1"
)
config["n_neighbors"] = n_neighbors
label_frequency_candles = default_label_frequency_candles
else:
logger.warning(
- f"Invalid label_frequency_candles {label_frequency_candles!r}: only 'auto' is supported for string values, using default {default_label_frequency_candles!r}"
+ f"Invalid label_frequency_candles value {label_frequency_candles!r}: only 'auto' is supported for string values, using default {default_label_frequency_candles!r}"
)
label_frequency_candles = default_label_frequency_candles
elif isinstance(label_frequency_candles, (int, float)):
label_frequency_candles = int(label_frequency_candles)
else:
logger.warning(
- f"Invalid label_frequency_candles {label_frequency_candles!r}: must be in range [2, 10000], using default {default_label_frequency_candles!r}"
+ f"Invalid label_frequency_candles value {label_frequency_candles!r}: must be in range [2, 10000], using default {default_label_frequency_candles!r}"
)
label_frequency_candles = default_label_frequency_candles
else:
logger.warning(
- f"Invalid label_frequency_candles {label_frequency_candles!r}: expected int, float, or 'auto', using default {default_label_frequency_candles!r}"
+ f"Invalid label_frequency_candles value {label_frequency_candles!r}: expected int, float, or 'auto', using default {default_label_frequency_candles!r}"
)
label_frequency_candles = default_label_frequency_candles
params = self._optuna_label_params.get(pair)
else:
raise ValueError(
- f"Invalid namespace {namespace!r}. "
- f"Supported: {', '.join(QuickAdapterRegressorV3._OPTUNA_NAMESPACES)}"
+ f"Invalid namespace value {namespace!r}: "
+ f"supported values are {', '.join(QuickAdapterRegressorV3._OPTUNA_NAMESPACES)}"
)
return params
self._optuna_label_params[pair] = params
else:
raise ValueError(
- f"Invalid namespace {namespace!r}. "
- f"Supported: {', '.join(QuickAdapterRegressorV3._OPTUNA_NAMESPACES)}"
+ f"Invalid namespace value {namespace!r}: "
+ f"supported values are {', '.join(QuickAdapterRegressorV3._OPTUNA_NAMESPACES)}"
)
def get_optuna_value(self, pair: str, namespace: OptunaNamespace) -> float:
value = self._optuna_hp_value.get(pair)
else:
raise ValueError(
- f"Invalid namespace {namespace!r}. "
- f"Supported: {QuickAdapterRegressorV3._OPTUNA_NAMESPACES[0]!r}" # "hp"
+ f"Invalid namespace value {namespace!r}: "
+ f"supported values are {QuickAdapterRegressorV3._OPTUNA_NAMESPACES[0]!r}" # "hp"
)
return value
self._optuna_hp_value[pair] = value
else:
raise ValueError(
- f"Invalid namespace {namespace!r}. "
- f"Supported: {QuickAdapterRegressorV3._OPTUNA_NAMESPACES[0]!r}" # "hp"
+ f"Invalid namespace value {namespace!r}: "
+ f"supported values are {QuickAdapterRegressorV3._OPTUNA_NAMESPACES[0]!r}" # "hp"
)
def get_optuna_values(
values = self._optuna_label_values.get(pair)
else:
raise ValueError(
- f"Invalid namespace {namespace!r}. "
- f"Supported: {QuickAdapterRegressorV3._OPTUNA_NAMESPACES[1]}" # "label"
+ f"Invalid namespace value {namespace!r}: "
+ f"supported values are {QuickAdapterRegressorV3._OPTUNA_NAMESPACES[1]}" # "label"
)
return values
self._optuna_label_values[pair] = values
else:
raise ValueError(
- f"Invalid namespace {namespace!r}. "
- f"Supported: {QuickAdapterRegressorV3._OPTUNA_NAMESPACES[1]}" # "label"
+ f"Invalid namespace value {namespace!r}: "
+ f"supported values are {QuickAdapterRegressorV3._OPTUNA_NAMESPACES[1]}" # "label"
)
def init_optuna_label_candle_pool(self) -> None:
) -> None:
if namespace not in {QuickAdapterRegressorV3._OPTUNA_NAMESPACES[1]}: # "label"
raise ValueError(
- f"Invalid namespace {namespace!r}. "
- f"Supported: {QuickAdapterRegressorV3._OPTUNA_NAMESPACES[1]}" # "label"
+ f"Invalid namespace value {namespace!r}: "
+ f"supported values are {QuickAdapterRegressorV3._OPTUNA_NAMESPACES[1]}" # "label"
)
if not callable(callback):
raise ValueError(
- f"Invalid callback {type(callback).__name__!r}: must be callable"
+ f"Invalid callback value {type(callback).__name__!r}: must be callable"
)
self._optuna_label_candles[pair] += 1
if pair not in self._optuna_label_incremented_pairs:
pred_minima = pred_extrema[pred_extrema < -eps]
else:
raise ValueError(
- f"Invalid extrema_selection {extrema_selection!r}. "
- f"Supported: {', '.join(QuickAdapterRegressorV3._EXTREMA_SELECTION_METHODS)}"
+ f"Invalid extrema_selection value {extrema_selection!r}: "
+ f"supported values are {', '.join(QuickAdapterRegressorV3._EXTREMA_SELECTION_METHODS)}"
)
return pred_minima, pred_maxima
keep_extrema_fraction: float = 1.0,
) -> tuple[float, float]:
if alpha < 0:
- raise ValueError(f"Invalid alpha {alpha!r}: must be >= 0")
+ raise ValueError(f"Invalid alpha value {alpha!r}: must be >= 0")
pred_minima, pred_maxima = QuickAdapterRegressorV3.get_pred_min_max(
pred_extrema, extrema_selection, keep_extrema_fraction
)
threshold_func = getattr(skimage.filters, f"threshold_{method}")
except AttributeError:
raise ValueError(
- f"Invalid skimage threshold method {method!r}. "
- f"Supported: {', '.join(QuickAdapterRegressorV3._SKIMAGE_THRESHOLD_METHODS)}"
+ f"Invalid skimage threshold method value {method!r}: "
+ f"supported values are {', '.join(QuickAdapterRegressorV3._SKIMAGE_THRESHOLD_METHODS)}"
)
min_func = QuickAdapterRegressorV3.apply_skimage_threshold
)
raise ValueError(
- f"Invalid distance_metric {distance_metric!r} for {QuickAdapterRegressorV3._DISTANCE_METHODS[0]}. "
- f"Supported: {', '.join(QuickAdapterRegressorV3._DISTANCE_METRICS)}"
+ f"Invalid distance_metric value {distance_metric!r} for {QuickAdapterRegressorV3._DISTANCE_METHODS[0]}: "
+ f"supported values are {', '.join(QuickAdapterRegressorV3._DISTANCE_METRICS)}"
)
@staticmethod
)
else:
raise ValueError(
- f"Invalid distance_metric {distance_metric!r} for {QuickAdapterRegressorV3._DISTANCE_METHODS[1]}. "
- f"Supported: {', '.join(QuickAdapterRegressorV3._DISTANCE_METRICS)}"
+ f"Invalid distance_metric value {distance_metric!r} for {QuickAdapterRegressorV3._DISTANCE_METHODS[1]}: "
+ f"supported values are {', '.join(QuickAdapterRegressorV3._DISTANCE_METRICS)}"
)
denominator = dist_to_ideal + dist_to_anti_ideal
)
else:
raise ValueError(
- f"Invalid trial_selection_method {trial_selection_method!r}. "
- f"Supported: {', '.join(QuickAdapterRegressorV3._DISTANCE_METHODS)}"
+ f"Invalid trial_selection_method value {trial_selection_method!r}: "
+ f"supported values are {', '.join(QuickAdapterRegressorV3._DISTANCE_METHODS)}"
)
min_score_position = np.nanargmin(scores)
)
else:
raise ValueError(
- f"Invalid selection_method {selection_method!r}. "
- f"Supported: {', '.join(QuickAdapterRegressorV3._DISTANCE_METHODS)}"
+ f"Invalid selection_method value {selection_method!r}: "
+ f"supported values are {', '.join(QuickAdapterRegressorV3._DISTANCE_METHODS)}"
)
ordered_cluster_indices = np.argsort(cluster_center_scores)
)
else:
raise ValueError(
- f"Invalid selection_method {selection_method!r}. "
- f"Supported: {', '.join(QuickAdapterRegressorV3._DISTANCE_METHODS)}"
+ f"Invalid selection_method value {selection_method!r}: "
+ f"supported values are {', '.join(QuickAdapterRegressorV3._DISTANCE_METHODS)}"
)
best_medoid_score_position = np.nanargmin(medoid_scores)
best_medoid_index = medoid_indices[best_medoid_score_position]
else:
raise ValueError(
- f"Invalid cluster_method {cluster_method!r}. "
- f"Supported: {', '.join(QuickAdapterRegressorV3._CLUSTER_METHODS)}"
+ f"Invalid cluster_method value {cluster_method!r}: "
+ f"supported values are {', '.join(QuickAdapterRegressorV3._CLUSTER_METHODS)}"
)
@staticmethod
return np.nanmax(neighbor_distances, axis=1)
else:
raise ValueError(
- f"Invalid aggregation {aggregation!r}. "
- f"Supported: {', '.join(QuickAdapterRegressorV3._DENSITY_AGGREGATIONS)}"
+ f"Invalid aggregation value {aggregation!r}: "
+ f"supported values are {', '.join(QuickAdapterRegressorV3._DENSITY_AGGREGATIONS)}"
)
@staticmethod
not in QuickAdapterRegressorV3._density_aggregations_set()
):
raise ValueError(
- f"Invalid aggregation in label_config {knn_aggregation!r}. "
- f"Supported: {', '.join(QuickAdapterRegressorV3._DENSITY_AGGREGATIONS)}"
+ f"Invalid aggregation value in label_config {knn_aggregation!r}: "
+ f"supported values are {', '.join(QuickAdapterRegressorV3._DENSITY_AGGREGATIONS)}"
)
knn_aggregation_param = label_config["aggregation_param"]
return QuickAdapterRegressorV3._knn_based_selection(
)
raise ValueError(
- f"Invalid label_method {selection_method!r}. "
- f"Supported: {', '.join(QuickAdapterRegressorV3._SELECTION_METHODS)}"
+ f"Invalid label_method value {selection_method!r}: "
+ f"supported values are {', '.join(QuickAdapterRegressorV3._SELECTION_METHODS)}"
)
def _get_multi_objective_study_best_trial(
) -> Optional[optuna.trial.FrozenTrial]:
if namespace not in {QuickAdapterRegressorV3._OPTUNA_NAMESPACES[1]}: # "label"
raise ValueError(
- f"Invalid namespace {namespace!r}. "
- f"Supported: {QuickAdapterRegressorV3._OPTUNA_NAMESPACES[1]}" # "label"
+ f"Invalid namespace value {namespace!r}: "
+ f"supported values are {QuickAdapterRegressorV3._OPTUNA_NAMESPACES[1]}" # "label"
)
n_objectives = len(study.directions)
if n_objectives < 2:
)
else:
raise ValueError(
- f"Invalid optuna storage_backend {storage_backend!r}. "
- f"Supported: {', '.join(QuickAdapterRegressorV3._OPTUNA_STORAGE_BACKENDS)}"
+ f"Invalid optuna storage_backend value {storage_backend!r}: "
+ f"supported values are {', '.join(QuickAdapterRegressorV3._OPTUNA_STORAGE_BACKENDS)}"
)
return storage
)
else:
raise ValueError(
- f"Invalid optuna sampler {sampler!r}. "
- f"Supported: {', '.join(QuickAdapterRegressorV3._OPTUNA_SAMPLERS)}"
+ f"Invalid optuna sampler value {sampler!r}: "
+ f"supported values are {', '.join(QuickAdapterRegressorV3._OPTUNA_SAMPLERS)}"
)
@lru_cache(maxsize=8)
)
else:
raise ValueError(
- f"Invalid namespace {namespace!r}. "
- f"Supported: {', '.join(QuickAdapterRegressorV3._OPTUNA_NAMESPACES)}"
+ f"Invalid namespace value {namespace!r}: "
+ f"supported values are {', '.join(QuickAdapterRegressorV3._OPTUNA_NAMESPACES)}"
)
def optuna_create_study(
samplers, sampler = self.optuna_samplers_by_namespace(namespace)
if sampler not in set(samplers):
raise ValueError(
- f"Invalid optuna {namespace} sampler {sampler!r}. "
- f"Supported: {', '.join(samplers)}"
+ f"Invalid optuna {namespace} sampler value {sampler!r}: "
+ f"supported values are {', '.join(samplers)}"
)
try:
scaler_attr = self._STANDARDIZATION_SCALERS.get(method)
if scaler_attr is None:
raise ValueError(
- f"Invalid standardization {method!r}. "
- f"Supported: {', '.join(STANDARDIZATION_TYPES)}"
+ f"Invalid standardization value {method!r}: "
+ f"supported values are {', '.join(STANDARDIZATION_TYPES)}"
)
scaler = getattr(self, scaler_attr, None)
if scaler is None:
scaler_attr = self._NORMALIZATION_SCALERS.get(method)
if scaler_attr is None:
raise ValueError(
- f"Invalid normalization {method!r}. "
- f"Supported: {', '.join(NORMALIZATION_TYPES)}"
+ f"Invalid normalization value {method!r}: "
+ f"supported values are {', '.join(NORMALIZATION_TYPES)}"
)
scaler = getattr(self, scaler_attr, None)
if scaler is None:
scaler_attr = self._STANDARDIZATION_SCALERS.get(method)
if scaler_attr is None:
raise ValueError(
- f"Invalid standardization {method!r}. "
- f"Supported: {', '.join(STANDARDIZATION_TYPES)}"
+ f"Invalid standardization value {method!r}: "
+ f"supported values are {', '.join(STANDARDIZATION_TYPES)}"
)
scaler = getattr(self, scaler_attr, None)
if scaler is None:
scaler_attr = self._NORMALIZATION_SCALERS.get(method)
if scaler_attr is None:
raise ValueError(
- f"Invalid normalization {method!r}. "
- f"Supported: {', '.join(NORMALIZATION_TYPES)}"
+ f"Invalid normalization value {method!r}: "
+ f"supported values are {', '.join(NORMALIZATION_TYPES)}"
)
scaler = getattr(self, scaler_attr, None)
if scaler is None:
return
raise ValueError(
- f"Invalid standardization {method!r}. Supported: {', '.join(STANDARDIZATION_TYPES)}"
+ f"Invalid standardization value {method!r}: supported values are {', '.join(STANDARDIZATION_TYPES)}"
)
def _fit_normalization(self, values: NDArray[np.floating]) -> None:
return
raise ValueError(
- f"Invalid normalization {method!r}. Supported: {', '.join(NORMALIZATION_TYPES)}"
+ f"Invalid normalization value {method!r}: supported values are {', '.join(NORMALIZATION_TYPES)}"
)
def fit(
method = extrema_smoothing.get("method", DEFAULTS_EXTREMA_SMOOTHING["method"])
if method not in set(SMOOTHING_METHODS):
logger.warning(
- f"Invalid extrema_smoothing method {method!r}, supported: {', '.join(SMOOTHING_METHODS)}, using default {SMOOTHING_METHODS[0]!r}"
+ f"Invalid extrema_smoothing method value {method!r}: supported values are {', '.join(SMOOTHING_METHODS)}, using default {SMOOTHING_METHODS[0]!r}"
)
method = SMOOTHING_METHODS[0]
)
if not isinstance(window_candles, int) or window_candles < 3:
logger.warning(
- f"Invalid extrema_smoothing window_candles {window_candles!r}: must be an integer >= 3, using default {DEFAULTS_EXTREMA_SMOOTHING['window_candles']!r}"
+ f"Invalid extrema_smoothing window_candles value {window_candles!r}: must be an integer >= 3, using default {DEFAULTS_EXTREMA_SMOOTHING['window_candles']!r}"
)
window_candles = int(DEFAULTS_EXTREMA_SMOOTHING["window_candles"])
beta = extrema_smoothing.get("beta", DEFAULTS_EXTREMA_SMOOTHING["beta"])
if not isinstance(beta, (int, float)) or not np.isfinite(beta) or beta <= 0:
logger.warning(
- f"Invalid extrema_smoothing beta {beta!r}: must be a finite number > 0, using default {DEFAULTS_EXTREMA_SMOOTHING['beta']!r}"
+ f"Invalid extrema_smoothing beta value {beta!r}: must be a finite number > 0, using default {DEFAULTS_EXTREMA_SMOOTHING['beta']!r}"
)
beta = DEFAULTS_EXTREMA_SMOOTHING["beta"]
)
if not isinstance(polyorder, int) or polyorder < 1:
logger.warning(
- f"Invalid extrema_smoothing polyorder {polyorder!r}: must be an integer >= 1, using default {DEFAULTS_EXTREMA_SMOOTHING['polyorder']!r}"
+ f"Invalid extrema_smoothing polyorder value {polyorder!r}: must be an integer >= 1, using default {DEFAULTS_EXTREMA_SMOOTHING['polyorder']!r}"
)
polyorder = DEFAULTS_EXTREMA_SMOOTHING["polyorder"]
mode = str(extrema_smoothing.get("mode", DEFAULTS_EXTREMA_SMOOTHING["mode"]))
if mode not in set(SMOOTHING_MODES):
logger.warning(
- f"Invalid extrema_smoothing mode {mode!r}, supported: {', '.join(SMOOTHING_MODES)}, using default {SMOOTHING_MODES[0]!r}"
+ f"Invalid extrema_smoothing mode value {mode!r}: supported values are {', '.join(SMOOTHING_MODES)}, using default {SMOOTHING_MODES[0]!r}"
)
mode = SMOOTHING_MODES[0]
sigma = extrema_smoothing.get("sigma", DEFAULTS_EXTREMA_SMOOTHING["sigma"])
if not isinstance(sigma, (int, float)) or sigma <= 0 or not np.isfinite(sigma):
logger.warning(
- f"Invalid extrema_smoothing sigma {sigma!r}: must be a finite number > 0, using default {DEFAULTS_EXTREMA_SMOOTHING['sigma']!r}"
+ f"Invalid extrema_smoothing sigma value {sigma!r}: must be a finite number > 0, using default {DEFAULTS_EXTREMA_SMOOTHING['sigma']!r}"
)
sigma = DEFAULTS_EXTREMA_SMOOTHING["sigma"]
)
if trade_price_target_method not in set(TRADE_PRICE_TARGETS):
logger.warning(
- f"Invalid trade_price_target_method {trade_price_target_method!r}. "
- f"Supported: {', '.join(TRADE_PRICE_TARGETS)}. "
- f"Using default {TRADE_PRICE_TARGETS[0]!r}"
+ f"Invalid trade_price_target_method value {trade_price_target_method!r}: "
+ f"supported values are {', '.join(TRADE_PRICE_TARGETS)}, "
+ f"using default {TRADE_PRICE_TARGETS[0]!r}"
)
trade_price_target_method = TRADE_PRICE_TARGETS[0]
return str(trade_price_target_method)
if not isinstance(lookback_period_candles, int) or lookback_period_candles < 0:
logger.warning(
- f"Invalid reversal_confirmation lookback_period_candles {lookback_period_candles!r}: must be >= 0, using default {QuickAdapterV3.default_reversal_confirmation['lookback_period_candles']!r}"
+ f"Invalid reversal_confirmation lookback_period_candles value {lookback_period_candles!r}: must be >= 0, using default {QuickAdapterV3.default_reversal_confirmation['lookback_period_candles']!r}"
)
lookback_period_candles = QuickAdapterV3.default_reversal_confirmation[
"lookback_period_candles"
0.0 < decay_fraction <= 1.0
):
logger.warning(
- f"Invalid reversal_confirmation decay_fraction {decay_fraction!r}: must be in range (0, 1], using default {QuickAdapterV3.default_reversal_confirmation['decay_fraction']!r}"
+ f"Invalid reversal_confirmation decay_fraction value {decay_fraction!r}: must be in range (0, 1], using default {QuickAdapterV3.default_reversal_confirmation['decay_fraction']!r}"
)
decay_fraction = QuickAdapterV3.default_reversal_confirmation[
"decay_fraction"
def get_label_natr_multiplier_fraction(self, pair: str, fraction: float) -> float:
if not isinstance(fraction, float) or not (0.0 <= fraction <= 1.0):
raise ValueError(
- f"Invalid fraction {fraction!r}: must be a float in range [0, 1]"
+ f"Invalid fraction value {fraction!r}: must be a float in range [0, 1]"
)
return self.get_label_natr_multiplier(pair) * fraction
return pattern.format(**duration)
except (KeyError, ValueError) as e:
raise ValueError(
- f"Invalid pattern {pattern!r}: failed to format with {e!r}"
+ f"Invalid pattern value {pattern!r}: failed to format with {e!r}"
)
def set_freqai_targets(
)
if trade_price_target_method_fn is None:
raise ValueError(
- f"Invalid trade_price_target_method {self.trade_price_target_method!r}. "
- f"Supported: {', '.join(TRADE_PRICE_TARGETS)}"
+ f"Invalid trade_price_target_method value {self.trade_price_target_method!r}: "
+ f"supported values are {', '.join(TRADE_PRICE_TARGETS)}"
)
return trade_price_target_method_fn()
) -> Optional[float]:
if not (0.0 <= natr_multiplier_fraction <= 1.0):
raise ValueError(
- f"Invalid natr_multiplier_fraction {natr_multiplier_fraction!r}: must be in range [0, 1]"
+ f"Invalid natr_multiplier_fraction value {natr_multiplier_fraction!r}: must be in range [0, 1]"
)
trade_duration_candles = self.get_trade_duration_candles(df, trade)
if not QuickAdapterV3.is_trade_duration_valid(trade_duration_candles):
) -> Optional[float]:
if not (0.0 <= natr_multiplier_fraction <= 1.0):
raise ValueError(
- f"Invalid natr_multiplier_fraction {natr_multiplier_fraction!r}: must be in range [0, 1]"
+ f"Invalid natr_multiplier_fraction value {natr_multiplier_fraction!r}: must be in range [0, 1]"
)
trade_duration_candles = self.get_trade_duration_candles(df, trade)
if not QuickAdapterV3.is_trade_duration_valid(trade_duration_candles):
callback: Callable[[], None],
) -> None:
if not callable(callback):
- raise ValueError(f"Invalid callback {callback!r}: must be callable")
+ raise ValueError(f"Invalid callback value {callback!r}: must be callable")
timestamp = int(current_time.timestamp())
candle_duration_secs = max(1, int(self._candle_duration_secs))
candle_start_secs = (timestamp // candle_duration_secs) * candle_duration_secs
current_exit_profit: float,
**kwargs,
) -> Optional[float] | tuple[Optional[float], Optional[str]]:
+ pair = trade.pair
if trade.has_open_orders:
return None
return None
df, _ = self.dp.get_analyzed_dataframe(
- pair=trade.pair, timeframe=self.config.get("timeframe")
+ pair=pair, timeframe=self.config.get("timeframe")
)
if df.empty:
return None
)
if not trade_partial_exit:
self.throttle_callback(
- pair=trade.pair,
+ pair=pair,
current_time=current_time,
callback=lambda: logger.info(
- f"[{trade.pair}] Trade {trade.trade_direction} stage {trade_exit_stage} | "
+ f"[{pair}] Trade {trade.trade_direction} stage {trade_exit_stage} | "
f"Take Profit: {format_number(trade_take_profit_price)}, Rate: {format_number(current_rate)}"
),
)
initial_trade_partial_stake_amount = trade_partial_stake_amount
trade_partial_stake_amount = trade.stake_amount - min_stake
logger.info(
- f"[{trade.pair}] Trade {trade.trade_direction} stage {trade_exit_stage} | "
+ f"[{pair}] Trade {trade.trade_direction} stage {trade_exit_stage} | "
f"Partial stake amount adjusted from {format_number(initial_trade_partial_stake_amount)} to {format_number(trade_partial_stake_amount)} to respect min_stake {format_number(min_stake)}"
)
return (
)
else:
raise ValueError(
- f"Invalid interpolation_direction {interpolation_direction!r}. "
- f"Supported: {', '.join(QuickAdapterV3._INTERPOLATION_DIRECTIONS)}"
+ f"Invalid interpolation_direction value {interpolation_direction!r}: "
+ f"supported values are {', '.join(QuickAdapterV3._INTERPOLATION_DIRECTIONS)}"
)
candle_deviation = (
candle_label_natr_value / 100.0
candle_threshold = base_price * (1 - current_deviation)
else:
raise ValueError(
- f"Invalid side {side!r}, supported: {', '.join(QuickAdapterV3._TRADE_DIRECTIONS)}"
+ f"Invalid side value {side!r}: supported values are {', '.join(QuickAdapterV3._TRADE_DIRECTIONS)}"
)
self._candle_threshold_cache[cache_key] = candle_threshold
return self._candle_threshold_cache[cache_key]
return False
else:
raise ValueError(
- f"Invalid trading_mode {trading_mode!r}. "
- f"Supported: {', '.join(QuickAdapterV3._TRADING_MODES)}"
+ f"Invalid trading_mode value {trading_mode!r}: "
+ f"supported values are {', '.join(QuickAdapterV3._TRADING_MODES)}"
)
def leverage(
strategy = extrema_weighting.get("strategy", DEFAULTS_EXTREMA_WEIGHTING["strategy"])
if strategy not in set(WEIGHT_STRATEGIES):
logger.warning(
- f"Invalid extrema_weighting strategy {strategy!r}, supported: {', '.join(WEIGHT_STRATEGIES)}, using default {WEIGHT_STRATEGIES[0]!r}"
+ f"Invalid extrema_weighting strategy value {strategy!r}: supported values are {', '.join(WEIGHT_STRATEGIES)}, using default {WEIGHT_STRATEGIES[0]!r}"
)
strategy = WEIGHT_STRATEGIES[0]
metric_coefficients = extrema_weighting.get(
)
if not isinstance(metric_coefficients, dict):
logger.warning(
- f"Invalid extrema_weighting metric_coefficients {metric_coefficients!r}: must be a mapping, using default {DEFAULTS_EXTREMA_WEIGHTING['metric_coefficients']!r}"
+ f"Invalid extrema_weighting metric_coefficients value value {metric_coefficients!r}: must be a mapping, using default {DEFAULTS_EXTREMA_WEIGHTING['metric_coefficients']!r}"
)
metric_coefficients = DEFAULTS_EXTREMA_WEIGHTING["metric_coefficients"]
elif invalid_keys := set(metric_coefficients.keys()) - set(COMBINED_METRICS):
)
if aggregation not in set(COMBINED_AGGREGATIONS):
logger.warning(
- f"Invalid extrema_weighting aggregation {aggregation!r}, supported: {', '.join(COMBINED_AGGREGATIONS)}, using default {DEFAULTS_EXTREMA_WEIGHTING['aggregation']!r}"
+ f"Invalid extrema_weighting aggregation value {aggregation!r}: supported values are {', '.join(COMBINED_AGGREGATIONS)}, using default {DEFAULTS_EXTREMA_WEIGHTING['aggregation']!r}"
)
aggregation = DEFAULTS_EXTREMA_WEIGHTING["aggregation"]
)
if standardization not in set(STANDARDIZATION_TYPES):
logger.warning(
- f"Invalid extrema_weighting standardization {standardization!r}, supported: {', '.join(STANDARDIZATION_TYPES)}, using default {STANDARDIZATION_TYPES[0]!r}"
+ f"Invalid extrema_weighting standardization value {standardization!r}: supported values are {', '.join(STANDARDIZATION_TYPES)}, using default {STANDARDIZATION_TYPES[0]!r}"
)
standardization = STANDARDIZATION_TYPES[0]
or robust_quantiles[0] >= robust_quantiles[1]
):
logger.warning(
- f"Invalid extrema_weighting robust_quantiles {robust_quantiles!r}: must be (q1, q3) with 0 <= q1 < q3 <= 1, using default {DEFAULTS_EXTREMA_WEIGHTING['robust_quantiles']!r}"
+ f"Invalid extrema_weighting robust_quantiles value {robust_quantiles!r}: must be (q1, q3) with 0 <= q1 < q3 <= 1, using default {DEFAULTS_EXTREMA_WEIGHTING['robust_quantiles']!r}"
)
robust_quantiles = DEFAULTS_EXTREMA_WEIGHTING["robust_quantiles"]
else:
or mmad_scaling_factor <= 0
):
logger.warning(
- f"Invalid extrema_weighting mmad_scaling_factor {mmad_scaling_factor!r}: must be a finite number > 0, using default {DEFAULTS_EXTREMA_WEIGHTING['mmad_scaling_factor']!r}"
+ f"Invalid extrema_weighting mmad_scaling_factor value {mmad_scaling_factor!r}: must be a finite number > 0, using default {DEFAULTS_EXTREMA_WEIGHTING['mmad_scaling_factor']!r}"
)
mmad_scaling_factor = DEFAULTS_EXTREMA_WEIGHTING["mmad_scaling_factor"]
)
if normalization not in set(NORMALIZATION_TYPES):
logger.warning(
- f"Invalid extrema_weighting normalization {normalization!r}, supported: {', '.join(NORMALIZATION_TYPES)}, using default {NORMALIZATION_TYPES[0]!r}"
+ f"Invalid extrema_weighting normalization value {normalization!r}: supported values are {', '.join(NORMALIZATION_TYPES)}, using default {NORMALIZATION_TYPES[0]!r}"
)
normalization = NORMALIZATION_TYPES[0]
or minmax_range[0] >= minmax_range[1]
):
logger.warning(
- f"Invalid extrema_weighting minmax_range {minmax_range!r}: must be (min, max) with min < max, using default {DEFAULTS_EXTREMA_WEIGHTING['minmax_range']!r}"
+ f"Invalid extrema_weighting minmax_range value {minmax_range!r}: must be (min, max) with min < max, using default {DEFAULTS_EXTREMA_WEIGHTING['minmax_range']!r}"
)
minmax_range = DEFAULTS_EXTREMA_WEIGHTING["minmax_range"]
else:
or sigmoid_scale <= 0
):
logger.warning(
- f"Invalid extrema_weighting sigmoid_scale {sigmoid_scale!r}: must be a finite number > 0, using default {DEFAULTS_EXTREMA_WEIGHTING['sigmoid_scale']!r}"
+ f"Invalid extrema_weighting sigmoid_scale value {sigmoid_scale!r}: must be a finite number > 0, using default {DEFAULTS_EXTREMA_WEIGHTING['sigmoid_scale']!r}"
)
sigmoid_scale = DEFAULTS_EXTREMA_WEIGHTING["sigmoid_scale"]
or not (0 < gamma <= 10.0)
):
logger.warning(
- f"Invalid extrema_weighting gamma {gamma!r}: must be in range (0, 10], using default {DEFAULTS_EXTREMA_WEIGHTING['gamma']!r}"
+ f"Invalid extrema_weighting gamma value {gamma!r}: must be in range (0, 10], using default {DEFAULTS_EXTREMA_WEIGHTING['gamma']!r}"
)
gamma = DEFAULTS_EXTREMA_WEIGHTING["gamma"]
@lru_cache(maxsize=8)
def get_odd_window(window: int) -> int:
if window < 1:
- raise ValueError(f"Invalid window {window!r}: must be > 0")
+ raise ValueError(f"Invalid window value {window!r}: must be > 0")
return window if window % 2 == 1 else window + 1
coeffs = sp.signal.windows.triang(M=window, sym=True)
else:
raise ValueError(
- f"Invalid window type {win_type!r}. "
- f"Supported: {', '.join(SMOOTHING_KERNELS)}"
+ f"Invalid window type value {win_type!r}: "
+ f"supported values are {', '.join(SMOOTHING_KERNELS)}"
)
return coeffs / np.sum(coeffs)
if len(indices) != weights.size:
raise ValueError(
- f"Invalid indices/weights: length mismatch, got {len(indices)} indices but {weights.size} weights"
+ f"Invalid indices/weights values: length mismatch, got {len(indices)} indices but {weights.size} weights"
)
weights_array = np.full(n_extrema, default_weight, dtype=float)
)
else:
raise ValueError(
- f"Invalid aggregation {aggregation!r}. Supported: {', '.join(COMBINED_AGGREGATIONS)}"
+ f"Invalid aggregation value {aggregation!r}: supported values are {', '.join(COMBINED_AGGREGATIONS)}"
)
else:
raise ValueError(
- f"Invalid extrema weighting strategy {strategy!r}. "
- f"Supported: {', '.join(WEIGHT_STRATEGIES)}"
+ f"Invalid extrema weighting strategy value {strategy!r}: "
+ f"supported values are {', '.join(WEIGHT_STRATEGIES)}"
)
weights = _impute_weights(
def get_callable_sha256(fn: Callable[..., Any]) -> str:
if not callable(fn):
- raise ValueError(f"Invalid fn {type(fn).__name__!r}: must be callable")
+ raise ValueError(f"Invalid fn value {type(fn).__name__!r}: must be callable")
code = getattr(fn, "__code__", None)
if code is None and isinstance(fn, functools.partial):
fn = fn.func
code = getattr(fn.__call__, "__code__", None)
if code is None:
raise ValueError(
- f"Invalid fn: unable to retrieve code object, got {type(fn).__name__!r}"
+ f"Invalid fn value: unable to retrieve code object, got {type(fn).__name__!r}"
)
return hashlib.sha256(code.co_code).hexdigest()
:return: The top change percentage series
"""
if period < 1:
- raise ValueError(f"Invalid period {period!r}: must be >= 1")
+ raise ValueError(f"Invalid period value {period!r}: must be >= 1")
previous_close_top = (
dataframe.get("close").rolling(period, min_periods=period).max().shift(1)
:return: The bottom change percentage series
"""
if period < 1:
- raise ValueError(f"Invalid period {period!r}: must be >= 1")
+ raise ValueError(f"Invalid period value {period!r}: must be >= 1")
previous_close_bottom = (
dataframe.get("close").rolling(period, min_periods=period).min().shift(1)
:return: Retracement percentage series
"""
if period < 1:
- raise ValueError(f"Invalid period {period!r}: must be >= 1")
+ raise ValueError(f"Invalid period value {period!r}: must be >= 1")
previous_close_low = (
dataframe.get("close").rolling(period, min_periods=period).min().shift(1)
) -> float:
"""Original fractal dimension computation implementation per Ehlers' paper."""
if period % 2 != 0:
- raise ValueError(f"Invalid period {period!r}: must be even")
+ raise ValueError(f"Invalid period value {period!r}: must be even")
half_period = period // 2
Original FRAMA implementation per Ehlers' paper with optional zero lag.
"""
if period % 2 != 0:
- raise ValueError(f"Invalid period {period!r}: must be even")
+ raise ValueError(f"Invalid period value {period!r}: must be even")
n = len(df)
https://www.sierrachart.com/index.php?page=doc/StudiesReference.php&ID=173&Name=Moving_Average_-_Smoothed
"""
if period <= 0:
- raise ValueError(f"Invalid period {period!r}: must be > 0")
+ raise ValueError(f"Invalid period value {period!r}: must be > 0")
n = len(series)
if n < period:
return pd.Series(index=series.index, dtype=float)
)
else:
raise ValueError(
- f"Invalid regressor {regressor!r}. Supported: {', '.join(REGRESSORS)}"
+ f"Invalid regressor value {regressor!r}: supported values are {', '.join(REGRESSORS)}"
)
return model
) -> dict[str, Any]:
if regressor not in set(REGRESSORS):
raise ValueError(
- f"Invalid regressor {regressor!r}. Supported: {', '.join(REGRESSORS)}"
+ f"Invalid regressor value {regressor!r}: supported values are {', '.join(REGRESSORS)}"
)
if not isinstance(space_fraction, (int, float)) or not (
0.0 <= space_fraction <= 1.0
else:
raise ValueError(
- f"Invalid regressor {regressor!r}. Supported: {', '.join(REGRESSORS)}"
+ f"Invalid regressor value {regressor!r}: supported values are {', '.join(REGRESSORS)}"
)
@lru_cache(maxsize=128)
def largest_divisor_to_step(integer: int, step: int) -> Optional[int]:
if not isinstance(integer, int) or integer <= 0:
- raise ValueError(f"Invalid integer {integer!r}: must be a positive integer")
+ raise ValueError(
+ f"Invalid integer value {integer!r}: must be a positive integer"
+ )
if not isinstance(step, int) or step <= 0:
- raise ValueError(f"Invalid step {step!r}: must be a positive integer")
+ raise ValueError(f"Invalid step value {step!r}: must be a positive integer")
if step == 1 or integer % step == 0:
return integer
if not isinstance(value, (int, float)):
raise ValueError(f"Invalid value {value!r}: must be an integer or float")
if not isinstance(step, int) or step <= 0:
- raise ValueError(f"Invalid step {step!r}: must be a positive integer")
+ raise ValueError(f"Invalid step value {step!r}: must be a positive integer")
if isinstance(value, (int, np.integer)):
q, r = divmod(value, step)
twice_r = r * 2
if not isinstance(value, (int, float)):
raise ValueError(f"Invalid value {value!r}: must be an integer or float")
if not isinstance(step, int) or step <= 0:
- raise ValueError(f"Invalid step {step!r}: must be a positive integer")
+ raise ValueError(f"Invalid step value {step!r}: must be a positive integer")
if isinstance(value, (int, np.integer)):
return int(-(-int(value) // step) * step)
if not np.isfinite(value):
if not isinstance(value, (int, float)):
raise ValueError(f"Invalid value {value!r}: must be an integer or float")
if not isinstance(step, int) or step <= 0:
- raise ValueError(f"Invalid step {step!r}: must be a positive integer")
+ raise ValueError(f"Invalid step value {step!r}: must be a positive integer")
if isinstance(value, (int, np.integer)):
return int((int(value) // step) * step)
if not np.isfinite(value):