Raises:
ValueError: If no trading pairs are configured
"""
- n_pairs = len(self.pairs)
- default_label_frequency_candles = max(2, 2 * n_pairs)
+ default_label_frequency_candles = max(2, 2 * len(self.pairs))
label_frequency_candles = self.config.get("feature_parameters", {}).get(
"label_frequency_candles"
cache_key = label_frequency_candles
if cache_key not in self._optuna_label_candle_pool_full_cache:
half_label_frequency_candles = int(label_frequency_candles / 2)
- min_offset = -half_label_frequency_candles
- max_offset = half_label_frequency_candles
self._optuna_label_candle_pool_full_cache[cache_key] = [
max(1, label_frequency_candles + offset)
- for offset in range(min_offset, max_offset + 1)
+ for offset in range(
+ -half_label_frequency_candles, half_label_frequency_candles + 1
+ )
]
return copy.deepcopy(self._optuna_label_candle_pool_full_cache[cache_key])
fit_live_predictions_candles: int,
label_period_candles: int,
) -> tuple[float, float]:
- label_period_cycles = fit_live_predictions_candles / label_period_candles
- thresholds_candles = max(2, int(label_period_cycles)) * label_period_candles
+ thresholds_candles = (
+ max(2, int(fit_live_predictions_candles / label_period_candles))
+ * label_period_candles
+ )
pred_extrema = pred_df.get(EXTREMA_COLUMN).iloc[-thresholds_candles:].copy()
"label_natr_ratio", min_label_natr_ratio, max_label_natr_ratio, step=0.05
)
- label_period_cycles = fit_live_predictions_candles / label_period_candles
- df = df.iloc[-(max(2, int(label_period_cycles)) * label_period_candles) :]
+ df = df.iloc[
+ -(
+ max(2, int(fit_live_predictions_candles / label_period_candles))
+ * label_period_candles
+ ) :
+ ]
if df.empty:
return 0, 0.0, 0.0
if n == 0:
return (0, None)
dates = df.get("date")
- last_date = dates.iloc[-1] if dates is not None and not dates.empty else None
- return (n, last_date)
+ return (n, dates.iloc[-1] if dates is not None and not dates.empty else None)
def _init_reversal_confirmation_defaults(self) -> None:
reversal_confirmation = self.config.get("reversal_confirmation", {})
zero_lag=True,
normalize=True,
)
- psar = ta.SAR(dataframe, acceleration=0.02, maximum=0.2)
- dataframe["%-diff_to_psar"] = closes - psar
+ dataframe["%-diff_to_psar"] = closes - ta.SAR(
+ dataframe, acceleration=0.02, maximum=0.2
+ )
kc = pta.kc(
highs,
lows,
current_date = dates.iloc[-1]
if isna(current_date):
return None
- trade_duration_minutes = (current_date - entry_date).total_seconds() / 60.0
return int(
- trade_duration_minutes / timeframe_to_minutes(self.config.get("timeframe"))
+ ((current_date - entry_date).total_seconds() / 60.0)
+ / timeframe_to_minutes(self.config.get("timeframe"))
)
@staticmethod
max_weight: float = 1.0,
weighting_exponent: float = 1.5,
) -> float:
- normalized_distance_from_center = abs(quantile - 0.5) * 2.0
return (
min_weight
+ (max_weight - min_weight)
- * normalized_distance_from_center**weighting_exponent
+ * (abs(quantile - 0.5) * 2.0) ** weighting_exponent
)
entry_weight = calculate_weight(entry_quantile)
def get_trade_exit_stage(trade: Trade) -> int:
n_open_orders = 0
if trade.has_open_orders:
- exit_side = "buy" if trade.is_short else "sell"
n_open_orders = sum(
- 1 for open_order in trade.open_orders if open_order.side == exit_side
+ 1
+ for open_order in trade.open_orders
+ if open_order.side == ("buy" if trade.is_short else "sell")
)
return trade.nr_of_successful_exits + n_open_orders
timestamp = int(current_time.timestamp())
candle_duration_secs = max(1, int(self._candle_duration_secs))
candle_start_secs = (timestamp // candle_duration_secs) * candle_duration_secs
- callback_hash = get_callable_sha256(callback)
- key = hashlib.sha256(f"{pair}\x00{callback_hash}".encode()).hexdigest()
+ key = hashlib.sha256(
+ f"{pair}\x00{get_callable_sha256(callback)}".encode()
+ ).hexdigest()
if candle_start_secs != self.last_candle_start_secs.get(key):
self.last_candle_start_secs[key] = candle_start_secs
try:
sigma_total = sigma_global + sigma_recent
if sigma_total <= 0:
return alpha_base
- ratio = sigma_global / sigma_total
- alpha_vol = alpha_base * (ratio**gamma)
- return max(min_alpha, alpha_vol)
+ return max(min_alpha, alpha_base * ((sigma_global / sigma_total) ** gamma))
alpha_v = volatility_adjusted_alpha(
alpha_len, std_v_global, std_v_recent, min_alpha=min_alpha
if scale <= 0 or not np.isfinite(scale):
scale = 1.0
- scaled = scale * weights
- return sp.special.expit(scaled)
+ return sp.special.expit(scale * weights)
def _normalize_minmax(
w_range = w_max - w_min
if np.isclose(w_range, 0.0):
- range_midpoint = midpoint(range[0], range[1])
- return np.full_like(weights, range_midpoint, dtype=float)
+ return np.full_like(weights, midpoint(range[0], range[1]), dtype=float)
- normalized = (weights - w_min) / w_range
- return range[0] + normalized * (range[1] - range[0])
+ return range[0] + ((weights - w_min) / w_range) * (range[1] - range[0])
def _normalize_l1(weights: NDArray[np.floating]) -> NDArray[np.floating]:
weights_sum = np.sum(np.abs(weights))
if weights_sum <= 0 or not np.isfinite(weights_sum):
return np.full_like(weights, float(DEFAULT_EXTREMA_WEIGHT), dtype=float)
- normalized_weights = weights / weights_sum
- return normalized_weights
+ return weights / weights_sum
def _normalize_l2(weights: NDArray[np.floating]) -> NDArray[np.floating]:
if l2_norm <= 0 or not np.isfinite(l2_norm):
return np.full_like(weights, float(DEFAULT_EXTREMA_WEIGHT), dtype=float)
- normalized_weights = weights / l2_norm
- return normalized_weights
+ return weights / l2_norm
def _normalize_softmax(
if n <= 1:
return np.full_like(weights, float(DEFAULT_EXTREMA_WEIGHT), dtype=float)
- normalized_weights = (ranks - 1) / (n - 1)
- return normalized_weights
+ return (ranks - 1) / (n - 1)
def normalize_weights(
for param, (default_min, default_max) in default_ranges.items():
center_value = model_training_best_parameters.get(param)
- if center_value is None:
- center_value = midpoint(default_min, default_max)
- elif not isinstance(center_value, (int, float)) or not np.isfinite(
+ center_value = center_value or midpoint(default_min, default_max)
+ if not isinstance(center_value, (int, float)) or not np.isfinite(
center_value
):
continue