]> Piment Noir Git Repositories - freqai-strategies.git/commitdiff
refactor(qav3): typing
authorJérôme Benoit <jerome.benoit@piment-noir.org>
Tue, 11 Feb 2025 16:35:06 +0000 (17:35 +0100)
committerJérôme Benoit <jerome.benoit@piment-noir.org>
Tue, 11 Feb 2025 16:35:06 +0000 (17:35 +0100)
Signed-off-by: Jérôme Benoit <jerome.benoit@piment-noir.org>
quickadapter/user_data/freqaimodels/LightGBMRegressorQuickAdapterV35.py
quickadapter/user_data/freqaimodels/XGBoostRegressorQuickAdapterV35.py

index 8889ebba4902291b8d666ca82002b4738642a4a5..e299b4baa81e289a1851b0e935b4961afac084e1 100644 (file)
@@ -242,7 +242,7 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel):
 
 def min_max_pred(
     pred_df: pd.DataFrame, fit_live_predictions_candles: int, label_period_candles: int
-):
+) -> tuple[float, float]:
     beta = 10.0
     extrema = pred_df.tail(label_period_candles)["&s-extrema"]
     min_pred = smooth_min(extrema, beta=beta)
@@ -253,7 +253,7 @@ def min_max_pred(
 
 def __min_max_pred(
     pred_df: pd.DataFrame, fit_live_predictions_candles: int, label_period_candles: int
-):
+) -> tuple[float, float]:
     pred_df_sorted = (
         pred_df.select_dtypes(exclude=["object"])
         .copy()
@@ -280,7 +280,7 @@ def objective(
     fit_live_predictions_candles,
     candles_step,
     params,
-):
+) -> float:
     min_train_window: int = 600
     max_train_window: int = (
         len(X) if len(X) > min_train_window else (min_train_window + len(X))
@@ -333,7 +333,9 @@ def objective(
     return error
 
 
-def hp_objective(trial, X, y, train_weights, X_test, y_test, test_weights, params):
+def hp_objective(
+    trial, X, y, train_weights, X_test, y_test, test_weights, params
+) -> float:
     study_params = {
         "n_estimators": trial.suggest_int("n_estimators", 100, 800),
         "num_leaves": trial.suggest_int("num_leaves", 2, 256),
@@ -369,9 +371,9 @@ def sanitize_path(path: str) -> str:
     return allowed.sub("_", path)
 
 
-def smooth_max(series, beta=1.0):
+def smooth_max(series: pd.Series, beta=1.0) -> float:
     return np.log(np.sum(np.exp(beta * series))) / beta
 
 
-def smooth_min(series, beta=1.0):
+def smooth_min(series: pd.Series, beta=1.0) -> float:
     return -np.log(np.sum(np.exp(-beta * series))) / beta
index 0ba230535f5b994bbcb01768e0e9d7d84c3d8c39..b5de25057da5c090ab8f0e5f5ed688c9809a0474 100644 (file)
@@ -245,7 +245,7 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel):
 
 def min_max_pred(
     pred_df: pd.DataFrame, fit_live_predictions_candles: int, label_period_candles: int
-):
+) -> tuple[float, float]:
     beta = 10.0
     extrema = pred_df.tail(label_period_candles)["&s-extrema"]
     min_pred = smooth_min(extrema, beta=beta)
@@ -256,7 +256,7 @@ def min_max_pred(
 
 def __min_max_pred(
     pred_df: pd.DataFrame, fit_live_predictions_candles: int, label_period_candles: int
-):
+) -> tuple[float, float]:
     pred_df_sorted = (
         pred_df.select_dtypes(exclude=["object"])
         .copy()
@@ -283,7 +283,7 @@ def objective(
     fit_live_predictions_candles,
     candles_step,
     params,
-):
+) -> float:
     min_train_window: int = 600
     max_train_window: int = (
         len(X) if len(X) > min_train_window else (min_train_window + len(X))
@@ -341,7 +341,9 @@ def objective(
     return error
 
 
-def hp_objective(trial, X, y, train_weights, X_test, y_test, test_weights, params):
+def hp_objective(
+    trial, X, y, train_weights, X_test, y_test, test_weights, params
+) -> float:
     study_params = {
         "n_estimators": trial.suggest_int("n_estimators", 100, 800),
         "learning_rate": trial.suggest_float("learning_rate", 1e-3, 0.3, log=True),
@@ -382,9 +384,9 @@ def sanitize_path(path: str) -> str:
     return allowed.sub("_", path)
 
 
-def smooth_max(series, beta=1.0):
+def smooth_max(series: pd.Series, beta=1.0) -> float:
     return np.log(np.sum(np.exp(beta * series))) / beta
 
 
-def smooth_min(series, beta=1.0):
+def smooth_min(series: pd.Series, beta=1.0) -> float:
     return -np.log(np.sum(np.exp(-beta * series))) / beta