From: Jérôme Benoit Date: Mon, 3 Feb 2025 15:03:35 +0000 (+0100) Subject: refactor: use freqtrade training defaults X-Git-Url: https://git.piment-noir.org/?a=commitdiff_plain;h=47af573d2a8bd4cc488e94c6192f990971812cc0;p=freqai-strategies.git refactor: use freqtrade training defaults Signed-off-by: Jérôme Benoit --- diff --git a/quickadapter/user_data/freqaimodels/LightGBMRegressorQuickAdapterV35.py b/quickadapter/user_data/freqaimodels/LightGBMRegressorQuickAdapterV35.py index afab949..b7fbef8 100644 --- a/quickadapter/user_data/freqaimodels/LightGBMRegressorQuickAdapterV35.py +++ b/quickadapter/user_data/freqaimodels/LightGBMRegressorQuickAdapterV35.py @@ -12,7 +12,7 @@ import sklearn import warnings N_TRIALS = 36 -TEST_SIZE = 0.25 +TEST_SIZE = 0.1 warnings.simplefilter(action="ignore", category=FutureWarning) @@ -57,19 +57,13 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel): optuna_hyperopt: bool = ( self.freqai_info.get("optuna_hyperopt", False) - and self.freqai_info.get("data_split_parameters", {}).get( - "test_size", TEST_SIZE - ) - > 0 + and self.data_split_parameters.get("test_size", TEST_SIZE) > 0 ) start = time.time() if optuna_hyperopt: pruner = optuna.pruners.MedianPruner(n_warmup_steps=5) - study = optuna.create_study( - pruner=pruner, - direction="minimize", - ) + study = optuna.create_study(pruner=pruner, direction="minimize") study.optimize( lambda trial: objective( trial, @@ -156,9 +150,7 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel): pred_df_sorted[col] = pred_df_sorted[col].sort_values( ascending=False, ignore_index=True ) - frequency = num_candles / ( - self.freqai_info["feature_parameters"]["label_period_candles"] * 2 - ) + frequency = num_candles / (self.ft_params["label_period_candles"] * 2) max_pred = pred_df_sorted.iloc[: int(frequency)].mean() min_pred = pred_df_sorted.iloc[-int(frequency) :].mean() dk.data["extra_returns_per_train"]["&s-maxima_sort_threshold"] = max_pred[ @@ -194,12 +186,7 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel): dk.data["extra_returns_per_train"]["DI_cutoff"] = cutoff def eval_set_and_weights(self, X_test, y_test, test_weights): - if ( - self.freqai_info.get("data_split_parameters", {}).get( - "test_size", TEST_SIZE - ) - == 0 - ): + if self.data_split_parameters.get("test_size", TEST_SIZE) == 0: eval_set = None eval_weights = None else: @@ -210,7 +197,15 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel): def objective( - trial, X, y, train_weights, X_test, y_test, test_weights, candles_step, params + trial, + X, + y, + train_weights, + X_test, + y_test, + test_weights, + candles_step, + params, ): train_window = trial.suggest_int( "train_period_candles", 1152, 17280, step=candles_step diff --git a/quickadapter/user_data/freqaimodels/XGBoostRegressorQuickAdapterV35.py b/quickadapter/user_data/freqaimodels/XGBoostRegressorQuickAdapterV35.py index 1d347c8..3b34711 100644 --- a/quickadapter/user_data/freqaimodels/XGBoostRegressorQuickAdapterV35.py +++ b/quickadapter/user_data/freqaimodels/XGBoostRegressorQuickAdapterV35.py @@ -12,7 +12,7 @@ import sklearn import warnings N_TRIALS = 36 -TEST_SIZE = 0.25 +TEST_SIZE = 0.1 warnings.simplefilter(action="ignore", category=FutureWarning) @@ -57,10 +57,7 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel): optuna_hyperopt: bool = ( self.freqai_info.get("optuna_hyperopt", False) - and self.freqai_info.get("data_split_parameters", {}).get( - "test_size", TEST_SIZE - ) - > 0 + and self.data_split_parameters.get("test_size", TEST_SIZE) > 0 ) start = time.time() @@ -153,9 +150,7 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel): pred_df_sorted[col] = pred_df_sorted[col].sort_values( ascending=False, ignore_index=True ) - frequency = num_candles / ( - self.freqai_info["feature_parameters"]["label_period_candles"] * 2 - ) + frequency = num_candles / (self.self.ft_params["label_period_candles"] * 2) max_pred = pred_df_sorted.iloc[: int(frequency)].mean() min_pred = pred_df_sorted.iloc[-int(frequency) :].mean() dk.data["extra_returns_per_train"]["&s-maxima_sort_threshold"] = max_pred[ @@ -191,12 +186,7 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel): dk.data["extra_returns_per_train"]["DI_cutoff"] = cutoff def eval_set_and_weights(self, X_test, y_test, test_weights): - if ( - self.freqai_info.get("data_split_parameters", {}).get( - "test_size", TEST_SIZE - ) - == 0 - ): + if self.data_split_parameters.get("test_size", TEST_SIZE) == 0: eval_set = None eval_weights = None else: @@ -207,7 +197,15 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel): def objective( - trial, X, y, train_weights, X_test, y_test, test_weights, candles_step, params + trial, + X, + y, + train_weights, + X_test, + y_test, + test_weights, + candles_step, + params, ): train_window = trial.suggest_int( "train_period_candles", 1152, 17280, step=candles_step