From: Jérôme Benoit Date: Thu, 13 Mar 2025 10:45:22 +0000 (+0100) Subject: fix(qav3): remove buggy zero_phase_gaussian for now X-Git-Url: https://git.piment-noir.org/?a=commitdiff_plain;h=5088d5f86164a70e14a1db97c17d4db9fbb7fa49;p=freqai-strategies.git fix(qav3): remove buggy zero_phase_gaussian for now Signed-off-by: Jérôme Benoit --- diff --git a/quickadapter/user_data/freqaimodels/LightGBMRegressorQuickAdapterV35.py b/quickadapter/user_data/freqaimodels/LightGBMRegressorQuickAdapterV35.py index 5862667..65d2f25 100644 --- a/quickadapter/user_data/freqaimodels/LightGBMRegressorQuickAdapterV35.py +++ b/quickadapter/user_data/freqaimodels/LightGBMRegressorQuickAdapterV35.py @@ -1,6 +1,6 @@ import logging import json -from typing import Any, Dict +from typing import Any from pathlib import Path from lightgbm import LGBMRegressor @@ -33,7 +33,7 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel): experiments, with a an objective of helping the community make smarter choices in their ML journey. - This strategy is experimental (as with all strategies released to sponsors). Do *not* expect + This freqaimodel is experimental (as with all models released to sponsors). Do *not* expect returns. The goal is to demonstrate gratitude to people who support the project and to help them find a good starting point for their own creativity. @@ -55,10 +55,10 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel): and self.__optuna_config.get("enabled", False) and self.data_split_parameters.get("test_size", TEST_SIZE) > 0 ) - self.__optuna_hp_rmse: Dict[str, float] = {} - self.__optuna_period_rmse: Dict[str, float] = {} - self.__optuna_hp_params: Dict[str, Dict] = {} - self.__optuna_period_params: Dict[str, Dict] = {} + self.__optuna_hp_rmse: dict[str, float] = {} + self.__optuna_period_rmse: dict[str, float] = {} + self.__optuna_hp_params: dict[str, dict] = {} + self.__optuna_period_params: dict[str, dict] = {} for pair in self.pairs: self.__optuna_hp_rmse[pair] = -1 self.__optuna_period_rmse[pair] = -1 @@ -75,7 +75,7 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel): ) ) - def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any: + def fit(self, data_dictionary: dict, dk: FreqaiDataKitchen, **kwargs) -> Any: """ User sets up the training and test data to fit their desired model here :param data_dictionary: the dictionary constructed by DataHandler to hold @@ -300,7 +300,7 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel): X_test, y_test, test_weights, - ) -> tuple[Dict, float] | tuple[None, None]: + ) -> tuple[dict, float] | tuple[None, None]: identifier = self.freqai_info.get("identifier") study_namespace = "hp" study_name = f"{identifier}-{study_namespace}-{pair}" @@ -379,7 +379,7 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel): y_test, test_weights, model_training_parameters, - ) -> tuple[Dict, float] | tuple[None, None]: + ) -> tuple[dict, float] | tuple[None, None]: identifier = self.freqai_info.get("identifier") study_namespace = "period" study_name = f"{identifier}-{study_namespace}-{pair}" @@ -441,7 +441,7 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel): return params, study.best_value def optuna_save_best_params( - self, pair: str, namespace: str, best_params: Dict + self, pair: str, namespace: str, best_params: dict ) -> None: best_params_path = Path( self.full_path / f"optuna-{namespace}-best-params-{pair.split('/')[0]}.json" @@ -449,7 +449,7 @@ class LightGBMRegressorQuickAdapterV35(BaseRegressionModel): with best_params_path.open("w", encoding="utf-8") as write_file: json.dump(best_params, write_file, indent=4) - def optuna_load_best_params(self, pair: str, namespace: str) -> Dict | None: + def optuna_load_best_params(self, pair: str, namespace: str) -> dict | None: best_params_path = Path( self.full_path / f"optuna-{namespace}-best-params-{pair.split('/')[0]}.json" ) diff --git a/quickadapter/user_data/freqaimodels/XGBoostRegressorQuickAdapterV35.py b/quickadapter/user_data/freqaimodels/XGBoostRegressorQuickAdapterV35.py index 20a5a16..eeaecba 100644 --- a/quickadapter/user_data/freqaimodels/XGBoostRegressorQuickAdapterV35.py +++ b/quickadapter/user_data/freqaimodels/XGBoostRegressorQuickAdapterV35.py @@ -1,6 +1,6 @@ import logging import json -from typing import Any, Dict +from typing import Any from pathlib import Path from xgboost import XGBRegressor @@ -33,7 +33,7 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel): experiments, with a an objective of helping the community make smarter choices in their ML journey. - This strategy is experimental (as with all strategies released to sponsors). Do *not* expect + This freqaimodel is experimental (as with all models released to sponsors). Do *not* expect returns. The goal is to demonstrate gratitude to people who support the project and to help them find a good starting point for their own creativity. @@ -55,10 +55,10 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel): and self.__optuna_config.get("enabled", False) and self.data_split_parameters.get("test_size", TEST_SIZE) > 0 ) - self.__optuna_hp_rmse: Dict[str, float] = {} - self.__optuna_period_rmse: Dict[str, float] = {} - self.__optuna_hp_params: Dict[str, Dict] = {} - self.__optuna_period_params: Dict[str, Dict] = {} + self.__optuna_hp_rmse: dict[str, float] = {} + self.__optuna_period_rmse: dict[str, float] = {} + self.__optuna_hp_params: dict[str, dict] = {} + self.__optuna_period_params: dict[str, dict] = {} for pair in self.pairs: self.__optuna_hp_rmse[pair] = -1 self.__optuna_period_rmse[pair] = -1 @@ -75,7 +75,7 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel): ) ) - def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any: + def fit(self, data_dictionary: dict, dk: FreqaiDataKitchen, **kwargs) -> Any: """ User sets up the training and test data to fit their desired model here :param data_dictionary: the dictionary constructed by DataHandler to hold @@ -303,7 +303,7 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel): X_test, y_test, test_weights, - ) -> tuple[Dict, float] | tuple[None, None]: + ) -> tuple[dict, float] | tuple[None, None]: identifier = self.freqai_info.get("identifier") study_namespace = "hp" study_name = f"{identifier}-{study_namespace}-{pair}" @@ -382,7 +382,7 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel): y_test, test_weights, model_training_parameters, - ) -> tuple[Dict, float] | tuple[None, None]: + ) -> tuple[dict, float] | tuple[None, None]: identifier = self.freqai_info.get("identifier") study_namespace = "period" study_name = f"{identifier}-{study_namespace}-{pair}" @@ -444,7 +444,7 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel): return params, study.best_value def optuna_save_best_params( - self, pair: str, namespace: str, best_params: Dict + self, pair: str, namespace: str, best_params: dict ) -> None: best_params_path = Path( self.full_path / f"optuna-{namespace}-best-params-{pair.split('/')[0]}.json" @@ -452,7 +452,7 @@ class XGBoostRegressorQuickAdapterV35(BaseRegressionModel): with best_params_path.open("w", encoding="utf-8") as write_file: json.dump(best_params, write_file, indent=4) - def optuna_load_best_params(self, pair: str, namespace: str) -> Dict | None: + def optuna_load_best_params(self, pair: str, namespace: str) -> dict | None: best_params_path = Path( self.full_path / f"optuna-{namespace}-best-params-{pair.split('/')[0]}.json" ) diff --git a/quickadapter/user_data/strategies/QuickAdapterV3.py b/quickadapter/user_data/strategies/QuickAdapterV3.py index 5ab3fcf..7c4cb54 100644 --- a/quickadapter/user_data/strategies/QuickAdapterV3.py +++ b/quickadapter/user_data/strategies/QuickAdapterV3.py @@ -10,7 +10,7 @@ from freqtrade.strategy.interface import IStrategy from technical.pivots_points import pivots_points from technical.indicators import chaikin_money_flow from freqtrade.persistence import Trade -from scipy.signal import argrelmin, argrelmax, gaussian, convolve +from scipy.signal import argrelmin, argrelmax import numpy as np import pandas_ta as pta @@ -23,7 +23,7 @@ MAXIMA_THRESHOLD_COLUMN = "&s-maxima_threshold" class QuickAdapterV3(IStrategy): """ - The following freqaimodel is released to sponsors of the non-profit FreqAI open-source project. + The following freqtrade strategy is released to sponsors of the non-profit FreqAI open-source project. If you find the FreqAI project useful, please consider supporting it by becoming a sponsor. We use sponsor money to help stimulate new features and to pay for running these public experiments, with a an objective of helping the community make smarter choices in their @@ -286,6 +286,7 @@ class QuickAdapterV3(IStrategy): dataframe["minima_threshold"] = dataframe[MINIMA_THRESHOLD_COLUMN] dataframe["maxima_threshold"] = dataframe[MAXIMA_THRESHOLD_COLUMN] + return dataframe def populate_entry_trend(self, df: DataFrame, metadata: dict) -> DataFrame: @@ -418,9 +419,9 @@ class QuickAdapterV3(IStrategy): std=std ) ), - "zero_phase_gaussian": zero_phase_gaussian( - series=series, window=window, std=std - ), + # "zero_phase_gaussian": zero_phase_gaussian( + # series=series, window=window, std=std + # ), "boxcar": series.rolling( window=window, win_type="boxcar", center=center ).mean(), @@ -509,17 +510,5 @@ def zlewma(series: Series, timeperiod: int) -> Series: return 2 * ewma - ewma.ewm(span=timeperiod).mean() -def zero_phase_gaussian(series: Series, window: int, std: float) -> Series: - # Gaussian kernel - kernel = gaussian(window, std=std) - kernel /= kernel.sum() - - # Forward-backward convolution for zero phase lag - padded_series = np.pad(series, (window // 2, window // 2), mode="edge") - smoothed = convolve(padded_series, kernel, mode="valid") - smoothed = convolve(smoothed[::-1], kernel, mode="valid")[::-1] - return Series(smoothed, index=series.index) - - def get_distance(p1: Series | float, p2: Series | float) -> Series | float: return abs((p1) - (p2))