Initial commit - create a different file for signals

This commit is contained in:
jainanuj94 2024-08-02 15:54:03 +05:30
parent 2ad921f99e
commit 8f8859a5f5
6 changed files with 76 additions and 13 deletions

View File

@ -47,9 +47,12 @@ def _load_signal_candles(backtest_dir: Path):
return _load_backtest_analysis_data(backtest_dir, "signals")
def _load_exit_signal_candles(backtest_dir: Path):
return _load_backtest_analysis_data(backtest_dir, "exited")
def _process_candles_and_indicators(pairlist, strategy_name, trades, signal_candles):
analysed_trades_dict = {}
analysed_trades_dict[strategy_name] = {}
analysed_trades_dict = {strategy_name: {}}
try:
logger.info(f"Processing {strategy_name} : {len(pairlist)} pairs")
@ -333,6 +336,7 @@ def process_entry_exit_reasons(config: Config):
if trades is not None and not trades.empty:
signal_candles = _load_signal_candles(config["exportfilename"])
exit_signal_candles = _load_exit_signal_candles(config["exportfilename"])
rej_df = None
if do_rejected:
@ -349,6 +353,10 @@ def process_entry_exit_reasons(config: Config):
config["exchange"]["pair_whitelist"], strategy_name, trades, signal_candles
)
exited_trades_dict = _process_candles_and_indicators(
config["exchange"]["pair_whitelist"], strategy_name, trades, exit_signal_candles
)
res_df = prepare_results(
analysed_trades_dict,
strategy_name,
@ -357,6 +365,23 @@ def process_entry_exit_reasons(config: Config):
timerange=timerange,
)
exited_df = prepare_results(
exited_trades_dict,
strategy_name,
enter_reason_list,
exit_reason_list,
timerange=timerange,
)
print_results(
exited_df,
analysis_groups,
indicator_list,
to_csv=False,
rejected_signals=None,
csv_path=csv_path,
)
print_results(
res_df,
analysis_groups,

View File

@ -42,7 +42,8 @@ from freqtrade.optimize.bt_progress import BTProgress
from freqtrade.optimize.optimize_reports import (
generate_backtest_stats,
generate_rejected_signals,
generate_trade_signal_candles,
generate_trade_entry_signal_candles,
generate_trade_exit_signal_candles,
show_backtest_results,
store_backtest_analysis_results,
store_backtest_stats,
@ -122,6 +123,7 @@ class Backtesting:
self.processed_dfs: Dict[str, Dict] = {}
self.rejected_dict: Dict[str, List] = {}
self.rejected_df: Dict[str, Dict] = {}
self.exited_dfs: Dict[str, Dict] = {}
self._exchange_name = self.config["exchange"]["name"]
if not exchange:
@ -1558,12 +1560,15 @@ class Backtesting:
self.config.get("export", "none") == "signals"
and self.dataprovider.runmode == RunMode.BACKTEST
):
self.processed_dfs[strategy_name] = generate_trade_signal_candles(
self.processed_dfs[strategy_name] = generate_trade_entry_signal_candles(
preprocessed_tmp, results
)
self.rejected_df[strategy_name] = generate_rejected_signals(
preprocessed_tmp, self.rejected_dict
)
self.exited_dfs[strategy_name] = generate_trade_exit_signal_candles(
preprocessed_tmp, results
)
return min_date, max_date
@ -1639,7 +1644,11 @@ class Backtesting:
and self.dataprovider.runmode == RunMode.BACKTEST
):
store_backtest_analysis_results(
self.config["exportfilename"], self.processed_dfs, self.rejected_df, dt_appendix
self.config["exportfilename"],
self.processed_dfs,
self.rejected_df,
self.exited_dfs,
dt_appendix,
)
# Results may be mixed up now. Sort them so they follow --strategy-list order.

View File

@ -25,6 +25,7 @@ from freqtrade.optimize.optimize_reports.optimize_reports import (
generate_strategy_comparison,
generate_strategy_stats,
generate_tag_metrics,
generate_trade_signal_candles,
generate_trade_entry_signal_candles,
generate_trade_exit_signal_candles,
generate_trading_stats,
)

View File

@ -90,7 +90,12 @@ def _store_backtest_analysis_data(
def store_backtest_analysis_results(
recordfilename: Path, candles: Dict[str, Dict], trades: Dict[str, Dict], dtappendix: str
recordfilename: Path,
candles: Dict[str, Dict],
trades: Dict[str, Dict],
exited: Dict[str, Dict],
dtappendix: str,
) -> None:
_store_backtest_analysis_data(recordfilename, candles, dtappendix, "signals")
_store_backtest_analysis_data(recordfilename, trades, dtappendix, "rejected")
_store_backtest_analysis_data(recordfilename, exited, dtappendix, "exited")

View File

@ -24,9 +24,9 @@ from freqtrade.util import decimals_per_coin, fmt_coin
logger = logging.getLogger(__name__)
def generate_trade_signal_candles(
def generate_trade_entry_signal_candles(
preprocessed_df: Dict[str, DataFrame], bt_results: Dict[str, Any]
) -> DataFrame:
) -> Dict[str, DataFrame]:
signal_candles_only = {}
for pair in preprocessed_df.keys():
signal_candles_only_df = DataFrame()
@ -47,6 +47,29 @@ def generate_trade_signal_candles(
return signal_candles_only
def generate_trade_exit_signal_candles(
preprocessed_df: Dict[str, DataFrame], bt_results: Dict[str, Any]
) -> Dict[str, DataFrame]:
signal_candles_only = {}
for pair in preprocessed_df.keys():
signal_candles_only_df = DataFrame()
pairdf = preprocessed_df[pair]
resdf = bt_results["results"]
pairresults = resdf.loc[(resdf["pair"] == pair)]
if pairdf.shape[0] > 0:
for t, v in pairresults.close_date.items():
allinds = pairdf.loc[(pairdf["date"] < v)]
signal_inds = allinds.iloc[[-1]]
signal_candles_only_df = concat(
[signal_candles_only_df.infer_objects(), signal_inds.infer_objects()]
)
signal_candles_only[pair] = signal_candles_only_df
return signal_candles_only
def generate_rejected_signals(
preprocessed_df: Dict[str, DataFrame], rejected_dict: Dict[str, DataFrame]
) -> Dict[str, DataFrame]:

View File

@ -293,7 +293,7 @@ def test_store_backtest_candles(testdatadir, mocker):
candle_dict = {"DefStrat": {"UNITTEST/BTC": pd.DataFrame()}}
# mock directory exporting
store_backtest_analysis_results(testdatadir, candle_dict, {}, "2022_01_01_15_05_13")
store_backtest_analysis_results(testdatadir, candle_dict, {}, {}, "2022_01_01_15_05_13")
assert dump_mock.call_count == 2
assert isinstance(dump_mock.call_args_list[0][0][0], Path)
@ -302,7 +302,7 @@ def test_store_backtest_candles(testdatadir, mocker):
dump_mock.reset_mock()
# mock file exporting
filename = Path(testdatadir / "testresult")
store_backtest_analysis_results(filename, candle_dict, {}, "2022_01_01_15_05_13")
store_backtest_analysis_results(filename, candle_dict, {}, {}, "2022_01_01_15_05_13")
assert dump_mock.call_count == 2
assert isinstance(dump_mock.call_args_list[0][0][0], Path)
# result will be testdatadir / testresult-<timestamp>_signals.pkl
@ -315,7 +315,7 @@ def test_write_read_backtest_candles(tmp_path):
# test directory exporting
sample_date = "2022_01_01_15_05_13"
store_backtest_analysis_results(tmp_path, candle_dict, {}, sample_date)
store_backtest_analysis_results(tmp_path, candle_dict, {}, {}, sample_date)
stored_file = tmp_path / f"backtest-result-{sample_date}_signals.pkl"
with stored_file.open("rb") as scp:
pickled_signal_candles = joblib.load(scp)
@ -330,7 +330,7 @@ def test_write_read_backtest_candles(tmp_path):
# test file exporting
filename = tmp_path / "testresult"
store_backtest_analysis_results(filename, candle_dict, {}, sample_date)
store_backtest_analysis_results(filename, candle_dict, {}, {}, sample_date)
stored_file = tmp_path / f"testresult-{sample_date}_signals.pkl"
with stored_file.open("rb") as scp:
pickled_signal_candles = joblib.load(scp)