2022-05-22 22:24:52 +00:00
|
|
|
import logging
|
|
|
|
from pathlib import Path
|
2024-08-05 18:27:24 +00:00
|
|
|
from typing import Dict, List
|
2022-05-22 22:24:52 +00:00
|
|
|
|
2022-05-22 22:41:28 +00:00
|
|
|
import joblib
|
2022-05-22 22:24:52 +00:00
|
|
|
import pandas as pd
|
|
|
|
|
2022-11-27 11:30:13 +00:00
|
|
|
from freqtrade.configuration import TimeRange
|
2022-11-26 16:58:56 +00:00
|
|
|
from freqtrade.constants import Config
|
2024-05-12 13:18:32 +00:00
|
|
|
from freqtrade.data.btanalysis import (
|
2024-09-06 06:58:02 +00:00
|
|
|
BT_DATA_COLUMNS,
|
2024-05-12 13:18:32 +00:00
|
|
|
get_latest_backtest_filename,
|
|
|
|
load_backtest_data,
|
|
|
|
load_backtest_stats,
|
|
|
|
)
|
2022-05-22 22:24:52 +00:00
|
|
|
from freqtrade.exceptions import OperationalException
|
2024-07-07 07:38:02 +00:00
|
|
|
from freqtrade.util import print_df_rich_table
|
2022-05-22 22:24:52 +00:00
|
|
|
|
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2022-12-05 15:34:31 +00:00
|
|
|
def _load_backtest_analysis_data(backtest_dir: Path, name: str):
|
2022-05-24 19:27:15 +00:00
|
|
|
if backtest_dir.is_dir():
|
2024-05-12 15:41:55 +00:00
|
|
|
scpf = Path(
|
|
|
|
backtest_dir,
|
|
|
|
Path(get_latest_backtest_filename(backtest_dir)).stem + "_" + name + ".pkl",
|
|
|
|
)
|
2022-05-24 19:27:15 +00:00
|
|
|
else:
|
2022-12-05 15:34:31 +00:00
|
|
|
scpf = Path(backtest_dir.parent / f"{backtest_dir.stem}_{name}.pkl")
|
2022-05-29 10:54:27 +00:00
|
|
|
|
2022-05-22 22:24:52 +00:00
|
|
|
try:
|
2023-02-25 16:08:02 +00:00
|
|
|
with scpf.open("rb") as scp:
|
2023-03-19 14:00:20 +00:00
|
|
|
loaded_data = joblib.load(scp)
|
|
|
|
logger.info(f"Loaded {name} candles: {str(scpf)}")
|
2022-05-22 22:24:52 +00:00
|
|
|
except Exception as e:
|
2022-12-05 15:34:31 +00:00
|
|
|
logger.error(f"Cannot load {name} data from pickled results: ", e)
|
2023-03-19 14:00:20 +00:00
|
|
|
return None
|
2022-12-05 15:34:31 +00:00
|
|
|
|
2022-12-08 18:47:09 +00:00
|
|
|
return loaded_data
|
2022-05-22 22:24:52 +00:00
|
|
|
|
2022-12-05 15:34:31 +00:00
|
|
|
|
2022-12-08 18:47:09 +00:00
|
|
|
def _load_rejected_signals(backtest_dir: Path):
|
2022-12-05 15:34:31 +00:00
|
|
|
return _load_backtest_analysis_data(backtest_dir, "rejected")
|
|
|
|
|
|
|
|
|
|
|
|
def _load_signal_candles(backtest_dir: Path):
|
|
|
|
return _load_backtest_analysis_data(backtest_dir, "signals")
|
2022-05-22 22:24:52 +00:00
|
|
|
|
|
|
|
|
2024-08-05 18:27:24 +00:00
|
|
|
def _load_exit_signal_candles(backtest_dir: Path) -> Dict[str, Dict[str, pd.DataFrame]]:
|
2024-08-02 10:24:03 +00:00
|
|
|
return _load_backtest_analysis_data(backtest_dir, "exited")
|
|
|
|
|
|
|
|
|
2024-09-04 04:26:12 +00:00
|
|
|
def _process_candles_and_indicators(
|
|
|
|
pairlist, strategy_name, trades, signal_candles, date_col: str = "open_date"
|
|
|
|
):
|
2024-08-06 09:46:30 +00:00
|
|
|
analysed_trades_dict: Dict[str, Dict] = {strategy_name: {}}
|
2022-05-22 22:24:52 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
logger.info(f"Processing {strategy_name} : {len(pairlist)} pairs")
|
|
|
|
|
|
|
|
for pair in pairlist:
|
|
|
|
if pair in signal_candles[strategy_name]:
|
|
|
|
analysed_trades_dict[strategy_name][pair] = _analyze_candles_and_indicators(
|
2024-08-06 09:46:30 +00:00
|
|
|
pair, trades, signal_candles[strategy_name][pair], date_col
|
2024-05-12 15:41:55 +00:00
|
|
|
)
|
2022-05-29 10:20:11 +00:00
|
|
|
except Exception as e:
|
|
|
|
print(f"Cannot process entry/exit reasons for {strategy_name}: ", e)
|
2022-05-22 22:24:52 +00:00
|
|
|
|
|
|
|
return analysed_trades_dict
|
|
|
|
|
|
|
|
|
2024-08-05 17:49:38 +00:00
|
|
|
def _analyze_candles_and_indicators(
|
2024-09-04 04:26:12 +00:00
|
|
|
pair: str, trades: pd.DataFrame, signal_candles: pd.DataFrame, date_col: str = "open_date"
|
2024-08-05 18:27:24 +00:00
|
|
|
) -> pd.DataFrame:
|
2022-05-22 22:24:52 +00:00
|
|
|
buyf = signal_candles
|
|
|
|
|
|
|
|
if len(buyf) > 0:
|
2024-05-12 15:41:55 +00:00
|
|
|
buyf = buyf.set_index("date", drop=False)
|
|
|
|
trades_red = trades.loc[trades["pair"] == pair].copy()
|
2022-05-22 22:24:52 +00:00
|
|
|
|
|
|
|
trades_inds = pd.DataFrame()
|
|
|
|
|
|
|
|
if trades_red.shape[0] > 0 and buyf.shape[0] > 0:
|
2024-08-05 17:49:38 +00:00
|
|
|
for t, v in trades_red.iterrows():
|
2024-08-06 09:46:30 +00:00
|
|
|
allinds = buyf.loc[(buyf["date"] < v[date_col])]
|
2022-05-22 22:24:52 +00:00
|
|
|
if allinds.shape[0] > 0:
|
|
|
|
tmp_inds = allinds.iloc[[-1]]
|
|
|
|
|
2024-05-12 15:41:55 +00:00
|
|
|
trades_red.loc[t, "signal_date"] = tmp_inds["date"].values[0]
|
|
|
|
trades_red.loc[t, "enter_reason"] = trades_red.loc[t, "enter_tag"]
|
|
|
|
tmp_inds.index.rename("signal_date", inplace=True)
|
2022-05-22 22:24:52 +00:00
|
|
|
trades_inds = pd.concat([trades_inds, tmp_inds])
|
|
|
|
|
2024-05-12 15:41:55 +00:00
|
|
|
if "signal_date" in trades_red:
|
|
|
|
trades_red["signal_date"] = pd.to_datetime(trades_red["signal_date"], utc=True)
|
|
|
|
trades_red.set_index("signal_date", inplace=True)
|
2022-05-22 22:24:52 +00:00
|
|
|
|
|
|
|
try:
|
2024-05-12 15:41:55 +00:00
|
|
|
trades_red = pd.merge(trades_red, trades_inds, on="signal_date", how="outer")
|
2022-05-22 22:24:52 +00:00
|
|
|
except Exception as e:
|
2022-05-29 10:20:11 +00:00
|
|
|
raise e
|
2022-05-22 22:24:52 +00:00
|
|
|
return trades_red
|
|
|
|
else:
|
|
|
|
return pd.DataFrame()
|
|
|
|
|
|
|
|
|
2024-05-12 15:41:55 +00:00
|
|
|
def _do_group_table_output(
|
|
|
|
bigdf,
|
|
|
|
glist,
|
|
|
|
csv_path: Path,
|
|
|
|
to_csv=False,
|
|
|
|
):
|
2022-05-29 10:20:11 +00:00
|
|
|
for g in glist:
|
|
|
|
# 0: summary wins/losses grouped by enter tag
|
|
|
|
if g == "0":
|
2024-05-12 15:41:55 +00:00
|
|
|
group_mask = ["enter_reason"]
|
|
|
|
wins = (
|
|
|
|
bigdf.loc[bigdf["profit_abs"] >= 0].groupby(group_mask).agg({"profit_abs": ["sum"]})
|
|
|
|
)
|
2023-09-02 11:44:23 +00:00
|
|
|
|
2024-05-12 15:41:55 +00:00
|
|
|
wins.columns = ["profit_abs_wins"]
|
|
|
|
loss = (
|
|
|
|
bigdf.loc[bigdf["profit_abs"] < 0].groupby(group_mask).agg({"profit_abs": ["sum"]})
|
|
|
|
)
|
|
|
|
loss.columns = ["profit_abs_loss"]
|
2022-05-29 10:20:11 +00:00
|
|
|
|
2024-05-12 15:41:55 +00:00
|
|
|
new = bigdf.groupby(group_mask).agg(
|
|
|
|
{"profit_abs": ["count", lambda x: sum(x > 0), lambda x: sum(x <= 0)]}
|
|
|
|
)
|
|
|
|
new = pd.concat([new, wins, loss], axis=1).fillna(0)
|
2022-05-29 10:20:11 +00:00
|
|
|
|
2024-05-12 15:41:55 +00:00
|
|
|
new["profit_tot"] = new["profit_abs_wins"] - abs(new["profit_abs_loss"])
|
|
|
|
new["wl_ratio_pct"] = (new.iloc[:, 1] / new.iloc[:, 0] * 100).fillna(0)
|
|
|
|
new["avg_win"] = (new["profit_abs_wins"] / new.iloc[:, 1]).fillna(0)
|
|
|
|
new["avg_loss"] = (new["profit_abs_loss"] / new.iloc[:, 2]).fillna(0)
|
|
|
|
|
|
|
|
new["exp_ratio"] = (
|
|
|
|
((1 + (new["avg_win"] / abs(new["avg_loss"]))) * (new["wl_ratio_pct"] / 100)) - 1
|
|
|
|
).fillna(0)
|
|
|
|
|
|
|
|
new.columns = [
|
|
|
|
"total_num_buys",
|
|
|
|
"wins",
|
|
|
|
"losses",
|
|
|
|
"profit_abs_wins",
|
|
|
|
"profit_abs_loss",
|
|
|
|
"profit_tot",
|
|
|
|
"wl_ratio_pct",
|
|
|
|
"avg_win",
|
|
|
|
"avg_loss",
|
|
|
|
"exp_ratio",
|
|
|
|
]
|
|
|
|
|
|
|
|
sortcols = ["total_num_buys"]
|
|
|
|
|
|
|
|
_print_table(
|
|
|
|
new, sortcols, show_index=True, name="Group 0:", to_csv=to_csv, csv_path=csv_path
|
|
|
|
)
|
2022-05-29 10:20:11 +00:00
|
|
|
|
|
|
|
else:
|
2024-05-12 15:41:55 +00:00
|
|
|
agg_mask = {
|
|
|
|
"profit_abs": ["count", "sum", "median", "mean"],
|
|
|
|
"profit_ratio": ["median", "mean", "sum"],
|
|
|
|
}
|
|
|
|
agg_cols = [
|
|
|
|
"num_buys",
|
|
|
|
"profit_abs_sum",
|
|
|
|
"profit_abs_median",
|
|
|
|
"profit_abs_mean",
|
|
|
|
"median_profit_pct",
|
|
|
|
"mean_profit_pct",
|
|
|
|
"total_profit_pct",
|
|
|
|
]
|
|
|
|
sortcols = ["profit_abs_sum", "enter_reason"]
|
2022-05-29 10:20:11 +00:00
|
|
|
|
|
|
|
# 1: profit summaries grouped by enter_tag
|
|
|
|
if g == "1":
|
2024-05-12 15:41:55 +00:00
|
|
|
group_mask = ["enter_reason"]
|
2022-05-29 10:20:11 +00:00
|
|
|
|
|
|
|
# 2: profit summaries grouped by enter_tag and exit_tag
|
|
|
|
if g == "2":
|
2024-05-12 15:41:55 +00:00
|
|
|
group_mask = ["enter_reason", "exit_reason"]
|
2022-05-29 10:20:11 +00:00
|
|
|
|
|
|
|
# 3: profit summaries grouped by pair and enter_tag
|
|
|
|
if g == "3":
|
2024-05-12 15:41:55 +00:00
|
|
|
group_mask = ["pair", "enter_reason"]
|
2022-05-29 10:20:11 +00:00
|
|
|
|
|
|
|
# 4: profit summaries grouped by pair, enter_ and exit_tag (this can get quite large)
|
|
|
|
if g == "4":
|
2024-05-12 15:41:55 +00:00
|
|
|
group_mask = ["pair", "enter_reason", "exit_reason"]
|
2023-01-16 13:28:40 +00:00
|
|
|
|
|
|
|
# 5: profit summaries grouped by exit_tag
|
|
|
|
if g == "5":
|
2024-05-12 15:41:55 +00:00
|
|
|
group_mask = ["exit_reason"]
|
|
|
|
sortcols = ["exit_reason"]
|
2023-01-16 13:28:40 +00:00
|
|
|
|
2022-06-15 05:13:47 +00:00
|
|
|
if group_mask:
|
|
|
|
new = bigdf.groupby(group_mask).agg(agg_mask).reset_index()
|
|
|
|
new.columns = group_mask + agg_cols
|
2024-05-12 15:41:55 +00:00
|
|
|
new["median_profit_pct"] = new["median_profit_pct"] * 100
|
|
|
|
new["mean_profit_pct"] = new["mean_profit_pct"] * 100
|
|
|
|
new["total_profit_pct"] = new["total_profit_pct"] * 100
|
2022-06-15 05:13:47 +00:00
|
|
|
|
2024-05-12 15:41:55 +00:00
|
|
|
_print_table(new, sortcols, name=f"Group {g}:", to_csv=to_csv, csv_path=csv_path)
|
2022-06-15 05:13:47 +00:00
|
|
|
else:
|
|
|
|
logger.warning("Invalid group mask specified.")
|
2022-05-29 10:20:11 +00:00
|
|
|
|
|
|
|
|
2024-05-12 15:41:55 +00:00
|
|
|
def _do_rejected_signals_output(
|
|
|
|
rejected_signals_df: pd.DataFrame, to_csv: bool = False, csv_path=None
|
|
|
|
) -> None:
|
|
|
|
cols = ["pair", "date", "enter_tag"]
|
|
|
|
sortcols = ["date", "pair", "enter_tag"]
|
|
|
|
_print_table(
|
|
|
|
rejected_signals_df[cols],
|
|
|
|
sortcols,
|
|
|
|
show_index=False,
|
|
|
|
name="Rejected Signals:",
|
|
|
|
to_csv=to_csv,
|
|
|
|
csv_path=csv_path,
|
|
|
|
)
|
2022-12-05 15:34:31 +00:00
|
|
|
|
|
|
|
|
2024-05-12 15:41:55 +00:00
|
|
|
def _select_rows_within_dates(df, timerange=None, df_date_col: str = "date"):
|
2022-11-27 11:30:13 +00:00
|
|
|
if timerange:
|
2024-05-12 15:41:55 +00:00
|
|
|
if timerange.starttype == "date":
|
2022-11-27 11:30:13 +00:00
|
|
|
df = df.loc[(df[df_date_col] >= timerange.startdt)]
|
2024-05-12 15:41:55 +00:00
|
|
|
if timerange.stoptype == "date":
|
2022-11-27 11:30:13 +00:00
|
|
|
df = df.loc[(df[df_date_col] < timerange.stopdt)]
|
2022-11-25 16:12:15 +00:00
|
|
|
return df
|
|
|
|
|
|
|
|
|
2022-11-26 16:58:56 +00:00
|
|
|
def _select_rows_by_tags(df, enter_reason_list, exit_reason_list):
|
2022-11-25 16:12:15 +00:00
|
|
|
if enter_reason_list and "all" not in enter_reason_list:
|
2024-05-12 15:41:55 +00:00
|
|
|
df = df.loc[(df["enter_reason"].isin(enter_reason_list))]
|
2022-11-25 16:12:15 +00:00
|
|
|
|
|
|
|
if exit_reason_list and "all" not in exit_reason_list:
|
2024-05-12 15:41:55 +00:00
|
|
|
df = df.loc[(df["exit_reason"].isin(exit_reason_list))]
|
2022-11-25 16:12:15 +00:00
|
|
|
return df
|
|
|
|
|
|
|
|
|
2024-05-12 15:41:55 +00:00
|
|
|
def prepare_results(
|
|
|
|
analysed_trades, stratname, enter_reason_list, exit_reason_list, timerange=None
|
2024-08-05 18:27:24 +00:00
|
|
|
) -> pd.DataFrame:
|
2022-11-26 16:58:56 +00:00
|
|
|
res_df = pd.DataFrame()
|
2022-05-22 22:24:52 +00:00
|
|
|
for pair, trades in analysed_trades[stratname].items():
|
2024-05-12 15:41:55 +00:00
|
|
|
if trades.shape[0] > 0:
|
|
|
|
trades.dropna(subset=["close_date"], inplace=True)
|
2023-11-15 11:52:25 +00:00
|
|
|
res_df = pd.concat([res_df, trades], ignore_index=True)
|
2022-05-22 22:24:52 +00:00
|
|
|
|
2022-11-27 11:30:13 +00:00
|
|
|
res_df = _select_rows_within_dates(res_df, timerange)
|
2022-11-25 16:12:15 +00:00
|
|
|
|
2024-05-12 15:41:55 +00:00
|
|
|
if res_df is not None and res_df.shape[0] > 0 and ("enter_reason" in res_df.columns):
|
2022-11-26 16:58:56 +00:00
|
|
|
res_df = _select_rows_by_tags(res_df, enter_reason_list, exit_reason_list)
|
2022-05-22 22:24:52 +00:00
|
|
|
|
2022-11-26 16:58:56 +00:00
|
|
|
return res_df
|
|
|
|
|
|
|
|
|
2024-05-12 15:41:55 +00:00
|
|
|
def print_results(
|
|
|
|
res_df: pd.DataFrame,
|
2024-08-18 13:11:04 +00:00
|
|
|
exit_df: pd.DataFrame,
|
2024-05-12 15:41:55 +00:00
|
|
|
analysis_groups: List[str],
|
|
|
|
indicator_list: List[str],
|
|
|
|
csv_path: Path,
|
|
|
|
rejected_signals=None,
|
|
|
|
to_csv=False,
|
|
|
|
):
|
2022-11-26 16:58:56 +00:00
|
|
|
if res_df.shape[0] > 0:
|
|
|
|
if analysis_groups:
|
2022-12-05 15:34:31 +00:00
|
|
|
_do_group_table_output(res_df, analysis_groups, to_csv=to_csv, csv_path=csv_path)
|
|
|
|
|
2023-04-28 13:17:35 +00:00
|
|
|
if rejected_signals is not None:
|
|
|
|
if rejected_signals.empty:
|
|
|
|
print("There were no rejected signals.")
|
|
|
|
else:
|
|
|
|
_do_rejected_signals_output(rejected_signals, to_csv=to_csv, csv_path=csv_path)
|
2022-05-22 22:24:52 +00:00
|
|
|
|
2022-12-05 15:34:31 +00:00
|
|
|
# NB this can be large for big dataframes!
|
2022-06-15 10:25:06 +00:00
|
|
|
if "all" in indicator_list:
|
2024-05-12 15:41:55 +00:00
|
|
|
_print_table(
|
|
|
|
res_df, show_index=False, name="Indicators:", to_csv=to_csv, csv_path=csv_path
|
|
|
|
)
|
2022-12-05 15:34:31 +00:00
|
|
|
elif indicator_list is not None and indicator_list:
|
2022-06-15 10:25:06 +00:00
|
|
|
available_inds = []
|
|
|
|
for ind in indicator_list:
|
2022-11-26 16:58:56 +00:00
|
|
|
if ind in res_df:
|
2022-06-15 10:25:06 +00:00
|
|
|
available_inds.append(ind)
|
2024-08-18 13:11:04 +00:00
|
|
|
|
|
|
|
merged_df = _merge_dfs(res_df, exit_df, available_inds)
|
|
|
|
|
2024-05-12 15:41:55 +00:00
|
|
|
_print_table(
|
2024-08-18 13:11:04 +00:00
|
|
|
merged_df,
|
2024-05-12 15:41:55 +00:00
|
|
|
sortcols=["exit_reason"],
|
|
|
|
show_index=False,
|
|
|
|
name="Indicators:",
|
|
|
|
to_csv=to_csv,
|
|
|
|
csv_path=csv_path,
|
|
|
|
)
|
2022-05-22 22:24:52 +00:00
|
|
|
else:
|
2022-11-26 16:58:56 +00:00
|
|
|
print("\\No trades to show")
|
2022-05-22 22:24:52 +00:00
|
|
|
|
|
|
|
|
2024-08-18 13:11:04 +00:00
|
|
|
def _merge_dfs(entry_df, exit_df, available_inds):
|
|
|
|
merge_on = ["pair", "open_date"]
|
2024-09-06 06:58:02 +00:00
|
|
|
signal_wide_indicators = list(set(available_inds) - set(BT_DATA_COLUMNS))
|
2024-08-18 13:11:04 +00:00
|
|
|
columns_to_keep = merge_on + ["enter_reason", "exit_reason"] + available_inds
|
2024-08-18 17:52:20 +00:00
|
|
|
|
|
|
|
if exit_df is None or exit_df.empty:
|
|
|
|
return entry_df[columns_to_keep]
|
|
|
|
|
|
|
|
return pd.merge(
|
|
|
|
entry_df[columns_to_keep],
|
2024-09-05 16:22:09 +00:00
|
|
|
exit_df[merge_on + signal_wide_indicators],
|
2024-08-18 17:52:20 +00:00
|
|
|
on=merge_on,
|
|
|
|
suffixes=(" (entry)", " (exit)"),
|
|
|
|
)
|
2024-08-18 13:11:04 +00:00
|
|
|
|
|
|
|
|
2024-05-12 15:41:55 +00:00
|
|
|
def _print_table(
|
|
|
|
df: pd.DataFrame, sortcols=None, *, show_index=False, name=None, to_csv=False, csv_path: Path
|
|
|
|
):
|
|
|
|
if sortcols is not None:
|
2022-05-22 22:24:52 +00:00
|
|
|
data = df.sort_values(sortcols)
|
|
|
|
else:
|
|
|
|
data = df
|
|
|
|
|
2022-12-05 15:34:31 +00:00
|
|
|
if to_csv:
|
2023-04-28 13:29:15 +00:00
|
|
|
safe_name = Path(csv_path, name.lower().replace(" ", "_").replace(":", "") + ".csv")
|
|
|
|
data.to_csv(safe_name)
|
|
|
|
print(f"Saved {name} to {safe_name}")
|
2022-12-05 15:34:31 +00:00
|
|
|
else:
|
|
|
|
if name is not None:
|
|
|
|
print(name)
|
|
|
|
|
2024-07-07 07:38:02 +00:00
|
|
|
print_df_rich_table(data, data.keys(), show_index=show_index)
|
2022-05-22 22:24:52 +00:00
|
|
|
|
|
|
|
|
2022-11-26 16:58:56 +00:00
|
|
|
def process_entry_exit_reasons(config: Config):
|
2022-05-22 22:24:52 +00:00
|
|
|
try:
|
2024-05-12 15:41:55 +00:00
|
|
|
analysis_groups = config.get("analysis_groups", [])
|
|
|
|
enter_reason_list = config.get("enter_reason_list", ["all"])
|
|
|
|
exit_reason_list = config.get("exit_reason_list", ["all"])
|
|
|
|
indicator_list = config.get("indicator_list", [])
|
|
|
|
do_rejected = config.get("analysis_rejected", False)
|
|
|
|
to_csv = config.get("analysis_to_csv", False)
|
|
|
|
csv_path = Path(config.get("analysis_csv_path", config["exportfilename"]))
|
2023-04-28 13:25:25 +00:00
|
|
|
if to_csv and not csv_path.is_dir():
|
|
|
|
raise OperationalException(f"Specified directory {csv_path} does not exist.")
|
2022-11-27 11:30:13 +00:00
|
|
|
|
2024-05-12 15:41:55 +00:00
|
|
|
timerange = TimeRange.parse_timerange(
|
|
|
|
None if config.get("timerange") is None else str(config.get("timerange"))
|
|
|
|
)
|
2022-11-26 16:58:56 +00:00
|
|
|
|
2024-05-12 15:41:55 +00:00
|
|
|
backtest_stats = load_backtest_stats(config["exportfilename"])
|
2022-11-26 16:58:56 +00:00
|
|
|
|
2024-05-12 15:41:55 +00:00
|
|
|
for strategy_name, results in backtest_stats["strategy"].items():
|
|
|
|
trades = load_backtest_data(config["exportfilename"], strategy_name)
|
2022-05-29 10:54:27 +00:00
|
|
|
|
2022-12-05 15:34:31 +00:00
|
|
|
if trades is not None and not trades.empty:
|
2024-08-18 13:11:04 +00:00
|
|
|
signal_candles = _load_signal_candles(config["exportfilename"])
|
|
|
|
exit_signals = _load_exit_signal_candles(config["exportfilename"])
|
2022-12-05 15:34:31 +00:00
|
|
|
|
|
|
|
rej_df = None
|
|
|
|
if do_rejected:
|
2024-05-12 15:41:55 +00:00
|
|
|
rejected_signals_dict = _load_rejected_signals(config["exportfilename"])
|
|
|
|
rej_df = prepare_results(
|
|
|
|
rejected_signals_dict,
|
|
|
|
strategy_name,
|
|
|
|
enter_reason_list,
|
|
|
|
exit_reason_list,
|
|
|
|
timerange=timerange,
|
|
|
|
)
|
2022-12-05 15:34:31 +00:00
|
|
|
|
2024-08-18 13:11:04 +00:00
|
|
|
entry_df = _generate_dfs(
|
2024-08-05 17:49:38 +00:00
|
|
|
config["exchange"]["pair_whitelist"],
|
2024-08-18 13:11:04 +00:00
|
|
|
enter_reason_list,
|
|
|
|
exit_reason_list,
|
|
|
|
signal_candles,
|
2024-08-05 17:49:38 +00:00
|
|
|
strategy_name,
|
2024-08-18 13:11:04 +00:00
|
|
|
timerange,
|
2024-08-05 17:49:38 +00:00
|
|
|
trades,
|
2024-08-18 13:11:04 +00:00
|
|
|
"open_date",
|
2024-05-12 15:41:55 +00:00
|
|
|
)
|
|
|
|
|
2024-08-18 13:11:04 +00:00
|
|
|
exit_df = _generate_dfs(
|
|
|
|
config["exchange"]["pair_whitelist"],
|
2024-05-12 15:41:55 +00:00
|
|
|
enter_reason_list,
|
|
|
|
exit_reason_list,
|
2024-08-18 13:11:04 +00:00
|
|
|
exit_signals,
|
|
|
|
strategy_name,
|
|
|
|
timerange,
|
|
|
|
trades,
|
|
|
|
"close_date",
|
2024-05-12 15:41:55 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
print_results(
|
2024-08-18 13:11:04 +00:00
|
|
|
entry_df,
|
|
|
|
exit_df,
|
2024-05-12 15:41:55 +00:00
|
|
|
analysis_groups,
|
|
|
|
indicator_list,
|
|
|
|
rejected_signals=rej_df,
|
|
|
|
to_csv=to_csv,
|
|
|
|
csv_path=csv_path,
|
|
|
|
)
|
2022-05-29 10:54:27 +00:00
|
|
|
|
2022-05-22 22:24:52 +00:00
|
|
|
except ValueError as e:
|
|
|
|
raise OperationalException(e) from e
|
2024-08-18 13:11:04 +00:00
|
|
|
|
|
|
|
|
|
|
|
def _generate_dfs(
|
|
|
|
pairlist: list,
|
|
|
|
enter_reason_list: list,
|
|
|
|
exit_reason_list: list,
|
|
|
|
signal_candles: Dict,
|
|
|
|
strategy_name: str,
|
|
|
|
timerange: TimeRange,
|
|
|
|
trades: pd.DataFrame,
|
|
|
|
date_col: str,
|
|
|
|
) -> pd.DataFrame:
|
|
|
|
analysed_trades_dict = _process_candles_and_indicators(
|
|
|
|
pairlist,
|
|
|
|
strategy_name,
|
|
|
|
trades,
|
|
|
|
signal_candles,
|
|
|
|
date_col,
|
|
|
|
)
|
|
|
|
res_df = prepare_results(
|
|
|
|
analysed_trades_dict,
|
|
|
|
strategy_name,
|
|
|
|
enter_reason_list,
|
|
|
|
exit_reason_list,
|
|
|
|
timerange=timerange,
|
|
|
|
)
|
|
|
|
return res_df
|