#000 | Anuj | Merge Dfs for entry and exit in one table

This commit is contained in:
jainanuj94 2024-08-18 18:41:04 +05:30
parent 8085e24dcd
commit 19a2e06c0b
2 changed files with 76 additions and 258 deletions

View File

@ -257,16 +257,14 @@ def prepare_results(
def print_results(
res_df: pd.DataFrame,
exit_df: pd.DataFrame,
analysis_groups: List[str],
indicator_list: List[str],
csv_path: Path,
rejected_signals=None,
to_csv=False,
exited_signals=False,
):
if res_df.shape[0] > 0:
if exited_signals is True:
print("Analysing on exit signals.")
if analysis_groups:
_do_group_table_output(res_df, analysis_groups, to_csv=to_csv, csv_path=csv_path)
@ -286,9 +284,11 @@ def print_results(
for ind in indicator_list:
if ind in res_df:
available_inds.append(ind)
ilist = ["pair", "enter_reason", "exit_reason"] + available_inds
merged_df = _merge_dfs(res_df, exit_df, available_inds)
_print_table(
res_df[ilist],
merged_df,
sortcols=["exit_reason"],
show_index=False,
name="Indicators:",
@ -299,6 +299,21 @@ def print_results(
print("\\No trades to show")
def _merge_dfs(entry_df, exit_df, available_inds):
merge_on = ["pair", "open_date"]
columns_to_keep = merge_on + ["enter_reason", "exit_reason"] + available_inds
if exit_df is not None and not exit_df.empty:
merged_df = pd.merge(
entry_df[columns_to_keep],
exit_df[merge_on + available_inds],
on=merge_on,
suffixes=(" (entry)", " (exit)"),
)
else:
merged_df = entry_df[columns_to_keep]
return merged_df
def _print_table(
df: pd.DataFrame, sortcols=None, *, show_index=False, name=None, to_csv=False, csv_path: Path
):
@ -324,7 +339,6 @@ def process_entry_exit_reasons(config: Config):
enter_reason_list = config.get("enter_reason_list", ["all"])
exit_reason_list = config.get("exit_reason_list", ["all"])
indicator_list = config.get("indicator_list", [])
do_exited = config.get("analysis_exited", False)
do_rejected = config.get("analysis_rejected", False)
to_csv = config.get("analysis_to_csv", False)
csv_path = Path(config.get("analysis_csv_path", config["exportfilename"]))
@ -341,12 +355,8 @@ def process_entry_exit_reasons(config: Config):
trades = load_backtest_data(config["exportfilename"], strategy_name)
if trades is not None and not trades.empty:
date_col = "open_date"
if do_exited is True:
signal_candles = _load_exit_signal_candles(config["exportfilename"])
date_col = "close_date"
else:
signal_candles = _load_signal_candles(config["exportfilename"])
signal_candles = _load_signal_candles(config["exportfilename"])
exit_signals = _load_exit_signal_candles(config["exportfilename"])
rej_df = None
if do_rejected:
@ -359,31 +369,64 @@ def process_entry_exit_reasons(config: Config):
timerange=timerange,
)
analysed_trades_dict = _process_candles_and_indicators(
entry_df = _generate_dfs(
config["exchange"]["pair_whitelist"],
strategy_name,
trades,
signal_candles,
date_col,
)
res_df = prepare_results(
analysed_trades_dict,
strategy_name,
enter_reason_list,
exit_reason_list,
timerange=timerange,
signal_candles,
strategy_name,
timerange,
trades,
"open_date",
)
exit_df = _generate_dfs(
config["exchange"]["pair_whitelist"],
enter_reason_list,
exit_reason_list,
exit_signals,
strategy_name,
timerange,
trades,
"close_date",
)
print_results(
res_df,
entry_df,
exit_df,
analysis_groups,
indicator_list,
rejected_signals=rej_df,
to_csv=to_csv,
csv_path=csv_path,
exited_signals=do_exited,
)
except ValueError as e:
raise OperationalException(e) from e
def _generate_dfs(
pairlist: list,
enter_reason_list: list,
exit_reason_list: list,
signal_candles: Dict,
strategy_name: str,
timerange: TimeRange,
trades: pd.DataFrame,
date_col: str,
) -> pd.DataFrame:
analysed_trades_dict = _process_candles_and_indicators(
pairlist,
strategy_name,
trades,
signal_candles,
date_col,
)
res_df = prepare_results(
analysed_trades_dict,
strategy_name,
enter_reason_list,
exit_reason_list,
timerange=timerange,
)
return res_df

View File

@ -160,6 +160,14 @@ def test_backtest_analysis_on_entry_and_rejected_signals_nomock(
assert "34.049" in captured.out
assert "0.104" in captured.out
assert "52.829" in captured.out
# assert indicator list
assert "close (entry)" in captured.out
assert "0.016" in captured.out
assert "rsi (entry)" in captured.out
assert "54.320" in captured.out
assert "close (exit)" in captured.out
assert "rsi (exit)" in captured.out
assert "52.829" in captured.out
# test group 1
args = get_args(base_args + ["--analysis-groups", "1"])
@ -247,236 +255,3 @@ def test_backtest_analysis_on_entry_and_rejected_signals_nomock(
start_analysis_entries_exits(args)
captured = capsys.readouterr()
assert "no rejected signals" in captured.out
def test_backtest_analysis_on_exit_signals_nomock(
default_conf, mocker, caplog, testdatadir, user_dir, capsys
):
caplog.set_level(logging.INFO)
(user_dir / "backtest_results").mkdir(parents=True, exist_ok=True)
default_conf.update(
{
"use_exit_signal": True,
"exit_profit_only": False,
"exit_profit_offset": 0.0,
"ignore_roi_if_entry_signal": False,
}
)
patch_exchange(mocker)
result1 = pd.DataFrame(
{
"pair": ["ETH/BTC", "LTC/BTC", "ETH/BTC", "LTC/BTC"],
"profit_ratio": [0.025, 0.05, -0.1, -0.05],
"profit_abs": [0.5, 2.0, -4.0, -2.0],
"open_date": pd.to_datetime(
[
"2018-01-29 18:40:00",
"2018-01-30 03:30:00",
"2018-01-30 08:10:00",
"2018-01-31 13:30:00",
],
utc=True,
),
"close_date": pd.to_datetime(
[
"2018-01-30 20:45:00",
"2018-01-30 05:35:00",
"2018-01-30 09:10:00",
"2018-01-31 15:00:00",
],
utc=True,
),
"trade_duration": [235, 40, 60, 90],
"is_open": [False, False, False, False],
"stake_amount": [0.01, 0.01, 0.01, 0.01],
"open_rate": [0.104445, 0.10302485, 0.10302485, 0.10302485],
"close_rate": [0.104969, 0.103541, 0.102041, 0.102541],
"is_short": [False, False, False, False],
"enter_tag": [
"enter_tag_long_a",
"enter_tag_long_b",
"enter_tag_long_a",
"enter_tag_long_b",
],
"exit_reason": [
ExitType.ROI.value,
ExitType.EXIT_SIGNAL.value,
ExitType.STOP_LOSS.value,
ExitType.TRAILING_STOP_LOSS.value,
],
}
)
backtestmock = MagicMock(
side_effect=[
{
"results": result1,
"config": default_conf,
"locks": [],
"rejected_signals": 20,
"timedout_entry_orders": 0,
"timedout_exit_orders": 0,
"canceled_trade_entries": 0,
"canceled_entry_orders": 0,
"replaced_entry_orders": 0,
"final_balance": 1000,
}
]
)
mocker.patch(
"freqtrade.plugins.pairlistmanager.PairListManager.whitelist",
PropertyMock(return_value=["ETH/BTC", "LTC/BTC", "DASH/BTC"]),
)
mocker.patch("freqtrade.optimize.backtesting.Backtesting.backtest", backtestmock)
patched_configuration_load_config_file(mocker, default_conf)
args = [
"backtesting",
"--config",
"config.json",
"--datadir",
str(testdatadir),
"--user-data-dir",
str(user_dir),
"--timeframe",
"5m",
"--timerange",
"1515560100-1517287800",
"--export",
"signals",
"--cache",
"none",
]
args = get_args(args)
start_backtesting(args)
captured = capsys.readouterr()
assert "BACKTESTING REPORT" in captured.out
assert "EXIT REASON STATS" in captured.out
assert "LEFT OPEN TRADES REPORT" in captured.out
base_args = [
"backtesting-analysis",
"--config",
"config.json",
"--datadir",
str(testdatadir),
"--user-data-dir",
str(user_dir),
]
# test group 0 and indicator list
args = get_args(
base_args
+ [
"--analysis-groups",
"0",
"--exit-signals",
"--indicator-list",
"close",
"rsi",
"profit_abs",
]
)
start_analysis_entries_exits(args)
captured = capsys.readouterr()
assert "LTC/BTC" in captured.out
assert "ETH/BTC" in captured.out
assert "enter_tag_long_a" in captured.out
assert "enter_tag_long_b" in captured.out
assert "exit_signal" in captured.out
assert "roi" in captured.out
assert "stop_loss" in captured.out
assert "trailing_stop_loss" in captured.out
assert "0.5" in captured.out
assert "-4" in captured.out
assert "-2" in captured.out
assert "57.654" in captured.out
assert "-8" in captured.out
assert "0" in captured.out
assert "0.104" in captured.out
assert "0.016" in captured.out
assert "52.829" in captured.out
# test group 1
args = get_args(base_args + ["--analysis-groups", "1"])
start_analysis_entries_exits(args)
captured = capsys.readouterr()
assert "enter_tag_long_a" in captured.out
assert "enter_tag_long_b" in captured.out
assert "total_profit_pct" in captured.out
assert "-3.5" in captured.out
assert "-1.75" in captured.out
assert "-7.5" in captured.out
assert "-3.75" in captured.out
assert "0" in captured.out
# test group 2
args = get_args(base_args + ["--analysis-groups", "2"])
start_analysis_entries_exits(args)
captured = capsys.readouterr()
assert "enter_tag_long_a" in captured.out
assert "enter_tag_long_b" in captured.out
assert "exit_signal" in captured.out
assert "roi" in captured.out
assert "stop_loss" in captured.out
assert "trailing_stop_loss" in captured.out
assert "total_profit_pct" in captured.out
assert "-10" in captured.out
assert "-5" in captured.out
assert "2.5" in captured.out
# test group 3
args = get_args(base_args + ["--analysis-groups", "3"])
start_analysis_entries_exits(args)
captured = capsys.readouterr()
assert "LTC/BTC" in captured.out
assert "ETH/BTC" in captured.out
assert "enter_tag_long_a" in captured.out
assert "enter_tag_long_b" in captured.out
assert "total_profit_pct" in captured.out
assert "-7.5" in captured.out
assert "-3.75" in captured.out
assert "-1.75" in captured.out
assert "0" in captured.out
assert "2" in captured.out
# test group 4
args = get_args(base_args + ["--analysis-groups", "4"])
start_analysis_entries_exits(args)
captured = capsys.readouterr()
assert "LTC/BTC" in captured.out
assert "ETH/BTC" in captured.out
assert "enter_tag_long_a" in captured.out
assert "enter_tag_long_b" in captured.out
assert "exit_signal" in captured.out
assert "roi" in captured.out
assert "stop_loss" in captured.out
assert "trailing_stop_loss" in captured.out
assert "total_profit_pct" in captured.out
assert "-10" in captured.out
assert "-5" in captured.out
assert "-4" in captured.out
assert "0.5" in captured.out
assert "1" in captured.out
assert "2.5" in captured.out
# test group 5
args = get_args(base_args + ["--analysis-groups", "5"])
start_analysis_entries_exits(args)
captured = capsys.readouterr()
assert "exit_signal" in captured.out
assert "roi" in captured.out
assert "stop_loss" in captured.out
assert "trailing_stop_loss" in captured.out
# test date filtering
args = get_args(
base_args + ["--analysis-groups", "0", "1", "2", "--timerange", "20180129-20180130"]
)
start_analysis_entries_exits(args)
captured = capsys.readouterr()
assert "enter_tag_long_a" in captured.out
assert "enter_tag_long_b" not in captured.out