mirror of
https://github.com/freqtrade/freqtrade.git
synced 2024-11-10 10:21:59 +00:00
commit
ec40e79362
|
@ -22,50 +22,79 @@ DataFrame of the candles that resulted in buy signals. Depending on how many buy
|
|||
makes, this file may get quite large, so periodically check your `user_data/backtest_results`
|
||||
folder to delete old exports.
|
||||
|
||||
To analyze the buy tags, we need to use the `buy_reasons.py` script from
|
||||
[froggleston's repo](https://github.com/froggleston/freqtrade-buyreasons). Follow the instructions
|
||||
in their README to copy the script into your `freqtrade/scripts/` folder.
|
||||
|
||||
Before running your next backtest, make sure you either delete your old backtest results or run
|
||||
backtesting with the `--cache none` option to make sure no cached results are used.
|
||||
|
||||
If all goes well, you should now see a `backtest-result-{timestamp}_signals.pkl` file in the
|
||||
`user_data/backtest_results` folder.
|
||||
|
||||
Now run the `buy_reasons.py` script, supplying a few options:
|
||||
To analyze the entry/exit tags, we now need to use the `freqtrade backtesting-analysis` command
|
||||
with `--analysis-groups` option provided with space-separated arguments (default `0 1 2`):
|
||||
|
||||
``` bash
|
||||
python3 scripts/buy_reasons.py -c <config.json> -s <strategy_name> -t <timerange> -g0,1,2,3,4
|
||||
freqtrade backtesting-analysis -c <config.json> --analysis-groups 0 1 2 3 4
|
||||
```
|
||||
|
||||
The `-g` option is used to specify the various tabular outputs, ranging from the simplest (0)
|
||||
to the most detailed per pair, per buy and per sell tag (4). More options are available by
|
||||
running with the `-h` option.
|
||||
This command will read from the last backtesting results. The `--analysis-groups` option is
|
||||
used to specify the various tabular outputs showing the profit fo each group or trade,
|
||||
ranging from the simplest (0) to the most detailed per pair, per buy and per sell tag (4):
|
||||
|
||||
* 1: profit summaries grouped by enter_tag
|
||||
* 2: profit summaries grouped by enter_tag and exit_tag
|
||||
* 3: profit summaries grouped by pair and enter_tag
|
||||
* 4: profit summaries grouped by pair, enter_ and exit_tag (this can get quite large)
|
||||
|
||||
More options are available by running with the `-h` option.
|
||||
|
||||
### Using export-filename
|
||||
|
||||
Normally, `backtesting-analysis` uses the latest backtest results, but if you wanted to go
|
||||
back to a previous backtest output, you need to supply the `--export-filename` option.
|
||||
You can supply the same parameter to `backtest-analysis` with the name of the final backtest
|
||||
output file. This allows you to keep historical versions of backtest results and re-analyse
|
||||
them at a later date:
|
||||
|
||||
``` bash
|
||||
freqtrade backtesting -c <config.json> --timeframe <tf> --strategy <strategy_name> --timerange=<timerange> --export=signals --export-filename=/tmp/mystrat_backtest.json
|
||||
```
|
||||
|
||||
You should see some output similar to below in the logs with the name of the timestamped
|
||||
filename that was exported:
|
||||
|
||||
```
|
||||
2022-06-14 16:28:32,698 - freqtrade.misc - INFO - dumping json to "/tmp/mystrat_backtest-2022-06-14_16-28-32.json"
|
||||
```
|
||||
|
||||
You can then use that filename in `backtesting-analysis`:
|
||||
|
||||
```
|
||||
freqtrade backtesting-analysis -c <config.json> --export-filename=/tmp/mystrat_backtest-2022-06-14_16-28-32.json
|
||||
```
|
||||
|
||||
### Tuning the buy tags and sell tags to display
|
||||
|
||||
To show only certain buy and sell tags in the displayed output, use the following two options:
|
||||
|
||||
```
|
||||
--enter_reason_list : Comma separated list of enter signals to analyse. Default: "all"
|
||||
--exit_reason_list : Comma separated list of exit signals to analyse. Default: "stop_loss,trailing_stop_loss"
|
||||
--enter-reason-list : Space-separated list of enter signals to analyse. Default: "all"
|
||||
--exit-reason-list : Space-separated list of exit signals to analyse. Default: "all"
|
||||
```
|
||||
|
||||
For example:
|
||||
|
||||
```bash
|
||||
python3 scripts/buy_reasons.py -c <config.json> -s <strategy_name> -t <timerange> -g0,1,2,3,4 --enter_reason_list "enter_tag_a,enter_tag_b" --exit_reason_list "roi,custom_exit_tag_a,stop_loss"
|
||||
freqtrade backtesting-analysis -c <config.json> --analysis-groups 0 2 --enter-reason-list enter_tag_a enter_tag_b --exit-reason-list roi custom_exit_tag_a stop_loss
|
||||
```
|
||||
|
||||
### Outputting signal candle indicators
|
||||
|
||||
The real power of the buy_reasons.py script comes from the ability to print out the indicator
|
||||
The real power of `freqtrade backtesting-analysis` comes from the ability to print out the indicator
|
||||
values present on signal candles to allow fine-grained investigation and tuning of buy signal
|
||||
indicators. To print out a column for a given set of indicators, use the `--indicator-list`
|
||||
option:
|
||||
|
||||
```bash
|
||||
python3 scripts/buy_reasons.py -c <config.json> -s <strategy_name> -t <timerange> -g0,1,2,3,4 --enter_reason_list "enter_tag_a,enter_tag_b" --exit_reason_list "roi,custom_exit_tag_a,stop_loss" --indicator_list "rsi,rsi_1h,bb_lowerband,ema_9,macd,macdsignal"
|
||||
freqtrade backtesting-analysis -c <config.json> --analysis-groups 0 2 --enter-reason-list enter_tag_a enter_tag_b --exit-reason-list roi custom_exit_tag_a stop_loss --indicator-list rsi rsi_1h bb_lowerband ema_9 macd macdsignal
|
||||
```
|
||||
|
||||
The indicators have to be present in your strategy's main DataFrame (either for your main
|
||||
|
|
|
@ -651,6 +651,61 @@ Common arguments:
|
|||
|
||||
```
|
||||
|
||||
## Detailed backtest analysis
|
||||
|
||||
Advanced backtest result analysis.
|
||||
|
||||
More details in the [Backtesting analysis](advanced-backtesting.md#analyze-the-buyentry-and-sellexit-tags) Section.
|
||||
|
||||
```
|
||||
usage: freqtrade backtesting-analysis [-h] [-v] [--logfile FILE] [-V]
|
||||
[-c PATH] [-d PATH] [--userdir PATH]
|
||||
[--export-filename PATH]
|
||||
[--analysis-groups {0,1,2,3,4} [{0,1,2,3,4} ...]]
|
||||
[--enter-reason-list ENTER_REASON_LIST [ENTER_REASON_LIST ...]]
|
||||
[--exit-reason-list EXIT_REASON_LIST [EXIT_REASON_LIST ...]]
|
||||
[--indicator-list INDICATOR_LIST [INDICATOR_LIST ...]]
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
--export-filename PATH, --backtest-filename PATH
|
||||
Use this filename for backtest results.Requires
|
||||
`--export` to be set as well. Example: `--export-filen
|
||||
ame=user_data/backtest_results/backtest_today.json`
|
||||
--analysis-groups {0,1,2,3,4} [{0,1,2,3,4} ...]
|
||||
grouping output - 0: simple wins/losses by enter tag,
|
||||
1: by enter_tag, 2: by enter_tag and exit_tag, 3: by
|
||||
pair and enter_tag, 4: by pair, enter_ and exit_tag
|
||||
(this can get quite large)
|
||||
--enter-reason-list ENTER_REASON_LIST [ENTER_REASON_LIST ...]
|
||||
Comma separated list of entry signals to analyse.
|
||||
Default: all. e.g. 'entry_tag_a,entry_tag_b'
|
||||
--exit-reason-list EXIT_REASON_LIST [EXIT_REASON_LIST ...]
|
||||
Comma separated list of exit signals to analyse.
|
||||
Default: all. e.g.
|
||||
'exit_tag_a,roi,stop_loss,trailing_stop_loss'
|
||||
--indicator-list INDICATOR_LIST [INDICATOR_LIST ...]
|
||||
Comma separated list of indicators to analyse. e.g.
|
||||
'close,rsi,bb_lowerband,profit_abs'
|
||||
|
||||
Common arguments:
|
||||
-v, --verbose Verbose mode (-vv for more, -vvv to get all messages).
|
||||
--logfile FILE Log to the file specified. Special values are:
|
||||
'syslog', 'journald'. See the documentation for more
|
||||
details.
|
||||
-V, --version show program's version number and exit
|
||||
-c PATH, --config PATH
|
||||
Specify configuration file (default:
|
||||
`userdir/config.json` or `config.json` whichever
|
||||
exists). Multiple --config options may be used. Can be
|
||||
set to `-` to read config from stdin.
|
||||
-d PATH, --datadir PATH
|
||||
Path to directory with historical backtesting data.
|
||||
--userdir PATH, --user-data-dir PATH
|
||||
Path to userdata directory.
|
||||
|
||||
```
|
||||
|
||||
## List Hyperopt results
|
||||
|
||||
You can list the hyperoptimization epochs the Hyperopt module evaluated previously with the `hyperopt-list` sub-command.
|
||||
|
|
|
@ -6,6 +6,7 @@ Contains all start-commands, subcommands and CLI Interface creation.
|
|||
Note: Be careful with file-scoped imports in these subfiles.
|
||||
as they are parsed on startup, nothing containing optional modules should be loaded.
|
||||
"""
|
||||
from freqtrade.commands.analyze_commands import start_analysis_entries_exits
|
||||
from freqtrade.commands.arguments import Arguments
|
||||
from freqtrade.commands.build_config_commands import start_new_config
|
||||
from freqtrade.commands.data_commands import (start_convert_data, start_convert_trades,
|
||||
|
|
69
freqtrade/commands/analyze_commands.py
Executable file
69
freqtrade/commands/analyze_commands.py
Executable file
|
@ -0,0 +1,69 @@
|
|||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict
|
||||
|
||||
from freqtrade.configuration import setup_utils_configuration
|
||||
from freqtrade.enums import RunMode
|
||||
from freqtrade.exceptions import OperationalException
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def setup_analyze_configuration(args: Dict[str, Any], method: RunMode) -> Dict[str, Any]:
|
||||
"""
|
||||
Prepare the configuration for the entry/exit reason analysis module
|
||||
:param args: Cli args from Arguments()
|
||||
:param method: Bot running mode
|
||||
:return: Configuration
|
||||
"""
|
||||
config = setup_utils_configuration(args, method)
|
||||
|
||||
no_unlimited_runmodes = {
|
||||
RunMode.BACKTEST: 'backtesting',
|
||||
}
|
||||
if method in no_unlimited_runmodes.keys():
|
||||
from freqtrade.data.btanalysis import get_latest_backtest_filename
|
||||
|
||||
if 'exportfilename' in config:
|
||||
if config['exportfilename'].is_dir():
|
||||
btfile = Path(get_latest_backtest_filename(config['exportfilename']))
|
||||
signals_file = f"{config['exportfilename']}/{btfile.stem}_signals.pkl"
|
||||
else:
|
||||
if config['exportfilename'].exists():
|
||||
btfile = Path(config['exportfilename'])
|
||||
signals_file = f"{btfile.parent}/{btfile.stem}_signals.pkl"
|
||||
else:
|
||||
raise OperationalException(f"{config['exportfilename']} does not exist.")
|
||||
else:
|
||||
raise OperationalException('exportfilename not in config.')
|
||||
|
||||
if (not Path(signals_file).exists()):
|
||||
raise OperationalException(
|
||||
(f"Cannot find latest backtest signals file: {signals_file}."
|
||||
"Run backtesting with `--export signals`.")
|
||||
)
|
||||
|
||||
return config
|
||||
|
||||
|
||||
def start_analysis_entries_exits(args: Dict[str, Any]) -> None:
|
||||
"""
|
||||
Start analysis script
|
||||
:param args: Cli args from Arguments()
|
||||
:return: None
|
||||
"""
|
||||
from freqtrade.data.entryexitanalysis import process_entry_exit_reasons
|
||||
|
||||
# Initialize configuration
|
||||
config = setup_analyze_configuration(args, RunMode.BACKTEST)
|
||||
|
||||
logger.info('Starting freqtrade in analysis mode')
|
||||
|
||||
process_entry_exit_reasons(config['exportfilename'],
|
||||
config['exchange']['pair_whitelist'],
|
||||
config['analysis_groups'],
|
||||
config['enter_reason_list'],
|
||||
config['exit_reason_list'],
|
||||
config['indicator_list']
|
||||
)
|
|
@ -101,6 +101,9 @@ ARGS_HYPEROPT_SHOW = ["hyperopt_list_best", "hyperopt_list_profitable", "hyperop
|
|||
"print_json", "hyperoptexportfilename", "hyperopt_show_no_header",
|
||||
"disableparamexport", "backtest_breakdown"]
|
||||
|
||||
ARGS_ANALYZE_ENTRIES_EXITS = ["exportfilename", "analysis_groups", "enter_reason_list",
|
||||
"exit_reason_list", "indicator_list"]
|
||||
|
||||
NO_CONF_REQURIED = ["convert-data", "convert-trade-data", "download-data", "list-timeframes",
|
||||
"list-markets", "list-pairs", "list-strategies", "list-data",
|
||||
"hyperopt-list", "hyperopt-show", "backtest-filter",
|
||||
|
@ -182,8 +185,9 @@ class Arguments:
|
|||
self.parser = argparse.ArgumentParser(description='Free, open source crypto trading bot')
|
||||
self._build_args(optionlist=['version'], parser=self.parser)
|
||||
|
||||
from freqtrade.commands import (start_backtesting, start_backtesting_show,
|
||||
start_convert_data, start_convert_db, start_convert_trades,
|
||||
from freqtrade.commands import (start_analysis_entries_exits, start_backtesting,
|
||||
start_backtesting_show, start_convert_data,
|
||||
start_convert_db, start_convert_trades,
|
||||
start_create_userdir, start_download_data, start_edge,
|
||||
start_hyperopt, start_hyperopt_list, start_hyperopt_show,
|
||||
start_install_ui, start_list_data, start_list_exchanges,
|
||||
|
@ -283,6 +287,13 @@ class Arguments:
|
|||
backtesting_show_cmd.set_defaults(func=start_backtesting_show)
|
||||
self._build_args(optionlist=ARGS_BACKTEST_SHOW, parser=backtesting_show_cmd)
|
||||
|
||||
# Add backtesting analysis subcommand
|
||||
analysis_cmd = subparsers.add_parser('backtesting-analysis',
|
||||
help='Backtest Analysis module.',
|
||||
parents=[_common_parser])
|
||||
analysis_cmd.set_defaults(func=start_analysis_entries_exits)
|
||||
self._build_args(optionlist=ARGS_ANALYZE_ENTRIES_EXITS, parser=analysis_cmd)
|
||||
|
||||
# Add edge subcommand
|
||||
edge_cmd = subparsers.add_parser('edge', help='Edge module.',
|
||||
parents=[_common_parser, _strategy_parser])
|
||||
|
|
|
@ -614,4 +614,37 @@ AVAILABLE_CLI_OPTIONS = {
|
|||
"that do not contain any parameters."),
|
||||
action="store_true",
|
||||
),
|
||||
"analysis_groups": Arg(
|
||||
"--analysis-groups",
|
||||
help=("grouping output - "
|
||||
"0: simple wins/losses by enter tag, "
|
||||
"1: by enter_tag, "
|
||||
"2: by enter_tag and exit_tag, "
|
||||
"3: by pair and enter_tag, "
|
||||
"4: by pair, enter_ and exit_tag (this can get quite large)"),
|
||||
nargs='+',
|
||||
default=['0', '1', '2'],
|
||||
choices=['0', '1', '2', '3', '4'],
|
||||
),
|
||||
"enter_reason_list": Arg(
|
||||
"--enter-reason-list",
|
||||
help=("Comma separated list of entry signals to analyse. Default: all. "
|
||||
"e.g. 'entry_tag_a,entry_tag_b'"),
|
||||
nargs='+',
|
||||
default=['all'],
|
||||
),
|
||||
"exit_reason_list": Arg(
|
||||
"--exit-reason-list",
|
||||
help=("Comma separated list of exit signals to analyse. Default: all. "
|
||||
"e.g. 'exit_tag_a,roi,stop_loss,trailing_stop_loss'"),
|
||||
nargs='+',
|
||||
default=['all'],
|
||||
),
|
||||
"indicator_list": Arg(
|
||||
"--indicator-list",
|
||||
help=("Comma separated list of indicators to analyse. "
|
||||
"e.g. 'close,rsi,bb_lowerband,profit_abs'"),
|
||||
nargs='+',
|
||||
default=[],
|
||||
),
|
||||
}
|
||||
|
|
|
@ -95,6 +95,8 @@ class Configuration:
|
|||
|
||||
self._process_data_options(config)
|
||||
|
||||
self._process_analyze_options(config)
|
||||
|
||||
# Check if the exchange set by the user is supported
|
||||
check_exchange(config, config.get('experimental', {}).get('block_bad_exchanges', True))
|
||||
|
||||
|
@ -433,6 +435,19 @@ class Configuration:
|
|||
self._args_to_config(config, argname='candle_types',
|
||||
logstring='Detected --candle-types: {}')
|
||||
|
||||
def _process_analyze_options(self, config: Dict[str, Any]) -> None:
|
||||
self._args_to_config(config, argname='analysis_groups',
|
||||
logstring='Analysis reason groups: {}')
|
||||
|
||||
self._args_to_config(config, argname='enter_reason_list',
|
||||
logstring='Analysis enter tag list: {}')
|
||||
|
||||
self._args_to_config(config, argname='exit_reason_list',
|
||||
logstring='Analysis exit tag list: {}')
|
||||
|
||||
self._args_to_config(config, argname='indicator_list',
|
||||
logstring='Analysis indicator list: {}')
|
||||
|
||||
def _process_runmode(self, config: Dict[str, Any]) -> None:
|
||||
|
||||
self._args_to_config(config, argname='dry_run',
|
||||
|
|
227
freqtrade/data/entryexitanalysis.py
Executable file
227
freqtrade/data/entryexitanalysis.py
Executable file
|
@ -0,0 +1,227 @@
|
|||
import logging
|
||||
from pathlib import Path
|
||||
from typing import List, Optional
|
||||
|
||||
import joblib
|
||||
import pandas as pd
|
||||
from tabulate import tabulate
|
||||
|
||||
from freqtrade.data.btanalysis import (get_latest_backtest_filename, load_backtest_data,
|
||||
load_backtest_stats)
|
||||
from freqtrade.exceptions import OperationalException
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _load_signal_candles(backtest_dir: Path):
|
||||
if backtest_dir.is_dir():
|
||||
scpf = Path(backtest_dir,
|
||||
Path(get_latest_backtest_filename(backtest_dir)).stem + "_signals.pkl"
|
||||
)
|
||||
else:
|
||||
scpf = Path(backtest_dir.parent / f"{backtest_dir.stem}_signals.pkl")
|
||||
|
||||
try:
|
||||
scp = open(scpf, "rb")
|
||||
signal_candles = joblib.load(scp)
|
||||
logger.info(f"Loaded signal candles: {str(scpf)}")
|
||||
except Exception as e:
|
||||
logger.error("Cannot load signal candles from pickled results: ", e)
|
||||
|
||||
return signal_candles
|
||||
|
||||
|
||||
def _process_candles_and_indicators(pairlist, strategy_name, trades, signal_candles):
|
||||
analysed_trades_dict = {}
|
||||
analysed_trades_dict[strategy_name] = {}
|
||||
|
||||
try:
|
||||
logger.info(f"Processing {strategy_name} : {len(pairlist)} pairs")
|
||||
|
||||
for pair in pairlist:
|
||||
if pair in signal_candles[strategy_name]:
|
||||
analysed_trades_dict[strategy_name][pair] = _analyze_candles_and_indicators(
|
||||
pair,
|
||||
trades,
|
||||
signal_candles[strategy_name][pair])
|
||||
except Exception as e:
|
||||
print(f"Cannot process entry/exit reasons for {strategy_name}: ", e)
|
||||
|
||||
return analysed_trades_dict
|
||||
|
||||
|
||||
def _analyze_candles_and_indicators(pair, trades, signal_candles):
|
||||
buyf = signal_candles
|
||||
|
||||
if len(buyf) > 0:
|
||||
buyf = buyf.set_index('date', drop=False)
|
||||
trades_red = trades.loc[trades['pair'] == pair].copy()
|
||||
|
||||
trades_inds = pd.DataFrame()
|
||||
|
||||
if trades_red.shape[0] > 0 and buyf.shape[0] > 0:
|
||||
for t, v in trades_red.open_date.items():
|
||||
allinds = buyf.loc[(buyf['date'] < v)]
|
||||
if allinds.shape[0] > 0:
|
||||
tmp_inds = allinds.iloc[[-1]]
|
||||
|
||||
trades_red.loc[t, 'signal_date'] = tmp_inds['date'].values[0]
|
||||
trades_red.loc[t, 'enter_reason'] = trades_red.loc[t, 'enter_tag']
|
||||
tmp_inds.index.rename('signal_date', inplace=True)
|
||||
trades_inds = pd.concat([trades_inds, tmp_inds])
|
||||
|
||||
if 'signal_date' in trades_red:
|
||||
trades_red['signal_date'] = pd.to_datetime(trades_red['signal_date'], utc=True)
|
||||
trades_red.set_index('signal_date', inplace=True)
|
||||
|
||||
try:
|
||||
trades_red = pd.merge(trades_red, trades_inds, on='signal_date', how='outer')
|
||||
except Exception as e:
|
||||
raise e
|
||||
return trades_red
|
||||
else:
|
||||
return pd.DataFrame()
|
||||
|
||||
|
||||
def _do_group_table_output(bigdf, glist):
|
||||
for g in glist:
|
||||
# 0: summary wins/losses grouped by enter tag
|
||||
if g == "0":
|
||||
group_mask = ['enter_reason']
|
||||
wins = bigdf.loc[bigdf['profit_abs'] >= 0] \
|
||||
.groupby(group_mask) \
|
||||
.agg({'profit_abs': ['sum']})
|
||||
|
||||
wins.columns = ['profit_abs_wins']
|
||||
loss = bigdf.loc[bigdf['profit_abs'] < 0] \
|
||||
.groupby(group_mask) \
|
||||
.agg({'profit_abs': ['sum']})
|
||||
loss.columns = ['profit_abs_loss']
|
||||
|
||||
new = bigdf.groupby(group_mask).agg({'profit_abs': [
|
||||
'count',
|
||||
lambda x: sum(x > 0),
|
||||
lambda x: sum(x <= 0)]})
|
||||
new = pd.concat([new, wins, loss], axis=1).fillna(0)
|
||||
|
||||
new['profit_tot'] = new['profit_abs_wins'] - abs(new['profit_abs_loss'])
|
||||
new['wl_ratio_pct'] = (new.iloc[:, 1] / new.iloc[:, 0] * 100).fillna(0)
|
||||
new['avg_win'] = (new['profit_abs_wins'] / new.iloc[:, 1]).fillna(0)
|
||||
new['avg_loss'] = (new['profit_abs_loss'] / new.iloc[:, 2]).fillna(0)
|
||||
|
||||
new.columns = ['total_num_buys', 'wins', 'losses', 'profit_abs_wins', 'profit_abs_loss',
|
||||
'profit_tot', 'wl_ratio_pct', 'avg_win', 'avg_loss']
|
||||
|
||||
sortcols = ['total_num_buys']
|
||||
|
||||
_print_table(new, sortcols, show_index=True)
|
||||
|
||||
else:
|
||||
agg_mask = {'profit_abs': ['count', 'sum', 'median', 'mean'],
|
||||
'profit_ratio': ['sum', 'median', 'mean']}
|
||||
agg_cols = ['num_buys', 'profit_abs_sum', 'profit_abs_median',
|
||||
'profit_abs_mean', 'median_profit_pct', 'mean_profit_pct',
|
||||
'total_profit_pct']
|
||||
sortcols = ['profit_abs_sum', 'enter_reason']
|
||||
|
||||
# 1: profit summaries grouped by enter_tag
|
||||
if g == "1":
|
||||
group_mask = ['enter_reason']
|
||||
|
||||
# 2: profit summaries grouped by enter_tag and exit_tag
|
||||
if g == "2":
|
||||
group_mask = ['enter_reason', 'exit_reason']
|
||||
|
||||
# 3: profit summaries grouped by pair and enter_tag
|
||||
if g == "3":
|
||||
group_mask = ['pair', 'enter_reason']
|
||||
|
||||
# 4: profit summaries grouped by pair, enter_ and exit_tag (this can get quite large)
|
||||
if g == "4":
|
||||
group_mask = ['pair', 'enter_reason', 'exit_reason']
|
||||
if group_mask:
|
||||
new = bigdf.groupby(group_mask).agg(agg_mask).reset_index()
|
||||
new.columns = group_mask + agg_cols
|
||||
new['median_profit_pct'] = new['median_profit_pct'] * 100
|
||||
new['mean_profit_pct'] = new['mean_profit_pct'] * 100
|
||||
new['total_profit_pct'] = new['total_profit_pct'] * 100
|
||||
|
||||
_print_table(new, sortcols)
|
||||
else:
|
||||
logger.warning("Invalid group mask specified.")
|
||||
|
||||
|
||||
def _print_results(analysed_trades, stratname, analysis_groups,
|
||||
enter_reason_list, exit_reason_list,
|
||||
indicator_list, columns=None):
|
||||
if columns is None:
|
||||
columns = ['pair', 'open_date', 'close_date', 'profit_abs', 'enter_reason', 'exit_reason']
|
||||
|
||||
bigdf = pd.DataFrame()
|
||||
for pair, trades in analysed_trades[stratname].items():
|
||||
bigdf = pd.concat([bigdf, trades], ignore_index=True)
|
||||
|
||||
if bigdf.shape[0] > 0 and ('enter_reason' in bigdf.columns):
|
||||
if analysis_groups:
|
||||
_do_group_table_output(bigdf, analysis_groups)
|
||||
|
||||
if enter_reason_list and "all" not in enter_reason_list:
|
||||
bigdf = bigdf.loc[(bigdf['enter_reason'].isin(enter_reason_list))]
|
||||
|
||||
if exit_reason_list and "all" not in exit_reason_list:
|
||||
bigdf = bigdf.loc[(bigdf['exit_reason'].isin(exit_reason_list))]
|
||||
|
||||
if "all" in indicator_list:
|
||||
print(bigdf)
|
||||
elif indicator_list is not None:
|
||||
available_inds = []
|
||||
for ind in indicator_list:
|
||||
if ind in bigdf:
|
||||
available_inds.append(ind)
|
||||
ilist = ["pair", "enter_reason", "exit_reason"] + available_inds
|
||||
_print_table(bigdf[ilist], sortcols=['exit_reason'], show_index=False)
|
||||
else:
|
||||
print("\\_ No trades to show")
|
||||
|
||||
|
||||
def _print_table(df, sortcols=None, show_index=False):
|
||||
if (sortcols is not None):
|
||||
data = df.sort_values(sortcols)
|
||||
else:
|
||||
data = df
|
||||
|
||||
print(
|
||||
tabulate(
|
||||
data,
|
||||
headers='keys',
|
||||
tablefmt='psql',
|
||||
showindex=show_index
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def process_entry_exit_reasons(backtest_dir: Path,
|
||||
pairlist: List[str],
|
||||
analysis_groups: Optional[List[str]] = ["0", "1", "2"],
|
||||
enter_reason_list: Optional[List[str]] = ["all"],
|
||||
exit_reason_list: Optional[List[str]] = ["all"],
|
||||
indicator_list: Optional[List[str]] = []):
|
||||
try:
|
||||
backtest_stats = load_backtest_stats(backtest_dir)
|
||||
for strategy_name, results in backtest_stats['strategy'].items():
|
||||
trades = load_backtest_data(backtest_dir, strategy_name)
|
||||
|
||||
if not trades.empty:
|
||||
signal_candles = _load_signal_candles(backtest_dir)
|
||||
analysed_trades_dict = _process_candles_and_indicators(pairlist, strategy_name,
|
||||
trades, signal_candles)
|
||||
_print_results(analysed_trades_dict,
|
||||
strategy_name,
|
||||
analysis_groups,
|
||||
enter_reason_list,
|
||||
exit_reason_list,
|
||||
indicator_list)
|
||||
|
||||
except ValueError as e:
|
||||
raise OperationalException(e) from e
|
191
tests/data/test_entryexitanalysis.py
Executable file
191
tests/data/test_entryexitanalysis.py
Executable file
|
@ -0,0 +1,191 @@
|
|||
import logging
|
||||
from unittest.mock import MagicMock, PropertyMock
|
||||
|
||||
import pandas as pd
|
||||
import pytest
|
||||
|
||||
from freqtrade.commands.analyze_commands import start_analysis_entries_exits
|
||||
from freqtrade.commands.optimize_commands import start_backtesting
|
||||
from freqtrade.enums import ExitType
|
||||
from freqtrade.optimize.backtesting import Backtesting
|
||||
from tests.conftest import get_args, patch_exchange, patched_configuration_load_config_file
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def entryexitanalysis_cleanup() -> None:
|
||||
yield None
|
||||
|
||||
Backtesting.cleanup()
|
||||
|
||||
|
||||
def test_backtest_analysis_nomock(default_conf, mocker, caplog, testdatadir, tmpdir, capsys):
|
||||
caplog.set_level(logging.INFO)
|
||||
|
||||
default_conf.update({
|
||||
"use_exit_signal": True,
|
||||
"exit_profit_only": False,
|
||||
"exit_profit_offset": 0.0,
|
||||
"ignore_roi_if_entry_signal": False,
|
||||
})
|
||||
patch_exchange(mocker)
|
||||
result1 = pd.DataFrame({'pair': ['ETH/BTC', 'LTC/BTC', 'ETH/BTC', 'LTC/BTC'],
|
||||
'profit_ratio': [0.025, 0.05, -0.1, -0.05],
|
||||
'profit_abs': [0.5, 2.0, -4.0, -2.0],
|
||||
'open_date': pd.to_datetime(['2018-01-29 18:40:00',
|
||||
'2018-01-30 03:30:00',
|
||||
'2018-01-30 08:10:00',
|
||||
'2018-01-31 13:30:00', ], utc=True
|
||||
),
|
||||
'close_date': pd.to_datetime(['2018-01-29 20:45:00',
|
||||
'2018-01-30 05:35:00',
|
||||
'2018-01-30 09:10:00',
|
||||
'2018-01-31 15:00:00', ], utc=True),
|
||||
'trade_duration': [235, 40, 60, 90],
|
||||
'is_open': [False, False, False, False],
|
||||
'stake_amount': [0.01, 0.01, 0.01, 0.01],
|
||||
'open_rate': [0.104445, 0.10302485, 0.10302485, 0.10302485],
|
||||
'close_rate': [0.104969, 0.103541, 0.102041, 0.102541],
|
||||
"is_short": [False, False, False, False],
|
||||
'enter_tag': ["enter_tag_long_a",
|
||||
"enter_tag_long_b",
|
||||
"enter_tag_long_a",
|
||||
"enter_tag_long_b"],
|
||||
'exit_reason': [ExitType.ROI,
|
||||
ExitType.EXIT_SIGNAL,
|
||||
ExitType.STOP_LOSS,
|
||||
ExitType.TRAILING_STOP_LOSS]
|
||||
})
|
||||
|
||||
backtestmock = MagicMock(side_effect=[
|
||||
{
|
||||
'results': result1,
|
||||
'config': default_conf,
|
||||
'locks': [],
|
||||
'rejected_signals': 20,
|
||||
'timedout_entry_orders': 0,
|
||||
'timedout_exit_orders': 0,
|
||||
'canceled_trade_entries': 0,
|
||||
'canceled_entry_orders': 0,
|
||||
'replaced_entry_orders': 0,
|
||||
'final_balance': 1000,
|
||||
}
|
||||
])
|
||||
mocker.patch('freqtrade.plugins.pairlistmanager.PairListManager.whitelist',
|
||||
PropertyMock(return_value=['ETH/BTC', 'LTC/BTC', 'DASH/BTC']))
|
||||
mocker.patch('freqtrade.optimize.backtesting.Backtesting.backtest', backtestmock)
|
||||
|
||||
patched_configuration_load_config_file(mocker, default_conf)
|
||||
|
||||
args = [
|
||||
'backtesting',
|
||||
'--config', 'config.json',
|
||||
'--datadir', str(testdatadir),
|
||||
'--user-data-dir', str(tmpdir),
|
||||
'--timeframe', '5m',
|
||||
'--timerange', '1515560100-1517287800',
|
||||
'--export', 'signals',
|
||||
'--cache', 'none',
|
||||
]
|
||||
args = get_args(args)
|
||||
start_backtesting(args)
|
||||
|
||||
captured = capsys.readouterr()
|
||||
assert 'BACKTESTING REPORT' in captured.out
|
||||
assert 'EXIT REASON STATS' in captured.out
|
||||
assert 'LEFT OPEN TRADES REPORT' in captured.out
|
||||
|
||||
base_args = [
|
||||
'backtesting-analysis',
|
||||
'--config', 'config.json',
|
||||
'--datadir', str(testdatadir),
|
||||
'--user-data-dir', str(tmpdir),
|
||||
]
|
||||
|
||||
# test group 0 and indicator list
|
||||
args = get_args(base_args +
|
||||
['--analysis-groups', "0",
|
||||
'--indicator-list', "close", "rsi", "profit_abs"]
|
||||
)
|
||||
start_analysis_entries_exits(args)
|
||||
captured = capsys.readouterr()
|
||||
assert 'LTC/BTC' in captured.out
|
||||
assert 'ETH/BTC' in captured.out
|
||||
assert 'enter_tag_long_a' in captured.out
|
||||
assert 'enter_tag_long_b' in captured.out
|
||||
assert 'exit_signal' in captured.out
|
||||
assert 'roi' in captured.out
|
||||
assert 'stop_loss' in captured.out
|
||||
assert 'trailing_stop_loss' in captured.out
|
||||
assert '0.5' in captured.out
|
||||
assert '-4' in captured.out
|
||||
assert '-2' in captured.out
|
||||
assert '-3.5' in captured.out
|
||||
assert '50' in captured.out
|
||||
assert '0' in captured.out
|
||||
assert '0.01616' in captured.out
|
||||
assert '34.049' in captured.out
|
||||
assert '0.104104' in captured.out
|
||||
assert '47.0996' in captured.out
|
||||
|
||||
# test group 1
|
||||
args = get_args(base_args + ['--analysis-groups', "1"])
|
||||
start_analysis_entries_exits(args)
|
||||
captured = capsys.readouterr()
|
||||
assert 'enter_tag_long_a' in captured.out
|
||||
assert 'enter_tag_long_b' in captured.out
|
||||
assert 'total_profit_pct' in captured.out
|
||||
assert '-3.5' in captured.out
|
||||
assert '-1.75' in captured.out
|
||||
assert '-7.5' in captured.out
|
||||
assert '-3.75' in captured.out
|
||||
assert '0' in captured.out
|
||||
|
||||
# test group 2
|
||||
args = get_args(base_args + ['--analysis-groups', "2"])
|
||||
start_analysis_entries_exits(args)
|
||||
captured = capsys.readouterr()
|
||||
assert 'enter_tag_long_a' in captured.out
|
||||
assert 'enter_tag_long_b' in captured.out
|
||||
assert 'exit_signal' in captured.out
|
||||
assert 'roi' in captured.out
|
||||
assert 'stop_loss' in captured.out
|
||||
assert 'trailing_stop_loss' in captured.out
|
||||
assert 'total_profit_pct' in captured.out
|
||||
assert '-10' in captured.out
|
||||
assert '-5' in captured.out
|
||||
assert '2.5' in captured.out
|
||||
|
||||
# test group 3
|
||||
args = get_args(base_args + ['--analysis-groups', "3"])
|
||||
start_analysis_entries_exits(args)
|
||||
captured = capsys.readouterr()
|
||||
assert 'LTC/BTC' in captured.out
|
||||
assert 'ETH/BTC' in captured.out
|
||||
assert 'enter_tag_long_a' in captured.out
|
||||
assert 'enter_tag_long_b' in captured.out
|
||||
assert 'total_profit_pct' in captured.out
|
||||
assert '-7.5' in captured.out
|
||||
assert '-3.75' in captured.out
|
||||
assert '-1.75' in captured.out
|
||||
assert '0' in captured.out
|
||||
assert '2' in captured.out
|
||||
|
||||
# test group 4
|
||||
args = get_args(base_args + ['--analysis-groups', "4"])
|
||||
start_analysis_entries_exits(args)
|
||||
captured = capsys.readouterr()
|
||||
assert 'LTC/BTC' in captured.out
|
||||
assert 'ETH/BTC' in captured.out
|
||||
assert 'enter_tag_long_a' in captured.out
|
||||
assert 'enter_tag_long_b' in captured.out
|
||||
assert 'exit_signal' in captured.out
|
||||
assert 'roi' in captured.out
|
||||
assert 'stop_loss' in captured.out
|
||||
assert 'trailing_stop_loss' in captured.out
|
||||
assert 'total_profit_pct' in captured.out
|
||||
assert '-10' in captured.out
|
||||
assert '-5' in captured.out
|
||||
assert '-4' in captured.out
|
||||
assert '0.5' in captured.out
|
||||
assert '1' in captured.out
|
||||
assert '2.5' in captured.out
|
|
@ -1384,12 +1384,14 @@ def test_api_strategies(botclient):
|
|||
rc = client_get(client, f"{BASE_URI}/strategies")
|
||||
|
||||
assert_response(rc)
|
||||
|
||||
assert rc.json() == {'strategies': [
|
||||
'HyperoptableStrategy',
|
||||
'InformativeDecoratorTest',
|
||||
'StrategyTestV2',
|
||||
'StrategyTestV3',
|
||||
'StrategyTestV3Futures',
|
||||
'StrategyTestV3Analysis',
|
||||
'StrategyTestV3Futures'
|
||||
]}
|
||||
|
||||
|
||||
|
|
175
tests/strategy/strats/strategy_test_v3_analysis.py
Normal file
175
tests/strategy/strats/strategy_test_v3_analysis.py
Normal file
|
@ -0,0 +1,175 @@
|
|||
# pragma pylint: disable=missing-docstring, invalid-name, pointless-string-statement
|
||||
|
||||
import talib.abstract as ta
|
||||
from pandas import DataFrame
|
||||
|
||||
import freqtrade.vendor.qtpylib.indicators as qtpylib
|
||||
from freqtrade.strategy import (BooleanParameter, DecimalParameter, IntParameter, IStrategy,
|
||||
RealParameter)
|
||||
|
||||
|
||||
class StrategyTestV3Analysis(IStrategy):
|
||||
"""
|
||||
Strategy used by tests freqtrade bot.
|
||||
Please do not modify this strategy, it's intended for internal use only.
|
||||
Please look at the SampleStrategy in the user_data/strategy directory
|
||||
or strategy repository https://github.com/freqtrade/freqtrade-strategies
|
||||
for samples and inspiration.
|
||||
"""
|
||||
INTERFACE_VERSION = 3
|
||||
|
||||
# Minimal ROI designed for the strategy
|
||||
minimal_roi = {
|
||||
"40": 0.0,
|
||||
"30": 0.01,
|
||||
"20": 0.02,
|
||||
"0": 0.04
|
||||
}
|
||||
|
||||
# Optimal stoploss designed for the strategy
|
||||
stoploss = -0.10
|
||||
|
||||
# Optimal timeframe for the strategy
|
||||
timeframe = '5m'
|
||||
|
||||
# Optional order type mapping
|
||||
order_types = {
|
||||
'entry': 'limit',
|
||||
'exit': 'limit',
|
||||
'stoploss': 'limit',
|
||||
'stoploss_on_exchange': False
|
||||
}
|
||||
|
||||
# Number of candles the strategy requires before producing valid signals
|
||||
startup_candle_count: int = 20
|
||||
|
||||
# Optional time in force for orders
|
||||
order_time_in_force = {
|
||||
'entry': 'gtc',
|
||||
'exit': 'gtc',
|
||||
}
|
||||
|
||||
buy_params = {
|
||||
'buy_rsi': 35,
|
||||
# Intentionally not specified, so "default" is tested
|
||||
# 'buy_plusdi': 0.4
|
||||
}
|
||||
|
||||
sell_params = {
|
||||
'sell_rsi': 74,
|
||||
'sell_minusdi': 0.4
|
||||
}
|
||||
|
||||
buy_rsi = IntParameter([0, 50], default=30, space='buy')
|
||||
buy_plusdi = RealParameter(low=0, high=1, default=0.5, space='buy')
|
||||
sell_rsi = IntParameter(low=50, high=100, default=70, space='sell')
|
||||
sell_minusdi = DecimalParameter(low=0, high=1, default=0.5001, decimals=3, space='sell',
|
||||
load=False)
|
||||
protection_enabled = BooleanParameter(default=True)
|
||||
protection_cooldown_lookback = IntParameter([0, 50], default=30)
|
||||
|
||||
# TODO: Can this work with protection tests? (replace HyperoptableStrategy implicitly ... )
|
||||
# @property
|
||||
# def protections(self):
|
||||
# prot = []
|
||||
# if self.protection_enabled.value:
|
||||
# prot.append({
|
||||
# "method": "CooldownPeriod",
|
||||
# "stop_duration_candles": self.protection_cooldown_lookback.value
|
||||
# })
|
||||
# return prot
|
||||
|
||||
bot_started = False
|
||||
|
||||
def bot_start(self):
|
||||
self.bot_started = True
|
||||
|
||||
def informative_pairs(self):
|
||||
|
||||
return []
|
||||
|
||||
def populate_indicators(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
|
||||
|
||||
# Momentum Indicator
|
||||
# ------------------------------------
|
||||
|
||||
# ADX
|
||||
dataframe['adx'] = ta.ADX(dataframe)
|
||||
|
||||
# MACD
|
||||
macd = ta.MACD(dataframe)
|
||||
dataframe['macd'] = macd['macd']
|
||||
dataframe['macdsignal'] = macd['macdsignal']
|
||||
dataframe['macdhist'] = macd['macdhist']
|
||||
|
||||
# Minus Directional Indicator / Movement
|
||||
dataframe['minus_di'] = ta.MINUS_DI(dataframe)
|
||||
|
||||
# Plus Directional Indicator / Movement
|
||||
dataframe['plus_di'] = ta.PLUS_DI(dataframe)
|
||||
|
||||
# RSI
|
||||
dataframe['rsi'] = ta.RSI(dataframe)
|
||||
|
||||
# Stoch fast
|
||||
stoch_fast = ta.STOCHF(dataframe)
|
||||
dataframe['fastd'] = stoch_fast['fastd']
|
||||
dataframe['fastk'] = stoch_fast['fastk']
|
||||
|
||||
# Bollinger bands
|
||||
bollinger = qtpylib.bollinger_bands(qtpylib.typical_price(dataframe), window=20, stds=2)
|
||||
dataframe['bb_lowerband'] = bollinger['lower']
|
||||
dataframe['bb_middleband'] = bollinger['mid']
|
||||
dataframe['bb_upperband'] = bollinger['upper']
|
||||
|
||||
# EMA - Exponential Moving Average
|
||||
dataframe['ema10'] = ta.EMA(dataframe, timeperiod=10)
|
||||
|
||||
return dataframe
|
||||
|
||||
def populate_entry_trend(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
|
||||
|
||||
dataframe.loc[
|
||||
(
|
||||
(dataframe['rsi'] < self.buy_rsi.value) &
|
||||
(dataframe['fastd'] < 35) &
|
||||
(dataframe['adx'] > 30) &
|
||||
(dataframe['plus_di'] > self.buy_plusdi.value)
|
||||
) |
|
||||
(
|
||||
(dataframe['adx'] > 65) &
|
||||
(dataframe['plus_di'] > self.buy_plusdi.value)
|
||||
),
|
||||
['enter_long', 'enter_tag']] = 1, 'enter_tag_long'
|
||||
|
||||
dataframe.loc[
|
||||
(
|
||||
qtpylib.crossed_below(dataframe['rsi'], self.sell_rsi.value)
|
||||
),
|
||||
['enter_short', 'enter_tag']] = 1, 'enter_tag_short'
|
||||
|
||||
return dataframe
|
||||
|
||||
def populate_exit_trend(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
|
||||
dataframe.loc[
|
||||
(
|
||||
(
|
||||
(qtpylib.crossed_above(dataframe['rsi'], self.sell_rsi.value)) |
|
||||
(qtpylib.crossed_above(dataframe['fastd'], 70))
|
||||
) &
|
||||
(dataframe['adx'] > 10) &
|
||||
(dataframe['minus_di'] > 0)
|
||||
) |
|
||||
(
|
||||
(dataframe['adx'] > 70) &
|
||||
(dataframe['minus_di'] > self.sell_minusdi.value)
|
||||
),
|
||||
['exit_long', 'exit_tag']] = 1, 'exit_tag_long'
|
||||
|
||||
dataframe.loc[
|
||||
(
|
||||
qtpylib.crossed_above(dataframe['rsi'], self.buy_rsi.value)
|
||||
),
|
||||
['exit_long', 'exit_tag']] = 1, 'exit_tag_short'
|
||||
|
||||
return dataframe
|
|
@ -34,7 +34,7 @@ def test_search_all_strategies_no_failed():
|
|||
directory = Path(__file__).parent / "strats"
|
||||
strategies = StrategyResolver.search_all_objects(directory, enum_failed=False)
|
||||
assert isinstance(strategies, list)
|
||||
assert len(strategies) == 5
|
||||
assert len(strategies) == 6
|
||||
assert isinstance(strategies[0], dict)
|
||||
|
||||
|
||||
|
@ -42,10 +42,10 @@ def test_search_all_strategies_with_failed():
|
|||
directory = Path(__file__).parent / "strats"
|
||||
strategies = StrategyResolver.search_all_objects(directory, enum_failed=True)
|
||||
assert isinstance(strategies, list)
|
||||
assert len(strategies) == 6
|
||||
assert len(strategies) == 7
|
||||
# with enum_failed=True search_all_objects() shall find 2 good strategies
|
||||
# and 1 which fails to load
|
||||
assert len([x for x in strategies if x['class'] is not None]) == 5
|
||||
assert len([x for x in strategies if x['class'] is not None]) == 6
|
||||
assert len([x for x in strategies if x['class'] is None]) == 1
|
||||
|
||||
|
||||
|
|
Loading…
Reference in New Issue
Block a user