2017-11-18 07:45:01 +00:00
|
|
|
# pragma pylint: disable=missing-docstring,W0212
|
2017-11-14 21:15:24 +00:00
|
|
|
|
2017-11-17 16:54:31 +00:00
|
|
|
|
2017-09-28 14:00:14 +00:00
|
|
|
import logging
|
2017-11-14 21:15:24 +00:00
|
|
|
from typing import Tuple, Dict
|
2017-09-28 21:26:28 +00:00
|
|
|
|
2017-09-24 14:23:29 +00:00
|
|
|
import arrow
|
2018-01-02 19:32:11 +00:00
|
|
|
from pandas import DataFrame, Series
|
2017-11-14 22:14:01 +00:00
|
|
|
from tabulate import tabulate
|
2017-09-28 21:26:28 +00:00
|
|
|
|
2017-11-04 17:43:23 +00:00
|
|
|
from freqtrade import exchange
|
2017-11-25 00:04:11 +00:00
|
|
|
from freqtrade.analyze import populate_buy_trend, populate_sell_trend
|
2017-11-04 17:43:23 +00:00
|
|
|
from freqtrade.exchange import Bittrex
|
2017-11-16 05:49:06 +00:00
|
|
|
from freqtrade.main import min_roi_reached
|
2017-11-17 16:54:31 +00:00
|
|
|
from freqtrade.misc import load_config
|
2017-11-25 00:04:11 +00:00
|
|
|
from freqtrade.optimize import load_data, preprocess
|
2017-09-28 21:26:28 +00:00
|
|
|
from freqtrade.persistence import Trade
|
2017-09-24 14:23:29 +00:00
|
|
|
|
2017-11-25 00:04:11 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
2017-11-15 18:06:37 +00:00
|
|
|
|
|
|
|
|
2018-01-02 19:32:11 +00:00
|
|
|
def get_timeframe(data: Dict[str, DataFrame]) -> Tuple[arrow.Arrow, arrow.Arrow]:
|
2017-11-14 22:14:01 +00:00
|
|
|
"""
|
|
|
|
Get the maximum timeframe for the given backtest data
|
2018-01-02 19:32:11 +00:00
|
|
|
:param data: dictionary with preprocessed backtesting data
|
2017-11-14 22:14:01 +00:00
|
|
|
:return: tuple containing min_date, max_date
|
|
|
|
"""
|
2018-01-02 19:32:11 +00:00
|
|
|
all_dates = Series([])
|
|
|
|
for pair, pair_data in data.items():
|
|
|
|
all_dates = all_dates.append(pair_data['date'])
|
|
|
|
all_dates.sort_values(inplace=True)
|
|
|
|
return arrow.get(all_dates.iloc[0]), arrow.get(all_dates.iloc[-1])
|
2017-11-14 21:15:24 +00:00
|
|
|
|
|
|
|
|
2017-12-16 02:39:47 +00:00
|
|
|
def generate_text_table(
|
|
|
|
data: Dict[str, Dict], results: DataFrame, stake_currency, ticker_interval) -> str:
|
2017-11-14 22:14:01 +00:00
|
|
|
"""
|
|
|
|
Generates and returns a text table for the given backtest data and the results dataframe
|
|
|
|
:return: pretty printed table with tabulate as str
|
|
|
|
"""
|
2018-01-02 14:40:33 +00:00
|
|
|
floatfmt = ('s', 'd', '.2f', '.8f', '.1f')
|
2017-11-14 22:14:01 +00:00
|
|
|
tabular_data = []
|
2018-01-02 14:40:33 +00:00
|
|
|
headers = ['pair', 'buy count', 'avg profit %',
|
2018-01-03 10:35:54 +00:00
|
|
|
'total profit ' + stake_currency, 'avg duration', 'profit', 'loss']
|
2017-11-14 22:14:01 +00:00
|
|
|
for pair in data:
|
|
|
|
result = results[results.currency == pair]
|
|
|
|
tabular_data.append([
|
|
|
|
pair,
|
|
|
|
len(result.index),
|
2018-01-02 14:40:33 +00:00
|
|
|
result.profit_percent.mean() * 100.0,
|
|
|
|
result.profit_BTC.sum(),
|
|
|
|
result.duration.mean() * ticker_interval,
|
2018-01-03 10:19:46 +00:00
|
|
|
result.profit.sum(),
|
|
|
|
result.loss.sum()
|
2017-11-14 22:14:01 +00:00
|
|
|
])
|
|
|
|
|
|
|
|
# Append Total
|
|
|
|
tabular_data.append([
|
|
|
|
'TOTAL',
|
|
|
|
len(results.index),
|
2018-01-02 14:40:33 +00:00
|
|
|
results.profit_percent.mean() * 100.0,
|
|
|
|
results.profit_BTC.sum(),
|
|
|
|
results.duration.mean() * ticker_interval,
|
2018-01-03 10:19:46 +00:00
|
|
|
results.profit.sum(),
|
2018-01-03 10:35:54 +00:00
|
|
|
results.loss.sum()
|
2017-11-14 22:14:01 +00:00
|
|
|
])
|
2018-01-02 14:40:33 +00:00
|
|
|
return tabulate(tabular_data, headers=headers, floatfmt=floatfmt)
|
2017-11-14 22:14:01 +00:00
|
|
|
|
|
|
|
|
2017-12-23 15:26:22 +00:00
|
|
|
def backtest(stake_amount: float, processed: Dict[str, DataFrame],
|
2018-01-03 10:35:54 +00:00
|
|
|
max_open_trades: int = 0, realistic: bool = True, sell_profit_only: bool = False,
|
|
|
|
stoploss: int = -1.00, use_sell_signal: bool = False) -> DataFrame:
|
2017-11-22 23:25:06 +00:00
|
|
|
"""
|
|
|
|
Implements backtesting functionality
|
2017-12-25 11:07:50 +00:00
|
|
|
:param stake_amount: btc amount to use for each trade
|
2017-11-22 23:25:06 +00:00
|
|
|
:param processed: a processed dictionary with format {pair, data}
|
|
|
|
:param max_open_trades: maximum number of concurrent trades (default: 0, disabled)
|
2017-11-24 20:09:44 +00:00
|
|
|
:param realistic: do we try to simulate realistic trades? (default: True)
|
2017-11-22 23:25:06 +00:00
|
|
|
:return: DataFrame
|
|
|
|
"""
|
2017-10-01 08:02:47 +00:00
|
|
|
trades = []
|
2017-12-25 11:07:50 +00:00
|
|
|
trade_count_lock: dict = {}
|
2017-11-04 17:43:23 +00:00
|
|
|
exchange._API = Bittrex({'key': '', 'secret': ''})
|
2017-11-15 18:06:37 +00:00
|
|
|
for pair, pair_data in processed.items():
|
2017-11-21 21:33:34 +00:00
|
|
|
pair_data['buy'], pair_data['sell'] = 0, 0
|
2017-11-17 10:27:33 +00:00
|
|
|
ticker = populate_sell_trend(populate_buy_trend(pair_data))
|
2017-11-07 17:24:51 +00:00
|
|
|
# for each buy point
|
2017-11-24 20:09:44 +00:00
|
|
|
lock_pair_until = None
|
2018-01-02 18:51:07 +00:00
|
|
|
buy_subset = ticker[ticker.buy == 1][['buy', 'open', 'close', 'date', 'sell']]
|
|
|
|
for row in buy_subset.itertuples(index=True):
|
2017-11-24 20:09:44 +00:00
|
|
|
if realistic:
|
|
|
|
if lock_pair_until is not None and row.Index <= lock_pair_until:
|
|
|
|
continue
|
2017-11-22 23:25:06 +00:00
|
|
|
if max_open_trades > 0:
|
|
|
|
# Check if max_open_trades has already been reached for the given date
|
|
|
|
if not trade_count_lock.get(row.date, 0) < max_open_trades:
|
|
|
|
continue
|
|
|
|
|
|
|
|
if max_open_trades > 0:
|
|
|
|
# Increase lock
|
|
|
|
trade_count_lock[row.date] = trade_count_lock.get(row.date, 0) + 1
|
2017-11-21 21:33:34 +00:00
|
|
|
|
2017-11-07 17:24:51 +00:00
|
|
|
trade = Trade(
|
|
|
|
open_rate=row.close,
|
|
|
|
open_date=row.date,
|
2017-12-23 15:26:22 +00:00
|
|
|
stake_amount=stake_amount,
|
|
|
|
amount=stake_amount / row.open,
|
2017-12-17 21:07:56 +00:00
|
|
|
fee=exchange.get_fee()
|
2017-11-07 17:24:51 +00:00
|
|
|
)
|
2017-11-21 21:33:34 +00:00
|
|
|
|
2017-11-07 17:24:51 +00:00
|
|
|
# calculate win/lose forwards from buy point
|
2018-01-02 18:51:07 +00:00
|
|
|
sell_subset = ticker[row.Index + 1:][['close', 'date', 'sell']]
|
|
|
|
for row2 in sell_subset.itertuples(index=True):
|
2017-11-22 23:25:06 +00:00
|
|
|
if max_open_trades > 0:
|
|
|
|
# Increase trade_count_lock for every iteration
|
|
|
|
trade_count_lock[row2.date] = trade_count_lock.get(row2.date, 0) + 1
|
2017-11-21 21:33:34 +00:00
|
|
|
|
2018-01-03 09:56:18 +00:00
|
|
|
current_profit_percent = trade.calc_profit_percent(rate=row2.close)
|
2018-01-03 10:35:54 +00:00
|
|
|
if (sell_profit_only and current_profit_percent < 0):
|
2018-01-03 09:56:18 +00:00
|
|
|
continue
|
2018-01-03 10:35:54 +00:00
|
|
|
if min_roi_reached(trade, row2.close, row2.date)
|
|
|
|
or (row2.sell == 1 and use_sell_signal)
|
|
|
|
or current_profit_percent <= stoploss:
|
2017-12-26 07:59:38 +00:00
|
|
|
current_profit_btc = trade.calc_profit(rate=row2.close)
|
2017-11-24 20:09:44 +00:00
|
|
|
lock_pair_until = row2.Index
|
2017-09-25 18:06:15 +00:00
|
|
|
|
2017-12-19 05:58:02 +00:00
|
|
|
trades.append(
|
|
|
|
(
|
|
|
|
pair,
|
|
|
|
current_profit_percent,
|
2017-12-26 07:59:38 +00:00
|
|
|
current_profit_btc,
|
2018-01-03 10:35:54 +00:00
|
|
|
row2.Index - row.Index,
|
2018-01-03 10:19:46 +00:00
|
|
|
current_profit_btc > 0,
|
|
|
|
current_profit_btc < 0
|
2017-12-25 11:07:50 +00:00
|
|
|
)
|
2017-12-19 05:58:02 +00:00
|
|
|
)
|
2017-11-07 17:24:51 +00:00
|
|
|
break
|
2018-01-03 10:35:54 +00:00
|
|
|
labels = ['currency', 'profit_percent', 'profit_BTC', 'duration', 'profit', 'loss']
|
2017-11-17 06:02:06 +00:00
|
|
|
return DataFrame.from_records(trades, columns=labels)
|
2017-09-24 14:23:29 +00:00
|
|
|
|
2017-10-30 23:36:35 +00:00
|
|
|
|
2017-11-24 22:58:35 +00:00
|
|
|
def start(args):
|
2017-11-25 01:04:37 +00:00
|
|
|
# Initialize logger
|
|
|
|
logging.basicConfig(
|
|
|
|
level=args.loglevel,
|
|
|
|
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
|
|
|
)
|
|
|
|
|
2017-11-17 16:54:31 +00:00
|
|
|
exchange._API = Bittrex({'key': '', 'secret': ''})
|
2017-11-14 21:15:24 +00:00
|
|
|
|
2017-11-25 01:04:37 +00:00
|
|
|
logger.info('Using config: %s ...', args.config)
|
2017-11-24 22:58:35 +00:00
|
|
|
config = load_config(args.config)
|
2017-11-14 21:15:24 +00:00
|
|
|
|
2017-11-25 01:04:37 +00:00
|
|
|
logger.info('Using ticker_interval: %s ...', args.ticker_interval)
|
2017-11-14 21:37:30 +00:00
|
|
|
|
2017-11-14 23:11:46 +00:00
|
|
|
data = {}
|
2017-12-16 14:42:28 +00:00
|
|
|
pairs = config['exchange']['pair_whitelist']
|
2017-11-24 22:58:35 +00:00
|
|
|
if args.live:
|
2017-11-25 01:04:37 +00:00
|
|
|
logger.info('Downloading data for all pairs in whitelist ...')
|
2017-12-16 14:42:28 +00:00
|
|
|
for pair in pairs:
|
2017-11-24 22:58:35 +00:00
|
|
|
data[pair] = exchange.get_ticker_history(pair, args.ticker_interval)
|
2017-11-14 23:11:46 +00:00
|
|
|
else:
|
2017-12-16 14:42:28 +00:00
|
|
|
logger.info('Using local backtesting data (using whitelist in given config) ...')
|
2017-12-18 16:36:00 +00:00
|
|
|
data = load_data(pairs=pairs, ticker_interval=args.ticker_interval,
|
|
|
|
refresh_pairs=args.refresh_pairs)
|
2017-11-14 21:15:24 +00:00
|
|
|
|
2017-11-25 01:04:37 +00:00
|
|
|
logger.info('Using stake_currency: %s ...', config['stake_currency'])
|
|
|
|
logger.info('Using stake_amount: %s ...', config['stake_amount'])
|
2017-11-14 22:46:48 +00:00
|
|
|
|
2017-11-24 22:58:35 +00:00
|
|
|
max_open_trades = 0
|
|
|
|
if args.realistic_simulation:
|
2017-11-25 01:04:37 +00:00
|
|
|
logger.info('Using max_open_trades: %s ...', config['max_open_trades'])
|
2017-11-24 22:58:35 +00:00
|
|
|
max_open_trades = config['max_open_trades']
|
|
|
|
|
2017-11-25 00:12:44 +00:00
|
|
|
# Monkey patch config
|
2017-11-24 22:58:35 +00:00
|
|
|
from freqtrade import main
|
|
|
|
main._CONF = config
|
|
|
|
|
2018-01-02 19:32:11 +00:00
|
|
|
preprocessed = preprocess(data)
|
|
|
|
# Print timeframe
|
|
|
|
min_date, max_date = get_timeframe(preprocessed)
|
|
|
|
logger.info('Measuring data from %s up to %s ...', min_date.isoformat(), max_date.isoformat())
|
|
|
|
|
2017-11-17 16:54:31 +00:00
|
|
|
# Execute backtest and print results
|
2017-11-24 22:58:35 +00:00
|
|
|
results = backtest(
|
2018-01-03 10:35:54 +00:00
|
|
|
config['stake_amount'], preprocessed, max_open_trades, args.realistic_simulation,
|
|
|
|
config.get('experimental', {}).get('sell_profit_only', False), config.get('stoploss'),
|
|
|
|
config.get('experimental', {}).get('use_sell_signal', False)
|
2017-11-24 22:58:35 +00:00
|
|
|
)
|
2017-11-25 01:04:37 +00:00
|
|
|
logger.info(
|
2018-01-02 15:53:47 +00:00
|
|
|
'\n====================== BACKTESTING REPORT ================================\n%s',
|
2017-12-09 10:51:53 +00:00
|
|
|
generate_text_table(data, results, config['stake_currency'], args.ticker_interval)
|
2017-11-25 01:04:37 +00:00
|
|
|
)
|