2017-11-18 07:45:01 +00:00
|
|
|
# pragma pylint: disable=missing-docstring,W0212
|
2017-11-14 21:15:24 +00:00
|
|
|
|
2017-09-28 14:00:14 +00:00
|
|
|
import logging
|
2018-01-10 07:51:36 +00:00
|
|
|
from typing import Dict, Tuple
|
2017-09-28 21:26:28 +00:00
|
|
|
|
2017-09-24 14:23:29 +00:00
|
|
|
import arrow
|
2018-01-02 19:32:11 +00:00
|
|
|
from pandas import DataFrame, Series
|
2017-11-14 22:14:01 +00:00
|
|
|
from tabulate import tabulate
|
2017-09-28 21:26:28 +00:00
|
|
|
|
2018-01-10 07:51:36 +00:00
|
|
|
import freqtrade.misc as misc
|
|
|
|
import freqtrade.optimize as optimize
|
2017-11-04 17:43:23 +00:00
|
|
|
from freqtrade import exchange
|
2017-11-25 00:04:11 +00:00
|
|
|
from freqtrade.analyze import populate_buy_trend, populate_sell_trend
|
2017-11-04 17:43:23 +00:00
|
|
|
from freqtrade.exchange import Bittrex
|
2018-01-29 09:10:19 +00:00
|
|
|
from freqtrade.main import should_sell
|
2017-09-28 21:26:28 +00:00
|
|
|
from freqtrade.persistence import Trade
|
2018-01-15 08:35:11 +00:00
|
|
|
from freqtrade.strategy.strategy import Strategy
|
2017-09-24 14:23:29 +00:00
|
|
|
|
2017-11-25 00:04:11 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
2017-11-15 18:06:37 +00:00
|
|
|
|
|
|
|
|
2018-01-02 19:32:11 +00:00
|
|
|
def get_timeframe(data: Dict[str, DataFrame]) -> Tuple[arrow.Arrow, arrow.Arrow]:
|
2017-11-14 22:14:01 +00:00
|
|
|
"""
|
|
|
|
Get the maximum timeframe for the given backtest data
|
2018-01-02 19:32:11 +00:00
|
|
|
:param data: dictionary with preprocessed backtesting data
|
2017-11-14 22:14:01 +00:00
|
|
|
:return: tuple containing min_date, max_date
|
|
|
|
"""
|
2018-01-02 19:32:11 +00:00
|
|
|
all_dates = Series([])
|
2018-01-26 09:25:12 +00:00
|
|
|
for pair_data in data.values():
|
2018-01-02 19:32:11 +00:00
|
|
|
all_dates = all_dates.append(pair_data['date'])
|
|
|
|
all_dates.sort_values(inplace=True)
|
|
|
|
return arrow.get(all_dates.iloc[0]), arrow.get(all_dates.iloc[-1])
|
2017-11-14 21:15:24 +00:00
|
|
|
|
|
|
|
|
2017-12-16 02:39:47 +00:00
|
|
|
def generate_text_table(
|
2018-02-05 16:09:09 +00:00
|
|
|
data: Dict[str, Dict], results: DataFrame, stake_currency) -> str:
|
2017-11-14 22:14:01 +00:00
|
|
|
"""
|
|
|
|
Generates and returns a text table for the given backtest data and the results dataframe
|
|
|
|
:return: pretty printed table with tabulate as str
|
|
|
|
"""
|
2018-01-02 14:40:33 +00:00
|
|
|
floatfmt = ('s', 'd', '.2f', '.8f', '.1f')
|
2017-11-14 22:14:01 +00:00
|
|
|
tabular_data = []
|
2018-01-02 14:40:33 +00:00
|
|
|
headers = ['pair', 'buy count', 'avg profit %',
|
2018-01-03 10:35:54 +00:00
|
|
|
'total profit ' + stake_currency, 'avg duration', 'profit', 'loss']
|
2017-11-14 22:14:01 +00:00
|
|
|
for pair in data:
|
|
|
|
result = results[results.currency == pair]
|
|
|
|
tabular_data.append([
|
|
|
|
pair,
|
|
|
|
len(result.index),
|
2018-01-02 14:40:33 +00:00
|
|
|
result.profit_percent.mean() * 100.0,
|
|
|
|
result.profit_BTC.sum(),
|
2018-02-05 16:09:09 +00:00
|
|
|
result.duration.mean(),
|
2018-01-29 09:10:19 +00:00
|
|
|
len(result[result.profit_BTC > 0]),
|
|
|
|
len(result[result.profit_BTC < 0])
|
2017-11-14 22:14:01 +00:00
|
|
|
])
|
|
|
|
|
|
|
|
# Append Total
|
|
|
|
tabular_data.append([
|
|
|
|
'TOTAL',
|
|
|
|
len(results.index),
|
2018-01-02 14:40:33 +00:00
|
|
|
results.profit_percent.mean() * 100.0,
|
|
|
|
results.profit_BTC.sum(),
|
2018-02-05 16:09:09 +00:00
|
|
|
results.duration.mean(),
|
2018-01-29 09:10:19 +00:00
|
|
|
len(results[results.profit_BTC > 0]),
|
|
|
|
len(results[results.profit_BTC < 0])
|
2017-11-14 22:14:01 +00:00
|
|
|
])
|
2018-01-02 14:40:33 +00:00
|
|
|
return tabulate(tabular_data, headers=headers, floatfmt=floatfmt)
|
2017-11-14 22:14:01 +00:00
|
|
|
|
|
|
|
|
2018-01-29 09:10:19 +00:00
|
|
|
def get_sell_trade_entry(pair, row, buy_subset, ticker, trade_count_lock, args):
|
2018-01-11 16:45:41 +00:00
|
|
|
stake_amount = args['stake_amount']
|
|
|
|
max_open_trades = args.get('max_open_trades', 0)
|
|
|
|
trade = Trade(open_rate=row.close,
|
2018-02-05 17:22:49 +00:00
|
|
|
open_date=row.Index,
|
2018-01-11 16:45:41 +00:00
|
|
|
stake_amount=stake_amount,
|
|
|
|
amount=stake_amount / row.open,
|
|
|
|
fee=exchange.get_fee()
|
|
|
|
)
|
|
|
|
|
|
|
|
# calculate win/lose forwards from buy point
|
2018-02-05 17:22:49 +00:00
|
|
|
sell_subset = ticker[ticker.index > row.Index][['close', 'sell']]
|
2018-01-11 16:45:41 +00:00
|
|
|
for row2 in sell_subset.itertuples(index=True):
|
|
|
|
if max_open_trades > 0:
|
|
|
|
# Increase trade_count_lock for every iteration
|
2018-02-05 17:22:49 +00:00
|
|
|
trade_count_lock[row2.Index] = trade_count_lock.get(row2.Index, 0) + 1
|
2018-01-11 16:45:41 +00:00
|
|
|
|
2018-01-31 06:59:45 +00:00
|
|
|
# Buy is on is in the buy_subset there is a row that matches the date
|
|
|
|
# of the sell event
|
2018-02-06 10:13:12 +00:00
|
|
|
buy_signal = (buy_subset.index == row2.Index).any()
|
2018-02-05 17:22:49 +00:00
|
|
|
if(should_sell(trade, row2.close, row2.Index, buy_signal, row2.sell)):
|
2018-01-12 20:28:56 +00:00
|
|
|
return row2, (pair,
|
2018-01-29 09:10:19 +00:00
|
|
|
trade.calc_profit_percent(rate=row2.close),
|
|
|
|
trade.calc_profit(rate=row2.close),
|
2018-02-05 17:22:49 +00:00
|
|
|
(row2.Index - row.Index).seconds // 60
|
|
|
|
), row2.Index
|
2018-01-29 09:10:19 +00:00
|
|
|
return None
|
2018-01-11 16:45:41 +00:00
|
|
|
|
|
|
|
|
|
|
|
def backtest(args) -> DataFrame:
|
2017-11-22 23:25:06 +00:00
|
|
|
"""
|
|
|
|
Implements backtesting functionality
|
2018-01-11 16:45:41 +00:00
|
|
|
:param args: a dict containing:
|
|
|
|
stake_amount: btc amount to use for each trade
|
|
|
|
processed: a processed dictionary with format {pair, data}
|
|
|
|
max_open_trades: maximum number of concurrent trades (default: 0, disabled)
|
|
|
|
realistic: do we try to simulate realistic trades? (default: True)
|
|
|
|
sell_profit_only: sell if profit only
|
|
|
|
use_sell_signal: act on sell-signal
|
|
|
|
stoploss: use stoploss
|
2017-11-22 23:25:06 +00:00
|
|
|
:return: DataFrame
|
|
|
|
"""
|
2018-01-11 16:45:41 +00:00
|
|
|
processed = args['processed']
|
|
|
|
max_open_trades = args.get('max_open_trades', 0)
|
|
|
|
realistic = args.get('realistic', True)
|
2018-01-11 14:45:39 +00:00
|
|
|
record = args.get('record', None)
|
|
|
|
records = []
|
2017-10-01 08:02:47 +00:00
|
|
|
trades = []
|
2017-12-25 11:07:50 +00:00
|
|
|
trade_count_lock: dict = {}
|
2017-11-04 17:43:23 +00:00
|
|
|
exchange._API = Bittrex({'key': '', 'secret': ''})
|
2017-11-15 18:06:37 +00:00
|
|
|
for pair, pair_data in processed.items():
|
2017-11-21 21:33:34 +00:00
|
|
|
pair_data['buy'], pair_data['sell'] = 0, 0
|
2017-11-17 10:27:33 +00:00
|
|
|
ticker = populate_sell_trend(populate_buy_trend(pair_data))
|
2018-02-06 09:33:38 +00:00
|
|
|
if 'date' in ticker:
|
|
|
|
ticker.set_index('date', inplace=True)
|
2017-11-07 17:24:51 +00:00
|
|
|
# for each buy point
|
2017-11-24 20:09:44 +00:00
|
|
|
lock_pair_until = None
|
2018-02-05 17:22:49 +00:00
|
|
|
headers = ['buy', 'open', 'close', 'sell']
|
2018-01-29 09:10:19 +00:00
|
|
|
buy_subset = ticker[(ticker.buy == 1) & (ticker.sell == 0)][headers]
|
2018-01-02 18:51:07 +00:00
|
|
|
for row in buy_subset.itertuples(index=True):
|
2017-11-24 20:09:44 +00:00
|
|
|
if realistic:
|
2018-02-05 17:22:49 +00:00
|
|
|
if lock_pair_until is not None and row.Index <= lock_pair_until:
|
2017-11-24 20:09:44 +00:00
|
|
|
continue
|
2017-11-22 23:25:06 +00:00
|
|
|
if max_open_trades > 0:
|
|
|
|
# Check if max_open_trades has already been reached for the given date
|
2018-02-05 17:22:49 +00:00
|
|
|
if not trade_count_lock.get(row.Index, 0) < max_open_trades:
|
2017-11-22 23:25:06 +00:00
|
|
|
continue
|
|
|
|
|
|
|
|
if max_open_trades > 0:
|
|
|
|
# Increase lock
|
2018-02-05 17:22:49 +00:00
|
|
|
trade_count_lock[row.Index] = trade_count_lock.get(row.Index, 0) + 1
|
2017-11-21 21:33:34 +00:00
|
|
|
|
2018-01-29 09:10:19 +00:00
|
|
|
ret = get_sell_trade_entry(pair, row, buy_subset, ticker,
|
|
|
|
trade_count_lock, args)
|
2018-01-11 16:45:41 +00:00
|
|
|
if ret:
|
2018-01-29 09:10:19 +00:00
|
|
|
row2, trade_entry, next_date = ret
|
|
|
|
lock_pair_until = next_date
|
2018-01-11 16:45:41 +00:00
|
|
|
trades.append(trade_entry)
|
2018-01-11 14:45:39 +00:00
|
|
|
if record:
|
|
|
|
# Note, need to be json.dump friendly
|
2018-01-12 20:28:56 +00:00
|
|
|
# record a tuple of pair, current_profit_percent,
|
|
|
|
# entry-date, duration
|
2018-01-11 14:45:39 +00:00
|
|
|
records.append((pair, trade_entry[1],
|
2018-02-05 17:22:49 +00:00
|
|
|
row.Index.strftime('%s'),
|
|
|
|
row2.Index.strftime('%s'),
|
2018-01-11 14:45:39 +00:00
|
|
|
row.Index, trade_entry[3]))
|
|
|
|
# For now export inside backtest(), maybe change so that backtest()
|
|
|
|
# returns a tuple like: (dataframe, records, logs, etc)
|
|
|
|
if record and record.find('trades') >= 0:
|
|
|
|
logger.info('Dumping backtest results')
|
|
|
|
misc.file_dump_json('backtest-result.json', records)
|
2018-01-29 09:10:19 +00:00
|
|
|
labels = ['currency', 'profit_percent', 'profit_BTC', 'duration']
|
2017-11-17 06:02:06 +00:00
|
|
|
return DataFrame.from_records(trades, columns=labels)
|
2017-09-24 14:23:29 +00:00
|
|
|
|
2017-10-30 23:36:35 +00:00
|
|
|
|
2017-11-24 22:58:35 +00:00
|
|
|
def start(args):
|
2017-11-25 01:04:37 +00:00
|
|
|
# Initialize logger
|
|
|
|
logging.basicConfig(
|
|
|
|
level=args.loglevel,
|
|
|
|
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
|
|
|
)
|
|
|
|
|
2017-11-17 16:54:31 +00:00
|
|
|
exchange._API = Bittrex({'key': '', 'secret': ''})
|
2017-11-14 21:15:24 +00:00
|
|
|
|
2017-11-25 01:04:37 +00:00
|
|
|
logger.info('Using config: %s ...', args.config)
|
2018-01-05 09:23:12 +00:00
|
|
|
config = misc.load_config(args.config)
|
2018-01-30 06:28:54 +00:00
|
|
|
|
|
|
|
# If -i/--ticker-interval is use we override the configuration parameter
|
|
|
|
# (that will override the strategy configuration)
|
|
|
|
if args.ticker_interval:
|
|
|
|
config.update({'ticker_interval': args.ticker_interval})
|
|
|
|
|
|
|
|
# init the strategy to use
|
|
|
|
config.update({'strategy': args.strategy})
|
|
|
|
strategy = Strategy()
|
|
|
|
strategy.init(config)
|
|
|
|
|
|
|
|
logger.info('Using ticker_interval: %d ...', strategy.ticker_interval)
|
2017-11-14 21:37:30 +00:00
|
|
|
|
2017-11-14 23:11:46 +00:00
|
|
|
data = {}
|
2017-12-16 14:42:28 +00:00
|
|
|
pairs = config['exchange']['pair_whitelist']
|
2017-11-24 22:58:35 +00:00
|
|
|
if args.live:
|
2017-11-25 01:04:37 +00:00
|
|
|
logger.info('Downloading data for all pairs in whitelist ...')
|
2017-12-16 14:42:28 +00:00
|
|
|
for pair in pairs:
|
2018-01-30 06:28:54 +00:00
|
|
|
data[pair] = exchange.get_ticker_history(pair, strategy.ticker_interval)
|
2017-11-14 23:11:46 +00:00
|
|
|
else:
|
2017-12-16 14:42:28 +00:00
|
|
|
logger.info('Using local backtesting data (using whitelist in given config) ...')
|
2017-11-25 01:04:37 +00:00
|
|
|
logger.info('Using stake_currency: %s ...', config['stake_currency'])
|
|
|
|
logger.info('Using stake_amount: %s ...', config['stake_amount'])
|
2017-11-14 22:46:48 +00:00
|
|
|
|
2018-01-15 21:25:02 +00:00
|
|
|
timerange = misc.parse_timerange(args.timerange)
|
2018-01-30 06:28:54 +00:00
|
|
|
data = optimize.load_data(args.datadir,
|
|
|
|
pairs=pairs,
|
|
|
|
ticker_interval=strategy.ticker_interval,
|
2018-01-15 21:25:02 +00:00
|
|
|
refresh_pairs=args.refresh_pairs,
|
|
|
|
timerange=timerange)
|
2017-11-24 22:58:35 +00:00
|
|
|
max_open_trades = 0
|
|
|
|
if args.realistic_simulation:
|
2017-11-25 01:04:37 +00:00
|
|
|
logger.info('Using max_open_trades: %s ...', config['max_open_trades'])
|
2017-11-24 22:58:35 +00:00
|
|
|
max_open_trades = config['max_open_trades']
|
|
|
|
|
2017-11-25 00:12:44 +00:00
|
|
|
# Monkey patch config
|
2017-11-24 22:58:35 +00:00
|
|
|
from freqtrade import main
|
|
|
|
main._CONF = config
|
|
|
|
|
2018-01-15 21:25:02 +00:00
|
|
|
preprocessed = optimize.tickerdata_to_dataframe(data)
|
2018-01-02 19:32:11 +00:00
|
|
|
# Print timeframe
|
|
|
|
min_date, max_date = get_timeframe(preprocessed)
|
2018-01-26 09:25:35 +00:00
|
|
|
logger.info('Measuring data from %s up to %s (%s days)..',
|
|
|
|
min_date.isoformat(),
|
|
|
|
max_date.isoformat(),
|
|
|
|
(max_date-min_date).days)
|
2017-11-17 16:54:31 +00:00
|
|
|
# Execute backtest and print results
|
2018-01-11 16:45:41 +00:00
|
|
|
sell_profit_only = config.get('experimental', {}).get('sell_profit_only', False)
|
|
|
|
use_sell_signal = config.get('experimental', {}).get('use_sell_signal', False)
|
|
|
|
results = backtest({'stake_amount': config['stake_amount'],
|
|
|
|
'processed': preprocessed,
|
|
|
|
'max_open_trades': max_open_trades,
|
|
|
|
'realistic': args.realistic_simulation,
|
|
|
|
'sell_profit_only': sell_profit_only,
|
|
|
|
'use_sell_signal': use_sell_signal,
|
2018-01-19 06:37:06 +00:00
|
|
|
'stoploss': strategy.stoploss,
|
2018-01-11 14:45:39 +00:00
|
|
|
'record': args.export
|
2018-01-11 16:45:41 +00:00
|
|
|
})
|
2017-11-25 01:04:37 +00:00
|
|
|
logger.info(
|
2018-01-09 10:59:06 +00:00
|
|
|
'\n==================================== BACKTESTING REPORT ====================================\n%s', # noqa
|
2018-02-05 16:09:09 +00:00
|
|
|
generate_text_table(data, results, config['stake_currency'])
|
2017-11-25 01:04:37 +00:00
|
|
|
)
|