freqtrade_origin/freqtrade/optimize/backtesting.py

236 lines
9.5 KiB
Python
Raw Normal View History

# pragma pylint: disable=missing-docstring,W0212
2017-11-14 21:15:24 +00:00
import logging
2018-01-10 07:51:36 +00:00
from typing import Dict, Tuple
2017-09-28 21:26:28 +00:00
import arrow
2018-01-02 19:32:11 +00:00
from pandas import DataFrame, Series
from tabulate import tabulate
2017-09-28 21:26:28 +00:00
2018-01-10 07:51:36 +00:00
import freqtrade.misc as misc
import freqtrade.optimize as optimize
from freqtrade import exchange
from freqtrade.analyze import populate_buy_trend, populate_sell_trend
from freqtrade.exchange import Bittrex
from freqtrade.main import should_sell
2017-09-28 21:26:28 +00:00
from freqtrade.persistence import Trade
2018-01-15 08:35:11 +00:00
from freqtrade.strategy.strategy import Strategy
logger = logging.getLogger(__name__)
2018-01-02 19:32:11 +00:00
def get_timeframe(data: Dict[str, DataFrame]) -> Tuple[arrow.Arrow, arrow.Arrow]:
"""
Get the maximum timeframe for the given backtest data
2018-01-02 19:32:11 +00:00
:param data: dictionary with preprocessed backtesting data
:return: tuple containing min_date, max_date
"""
2018-01-02 19:32:11 +00:00
all_dates = Series([])
2018-01-26 09:25:12 +00:00
for pair_data in data.values():
2018-01-02 19:32:11 +00:00
all_dates = all_dates.append(pair_data['date'])
all_dates.sort_values(inplace=True)
return arrow.get(all_dates.iloc[0]), arrow.get(all_dates.iloc[-1])
2017-11-14 21:15:24 +00:00
2017-12-16 02:39:47 +00:00
def generate_text_table(
data: Dict[str, Dict], results: DataFrame, stake_currency, ticker_interval) -> str:
"""
Generates and returns a text table for the given backtest data and the results dataframe
:return: pretty printed table with tabulate as str
"""
2018-01-02 14:40:33 +00:00
floatfmt = ('s', 'd', '.2f', '.8f', '.1f')
tabular_data = []
2018-01-02 14:40:33 +00:00
headers = ['pair', 'buy count', 'avg profit %',
2018-01-03 10:35:54 +00:00
'total profit ' + stake_currency, 'avg duration', 'profit', 'loss']
for pair in data:
result = results[results.currency == pair]
tabular_data.append([
pair,
len(result.index),
2018-01-02 14:40:33 +00:00
result.profit_percent.mean() * 100.0,
result.profit_BTC.sum(),
result.duration.mean() * ticker_interval,
len(result[result.profit_BTC > 0]),
len(result[result.profit_BTC < 0])
])
# Append Total
tabular_data.append([
'TOTAL',
len(results.index),
2018-01-02 14:40:33 +00:00
results.profit_percent.mean() * 100.0,
results.profit_BTC.sum(),
results.duration.mean() * ticker_interval,
len(results[results.profit_BTC > 0]),
len(results[results.profit_BTC < 0])
])
2018-01-02 14:40:33 +00:00
return tabulate(tabular_data, headers=headers, floatfmt=floatfmt)
def get_sell_trade_entry(pair, row, buy_subset, ticker, trade_count_lock, args):
2018-01-11 16:45:41 +00:00
stake_amount = args['stake_amount']
max_open_trades = args.get('max_open_trades', 0)
trade = Trade(open_rate=row.close,
open_date=row.date,
stake_amount=stake_amount,
amount=stake_amount / row.open,
fee=exchange.get_fee()
)
# calculate win/lose forwards from buy point
sell_subset = ticker[ticker.date > row.date][['close', 'date', 'sell']]
2018-01-11 16:45:41 +00:00
for row2 in sell_subset.itertuples(index=True):
if max_open_trades > 0:
# Increase trade_count_lock for every iteration
trade_count_lock[row2.date] = trade_count_lock.get(row2.date, 0) + 1
# Buy is on is in the buy_subset there is a row that matches the date
# of the sell event
buy_signal = not buy_subset[buy_subset.date == row2.date].empty
if(should_sell(trade, row2.close, row2.date, buy_signal, row2.sell)):
return row2, (pair,
trade.calc_profit_percent(rate=row2.close),
trade.calc_profit(rate=row2.close),
row2.Index - row.Index
), row2.date
return None
2018-01-11 16:45:41 +00:00
def backtest(args) -> DataFrame:
2017-11-22 23:25:06 +00:00
"""
Implements backtesting functionality
2018-01-11 16:45:41 +00:00
:param args: a dict containing:
stake_amount: btc amount to use for each trade
processed: a processed dictionary with format {pair, data}
max_open_trades: maximum number of concurrent trades (default: 0, disabled)
realistic: do we try to simulate realistic trades? (default: True)
sell_profit_only: sell if profit only
use_sell_signal: act on sell-signal
stoploss: use stoploss
2017-11-22 23:25:06 +00:00
:return: DataFrame
"""
2018-01-11 16:45:41 +00:00
processed = args['processed']
max_open_trades = args.get('max_open_trades', 0)
realistic = args.get('realistic', True)
2018-01-11 14:45:39 +00:00
record = args.get('record', None)
records = []
trades = []
trade_count_lock: dict = {}
exchange._API = Bittrex({'key': '', 'secret': ''})
for pair, pair_data in processed.items():
pair_data['buy'], pair_data['sell'] = 0, 0
ticker = populate_sell_trend(populate_buy_trend(pair_data))
# for each buy point
lock_pair_until = None
headers = ['buy', 'open', 'close', 'date', 'sell']
buy_subset = ticker[(ticker.buy == 1) & (ticker.sell == 0)][headers]
for row in buy_subset.itertuples(index=True):
if realistic:
if lock_pair_until is not None and row.date <= lock_pair_until:
continue
2017-11-22 23:25:06 +00:00
if max_open_trades > 0:
# Check if max_open_trades has already been reached for the given date
if not trade_count_lock.get(row.date, 0) < max_open_trades:
continue
if max_open_trades > 0:
# Increase lock
trade_count_lock[row.date] = trade_count_lock.get(row.date, 0) + 1
ret = get_sell_trade_entry(pair, row, buy_subset, ticker,
trade_count_lock, args)
2018-01-11 16:45:41 +00:00
if ret:
row2, trade_entry, next_date = ret
lock_pair_until = next_date
2018-01-11 16:45:41 +00:00
trades.append(trade_entry)
2018-01-11 14:45:39 +00:00
if record:
# Note, need to be json.dump friendly
# record a tuple of pair, current_profit_percent,
# entry-date, duration
2018-01-11 14:45:39 +00:00
records.append((pair, trade_entry[1],
row.date.strftime('%s'),
row2.date.strftime('%s'),
2018-01-11 14:45:39 +00:00
row.Index, trade_entry[3]))
# For now export inside backtest(), maybe change so that backtest()
# returns a tuple like: (dataframe, records, logs, etc)
if record and record.find('trades') >= 0:
logger.info('Dumping backtest results')
misc.file_dump_json('backtest-result.json', records)
labels = ['currency', 'profit_percent', 'profit_BTC', 'duration']
2017-11-17 06:02:06 +00:00
return DataFrame.from_records(trades, columns=labels)
2017-10-30 23:36:35 +00:00
def start(args):
# Initialize logger
logging.basicConfig(
level=args.loglevel,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
)
2017-11-17 16:54:31 +00:00
exchange._API = Bittrex({'key': '', 'secret': ''})
2017-11-14 21:15:24 +00:00
logger.info('Using config: %s ...', args.config)
2018-01-05 09:23:12 +00:00
config = misc.load_config(args.config)
# If -i/--ticker-interval is use we override the configuration parameter
# (that will override the strategy configuration)
if args.ticker_interval:
config.update({'ticker_interval': args.ticker_interval})
# init the strategy to use
config.update({'strategy': args.strategy})
strategy = Strategy()
strategy.init(config)
logger.info('Using ticker_interval: %d ...', strategy.ticker_interval)
2017-11-14 21:37:30 +00:00
data = {}
pairs = config['exchange']['pair_whitelist']
if args.live:
logger.info('Downloading data for all pairs in whitelist ...')
for pair in pairs:
data[pair] = exchange.get_ticker_history(pair, strategy.ticker_interval)
else:
logger.info('Using local backtesting data (using whitelist in given config) ...')
logger.info('Using stake_currency: %s ...', config['stake_currency'])
logger.info('Using stake_amount: %s ...', config['stake_amount'])
2017-11-14 22:46:48 +00:00
2018-01-15 21:25:02 +00:00
timerange = misc.parse_timerange(args.timerange)
data = optimize.load_data(args.datadir,
pairs=pairs,
ticker_interval=strategy.ticker_interval,
2018-01-15 21:25:02 +00:00
refresh_pairs=args.refresh_pairs,
timerange=timerange)
max_open_trades = 0
if args.realistic_simulation:
logger.info('Using max_open_trades: %s ...', config['max_open_trades'])
max_open_trades = config['max_open_trades']
2017-11-25 00:12:44 +00:00
# Monkey patch config
from freqtrade import main
main._CONF = config
2018-01-15 21:25:02 +00:00
preprocessed = optimize.tickerdata_to_dataframe(data)
2018-01-02 19:32:11 +00:00
# Print timeframe
min_date, max_date = get_timeframe(preprocessed)
2018-01-26 09:25:35 +00:00
logger.info('Measuring data from %s up to %s (%s days)..',
min_date.isoformat(),
max_date.isoformat(),
(max_date-min_date).days)
2017-11-17 16:54:31 +00:00
# Execute backtest and print results
2018-01-11 16:45:41 +00:00
sell_profit_only = config.get('experimental', {}).get('sell_profit_only', False)
use_sell_signal = config.get('experimental', {}).get('use_sell_signal', False)
results = backtest({'stake_amount': config['stake_amount'],
'processed': preprocessed,
'max_open_trades': max_open_trades,
'realistic': args.realistic_simulation,
'sell_profit_only': sell_profit_only,
'use_sell_signal': use_sell_signal,
'stoploss': strategy.stoploss,
2018-01-11 14:45:39 +00:00
'record': args.export
2018-01-11 16:45:41 +00:00
})
logger.info(
2018-01-09 10:59:06 +00:00
'\n==================================== BACKTESTING REPORT ====================================\n%s', # noqa
2018-01-30 07:07:54 +00:00
generate_text_table(data, results, config['stake_currency'], strategy.ticker_interval)
)