2020-01-26 12:17:26 +00:00
|
|
|
import logging
|
|
|
|
import sys
|
2020-07-12 07:57:00 +00:00
|
|
|
from collections import defaultdict
|
2023-06-17 13:13:56 +00:00
|
|
|
from typing import Any, Dict
|
2020-01-26 12:17:26 +00:00
|
|
|
|
2020-01-26 12:55:48 +00:00
|
|
|
from freqtrade.configuration import TimeRange, setup_utils_configuration
|
2023-07-09 13:09:03 +00:00
|
|
|
from freqtrade.constants import DATETIME_PRINT_FORMAT, DL_DATA_TIMEFRAMES, Config
|
2020-09-28 17:39:41 +00:00
|
|
|
from freqtrade.data.converter import convert_ohlcv_format, convert_trades_format
|
2023-06-17 13:13:56 +00:00
|
|
|
from freqtrade.data.history import convert_trades_to_ohlcv, download_data_main
|
2023-07-09 13:28:05 +00:00
|
|
|
from freqtrade.enums import RunMode, TradingMode
|
2020-01-26 12:17:26 +00:00
|
|
|
from freqtrade.exceptions import OperationalException
|
2023-06-17 13:13:56 +00:00
|
|
|
from freqtrade.exchange import timeframe_to_minutes
|
|
|
|
from freqtrade.plugins.pairlist.pairlist_helpers import expand_pairlist
|
2020-01-26 12:17:26 +00:00
|
|
|
from freqtrade.resolvers import ExchangeResolver
|
2023-01-13 06:27:18 +00:00
|
|
|
from freqtrade.util.binance_mig import migrate_binance_futures_data
|
2020-01-26 12:17:26 +00:00
|
|
|
|
2020-09-28 17:39:41 +00:00
|
|
|
|
2020-01-26 12:17:26 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2023-06-17 12:59:01 +00:00
|
|
|
def _check_data_config_download_sanity(config: Config) -> None:
|
2023-02-25 19:50:26 +00:00
|
|
|
if 'days' in config and 'timerange' in config:
|
|
|
|
raise OperationalException("--days and --timerange are mutually exclusive. "
|
|
|
|
"You can only specify one or the other.")
|
|
|
|
|
|
|
|
if 'pairs' not in config:
|
|
|
|
raise OperationalException(
|
|
|
|
"Downloading data requires a list of pairs. "
|
|
|
|
"Please check the documentation on how to configure this.")
|
|
|
|
|
|
|
|
|
2020-01-26 12:17:26 +00:00
|
|
|
def start_download_data(args: Dict[str, Any]) -> None:
|
|
|
|
"""
|
|
|
|
Download data (former download_backtest_data.py script)
|
|
|
|
"""
|
|
|
|
config = setup_utils_configuration(args, RunMode.UTIL_EXCHANGE)
|
|
|
|
|
2023-06-17 12:59:01 +00:00
|
|
|
_check_data_config_download_sanity(config)
|
2020-02-08 20:53:34 +00:00
|
|
|
|
2020-01-26 12:17:26 +00:00
|
|
|
try:
|
2023-06-17 13:13:56 +00:00
|
|
|
download_data_main(config)
|
2020-01-26 12:17:26 +00:00
|
|
|
|
|
|
|
except KeyboardInterrupt:
|
|
|
|
sys.exit("SIGINT received, aborting ...")
|
|
|
|
|
2020-01-26 19:31:13 +00:00
|
|
|
|
2021-09-29 14:50:05 +00:00
|
|
|
def start_convert_trades(args: Dict[str, Any]) -> None:
|
|
|
|
|
|
|
|
config = setup_utils_configuration(args, RunMode.UTIL_EXCHANGE)
|
|
|
|
|
|
|
|
timerange = TimeRange()
|
|
|
|
|
|
|
|
# Remove stake-currency to skip checks which are not relevant for datadownload
|
|
|
|
config['stake_currency'] = ''
|
|
|
|
|
|
|
|
if 'pairs' not in config:
|
|
|
|
raise OperationalException(
|
|
|
|
"Downloading data requires a list of pairs. "
|
|
|
|
"Please check the documentation on how to configure this.")
|
2023-07-09 13:09:03 +00:00
|
|
|
if 'timeframes' not in config:
|
|
|
|
config['timeframes'] = DL_DATA_TIMEFRAMES
|
2021-09-29 14:50:05 +00:00
|
|
|
|
|
|
|
# Init exchange
|
2023-05-13 06:27:27 +00:00
|
|
|
exchange = ExchangeResolver.load_exchange(config, validate=False)
|
2021-09-29 14:50:05 +00:00
|
|
|
# Manual validations of relevant settings
|
|
|
|
if not config['exchange'].get('skip_pair_validation', False):
|
|
|
|
exchange.validate_pairs(config['pairs'])
|
|
|
|
expanded_pairs = expand_pairlist(config['pairs'], list(exchange.markets))
|
|
|
|
|
2023-09-24 08:26:51 +00:00
|
|
|
logger.info(f"About to convert pairs: '{', '.join(expanded_pairs)}', "
|
|
|
|
f"intervals: '{', '.join(config['timeframes'])}' to {config['datadir']}")
|
2021-09-29 14:50:05 +00:00
|
|
|
|
|
|
|
for timeframe in config['timeframes']:
|
|
|
|
exchange.validate_timeframes(timeframe)
|
|
|
|
# Convert downloaded trade data to different timeframes
|
|
|
|
convert_trades_to_ohlcv(
|
|
|
|
pairs=expanded_pairs, timeframes=config['timeframes'],
|
|
|
|
datadir=config['datadir'], timerange=timerange, erase=bool(config.get('erase')),
|
|
|
|
data_format_ohlcv=config['dataformat_ohlcv'],
|
|
|
|
data_format_trades=config['dataformat_trades'],
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2020-01-26 19:31:13 +00:00
|
|
|
def start_convert_data(args: Dict[str, Any], ohlcv: bool = True) -> None:
|
|
|
|
"""
|
|
|
|
Convert data from one format to another
|
|
|
|
"""
|
|
|
|
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
|
|
|
if ohlcv:
|
2023-01-13 06:27:18 +00:00
|
|
|
migrate_binance_futures_data(config)
|
2023-07-09 13:28:05 +00:00
|
|
|
convert_ohlcv_format(config,
|
2023-07-09 13:31:48 +00:00
|
|
|
convert_from=args['format_from'],
|
|
|
|
convert_to=args['format_to'],
|
2023-07-09 13:28:05 +00:00
|
|
|
erase=args['erase'])
|
2020-01-26 19:31:13 +00:00
|
|
|
else:
|
|
|
|
convert_trades_format(config,
|
|
|
|
convert_from=args['format_from'], convert_to=args['format_to'],
|
|
|
|
erase=args['erase'])
|
2020-07-12 07:57:00 +00:00
|
|
|
|
|
|
|
|
|
|
|
def start_list_data(args: Dict[str, Any]) -> None:
|
|
|
|
"""
|
|
|
|
List available backtest data
|
|
|
|
"""
|
|
|
|
|
|
|
|
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
|
|
|
|
|
|
|
from tabulate import tabulate
|
2020-09-28 17:39:41 +00:00
|
|
|
|
|
|
|
from freqtrade.data.history.idatahandler import get_datahandler
|
2020-07-12 08:23:09 +00:00
|
|
|
dhc = get_datahandler(config['datadir'], config['dataformat_ohlcv'])
|
|
|
|
|
2022-03-03 06:06:13 +00:00
|
|
|
paircombs = dhc.ohlcv_get_available_data(
|
|
|
|
config['datadir'],
|
|
|
|
config.get('trading_mode', TradingMode.SPOT)
|
|
|
|
)
|
2020-07-12 07:57:00 +00:00
|
|
|
|
2020-07-14 04:55:34 +00:00
|
|
|
if args['pairs']:
|
|
|
|
paircombs = [comb for comb in paircombs if comb[0] in args['pairs']]
|
|
|
|
|
2020-07-12 07:57:00 +00:00
|
|
|
print(f"Found {len(paircombs)} pair / timeframe combinations.")
|
2022-08-19 11:44:31 +00:00
|
|
|
if not config.get('show_timerange'):
|
|
|
|
groupedpair = defaultdict(list)
|
|
|
|
for pair, timeframe, candle_type in sorted(
|
|
|
|
paircombs,
|
|
|
|
key=lambda x: (x[0], timeframe_to_minutes(x[1]), x[2])
|
|
|
|
):
|
|
|
|
groupedpair[(pair, candle_type)].append(timeframe)
|
|
|
|
|
|
|
|
if groupedpair:
|
|
|
|
print(tabulate([
|
|
|
|
(pair, ', '.join(timeframes), candle_type)
|
|
|
|
for (pair, candle_type), timeframes in groupedpair.items()
|
|
|
|
],
|
|
|
|
headers=("Pair", "Timeframe", "Type"),
|
|
|
|
tablefmt='psql', stralign='right'))
|
|
|
|
else:
|
|
|
|
paircombs1 = [(
|
|
|
|
pair, timeframe, candle_type,
|
|
|
|
*dhc.ohlcv_data_min_max(pair, timeframe, candle_type)
|
|
|
|
) for pair, timeframe, candle_type in paircombs]
|
2023-03-27 04:44:36 +00:00
|
|
|
|
2021-11-21 04:46:47 +00:00
|
|
|
print(tabulate([
|
2022-08-19 11:44:31 +00:00
|
|
|
(pair, timeframe, candle_type,
|
|
|
|
start.strftime(DATETIME_PRINT_FORMAT),
|
|
|
|
end.strftime(DATETIME_PRINT_FORMAT))
|
2023-03-27 04:44:36 +00:00
|
|
|
for pair, timeframe, candle_type, start, end in sorted(
|
|
|
|
paircombs1,
|
|
|
|
key=lambda x: (x[0], timeframe_to_minutes(x[1]), x[2]))
|
2022-08-19 11:44:31 +00:00
|
|
|
],
|
|
|
|
headers=("Pair", "Timeframe", "Type", 'From', 'To'),
|
2021-11-21 04:46:47 +00:00
|
|
|
tablefmt='psql', stralign='right'))
|