2018-02-04 06:42:03 +00:00
|
|
|
"""
|
|
|
|
This module contains the configuration class
|
|
|
|
"""
|
2018-03-25 19:37:14 +00:00
|
|
|
import logging
|
2019-07-25 18:35:20 +00:00
|
|
|
import warnings
|
2018-03-17 21:43:59 +00:00
|
|
|
from argparse import Namespace
|
2019-08-29 22:54:16 +00:00
|
|
|
from copy import deepcopy
|
2019-07-21 12:13:38 +00:00
|
|
|
from pathlib import Path
|
2019-08-10 17:58:04 +00:00
|
|
|
from typing import Any, Callable, Dict, List, Optional
|
2018-07-04 07:31:35 +00:00
|
|
|
|
2018-05-02 20:49:55 +00:00
|
|
|
from freqtrade import OperationalException, constants
|
2019-07-11 18:23:23 +00:00
|
|
|
from freqtrade.configuration.check_exchange import check_exchange
|
2019-08-21 17:35:27 +00:00
|
|
|
from freqtrade.configuration.config_validation import (
|
|
|
|
validate_config_consistency, validate_config_schema)
|
2019-08-18 13:00:12 +00:00
|
|
|
from freqtrade.configuration.directory_operations import (create_datadir,
|
|
|
|
create_userdata_dir)
|
2019-08-10 11:24:14 +00:00
|
|
|
from freqtrade.configuration.load_config import load_config_file
|
2019-07-06 21:31:48 +00:00
|
|
|
from freqtrade.loggers import setup_logging
|
2019-08-16 13:52:59 +00:00
|
|
|
from freqtrade.misc import deep_merge_dicts, json_load
|
2019-03-29 19:12:44 +00:00
|
|
|
from freqtrade.state import RunMode
|
2019-02-19 12:14:47 +00:00
|
|
|
|
2019-07-06 21:31:48 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
2018-07-19 19:12:27 +00:00
|
|
|
|
|
|
|
|
2018-02-04 06:42:03 +00:00
|
|
|
class Configuration(object):
|
|
|
|
"""
|
|
|
|
Class to read and init the bot configuration
|
|
|
|
Reuse this class for the bot, backtesting, hyperopt and every script that required configuration
|
|
|
|
"""
|
2018-11-14 10:37:53 +00:00
|
|
|
|
2018-12-25 13:23:59 +00:00
|
|
|
def __init__(self, args: Namespace, runmode: RunMode = None) -> None:
|
2018-02-04 06:42:03 +00:00
|
|
|
self.args = args
|
2018-05-30 20:38:09 +00:00
|
|
|
self.config: Optional[Dict[str, Any]] = None
|
2018-12-25 13:23:59 +00:00
|
|
|
self.runmode = runmode
|
2018-02-04 06:42:03 +00:00
|
|
|
|
2019-07-11 18:23:23 +00:00
|
|
|
def get_config(self) -> Dict[str, Any]:
|
|
|
|
"""
|
|
|
|
Return the config. Use this method to get the bot config
|
|
|
|
:return: Dict: Bot config
|
|
|
|
"""
|
|
|
|
if self.config is None:
|
|
|
|
self.config = self.load_config()
|
|
|
|
|
|
|
|
return self.config
|
|
|
|
|
2019-08-10 17:57:49 +00:00
|
|
|
@staticmethod
|
|
|
|
def from_files(files: List[str]) -> Dict[str, Any]:
|
2018-02-04 06:42:03 +00:00
|
|
|
"""
|
2019-08-10 17:57:49 +00:00
|
|
|
Iterate through the config files passed in, loading all of them
|
|
|
|
and merging their contents.
|
|
|
|
Files are loaded in sequence, parameters in later configuration files
|
|
|
|
override the same parameter from an earlier file (last definition wins).
|
|
|
|
:param files: List of file paths
|
|
|
|
:return: configuration dictionary
|
2018-02-04 06:42:03 +00:00
|
|
|
"""
|
2019-08-10 17:57:49 +00:00
|
|
|
# Keep this method as staticmethod, so it can be used from interactive environments
|
2019-02-19 12:14:47 +00:00
|
|
|
config: Dict[str, Any] = {}
|
2019-07-15 19:17:57 +00:00
|
|
|
|
2019-08-16 12:56:38 +00:00
|
|
|
if not files:
|
2019-08-29 22:54:16 +00:00
|
|
|
return deepcopy(constants.MINIMAL_CONFIG)
|
2019-08-16 12:56:38 +00:00
|
|
|
|
2019-07-15 19:17:57 +00:00
|
|
|
# We expect here a list of config filenames
|
2019-08-10 17:57:49 +00:00
|
|
|
for path in files:
|
|
|
|
logger.info(f'Using config: {path} ...')
|
2019-06-19 21:04:11 +00:00
|
|
|
|
2019-02-19 12:14:47 +00:00
|
|
|
# Merge config options, overwriting old values
|
2019-08-10 11:24:14 +00:00
|
|
|
config = deep_merge_dicts(load_config_file(path), config)
|
2018-02-04 06:42:03 +00:00
|
|
|
|
2019-08-10 17:58:04 +00:00
|
|
|
# Normalize config
|
2019-07-15 19:17:57 +00:00
|
|
|
if 'internals' not in config:
|
|
|
|
config['internals'] = {}
|
|
|
|
|
2019-08-10 17:58:04 +00:00
|
|
|
# validate configuration before returning
|
|
|
|
logger.info('Validating configuration ...')
|
|
|
|
validate_config_schema(config)
|
|
|
|
|
|
|
|
return config
|
|
|
|
|
2019-07-15 19:17:57 +00:00
|
|
|
def load_config(self) -> Dict[str, Any]:
|
|
|
|
"""
|
|
|
|
Extract information for sys.argv and load the bot configuration
|
|
|
|
:return: Configuration dictionary
|
|
|
|
"""
|
|
|
|
# Load all configs
|
2019-08-10 17:57:49 +00:00
|
|
|
config: Dict[str, Any] = Configuration.from_files(self.args.config)
|
2019-07-16 20:00:19 +00:00
|
|
|
|
|
|
|
self._process_common_options(config)
|
2019-07-15 19:17:57 +00:00
|
|
|
|
2019-07-16 20:00:19 +00:00
|
|
|
self._process_optimize_options(config)
|
2019-07-15 19:17:57 +00:00
|
|
|
|
2019-07-16 20:00:19 +00:00
|
|
|
self._process_plot_options(config)
|
2019-07-15 19:17:57 +00:00
|
|
|
|
2019-07-16 20:00:19 +00:00
|
|
|
self._process_runmode(config)
|
2019-07-15 19:17:57 +00:00
|
|
|
|
2019-08-16 12:56:57 +00:00
|
|
|
# Check if the exchange set by the user is supported
|
|
|
|
check_exchange(config, config.get('experimental', {}).get('block_bad_exchanges', True))
|
|
|
|
|
|
|
|
self._resolve_pairs_list(config)
|
|
|
|
|
2019-08-18 14:10:10 +00:00
|
|
|
validate_config_consistency(config)
|
|
|
|
|
2019-07-15 19:17:57 +00:00
|
|
|
return config
|
2018-02-04 06:42:03 +00:00
|
|
|
|
2019-07-16 20:00:19 +00:00
|
|
|
def _process_logging_options(self, config: Dict[str, Any]) -> None:
|
2018-03-03 21:39:39 +00:00
|
|
|
"""
|
2019-05-29 18:57:14 +00:00
|
|
|
Extract information for sys.argv and load logging configuration:
|
2019-07-06 23:53:13 +00:00
|
|
|
the -v/--verbose, --logfile options
|
2018-03-03 21:39:39 +00:00
|
|
|
"""
|
|
|
|
# Log level
|
2019-07-06 23:53:13 +00:00
|
|
|
if 'verbosity' in self.args and self.args.verbosity:
|
|
|
|
config.update({'verbosity': self.args.verbosity})
|
2018-07-19 19:12:27 +00:00
|
|
|
else:
|
|
|
|
config.update({'verbosity': 0})
|
2019-03-29 19:12:44 +00:00
|
|
|
|
|
|
|
if 'logfile' in self.args and self.args.logfile:
|
|
|
|
config.update({'logfile': self.args.logfile})
|
|
|
|
|
2019-07-06 21:31:48 +00:00
|
|
|
setup_logging(config)
|
2018-03-03 21:39:39 +00:00
|
|
|
|
2019-07-21 14:19:31 +00:00
|
|
|
def _process_common_options(self, config: Dict[str, Any]) -> None:
|
|
|
|
|
|
|
|
self._process_logging_options(config)
|
2019-07-15 19:17:57 +00:00
|
|
|
|
|
|
|
# Set strategy if not specified in config and or if it's non default
|
|
|
|
if self.args.strategy != constants.DEFAULT_STRATEGY or not config.get('strategy'):
|
|
|
|
config.update({'strategy': self.args.strategy})
|
|
|
|
|
2019-07-22 16:37:34 +00:00
|
|
|
self._args_to_config(config, argname='strategy_path',
|
|
|
|
logstring='Using additional Strategy lookup path: {}')
|
2019-07-15 19:17:57 +00:00
|
|
|
|
2019-06-27 22:01:51 +00:00
|
|
|
if ('db_url' in self.args and self.args.db_url and
|
|
|
|
self.args.db_url != constants.DEFAULT_DB_PROD_URL):
|
2018-06-07 03:26:39 +00:00
|
|
|
config.update({'db_url': self.args.db_url})
|
|
|
|
logger.info('Parameter --db-url detected ...')
|
|
|
|
|
|
|
|
if config.get('dry_run', False):
|
|
|
|
logger.info('Dry run is enabled')
|
2018-06-07 15:29:43 +00:00
|
|
|
if config.get('db_url') in [None, constants.DEFAULT_DB_PROD_URL]:
|
2018-06-07 03:26:39 +00:00
|
|
|
# Default to in-memory db for dry_run if not specified
|
2018-06-07 15:29:43 +00:00
|
|
|
config['db_url'] = constants.DEFAULT_DB_DRYRUN_URL
|
2018-06-07 03:26:39 +00:00
|
|
|
else:
|
|
|
|
if not config.get('db_url', None):
|
2018-06-07 15:29:43 +00:00
|
|
|
config['db_url'] = constants.DEFAULT_DB_PROD_URL
|
2018-06-07 03:26:39 +00:00
|
|
|
logger.info('Dry run is disabled')
|
|
|
|
|
2019-07-15 19:17:57 +00:00
|
|
|
logger.info(f'Using DB: "{config["db_url"]}"')
|
|
|
|
|
2018-10-10 18:13:56 +00:00
|
|
|
if config.get('forcebuy_enable', False):
|
|
|
|
logger.warning('`forcebuy` RPC message enabled.')
|
|
|
|
|
2018-11-14 10:37:53 +00:00
|
|
|
# Setting max_open_trades to infinite if -1
|
|
|
|
if config.get('max_open_trades') == -1:
|
|
|
|
config['max_open_trades'] = float('inf')
|
|
|
|
|
2019-07-15 19:17:57 +00:00
|
|
|
# Support for sd_notify
|
|
|
|
if 'sd_notify' in self.args and self.args.sd_notify:
|
|
|
|
config['internals'].update({'sd_notify': True})
|
2018-03-03 21:39:39 +00:00
|
|
|
|
2019-07-16 20:00:19 +00:00
|
|
|
def _process_datadir_options(self, config: Dict[str, Any]) -> None:
|
2019-05-29 18:57:14 +00:00
|
|
|
"""
|
2019-07-21 14:19:31 +00:00
|
|
|
Extract information for sys.argv and load directory configurations
|
|
|
|
--user-data, --datadir
|
2019-05-29 18:57:14 +00:00
|
|
|
"""
|
2019-08-29 22:54:16 +00:00
|
|
|
# Check exchange parameter here - otherwise `datadir` might be wrong.
|
|
|
|
if "exchange" in self.args and self.args.exchange:
|
|
|
|
config['exchange']['name'] = self.args.exchange
|
|
|
|
logger.info(f"Using exchange {config['exchange']['name']}")
|
|
|
|
|
2019-07-21 12:13:38 +00:00
|
|
|
if 'user_data_dir' in self.args and self.args.user_data_dir:
|
|
|
|
config.update({'user_data_dir': self.args.user_data_dir})
|
|
|
|
elif 'user_data_dir' not in config:
|
2019-07-21 12:32:29 +00:00
|
|
|
# Default to cwd/user_data (legacy option ...)
|
2019-07-21 12:13:38 +00:00
|
|
|
config.update({'user_data_dir': str(Path.cwd() / "user_data")})
|
2019-07-28 13:11:41 +00:00
|
|
|
|
2019-07-21 12:32:29 +00:00
|
|
|
# reset to user_data_dir so this contains the absolute path.
|
2019-07-31 17:39:54 +00:00
|
|
|
config['user_data_dir'] = create_userdata_dir(config['user_data_dir'], create_dir=False)
|
2019-07-21 12:32:29 +00:00
|
|
|
logger.info('Using user-data directory: %s ...', config['user_data_dir'])
|
2019-07-21 12:13:38 +00:00
|
|
|
|
2019-05-29 18:57:14 +00:00
|
|
|
if 'datadir' in self.args and self.args.datadir:
|
2019-07-12 00:26:27 +00:00
|
|
|
config.update({'datadir': create_datadir(config, self.args.datadir)})
|
2019-05-29 18:57:14 +00:00
|
|
|
else:
|
2019-07-12 00:26:27 +00:00
|
|
|
config.update({'datadir': create_datadir(config, None)})
|
2019-07-04 17:53:50 +00:00
|
|
|
logger.info('Using data directory: %s ...', config.get('datadir'))
|
2019-05-29 18:57:14 +00:00
|
|
|
|
2019-07-16 20:00:19 +00:00
|
|
|
def _process_optimize_options(self, config: Dict[str, Any]) -> None:
|
|
|
|
|
2019-04-23 18:58:27 +00:00
|
|
|
# This will override the strategy configuration
|
2019-04-24 19:24:00 +00:00
|
|
|
self._args_to_config(config, argname='ticker_interval',
|
|
|
|
logstring='Parameter -i/--ticker-interval detected ... '
|
|
|
|
'Using ticker_interval: {} ...')
|
2018-02-09 07:35:38 +00:00
|
|
|
|
2019-04-24 19:13:57 +00:00
|
|
|
self._args_to_config(config, argname='position_stacking',
|
2019-04-24 19:02:05 +00:00
|
|
|
logstring='Parameter --enable-position-stacking detected ...')
|
2018-07-17 18:26:59 +00:00
|
|
|
|
2018-07-17 19:05:03 +00:00
|
|
|
if 'use_max_market_positions' in self.args and not self.args.use_max_market_positions:
|
|
|
|
config.update({'use_max_market_positions': False})
|
|
|
|
logger.info('Parameter --disable-max-market-positions detected ...')
|
|
|
|
logger.info('max_open_trades set to unlimited ...')
|
2019-04-05 13:48:14 +00:00
|
|
|
elif 'max_open_trades' in self.args and self.args.max_open_trades:
|
|
|
|
config.update({'max_open_trades': self.args.max_open_trades})
|
2019-04-14 08:17:06 +00:00
|
|
|
logger.info('Parameter --max_open_trades detected, '
|
|
|
|
'overriding max_open_trades to: %s ...', config.get('max_open_trades'))
|
2018-07-17 19:05:03 +00:00
|
|
|
else:
|
|
|
|
logger.info('Using max_open_trades: %s ...', config.get('max_open_trades'))
|
2018-02-09 07:35:38 +00:00
|
|
|
|
2019-04-24 19:13:57 +00:00
|
|
|
self._args_to_config(config, argname='stake_amount',
|
2019-04-24 19:02:05 +00:00
|
|
|
logstring='Parameter --stake_amount detected, '
|
|
|
|
'overriding stake_amount to: {} ...')
|
2019-04-05 13:48:14 +00:00
|
|
|
|
2019-04-24 19:13:57 +00:00
|
|
|
self._args_to_config(config, argname='timerange',
|
2019-04-24 19:02:05 +00:00
|
|
|
logstring='Parameter --timerange detected: {} ...')
|
2018-02-09 07:35:38 +00:00
|
|
|
|
2019-07-16 20:00:19 +00:00
|
|
|
self._process_datadir_options(config)
|
2018-02-09 07:35:38 +00:00
|
|
|
|
2019-04-24 19:13:57 +00:00
|
|
|
self._args_to_config(config, argname='refresh_pairs',
|
2019-08-18 05:18:21 +00:00
|
|
|
logstring='Parameter -r/--refresh-pairs-cached detected ...',
|
|
|
|
deprecated_msg='-r/--refresh-pairs-cached will be removed soon.')
|
2018-02-09 07:35:38 +00:00
|
|
|
|
2019-04-24 19:13:57 +00:00
|
|
|
self._args_to_config(config, argname='strategy_list',
|
2019-04-24 19:02:05 +00:00
|
|
|
logstring='Using strategy list of {} Strategies', logfun=len)
|
2018-07-27 21:00:50 +00:00
|
|
|
|
2019-04-24 19:13:57 +00:00
|
|
|
self._args_to_config(config, argname='ticker_interval',
|
2019-04-24 19:02:05 +00:00
|
|
|
logstring='Overriding ticker interval with Command line argument')
|
2018-07-27 21:00:50 +00:00
|
|
|
|
2019-04-24 19:13:57 +00:00
|
|
|
self._args_to_config(config, argname='export',
|
2019-04-24 19:02:05 +00:00
|
|
|
logstring='Parameter --export detected: {} ...')
|
2018-02-09 07:35:38 +00:00
|
|
|
|
2019-04-24 19:13:57 +00:00
|
|
|
self._args_to_config(config, argname='exportfilename',
|
2019-04-24 19:02:05 +00:00
|
|
|
logstring='Storing backtest results to {} ...')
|
2018-06-03 12:52:03 +00:00
|
|
|
|
2019-04-24 19:27:32 +00:00
|
|
|
# Edge section:
|
2018-11-14 15:31:23 +00:00
|
|
|
if 'stoploss_range' in self.args and self.args.stoploss_range:
|
|
|
|
txt_range = eval(self.args.stoploss_range)
|
|
|
|
config['edge'].update({'stoploss_range_min': txt_range[0]})
|
|
|
|
config['edge'].update({'stoploss_range_max': txt_range[1]})
|
|
|
|
config['edge'].update({'stoploss_range_step': txt_range[2]})
|
|
|
|
logger.info('Parameter --stoplosses detected: %s ...', self.args.stoploss_range)
|
|
|
|
|
2019-04-24 19:27:32 +00:00
|
|
|
# Hyperopt section
|
2019-04-24 19:13:57 +00:00
|
|
|
self._args_to_config(config, argname='hyperopt',
|
2019-04-24 19:12:08 +00:00
|
|
|
logstring='Using Hyperopt file {}')
|
2019-03-04 06:24:05 +00:00
|
|
|
|
2019-07-22 16:37:34 +00:00
|
|
|
self._args_to_config(config, argname='hyperopt_path',
|
|
|
|
logstring='Using additional Hyperopt lookup path: {}')
|
|
|
|
|
2019-04-24 19:13:57 +00:00
|
|
|
self._args_to_config(config, argname='epochs',
|
2019-04-24 19:12:08 +00:00
|
|
|
logstring='Parameter --epochs detected ... '
|
|
|
|
'Will run Hyperopt with for {} epochs ...'
|
|
|
|
)
|
2018-03-02 13:46:32 +00:00
|
|
|
|
2019-04-24 19:13:57 +00:00
|
|
|
self._args_to_config(config, argname='spaces',
|
2019-04-24 19:12:08 +00:00
|
|
|
logstring='Parameter -s/--spaces detected: {}')
|
2018-03-04 08:51:22 +00:00
|
|
|
|
2019-04-24 19:13:57 +00:00
|
|
|
self._args_to_config(config, argname='print_all',
|
2019-04-24 19:12:08 +00:00
|
|
|
logstring='Parameter --print-all detected ...')
|
2019-04-21 22:10:01 +00:00
|
|
|
|
2019-08-12 18:07:29 +00:00
|
|
|
if 'print_colorized' in self.args and not self.args.print_colorized:
|
|
|
|
logger.info('Parameter --no-color detected ...')
|
2019-08-12 20:13:04 +00:00
|
|
|
config.update({'print_colorized': False})
|
|
|
|
else:
|
|
|
|
config.update({'print_colorized': True})
|
2019-08-03 16:09:42 +00:00
|
|
|
|
2019-08-15 18:39:04 +00:00
|
|
|
self._args_to_config(config, argname='print_json',
|
|
|
|
logstring='Parameter --print-json detected ...')
|
|
|
|
|
2019-04-24 19:13:57 +00:00
|
|
|
self._args_to_config(config, argname='hyperopt_jobs',
|
2019-04-24 19:12:08 +00:00
|
|
|
logstring='Parameter -j/--job-workers detected: {}')
|
2019-04-22 21:30:09 +00:00
|
|
|
|
2019-04-24 19:12:08 +00:00
|
|
|
self._args_to_config(config, argname='hyperopt_random_state',
|
|
|
|
logstring='Parameter --random-state detected: {}')
|
2019-05-01 12:27:58 +00:00
|
|
|
|
|
|
|
self._args_to_config(config, argname='hyperopt_min_trades',
|
|
|
|
logstring='Parameter --min-trades detected: {}')
|
|
|
|
|
2019-07-16 03:50:27 +00:00
|
|
|
self._args_to_config(config, argname='hyperopt_continue',
|
|
|
|
logstring='Hyperopt continue: {}')
|
2019-05-01 12:27:58 +00:00
|
|
|
|
2019-07-16 04:45:13 +00:00
|
|
|
self._args_to_config(config, argname='hyperopt_loss',
|
2019-07-15 19:35:42 +00:00
|
|
|
logstring='Using loss function: {}')
|
|
|
|
|
2019-07-16 20:00:19 +00:00
|
|
|
def _process_plot_options(self, config: Dict[str, Any]) -> None:
|
|
|
|
|
2019-06-16 11:31:24 +00:00
|
|
|
self._args_to_config(config, argname='pairs',
|
|
|
|
logstring='Using pairs {}')
|
|
|
|
|
|
|
|
self._args_to_config(config, argname='indicators1',
|
|
|
|
logstring='Using indicators1: {}')
|
|
|
|
|
|
|
|
self._args_to_config(config, argname='indicators2',
|
|
|
|
logstring='Using indicators2: {}')
|
|
|
|
|
|
|
|
self._args_to_config(config, argname='plot_limit',
|
|
|
|
logstring='Limiting plot to: {}')
|
2019-06-22 14:18:49 +00:00
|
|
|
self._args_to_config(config, argname='trade_source',
|
|
|
|
logstring='Using trades from: {}')
|
2019-07-15 19:17:57 +00:00
|
|
|
|
2019-08-16 13:27:33 +00:00
|
|
|
self._args_to_config(config, argname='erase',
|
|
|
|
logstring='Erase detected. Deleting existing data.')
|
|
|
|
|
2019-08-16 12:56:38 +00:00
|
|
|
self._args_to_config(config, argname='timeframes',
|
|
|
|
logstring='timeframes --timeframes: {}')
|
|
|
|
|
|
|
|
self._args_to_config(config, argname='days',
|
|
|
|
logstring='Detected --days: {}')
|
|
|
|
|
2019-07-16 20:00:19 +00:00
|
|
|
def _process_runmode(self, config: Dict[str, Any]) -> None:
|
|
|
|
|
2019-07-15 19:17:57 +00:00
|
|
|
if not self.runmode:
|
|
|
|
# Handle real mode, infer dry/live from config
|
|
|
|
self.runmode = RunMode.DRY_RUN if config.get('dry_run', True) else RunMode.LIVE
|
2019-08-15 16:41:02 +00:00
|
|
|
logger.info(f"Runmode set to {self.runmode}.")
|
2019-07-15 19:17:57 +00:00
|
|
|
|
|
|
|
config.update({'runmode': self.runmode})
|
2019-06-16 11:31:24 +00:00
|
|
|
|
2019-07-15 19:17:57 +00:00
|
|
|
def _args_to_config(self, config: Dict[str, Any], argname: str,
|
2019-07-25 18:35:20 +00:00
|
|
|
logstring: str, logfun: Optional[Callable] = None,
|
|
|
|
deprecated_msg: Optional[str] = None) -> None:
|
2019-07-15 19:17:57 +00:00
|
|
|
"""
|
|
|
|
:param config: Configuration dictionary
|
|
|
|
:param argname: Argumentname in self.args - will be copied to config dict.
|
|
|
|
:param logstring: Logging String
|
|
|
|
:param logfun: logfun is applied to the configuration entry before passing
|
|
|
|
that entry to the log string using .format().
|
|
|
|
sample: logfun=len (prints the length of the found
|
|
|
|
configuration instead of the content)
|
|
|
|
"""
|
|
|
|
if argname in self.args and getattr(self.args, argname):
|
|
|
|
|
|
|
|
config.update({argname: getattr(self.args, argname)})
|
|
|
|
if logfun:
|
|
|
|
logger.info(logstring.format(logfun(config[argname])))
|
|
|
|
else:
|
|
|
|
logger.info(logstring.format(config[argname]))
|
2019-07-25 18:35:20 +00:00
|
|
|
if deprecated_msg:
|
|
|
|
warnings.warn(f"DEPRECATED: {deprecated_msg}", DeprecationWarning)
|
2019-08-16 12:56:57 +00:00
|
|
|
|
|
|
|
def _resolve_pairs_list(self, config: Dict[str, Any]) -> None:
|
|
|
|
"""
|
|
|
|
Helper for download script.
|
|
|
|
Takes first found:
|
|
|
|
* -p (pairs argument)
|
|
|
|
* --pairs-file
|
|
|
|
* whitelist from config
|
|
|
|
"""
|
|
|
|
|
2019-08-16 13:52:59 +00:00
|
|
|
if "pairs" in config:
|
2019-08-16 12:56:57 +00:00
|
|
|
return
|
|
|
|
|
|
|
|
if "pairs_file" in self.args and self.args.pairs_file:
|
2019-08-16 13:52:59 +00:00
|
|
|
pairs_file = Path(self.args.pairs_file)
|
2019-08-16 12:56:57 +00:00
|
|
|
logger.info(f'Reading pairs file "{pairs_file}".')
|
|
|
|
# Download pairs from the pairs file if no config is specified
|
|
|
|
# or if pairs file is specified explicitely
|
|
|
|
if not pairs_file.exists():
|
2019-08-16 13:52:59 +00:00
|
|
|
raise OperationalException(f'No pairs file found with path "{pairs_file}".')
|
2019-08-21 04:58:56 +00:00
|
|
|
with pairs_file.open('r') as f:
|
|
|
|
config['pairs'] = json_load(f)
|
|
|
|
config['pairs'].sort()
|
2019-08-16 13:52:59 +00:00
|
|
|
return
|
2019-08-16 12:56:57 +00:00
|
|
|
|
2019-08-17 05:05:42 +00:00
|
|
|
if "config" in self.args and self.args.config:
|
2019-08-16 12:56:57 +00:00
|
|
|
logger.info("Using pairlist from configuration.")
|
|
|
|
config['pairs'] = config.get('exchange', {}).get('pair_whitelist')
|
2019-08-16 13:52:59 +00:00
|
|
|
else:
|
|
|
|
# Fall back to /dl_path/pairs.json
|
2019-08-29 22:54:16 +00:00
|
|
|
pairs_file = Path(config['datadir']) / "pairs.json"
|
2019-08-16 13:52:59 +00:00
|
|
|
if pairs_file.exists():
|
2019-08-21 04:58:56 +00:00
|
|
|
with pairs_file.open('r') as f:
|
|
|
|
config['pairs'] = json_load(f)
|
|
|
|
if 'pairs' in config:
|
|
|
|
config['pairs'].sort()
|