mirror of
https://github.com/freqtrade/freqtrade.git
synced 2024-11-10 02:12:01 +00:00
Merge branch 'develop' into align_userdata
This commit is contained in:
commit
aaeeb9c0c6
|
@ -3,9 +3,43 @@
|
|||
This page explains how to validate your strategy performance by using
|
||||
Backtesting.
|
||||
|
||||
## Getting data for backtesting and hyperopt
|
||||
|
||||
To download data (candles / OHLCV) needed for backtesting and hyperoptimization use the `freqtrade download-data` command.
|
||||
|
||||
If no additional parameter is specified, freqtrade will download data for `"1m"` and `"5m"` timeframes.
|
||||
Exchange and pairs will come from `config.json` (if specified using `-c/--config`). Otherwise `--exchange` becomes mandatory.
|
||||
|
||||
Alternatively, a `pairs.json` file can be used.
|
||||
|
||||
If you are using Binance for example:
|
||||
|
||||
- create a directory `user_data/data/binance` and copy `pairs.json` in that directory.
|
||||
- update the `pairs.json` to contain the currency pairs you are interested in.
|
||||
|
||||
```bash
|
||||
mkdir -p user_data/data/binance
|
||||
cp freqtrade/tests/testdata/pairs.json user_data/data/binance
|
||||
```
|
||||
|
||||
Then run:
|
||||
|
||||
```bash
|
||||
freqtrade download-data --exchange binance
|
||||
```
|
||||
|
||||
This will download ticker data for all the currency pairs you defined in `pairs.json`.
|
||||
|
||||
- To use a different directory than the exchange specific default, use `--datadir user_data/data/some_directory`.
|
||||
- To change the exchange used to download the tickers, please use a different configuration file (you'll probably need to adjust ratelimits etc.)
|
||||
- To use `pairs.json` from some other directory, use `--pairs-file some_other_dir/pairs.json`.
|
||||
- To download ticker data for only 10 days, use `--days 10` (defaults to 30 days).
|
||||
- Use `--timeframes` to specify which tickers to download. Default is `--timeframes 1m 5m` which will download 1-minute and 5-minute tickers.
|
||||
- To use exchange, timeframe and list of pairs as defined in your configuration file, use the `-c/--config` option. With this, the script uses the whitelist defined in the config as the list of currency pairs to download data for and does not require the pairs.json file. You can combine `-c/--config` with most other options.
|
||||
|
||||
## Test your strategy with Backtesting
|
||||
|
||||
Now you have good Buy and Sell strategies, you want to test it against
|
||||
Now you have good Buy and Sell strategies and some historic data, you want to test it against
|
||||
real data. This is what we call
|
||||
[backtesting](https://en.wikipedia.org/wiki/Backtesting).
|
||||
|
||||
|
@ -109,37 +143,6 @@ The full timerange specification:
|
|||
- Use tickframes between POSIX timestamps 1527595200 1527618600:
|
||||
`--timerange=1527595200-1527618600`
|
||||
|
||||
#### Downloading new set of ticker data
|
||||
|
||||
To download new set of backtesting ticker data, you can use a download script.
|
||||
|
||||
If you are using Binance for example:
|
||||
|
||||
- create a directory `user_data/data/binance` and copy `pairs.json` in that directory.
|
||||
- update the `pairs.json` to contain the currency pairs you are interested in.
|
||||
|
||||
```bash
|
||||
mkdir -p user_data/data/binance
|
||||
cp freqtrade/tests/testdata/pairs.json user_data/data/binance
|
||||
```
|
||||
|
||||
Then run:
|
||||
|
||||
```bash
|
||||
python scripts/download_backtest_data.py --exchange binance
|
||||
```
|
||||
|
||||
This will download ticker data for all the currency pairs you defined in `pairs.json`.
|
||||
|
||||
- To use a different directory than the exchange specific default, use `--datadir user_data/data/some_directory`.
|
||||
- To change the exchange used to download the tickers, use `--exchange`. Default is `bittrex`.
|
||||
- To use `pairs.json` from some other directory, use `--pairs-file some_other_dir/pairs.json`.
|
||||
- To download ticker data for only 10 days, use `--days 10`.
|
||||
- Use `--timeframes` to specify which tickers to download. Default is `--timeframes 1m 5m` which will download 1-minute and 5-minute tickers.
|
||||
- To use exchange, timeframe and list of pairs as defined in your configuration file, use the `-c/--config` option. With this, the script uses the whitelist defined in the config as the list of currency pairs to download data for and does not require the pairs.json file. You can combine `-c/--config` with other options.
|
||||
|
||||
For help about backtesting usage, please refer to [Backtesting commands](#backtesting-commands).
|
||||
|
||||
## Understand the backtesting result
|
||||
|
||||
The most important in the backtesting is to understand the result.
|
||||
|
|
|
@ -212,19 +212,11 @@ optional arguments:
|
|||
result.json)
|
||||
```
|
||||
|
||||
### How to use **--refresh-pairs-cached** parameter?
|
||||
### Getting historic data for backtesting
|
||||
|
||||
The first time your run Backtesting, it will take the pairs you have
|
||||
set in your config file and download data from the Exchange.
|
||||
|
||||
If for any reason you want to update your data set, you use
|
||||
`--refresh-pairs-cached` to force Backtesting to update the data it has.
|
||||
|
||||
!!! Note
|
||||
Use it only if you want to update your data set. You will not be able to come back to the previous version.
|
||||
|
||||
To test your strategy with latest data, we recommend continuing using
|
||||
the parameter `-l` or `--live`.
|
||||
The first time your run Backtesting, you will need to download some historic data first.
|
||||
This can be accomplished by using `freqtrade download-data`.
|
||||
Check the corresponding [help page section](backtesting.md#Getting-data-for-backtesting-and-hyperopt) for more details
|
||||
|
||||
## Hyperopt commands
|
||||
|
||||
|
|
|
@ -274,27 +274,24 @@ Please always check the mode of operation to select the correct method to get da
|
|||
|
||||
#### Possible options for DataProvider
|
||||
|
||||
- `available_pairs` - Property with tuples listing cached pairs with their intervals. (pair, interval)
|
||||
- `ohlcv(pair, ticker_interval)` - Currently cached ticker data for all pairs in the whitelist, returns DataFrame or empty DataFrame
|
||||
- `historic_ohlcv(pair, ticker_interval)` - Data stored on disk
|
||||
- `available_pairs` - Property with tuples listing cached pairs with their intervals (pair, interval).
|
||||
- `ohlcv(pair, ticker_interval)` - Currently cached ticker data for the pair, returns DataFrame or empty DataFrame.
|
||||
- `historic_ohlcv(pair, ticker_interval)` - Returns historical data stored on disk.
|
||||
- `get_pair_dataframe(pair, ticker_interval)` - This is a universal method, which returns either historical data (for backtesting) or cached live data (for the Dry-Run and Live-Run modes).
|
||||
- `runmode` - Property containing the current runmode.
|
||||
|
||||
#### ohlcv / historic_ohlcv
|
||||
#### Example: fetch live ohlcv / historic data for the first informative pair
|
||||
|
||||
``` python
|
||||
if self.dp:
|
||||
if self.dp.runmode in ('live', 'dry_run'):
|
||||
if (f'{self.stake_currency}/BTC', self.ticker_interval) in self.dp.available_pairs:
|
||||
data_eth = self.dp.ohlcv(pair='{self.stake_currency}/BTC',
|
||||
ticker_interval=self.ticker_interval)
|
||||
else:
|
||||
# Get historic ohlcv data (cached on disk).
|
||||
history_eth = self.dp.historic_ohlcv(pair='{self.stake_currency}/BTC',
|
||||
ticker_interval='1h')
|
||||
inf_pair, inf_timeframe = self.informative_pairs()[0]
|
||||
informative = self.dp.get_pair_dataframe(pair=inf_pair,
|
||||
ticker_interval=inf_timeframe)
|
||||
```
|
||||
|
||||
!!! Warning Warning about backtesting
|
||||
Be carefull when using dataprovider in backtesting. `historic_ohlcv()` provides the full time-range in one go,
|
||||
Be carefull when using dataprovider in backtesting. `historic_ohlcv()` (and `get_pair_dataframe()`
|
||||
for the backtesting runmode) provides the full time-range in one go,
|
||||
so please be aware of it and make sure to not "look into the future" to avoid surprises when running in dry/live mode).
|
||||
|
||||
!!! Warning Warning in hyperopt
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
from freqtrade.configuration.arguments import Arguments # noqa: F401
|
||||
from freqtrade.configuration.timerange import TimeRange # noqa: F401
|
||||
from freqtrade.configuration.configuration import Configuration # noqa: F401
|
||||
from freqtrade.configuration.config_validation import validate_config_consistency # noqa: F401
|
||||
|
|
|
@ -32,7 +32,7 @@ ARGS_LIST_EXCHANGES = ["print_one_column"]
|
|||
|
||||
ARGS_CREATE_USERDIR = ["user_data_dir"]
|
||||
|
||||
ARGS_DOWNLOADER = ARGS_COMMON + ["pairs", "pairs_file", "days", "exchange", "timeframes", "erase"]
|
||||
ARGS_DOWNLOAD_DATA = ["pairs", "pairs_file", "days", "exchange", "timeframes", "erase"]
|
||||
|
||||
ARGS_PLOT_DATAFRAME = (ARGS_COMMON + ARGS_STRATEGY +
|
||||
["pairs", "indicators1", "indicators2", "plot_limit", "db_url",
|
||||
|
@ -42,6 +42,8 @@ ARGS_PLOT_DATAFRAME = (ARGS_COMMON + ARGS_STRATEGY +
|
|||
ARGS_PLOT_PROFIT = (ARGS_COMMON + ARGS_STRATEGY +
|
||||
["pairs", "timerange", "export", "exportfilename", "db_url", "trade_source"])
|
||||
|
||||
NO_CONF_REQURIED = ["start_download_data"]
|
||||
|
||||
|
||||
class Arguments(object):
|
||||
"""
|
||||
|
@ -77,7 +79,10 @@ class Arguments(object):
|
|||
|
||||
# Workaround issue in argparse with action='append' and default value
|
||||
# (see https://bugs.python.org/issue16399)
|
||||
if not self._no_default_config and parsed_arg.config is None:
|
||||
# Allow no-config for certain commands (like downloading / plotting)
|
||||
if (not self._no_default_config and parsed_arg.config is None
|
||||
and not (hasattr(parsed_arg, 'func')
|
||||
and parsed_arg.func.__name__ in NO_CONF_REQURIED)):
|
||||
parsed_arg.config = [constants.DEFAULT_CONFIG]
|
||||
|
||||
return parsed_arg
|
||||
|
@ -95,7 +100,8 @@ class Arguments(object):
|
|||
:return: None
|
||||
"""
|
||||
from freqtrade.optimize import start_backtesting, start_hyperopt, start_edge
|
||||
from freqtrade.utils import start_create_userdir, start_list_exchanges
|
||||
from freqtrade.utils import start_create_userdir, start_download_data, start_list_exchanges
|
||||
|
||||
subparsers = self.parser.add_subparsers(dest='subparser')
|
||||
|
||||
# Add backtesting subcommand
|
||||
|
@ -125,3 +131,11 @@ class Arguments(object):
|
|||
)
|
||||
list_exchanges_cmd.set_defaults(func=start_list_exchanges)
|
||||
self._build_args(optionlist=ARGS_LIST_EXCHANGES, parser=list_exchanges_cmd)
|
||||
|
||||
# Add download-data subcommand
|
||||
download_data_cmd = subparsers.add_parser(
|
||||
'download-data',
|
||||
help='Download backtesting data.'
|
||||
)
|
||||
download_data_cmd.set_defaults(func=start_download_data)
|
||||
self._build_args(optionlist=ARGS_DOWNLOAD_DATA, parser=download_data_cmd)
|
||||
|
|
|
@ -259,7 +259,8 @@ AVAILABLE_CLI_OPTIONS = {
|
|||
# Script options
|
||||
"pairs": Arg(
|
||||
'-p', '--pairs',
|
||||
help='Show profits for only these pairs. Pairs are comma-separated.',
|
||||
help='Show profits for only these pairs. Pairs are space-separated.',
|
||||
nargs='+',
|
||||
),
|
||||
# Download data
|
||||
"pairs_file": Arg(
|
||||
|
@ -281,9 +282,10 @@ AVAILABLE_CLI_OPTIONS = {
|
|||
"timeframes": Arg(
|
||||
'-t', '--timeframes',
|
||||
help=f'Specify which tickers to download. Space-separated list. '
|
||||
f'Default: `{constants.DEFAULT_DOWNLOAD_TICKER_INTERVALS}`.',
|
||||
f'Default: `1m 5m`.',
|
||||
choices=['1m', '3m', '5m', '15m', '30m', '1h', '2h', '4h',
|
||||
'6h', '8h', '12h', '1d', '3d', '1w'],
|
||||
default=['1m', '5m'],
|
||||
nargs='+',
|
||||
),
|
||||
"erase": Arg(
|
||||
|
|
102
freqtrade/configuration/config_validation.py
Normal file
102
freqtrade/configuration/config_validation.py
Normal file
|
@ -0,0 +1,102 @@
|
|||
import logging
|
||||
from typing import Any, Dict
|
||||
|
||||
from jsonschema import Draft4Validator, validators
|
||||
from jsonschema.exceptions import ValidationError, best_match
|
||||
|
||||
from freqtrade import constants, OperationalException
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _extend_validator(validator_class):
|
||||
"""
|
||||
Extended validator for the Freqtrade configuration JSON Schema.
|
||||
Currently it only handles defaults for subschemas.
|
||||
"""
|
||||
validate_properties = validator_class.VALIDATORS['properties']
|
||||
|
||||
def set_defaults(validator, properties, instance, schema):
|
||||
for prop, subschema in properties.items():
|
||||
if 'default' in subschema:
|
||||
instance.setdefault(prop, subschema['default'])
|
||||
|
||||
for error in validate_properties(
|
||||
validator, properties, instance, schema,
|
||||
):
|
||||
yield error
|
||||
|
||||
return validators.extend(
|
||||
validator_class, {'properties': set_defaults}
|
||||
)
|
||||
|
||||
|
||||
FreqtradeValidator = _extend_validator(Draft4Validator)
|
||||
|
||||
|
||||
def validate_config_schema(conf: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""
|
||||
Validate the configuration follow the Config Schema
|
||||
:param conf: Config in JSON format
|
||||
:return: Returns the config if valid, otherwise throw an exception
|
||||
"""
|
||||
try:
|
||||
FreqtradeValidator(constants.CONF_SCHEMA).validate(conf)
|
||||
return conf
|
||||
except ValidationError as e:
|
||||
logger.critical(
|
||||
f"Invalid configuration. See config.json.example. Reason: {e}"
|
||||
)
|
||||
raise ValidationError(
|
||||
best_match(Draft4Validator(constants.CONF_SCHEMA).iter_errors(conf)).message
|
||||
)
|
||||
|
||||
|
||||
def validate_config_consistency(conf: Dict[str, Any]) -> None:
|
||||
"""
|
||||
Validate the configuration consistency.
|
||||
Should be ran after loading both configuration and strategy,
|
||||
since strategies can set certain configuration settings too.
|
||||
:param conf: Config in JSON format
|
||||
:return: Returns None if everything is ok, otherwise throw an OperationalException
|
||||
"""
|
||||
# validating trailing stoploss
|
||||
_validate_trailing_stoploss(conf)
|
||||
_validate_edge(conf)
|
||||
|
||||
|
||||
def _validate_trailing_stoploss(conf: Dict[str, Any]) -> None:
|
||||
|
||||
# Skip if trailing stoploss is not activated
|
||||
if not conf.get('trailing_stop', False):
|
||||
return
|
||||
|
||||
tsl_positive = float(conf.get('trailing_stop_positive', 0))
|
||||
tsl_offset = float(conf.get('trailing_stop_positive_offset', 0))
|
||||
tsl_only_offset = conf.get('trailing_only_offset_is_reached', False)
|
||||
|
||||
if tsl_only_offset:
|
||||
if tsl_positive == 0.0:
|
||||
raise OperationalException(
|
||||
f'The config trailing_only_offset_is_reached needs '
|
||||
'trailing_stop_positive_offset to be more than 0 in your config.')
|
||||
if tsl_positive > 0 and 0 < tsl_offset <= tsl_positive:
|
||||
raise OperationalException(
|
||||
f'The config trailing_stop_positive_offset needs '
|
||||
'to be greater than trailing_stop_positive_offset in your config.')
|
||||
|
||||
|
||||
def _validate_edge(conf: Dict[str, Any]) -> None:
|
||||
"""
|
||||
Edge and Dynamic whitelist should not both be enabled, since edge overrides dynamic whitelists.
|
||||
"""
|
||||
|
||||
if not conf.get('edge', {}).get('enabled'):
|
||||
return
|
||||
|
||||
if conf.get('pairlist', {}).get('method') == 'VolumePairList':
|
||||
raise OperationalException(
|
||||
"Edge and VolumePairList are incompatible, "
|
||||
"Edge will override whatever pairs VolumePairlist selects."
|
||||
)
|
|
@ -9,12 +9,13 @@ from typing import Any, Callable, Dict, List, Optional
|
|||
|
||||
from freqtrade import OperationalException, constants
|
||||
from freqtrade.configuration.check_exchange import check_exchange
|
||||
from freqtrade.configuration.config_validation import (
|
||||
validate_config_consistency, validate_config_schema)
|
||||
from freqtrade.configuration.directory_operations import (create_datadir,
|
||||
create_userdata_dir)
|
||||
from freqtrade.configuration.json_schema import validate_config_schema
|
||||
from freqtrade.configuration.load_config import load_config_file
|
||||
from freqtrade.loggers import setup_logging
|
||||
from freqtrade.misc import deep_merge_dicts
|
||||
from freqtrade.misc import deep_merge_dicts, json_load
|
||||
from freqtrade.state import RunMode
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
@ -54,6 +55,9 @@ class Configuration(object):
|
|||
# Keep this method as staticmethod, so it can be used from interactive environments
|
||||
config: Dict[str, Any] = {}
|
||||
|
||||
if not files:
|
||||
return constants.MINIMAL_CONFIG.copy()
|
||||
|
||||
# We expect here a list of config filenames
|
||||
for path in files:
|
||||
logger.info(f'Using config: {path} ...')
|
||||
|
@ -79,8 +83,6 @@ class Configuration(object):
|
|||
# Load all configs
|
||||
config: Dict[str, Any] = Configuration.from_files(self.args.config)
|
||||
|
||||
self._validate_config_consistency(config)
|
||||
|
||||
self._process_common_options(config)
|
||||
|
||||
self._process_optimize_options(config)
|
||||
|
@ -89,6 +91,13 @@ class Configuration(object):
|
|||
|
||||
self._process_runmode(config)
|
||||
|
||||
# Check if the exchange set by the user is supported
|
||||
check_exchange(config, config.get('experimental', {}).get('block_bad_exchanges', True))
|
||||
|
||||
self._resolve_pairs_list(config)
|
||||
|
||||
validate_config_consistency(config)
|
||||
|
||||
return config
|
||||
|
||||
def _process_logging_options(self, config: Dict[str, Any]) -> None:
|
||||
|
@ -146,9 +155,6 @@ class Configuration(object):
|
|||
if 'sd_notify' in self.args and self.args.sd_notify:
|
||||
config['internals'].update({'sd_notify': True})
|
||||
|
||||
# Check if the exchange set by the user is supported
|
||||
check_exchange(config, config.get('experimental', {}).get('block_bad_exchanges', True))
|
||||
|
||||
def _process_datadir_options(self, config: Dict[str, Any]) -> None:
|
||||
"""
|
||||
Extract information for sys.argv and load directory configurations
|
||||
|
@ -285,6 +291,19 @@ class Configuration(object):
|
|||
self._args_to_config(config, argname='trade_source',
|
||||
logstring='Using trades from: {}')
|
||||
|
||||
self._args_to_config(config, argname='erase',
|
||||
logstring='Erase detected. Deleting existing data.')
|
||||
|
||||
self._args_to_config(config, argname='timeframes',
|
||||
logstring='timeframes --timeframes: {}')
|
||||
|
||||
self._args_to_config(config, argname='days',
|
||||
logstring='Detected --days: {}')
|
||||
|
||||
if "exchange" in self.args and self.args.exchange:
|
||||
config['exchange']['name'] = self.args.exchange
|
||||
logger.info(f"Using exchange {config['exchange']['name']}")
|
||||
|
||||
def _process_runmode(self, config: Dict[str, Any]) -> None:
|
||||
|
||||
if not self.runmode:
|
||||
|
@ -294,35 +313,6 @@ class Configuration(object):
|
|||
|
||||
config.update({'runmode': self.runmode})
|
||||
|
||||
def _validate_config_consistency(self, conf: Dict[str, Any]) -> None:
|
||||
"""
|
||||
Validate the configuration consistency
|
||||
:param conf: Config in JSON format
|
||||
:return: Returns None if everything is ok, otherwise throw an OperationalException
|
||||
"""
|
||||
# validating trailing stoploss
|
||||
self._validate_trailing_stoploss(conf)
|
||||
|
||||
def _validate_trailing_stoploss(self, conf: Dict[str, Any]) -> None:
|
||||
|
||||
# Skip if trailing stoploss is not activated
|
||||
if not conf.get('trailing_stop', False):
|
||||
return
|
||||
|
||||
tsl_positive = float(conf.get('trailing_stop_positive', 0))
|
||||
tsl_offset = float(conf.get('trailing_stop_positive_offset', 0))
|
||||
tsl_only_offset = conf.get('trailing_only_offset_is_reached', False)
|
||||
|
||||
if tsl_only_offset:
|
||||
if tsl_positive == 0.0:
|
||||
raise OperationalException(
|
||||
f'The config trailing_only_offset_is_reached needs '
|
||||
'trailing_stop_positive_offset to be more than 0 in your config.')
|
||||
if tsl_positive > 0 and 0 < tsl_offset <= tsl_positive:
|
||||
raise OperationalException(
|
||||
f'The config trailing_stop_positive_offset needs '
|
||||
'to be greater than trailing_stop_positive_offset in your config.')
|
||||
|
||||
def _args_to_config(self, config: Dict[str, Any], argname: str,
|
||||
logstring: str, logfun: Optional[Callable] = None,
|
||||
deprecated_msg: Optional[str] = None) -> None:
|
||||
|
@ -344,3 +334,39 @@ class Configuration(object):
|
|||
logger.info(logstring.format(config[argname]))
|
||||
if deprecated_msg:
|
||||
warnings.warn(f"DEPRECATED: {deprecated_msg}", DeprecationWarning)
|
||||
|
||||
def _resolve_pairs_list(self, config: Dict[str, Any]) -> None:
|
||||
"""
|
||||
Helper for download script.
|
||||
Takes first found:
|
||||
* -p (pairs argument)
|
||||
* --pairs-file
|
||||
* whitelist from config
|
||||
"""
|
||||
|
||||
if "pairs" in config:
|
||||
return
|
||||
|
||||
if "pairs_file" in self.args and self.args.pairs_file:
|
||||
pairs_file = Path(self.args.pairs_file)
|
||||
logger.info(f'Reading pairs file "{pairs_file}".')
|
||||
# Download pairs from the pairs file if no config is specified
|
||||
# or if pairs file is specified explicitely
|
||||
if not pairs_file.exists():
|
||||
raise OperationalException(f'No pairs file found with path "{pairs_file}".')
|
||||
with pairs_file.open('r') as f:
|
||||
config['pairs'] = json_load(f)
|
||||
config['pairs'].sort()
|
||||
return
|
||||
|
||||
if "config" in self.args and self.args.config:
|
||||
logger.info("Using pairlist from configuration.")
|
||||
config['pairs'] = config.get('exchange', {}).get('pair_whitelist')
|
||||
else:
|
||||
# Fall back to /dl_path/pairs.json
|
||||
pairs_file = Path(config['datadir']) / config['exchange']['name'].lower() / "pairs.json"
|
||||
if pairs_file.exists():
|
||||
with pairs_file.open('r') as f:
|
||||
config['pairs'] = json_load(f)
|
||||
if 'pairs' in config:
|
||||
config['pairs'].sort()
|
||||
|
|
|
@ -1,53 +0,0 @@
|
|||
import logging
|
||||
from typing import Any, Dict
|
||||
|
||||
from jsonschema import Draft4Validator, validators
|
||||
from jsonschema.exceptions import ValidationError, best_match
|
||||
|
||||
from freqtrade import constants
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _extend_validator(validator_class):
|
||||
"""
|
||||
Extended validator for the Freqtrade configuration JSON Schema.
|
||||
Currently it only handles defaults for subschemas.
|
||||
"""
|
||||
validate_properties = validator_class.VALIDATORS['properties']
|
||||
|
||||
def set_defaults(validator, properties, instance, schema):
|
||||
for prop, subschema in properties.items():
|
||||
if 'default' in subschema:
|
||||
instance.setdefault(prop, subschema['default'])
|
||||
|
||||
for error in validate_properties(
|
||||
validator, properties, instance, schema,
|
||||
):
|
||||
yield error
|
||||
|
||||
return validators.extend(
|
||||
validator_class, {'properties': set_defaults}
|
||||
)
|
||||
|
||||
|
||||
FreqtradeValidator = _extend_validator(Draft4Validator)
|
||||
|
||||
|
||||
def validate_config_schema(conf: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""
|
||||
Validate the configuration follow the Config Schema
|
||||
:param conf: Config in JSON format
|
||||
:return: Returns the config if valid, otherwise throw an exception
|
||||
"""
|
||||
try:
|
||||
FreqtradeValidator(constants.CONF_SCHEMA).validate(conf)
|
||||
return conf
|
||||
except ValidationError as e:
|
||||
logger.critical(
|
||||
f"Invalid configuration. See config.json.example. Reason: {e}"
|
||||
)
|
||||
raise ValidationError(
|
||||
best_match(Draft4Validator(constants.CONF_SCHEMA).iter_errors(conf)).message
|
||||
)
|
|
@ -5,7 +5,6 @@ bot constants
|
|||
"""
|
||||
DEFAULT_CONFIG = 'config.json'
|
||||
DEFAULT_EXCHANGE = 'bittrex'
|
||||
DYNAMIC_WHITELIST = 20 # pairs
|
||||
PROCESS_THROTTLE_SECS = 5 # sec
|
||||
DEFAULT_TICKER_INTERVAL = 5 # min
|
||||
HYPEROPT_EPOCH = 100 # epochs
|
||||
|
@ -23,7 +22,6 @@ ORDERTYPE_POSSIBILITIES = ['limit', 'market']
|
|||
ORDERTIF_POSSIBILITIES = ['gtc', 'fok', 'ioc']
|
||||
AVAILABLE_PAIRLISTS = ['StaticPairList', 'VolumePairList']
|
||||
DRY_RUN_WALLET = 999.9
|
||||
DEFAULT_DOWNLOAD_TICKER_INTERVALS = '1m 5m'
|
||||
|
||||
TICKER_INTERVALS = [
|
||||
'1m', '3m', '5m', '15m', '30m',
|
||||
|
@ -39,6 +37,20 @@ SUPPORTED_FIAT = [
|
|||
"BTC", "XBT", "ETH", "XRP", "LTC", "BCH", "USDT"
|
||||
]
|
||||
|
||||
MINIMAL_CONFIG = {
|
||||
'stake_currency': '',
|
||||
'dry_run': True,
|
||||
'exchange': {
|
||||
'name': '',
|
||||
'key': '',
|
||||
'secret': '',
|
||||
'pair_whitelist': [],
|
||||
'ccxt_async_config': {
|
||||
'enableRateLimit': True,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Required json-schema for user specified config
|
||||
CONF_SCHEMA = {
|
||||
'type': 'object',
|
||||
|
|
|
@ -43,7 +43,7 @@ def trim_tickerlist(tickerlist: List[Dict], timerange: TimeRange) -> List[Dict]:
|
|||
start_index += 1
|
||||
|
||||
if timerange.stoptype == 'line':
|
||||
start_index = len(tickerlist) + timerange.stopts
|
||||
start_index = max(len(tickerlist) + timerange.stopts, 0)
|
||||
if timerange.stoptype == 'index':
|
||||
stop_index = timerange.stopts
|
||||
elif timerange.stoptype == 'date':
|
||||
|
@ -57,10 +57,8 @@ def trim_tickerlist(tickerlist: List[Dict], timerange: TimeRange) -> List[Dict]:
|
|||
return tickerlist[start_index:stop_index]
|
||||
|
||||
|
||||
def load_tickerdata_file(
|
||||
datadir: Optional[Path], pair: str,
|
||||
ticker_interval: str,
|
||||
timerange: Optional[TimeRange] = None) -> Optional[list]:
|
||||
def load_tickerdata_file(datadir: Optional[Path], pair: str, ticker_interval: str,
|
||||
timerange: Optional[TimeRange] = None) -> Optional[list]:
|
||||
"""
|
||||
Load a pair from file, either .json.gz or .json
|
||||
:return: tickerlist or None if unsuccesful
|
||||
|
@ -68,13 +66,22 @@ def load_tickerdata_file(
|
|||
filename = pair_data_filename(datadir, pair, ticker_interval)
|
||||
pairdata = misc.file_load_json(filename)
|
||||
if not pairdata:
|
||||
return None
|
||||
return []
|
||||
|
||||
if timerange:
|
||||
pairdata = trim_tickerlist(pairdata, timerange)
|
||||
return pairdata
|
||||
|
||||
|
||||
def store_tickerdata_file(datadir: Optional[Path], pair: str,
|
||||
ticker_interval: str, data: list, is_zip: bool = False):
|
||||
"""
|
||||
Stores tickerdata to file
|
||||
"""
|
||||
filename = pair_data_filename(datadir, pair, ticker_interval)
|
||||
misc.file_dump_json(filename, data, is_zip=is_zip)
|
||||
|
||||
|
||||
def load_pair_history(pair: str,
|
||||
ticker_interval: str,
|
||||
datadir: Optional[Path],
|
||||
|
@ -122,7 +129,7 @@ def load_pair_history(pair: str,
|
|||
else:
|
||||
logger.warning(
|
||||
f'No history data for pair: "{pair}", interval: {ticker_interval}. '
|
||||
'Use --refresh-pairs-cached option or download_backtest_data.py '
|
||||
'Use --refresh-pairs-cached option or `freqtrade download-data` '
|
||||
'script to download the data'
|
||||
)
|
||||
return None
|
||||
|
@ -177,11 +184,14 @@ def pair_data_filename(datadir: Optional[Path], pair: str, ticker_interval: str)
|
|||
return filename
|
||||
|
||||
|
||||
def load_cached_data_for_updating(filename: Path, ticker_interval: str,
|
||||
def load_cached_data_for_updating(datadir: Optional[Path], pair: str, ticker_interval: str,
|
||||
timerange: Optional[TimeRange]) -> Tuple[List[Any],
|
||||
Optional[int]]:
|
||||
"""
|
||||
Load cached data and choose what part of the data should be updated
|
||||
Load cached data to download more data.
|
||||
If timerange is passed in, checks wether data from an before the stored data will be downloaded.
|
||||
If that's the case than what's available should be completely overwritten.
|
||||
Only used by download_pair_history().
|
||||
"""
|
||||
|
||||
since_ms = None
|
||||
|
@ -195,12 +205,11 @@ def load_cached_data_for_updating(filename: Path, ticker_interval: str,
|
|||
since_ms = arrow.utcnow().shift(minutes=num_minutes).timestamp * 1000
|
||||
|
||||
# read the cached file
|
||||
if filename.is_file():
|
||||
with open(filename, "rt") as file:
|
||||
data = misc.json_load(file)
|
||||
# remove the last item, could be incomplete candle
|
||||
if data:
|
||||
data.pop()
|
||||
# Intentionally don't pass timerange in - since we need to load the full dataset.
|
||||
data = load_tickerdata_file(datadir, pair, ticker_interval)
|
||||
# remove the last item, could be incomplete candle
|
||||
if data:
|
||||
data.pop()
|
||||
else:
|
||||
data = []
|
||||
|
||||
|
@ -239,14 +248,12 @@ def download_pair_history(datadir: Optional[Path],
|
|||
)
|
||||
|
||||
try:
|
||||
filename = pair_data_filename(datadir, pair, ticker_interval)
|
||||
|
||||
logger.info(
|
||||
f'Download history data for pair: "{pair}", interval: {ticker_interval} '
|
||||
f'and store in {datadir}.'
|
||||
)
|
||||
|
||||
data, since_ms = load_cached_data_for_updating(filename, ticker_interval, timerange)
|
||||
data, since_ms = load_cached_data_for_updating(datadir, pair, ticker_interval, timerange)
|
||||
|
||||
logger.debug("Current Start: %s", misc.format_ms_time(data[1][0]) if data else 'None')
|
||||
logger.debug("Current End: %s", misc.format_ms_time(data[-1][0]) if data else 'None')
|
||||
|
@ -262,7 +269,7 @@ def download_pair_history(datadir: Optional[Path],
|
|||
logger.debug("New Start: %s", misc.format_ms_time(data[0][0]))
|
||||
logger.debug("New End: %s", misc.format_ms_time(data[-1][0]))
|
||||
|
||||
misc.file_dump_json(filename, data)
|
||||
store_tickerdata_file(datadir, pair, ticker_interval, data=data)
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
|
|
|
@ -408,12 +408,12 @@ class Exchange(object):
|
|||
except ccxt.InsufficientFunds as e:
|
||||
raise DependencyException(
|
||||
f'Insufficient funds to create {ordertype} {side} order on market {pair}.'
|
||||
f'Tried to {side} amount {amount} at rate {rate} (total {rate * amount}).'
|
||||
f'Tried to {side} amount {amount} at rate {rate}.'
|
||||
f'Message: {e}') from e
|
||||
except ccxt.InvalidOrder as e:
|
||||
raise DependencyException(
|
||||
f'Could not create {ordertype} {side} order on market {pair}.'
|
||||
f'Tried to {side} amount {amount} at rate {rate} (total {rate * amount}).'
|
||||
f'Tried to {side} amount {amount} at rate {rate}.'
|
||||
f'Message: {e}') from e
|
||||
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
|
||||
raise TemporaryError(
|
||||
|
@ -472,7 +472,7 @@ class Exchange(object):
|
|||
|
||||
order = self.create_order(pair, ordertype, 'sell', amount, rate, params)
|
||||
logger.info('stoploss limit order added for %s. '
|
||||
'stop price: %s. limit: %s' % (pair, stop_price, rate))
|
||||
'stop price: %s. limit: %s', pair, stop_price, rate)
|
||||
return order
|
||||
|
||||
@retrier
|
||||
|
@ -696,8 +696,13 @@ class Exchange(object):
|
|||
@retrier
|
||||
def get_order(self, order_id: str, pair: str) -> Dict:
|
||||
if self._config['dry_run']:
|
||||
order = self._dry_run_open_orders[order_id]
|
||||
return order
|
||||
try:
|
||||
order = self._dry_run_open_orders[order_id]
|
||||
return order
|
||||
except KeyError as e:
|
||||
# Gracefully handle errors with dry-run orders.
|
||||
raise InvalidOrderException(
|
||||
f'Tried to get an invalid dry-run-order (id: {order_id}). Message: {e}') from e
|
||||
try:
|
||||
return self._api.fetch_order(order_id, pair)
|
||||
except ccxt.InvalidOrder as e:
|
||||
|
|
|
@ -16,6 +16,7 @@ from freqtrade import (DependencyException, OperationalException, InvalidOrderEx
|
|||
from freqtrade.data.converter import order_book_to_dataframe
|
||||
from freqtrade.data.dataprovider import DataProvider
|
||||
from freqtrade.edge import Edge
|
||||
from freqtrade.configuration import validate_config_consistency
|
||||
from freqtrade.exchange import timeframe_to_minutes, timeframe_to_next_date
|
||||
from freqtrade.persistence import Trade
|
||||
from freqtrade.rpc import RPCManager, RPCMessageType
|
||||
|
@ -51,6 +52,9 @@ class FreqtradeBot(object):
|
|||
|
||||
self.strategy: IStrategy = StrategyResolver(self.config).strategy
|
||||
|
||||
# Check config consistency here since strategies can set certain options
|
||||
validate_config_consistency(config)
|
||||
|
||||
self.rpc: RPCManager = RPCManager(self)
|
||||
|
||||
self.exchange = ExchangeResolver(self.config['exchange']['name'], self.config).exchange
|
||||
|
|
|
@ -5,11 +5,12 @@ import gzip
|
|||
import logging
|
||||
import re
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing.io import IO
|
||||
|
||||
import numpy as np
|
||||
import rapidjson
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
@ -39,7 +40,7 @@ def datesarray_to_datetimearray(dates: np.ndarray) -> np.ndarray:
|
|||
return dates.dt.to_pydatetime()
|
||||
|
||||
|
||||
def file_dump_json(filename, data, is_zip=False) -> None:
|
||||
def file_dump_json(filename: Path, data, is_zip=False) -> None:
|
||||
"""
|
||||
Dump JSON data into a file
|
||||
:param filename: file to create
|
||||
|
@ -49,8 +50,8 @@ def file_dump_json(filename, data, is_zip=False) -> None:
|
|||
logger.info(f'dumping json to "{filename}"')
|
||||
|
||||
if is_zip:
|
||||
if not filename.endswith('.gz'):
|
||||
filename = filename + '.gz'
|
||||
if filename.suffix != '.gz':
|
||||
filename = filename.with_suffix('.gz')
|
||||
with gzip.open(filename, 'w') as fp:
|
||||
rapidjson.dump(data, fp, default=str, number_mode=rapidjson.NM_NATIVE)
|
||||
else:
|
||||
|
@ -60,7 +61,7 @@ def file_dump_json(filename, data, is_zip=False) -> None:
|
|||
logger.debug(f'done json to "{filename}"')
|
||||
|
||||
|
||||
def json_load(datafile):
|
||||
def json_load(datafile: IO):
|
||||
"""
|
||||
load data with rapidjson
|
||||
Use this to have a consistent experience,
|
||||
|
|
|
@ -190,7 +190,7 @@ class Backtesting(object):
|
|||
return tabulate(tabular_data, headers=headers, # type: ignore
|
||||
floatfmt=floatfmt, tablefmt="pipe")
|
||||
|
||||
def _store_backtest_result(self, recordfilename: str, results: DataFrame,
|
||||
def _store_backtest_result(self, recordfilename: Path, results: DataFrame,
|
||||
strategyname: Optional[str] = None) -> None:
|
||||
|
||||
records = [(t.pair, t.profit_percent, t.open_time.timestamp(),
|
||||
|
@ -201,10 +201,10 @@ class Backtesting(object):
|
|||
if records:
|
||||
if strategyname:
|
||||
# Inject strategyname to filename
|
||||
recname = Path(recordfilename)
|
||||
recordfilename = str(Path.joinpath(
|
||||
recname.parent, f'{recname.stem}-{strategyname}').with_suffix(recname.suffix))
|
||||
logger.info('Dumping backtest results to %s', recordfilename)
|
||||
recordfilename = Path.joinpath(
|
||||
recordfilename.parent,
|
||||
f'{recordfilename.stem}-{strategyname}').with_suffix(recordfilename.suffix)
|
||||
logger.info(f'Dumping backtest results to {recordfilename}')
|
||||
file_dump_json(recordfilename, records)
|
||||
|
||||
def _get_ticker_list(self, processed) -> Dict[str, DataFrame]:
|
||||
|
@ -458,7 +458,7 @@ class Backtesting(object):
|
|||
for strategy, results in all_results.items():
|
||||
|
||||
if self.config.get('export', False):
|
||||
self._store_backtest_result(self.config['exportfilename'], results,
|
||||
self._store_backtest_result(Path(self.config['exportfilename']), results,
|
||||
strategy if len(self.strategylist) > 1 else None)
|
||||
|
||||
print(f"Result for strategy {strategy}")
|
||||
|
|
|
@ -55,7 +55,6 @@ class VolumePairList(IPairList):
|
|||
# Generate dynamic whitelist
|
||||
self._whitelist = self._gen_pair_whitelist(
|
||||
self._config['stake_currency'], self._sort_key)[:self._number_pairs]
|
||||
logger.info(f"Searching pairs: {self._whitelist}")
|
||||
|
||||
@cached(TTLCache(maxsize=1, ttl=1800))
|
||||
def _gen_pair_whitelist(self, base_currency: str, key: str) -> List[str]:
|
||||
|
@ -92,4 +91,6 @@ class VolumePairList(IPairList):
|
|||
valid_tickers.remove(t)
|
||||
|
||||
pairs = [s['symbol'] for s in valid_tickers]
|
||||
logger.info(f"Searching pairs: {self._whitelist}")
|
||||
|
||||
return pairs
|
||||
|
|
|
@ -37,7 +37,7 @@ def init_plotscript(config):
|
|||
|
||||
strategy = StrategyResolver(config).strategy
|
||||
if "pairs" in config:
|
||||
pairs = config["pairs"].split(',')
|
||||
pairs = config["pairs"]
|
||||
else:
|
||||
pairs = config["exchange"]["pair_whitelist"]
|
||||
|
||||
|
|
|
@ -29,7 +29,8 @@ class IResolver(object):
|
|||
"""
|
||||
|
||||
# Generate spec based on absolute path
|
||||
spec = importlib.util.spec_from_file_location('unknown', str(module_path))
|
||||
# Pass object_name as first argument to have logging print a reasonable name.
|
||||
spec = importlib.util.spec_from_file_location(object_name, str(module_path))
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
try:
|
||||
spec.loader.exec_module(module) # type: ignore # importlib does not use typehints
|
||||
|
|
|
@ -74,7 +74,7 @@ def test_load_data_7min_ticker(mocker, caplog, default_conf) -> None:
|
|||
assert ld is None
|
||||
assert log_has(
|
||||
'No history data for pair: "UNITTEST/BTC", interval: 7m. '
|
||||
'Use --refresh-pairs-cached option or download_backtest_data.py '
|
||||
'Use --refresh-pairs-cached option or `freqtrade download-data` '
|
||||
'script to download the data', caplog
|
||||
)
|
||||
|
||||
|
@ -109,7 +109,7 @@ def test_load_data_with_new_pair_1min(ticker_history_list, mocker, caplog, defau
|
|||
assert os.path.isfile(file) is False
|
||||
assert log_has(
|
||||
'No history data for pair: "MEME/BTC", interval: 1m. '
|
||||
'Use --refresh-pairs-cached option or download_backtest_data.py '
|
||||
'Use --refresh-pairs-cached option or `freqtrade download-data` '
|
||||
'script to download the data', caplog
|
||||
)
|
||||
|
||||
|
@ -178,16 +178,13 @@ def test_load_cached_data_for_updating(mocker) -> None:
|
|||
# timeframe starts earlier than the cached data
|
||||
# should fully update data
|
||||
timerange = TimeRange('date', None, test_data[0][0] / 1000 - 1, 0)
|
||||
data, start_ts = load_cached_data_for_updating(test_filename,
|
||||
'1m',
|
||||
timerange)
|
||||
data, start_ts = load_cached_data_for_updating(datadir, 'UNITTEST/BTC', '1m', timerange)
|
||||
assert data == []
|
||||
assert start_ts == test_data[0][0] - 1000
|
||||
|
||||
# same with 'line' timeframe
|
||||
num_lines = (test_data[-1][0] - test_data[1][0]) / 1000 / 60 + 120
|
||||
data, start_ts = load_cached_data_for_updating(test_filename,
|
||||
'1m',
|
||||
data, start_ts = load_cached_data_for_updating(datadir, 'UNITTEST/BTC', '1m',
|
||||
TimeRange(None, 'line', 0, -num_lines))
|
||||
assert data == []
|
||||
assert start_ts < test_data[0][0] - 1
|
||||
|
@ -195,36 +192,29 @@ def test_load_cached_data_for_updating(mocker) -> None:
|
|||
# timeframe starts in the center of the cached data
|
||||
# should return the chached data w/o the last item
|
||||
timerange = TimeRange('date', None, test_data[0][0] / 1000 + 1, 0)
|
||||
data, start_ts = load_cached_data_for_updating(test_filename,
|
||||
'1m',
|
||||
timerange)
|
||||
data, start_ts = load_cached_data_for_updating(datadir, 'UNITTEST/BTC', '1m', timerange)
|
||||
assert data == test_data[:-1]
|
||||
assert test_data[-2][0] < start_ts < test_data[-1][0]
|
||||
|
||||
# same with 'line' timeframe
|
||||
num_lines = (test_data[-1][0] - test_data[1][0]) / 1000 / 60 + 30
|
||||
timerange = TimeRange(None, 'line', 0, -num_lines)
|
||||
data, start_ts = load_cached_data_for_updating(test_filename,
|
||||
'1m',
|
||||
timerange)
|
||||
data, start_ts = load_cached_data_for_updating(datadir, 'UNITTEST/BTC', '1m', timerange)
|
||||
assert data == test_data[:-1]
|
||||
assert test_data[-2][0] < start_ts < test_data[-1][0]
|
||||
|
||||
# timeframe starts after the chached data
|
||||
# should return the chached data w/o the last item
|
||||
timerange = TimeRange('date', None, test_data[-1][0] / 1000 + 1, 0)
|
||||
data, start_ts = load_cached_data_for_updating(test_filename,
|
||||
'1m',
|
||||
timerange)
|
||||
data, start_ts = load_cached_data_for_updating(datadir, 'UNITTEST/BTC', '1m', timerange)
|
||||
assert data == test_data[:-1]
|
||||
assert test_data[-2][0] < start_ts < test_data[-1][0]
|
||||
|
||||
# same with 'line' timeframe
|
||||
# Try loading last 30 lines.
|
||||
# Not supported by load_cached_data_for_updating, we always need to get the full data.
|
||||
num_lines = 30
|
||||
timerange = TimeRange(None, 'line', 0, -num_lines)
|
||||
data, start_ts = load_cached_data_for_updating(test_filename,
|
||||
'1m',
|
||||
timerange)
|
||||
data, start_ts = load_cached_data_for_updating(datadir, 'UNITTEST/BTC', '1m', timerange)
|
||||
assert data == test_data[:-1]
|
||||
assert test_data[-2][0] < start_ts < test_data[-1][0]
|
||||
|
||||
|
@ -232,35 +222,27 @@ def test_load_cached_data_for_updating(mocker) -> None:
|
|||
# should return the chached data w/o the last item
|
||||
num_lines = 30
|
||||
timerange = TimeRange(None, 'line', 0, -num_lines)
|
||||
data, start_ts = load_cached_data_for_updating(test_filename,
|
||||
'1m',
|
||||
timerange)
|
||||
data, start_ts = load_cached_data_for_updating(datadir, 'UNITTEST/BTC', '1m', timerange)
|
||||
assert data == test_data[:-1]
|
||||
assert test_data[-2][0] < start_ts < test_data[-1][0]
|
||||
|
||||
# no datafile exist
|
||||
# should return timestamp start time
|
||||
timerange = TimeRange('date', None, now_ts - 10000, 0)
|
||||
data, start_ts = load_cached_data_for_updating(test_filename.with_name('unexist'),
|
||||
'1m',
|
||||
timerange)
|
||||
data, start_ts = load_cached_data_for_updating(datadir, 'NONEXIST/BTC', '1m', timerange)
|
||||
assert data == []
|
||||
assert start_ts == (now_ts - 10000) * 1000
|
||||
|
||||
# same with 'line' timeframe
|
||||
num_lines = 30
|
||||
timerange = TimeRange(None, 'line', 0, -num_lines)
|
||||
data, start_ts = load_cached_data_for_updating(test_filename.with_name('unexist'),
|
||||
'1m',
|
||||
timerange)
|
||||
data, start_ts = load_cached_data_for_updating(datadir, 'NONEXIST/BTC', '1m', timerange)
|
||||
assert data == []
|
||||
assert start_ts == (now_ts - num_lines * 60) * 1000
|
||||
|
||||
# no datafile exist, no timeframe is set
|
||||
# should return an empty array and None
|
||||
data, start_ts = load_cached_data_for_updating(test_filename.with_name('unexist'),
|
||||
'1m',
|
||||
None)
|
||||
data, start_ts = load_cached_data_for_updating(datadir, 'NONEXIST/BTC', '1m', None)
|
||||
assert data == []
|
||||
assert start_ts is None
|
||||
|
||||
|
|
|
@ -656,7 +656,13 @@ def test_buy_prod(default_conf, mocker, exchange_name):
|
|||
with pytest.raises(DependencyException):
|
||||
api_mock.create_order = MagicMock(side_effect=ccxt.InvalidOrder("Order not found"))
|
||||
exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name)
|
||||
exchange.buy(pair='ETH/BTC', ordertype=order_type,
|
||||
exchange.buy(pair='ETH/BTC', ordertype='limit',
|
||||
amount=1, rate=200, time_in_force=time_in_force)
|
||||
|
||||
with pytest.raises(DependencyException):
|
||||
api_mock.create_order = MagicMock(side_effect=ccxt.InvalidOrder("Order not found"))
|
||||
exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name)
|
||||
exchange.buy(pair='ETH/BTC', ordertype='market',
|
||||
amount=1, rate=200, time_in_force=time_in_force)
|
||||
|
||||
with pytest.raises(TemporaryError):
|
||||
|
@ -779,7 +785,13 @@ def test_sell_prod(default_conf, mocker, exchange_name):
|
|||
with pytest.raises(DependencyException):
|
||||
api_mock.create_order = MagicMock(side_effect=ccxt.InvalidOrder("Order not found"))
|
||||
exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name)
|
||||
exchange.sell(pair='ETH/BTC', ordertype=order_type, amount=1, rate=200)
|
||||
exchange.sell(pair='ETH/BTC', ordertype='limit', amount=1, rate=200)
|
||||
|
||||
# Market orders don't require price, so the behaviour is slightly different
|
||||
with pytest.raises(DependencyException):
|
||||
api_mock.create_order = MagicMock(side_effect=ccxt.InvalidOrder("Order not found"))
|
||||
exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name)
|
||||
exchange.sell(pair='ETH/BTC', ordertype='market', amount=1, rate=200)
|
||||
|
||||
with pytest.raises(TemporaryError):
|
||||
api_mock.create_order = MagicMock(side_effect=ccxt.NetworkError("No Connection"))
|
||||
|
@ -1328,6 +1340,9 @@ def test_get_order(default_conf, mocker, exchange_name):
|
|||
print(exchange.get_order('X', 'TKN/BTC'))
|
||||
assert exchange.get_order('X', 'TKN/BTC').myid == 123
|
||||
|
||||
with pytest.raises(InvalidOrderException, match=r'Tried to get an invalid dry-run-order.*'):
|
||||
exchange.get_order('Y', 'TKN/BTC')
|
||||
|
||||
default_conf['dry_run'] = False
|
||||
api_mock = MagicMock()
|
||||
api_mock.fetch_order = MagicMock(return_value=456)
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
import math
|
||||
import random
|
||||
from pathlib import Path
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import numpy as np
|
||||
|
@ -785,10 +786,10 @@ def test_backtest_record(default_conf, fee, mocker):
|
|||
# reset test to test with strategy name
|
||||
names = []
|
||||
records = []
|
||||
backtesting._store_backtest_result("backtest-result.json", results, "DefStrat")
|
||||
backtesting._store_backtest_result(Path("backtest-result.json"), results, "DefStrat")
|
||||
assert len(results) == 4
|
||||
# Assert file_dump_json was only called once
|
||||
assert names == ['backtest-result-DefStrat.json']
|
||||
assert names == [Path('backtest-result-DefStrat.json')]
|
||||
records = records[0]
|
||||
# Ensure records are of correct type
|
||||
assert len(records) == 4
|
||||
|
|
|
@ -4,7 +4,7 @@ import argparse
|
|||
import pytest
|
||||
|
||||
from freqtrade.configuration import Arguments
|
||||
from freqtrade.configuration.arguments import ARGS_DOWNLOADER, ARGS_PLOT_DATAFRAME
|
||||
from freqtrade.configuration.arguments import ARGS_PLOT_DATAFRAME
|
||||
from freqtrade.configuration.cli_options import check_int_positive
|
||||
|
||||
|
||||
|
@ -50,10 +50,10 @@ def test_parse_args_verbose() -> None:
|
|||
|
||||
|
||||
def test_common_scripts_options() -> None:
|
||||
arguments = Arguments(['-p', 'ETH/BTC'], '')
|
||||
arguments._build_args(ARGS_DOWNLOADER)
|
||||
args = arguments._parse_args()
|
||||
assert args.pairs == 'ETH/BTC'
|
||||
args = Arguments(['download-data', '-p', 'ETH/BTC', 'XRP/BTC'], '').get_parsed_arg()
|
||||
|
||||
assert args.pairs == ['ETH/BTC', 'XRP/BTC']
|
||||
assert hasattr(args, "func")
|
||||
|
||||
|
||||
def test_parse_args_version() -> None:
|
||||
|
@ -135,14 +135,14 @@ def test_parse_args_hyperopt_custom() -> None:
|
|||
|
||||
def test_download_data_options() -> None:
|
||||
args = [
|
||||
'--pairs-file', 'file_with_pairs',
|
||||
'--datadir', 'datadir/directory',
|
||||
'download-data',
|
||||
'--pairs-file', 'file_with_pairs',
|
||||
'--days', '30',
|
||||
'--exchange', 'binance'
|
||||
]
|
||||
arguments = Arguments(args, '')
|
||||
arguments._build_args(ARGS_DOWNLOADER)
|
||||
args = arguments._parse_args()
|
||||
args = Arguments(args, '').get_parsed_arg()
|
||||
|
||||
assert args.pairs_file == 'file_with_pairs'
|
||||
assert args.datadir == 'datadir/directory'
|
||||
assert args.days == 30
|
||||
|
@ -162,7 +162,7 @@ def test_plot_dataframe_options() -> None:
|
|||
assert pargs.indicators1 == "sma10,sma100"
|
||||
assert pargs.indicators2 == "macd,fastd,fastk"
|
||||
assert pargs.plot_limit == 30
|
||||
assert pargs.pairs == "UNITTEST/BTC"
|
||||
assert pargs.pairs == ["UNITTEST/BTC"]
|
||||
|
||||
|
||||
def test_check_int_positive() -> None:
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
import json
|
||||
import logging
|
||||
import warnings
|
||||
from argparse import Namespace
|
||||
from copy import deepcopy
|
||||
from pathlib import Path
|
||||
from unittest.mock import MagicMock
|
||||
|
@ -11,10 +10,11 @@ import pytest
|
|||
from jsonschema import Draft4Validator, ValidationError, validate
|
||||
|
||||
from freqtrade import OperationalException, constants
|
||||
from freqtrade.configuration import Arguments, Configuration
|
||||
from freqtrade.configuration import Arguments, Configuration, validate_config_consistency
|
||||
from freqtrade.configuration.check_exchange import check_exchange
|
||||
from freqtrade.configuration.directory_operations import create_datadir, create_userdata_dir
|
||||
from freqtrade.configuration.json_schema import validate_config_schema
|
||||
from freqtrade.configuration.config_validation import validate_config_schema
|
||||
from freqtrade.configuration.directory_operations import (create_datadir,
|
||||
create_userdata_dir)
|
||||
from freqtrade.configuration.load_config import load_config_file
|
||||
from freqtrade.constants import DEFAULT_DB_DRYRUN_URL, DEFAULT_DB_PROD_URL
|
||||
from freqtrade.loggers import _set_loggers
|
||||
|
@ -663,21 +663,34 @@ def test_validate_tsl(default_conf):
|
|||
with pytest.raises(OperationalException,
|
||||
match=r'The config trailing_only_offset_is_reached needs '
|
||||
'trailing_stop_positive_offset to be more than 0 in your config.'):
|
||||
configuration = Configuration(Namespace())
|
||||
configuration._validate_config_consistency(default_conf)
|
||||
validate_config_consistency(default_conf)
|
||||
|
||||
default_conf['trailing_stop_positive_offset'] = 0.01
|
||||
default_conf['trailing_stop_positive'] = 0.015
|
||||
with pytest.raises(OperationalException,
|
||||
match=r'The config trailing_stop_positive_offset needs '
|
||||
'to be greater than trailing_stop_positive_offset in your config.'):
|
||||
configuration = Configuration(Namespace())
|
||||
configuration._validate_config_consistency(default_conf)
|
||||
validate_config_consistency(default_conf)
|
||||
|
||||
default_conf['trailing_stop_positive'] = 0.01
|
||||
default_conf['trailing_stop_positive_offset'] = 0.015
|
||||
Configuration(Namespace())
|
||||
configuration._validate_config_consistency(default_conf)
|
||||
validate_config_consistency(default_conf)
|
||||
|
||||
|
||||
def test_validate_edge(edge_conf):
|
||||
edge_conf.update({"pairlist": {
|
||||
"method": "VolumePairList",
|
||||
}})
|
||||
|
||||
with pytest.raises(OperationalException,
|
||||
match="Edge and VolumePairList are incompatible, "
|
||||
"Edge will override whatever pairs VolumePairlist selects."):
|
||||
validate_config_consistency(edge_conf)
|
||||
|
||||
edge_conf.update({"pairlist": {
|
||||
"method": "StaticPairList",
|
||||
}})
|
||||
validate_config_consistency(edge_conf)
|
||||
|
||||
|
||||
def test_load_config_test_comments() -> None:
|
||||
|
@ -742,3 +755,111 @@ def test_load_config_default_subkeys(all_conf, keys) -> None:
|
|||
validate_config_schema(all_conf)
|
||||
assert subkey in all_conf[key]
|
||||
assert all_conf[key][subkey] == keys[2]
|
||||
|
||||
|
||||
def test_pairlist_resolving():
|
||||
arglist = [
|
||||
'download-data',
|
||||
'--pairs', 'ETH/BTC', 'XRP/BTC',
|
||||
'--exchange', 'binance'
|
||||
]
|
||||
|
||||
args = Arguments(arglist, '').get_parsed_arg()
|
||||
|
||||
configuration = Configuration(args)
|
||||
config = configuration.get_config()
|
||||
|
||||
assert config['pairs'] == ['ETH/BTC', 'XRP/BTC']
|
||||
assert config['exchange']['name'] == 'binance'
|
||||
|
||||
|
||||
def test_pairlist_resolving_with_config(mocker, default_conf):
|
||||
patched_configuration_load_config_file(mocker, default_conf)
|
||||
arglist = [
|
||||
'--config', 'config.json',
|
||||
'download-data',
|
||||
]
|
||||
|
||||
args = Arguments(arglist, '').get_parsed_arg()
|
||||
|
||||
configuration = Configuration(args)
|
||||
config = configuration.get_config()
|
||||
|
||||
assert config['pairs'] == default_conf['exchange']['pair_whitelist']
|
||||
assert config['exchange']['name'] == default_conf['exchange']['name']
|
||||
|
||||
# Override pairs
|
||||
arglist = [
|
||||
'--config', 'config.json',
|
||||
'download-data',
|
||||
'--pairs', 'ETH/BTC', 'XRP/BTC',
|
||||
]
|
||||
|
||||
args = Arguments(arglist, '').get_parsed_arg()
|
||||
|
||||
configuration = Configuration(args)
|
||||
config = configuration.get_config()
|
||||
|
||||
assert config['pairs'] == ['ETH/BTC', 'XRP/BTC']
|
||||
assert config['exchange']['name'] == default_conf['exchange']['name']
|
||||
|
||||
|
||||
def test_pairlist_resolving_with_config_pl(mocker, default_conf):
|
||||
patched_configuration_load_config_file(mocker, default_conf)
|
||||
load_mock = mocker.patch("freqtrade.configuration.configuration.json_load",
|
||||
MagicMock(return_value=['XRP/BTC', 'ETH/BTC']))
|
||||
mocker.patch.object(Path, "exists", MagicMock(return_value=True))
|
||||
mocker.patch.object(Path, "open", MagicMock(return_value=MagicMock()))
|
||||
|
||||
arglist = [
|
||||
'--config', 'config.json',
|
||||
'download-data',
|
||||
'--pairs-file', 'pairs.json',
|
||||
]
|
||||
|
||||
args = Arguments(arglist, '').get_parsed_arg()
|
||||
|
||||
configuration = Configuration(args)
|
||||
config = configuration.get_config()
|
||||
|
||||
assert load_mock.call_count == 1
|
||||
assert config['pairs'] == ['ETH/BTC', 'XRP/BTC']
|
||||
assert config['exchange']['name'] == default_conf['exchange']['name']
|
||||
|
||||
|
||||
def test_pairlist_resolving_with_config_pl_not_exists(mocker, default_conf):
|
||||
patched_configuration_load_config_file(mocker, default_conf)
|
||||
mocker.patch("freqtrade.configuration.configuration.json_load",
|
||||
MagicMock(return_value=['XRP/BTC', 'ETH/BTC']))
|
||||
mocker.patch.object(Path, "exists", MagicMock(return_value=False))
|
||||
|
||||
arglist = [
|
||||
'--config', 'config.json',
|
||||
'download-data',
|
||||
'--pairs-file', 'pairs.json',
|
||||
]
|
||||
|
||||
args = Arguments(arglist, '').get_parsed_arg()
|
||||
|
||||
with pytest.raises(OperationalException, match=r"No pairs file found with path.*"):
|
||||
configuration = Configuration(args)
|
||||
configuration.get_config()
|
||||
|
||||
|
||||
def test_pairlist_resolving_fallback(mocker):
|
||||
mocker.patch.object(Path, "exists", MagicMock(return_value=True))
|
||||
mocker.patch.object(Path, "open", MagicMock(return_value=MagicMock()))
|
||||
mocker.patch("freqtrade.configuration.configuration.json_load",
|
||||
MagicMock(return_value=['XRP/BTC', 'ETH/BTC']))
|
||||
arglist = [
|
||||
'download-data',
|
||||
'--exchange', 'binance'
|
||||
]
|
||||
|
||||
args = Arguments(arglist, '').get_parsed_arg()
|
||||
|
||||
configuration = Configuration(args)
|
||||
config = configuration.get_config()
|
||||
|
||||
assert config['pairs'] == ['ETH/BTC', 'XRP/BTC']
|
||||
assert config['exchange']['name'] == 'binance'
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
# pragma pylint: disable=missing-docstring
|
||||
|
||||
from copy import deepcopy
|
||||
from unittest.mock import MagicMock
|
||||
from unittest.mock import MagicMock, PropertyMock
|
||||
|
||||
import pytest
|
||||
|
||||
|
@ -21,6 +21,7 @@ def test_parse_args_backtesting(mocker) -> None:
|
|||
further argument parsing is done in test_arguments.py
|
||||
"""
|
||||
backtesting_mock = mocker.patch('freqtrade.optimize.start_backtesting', MagicMock())
|
||||
backtesting_mock.__name__ = PropertyMock("start_backtesting")
|
||||
# it's sys.exit(0) at the end of backtesting
|
||||
with pytest.raises(SystemExit):
|
||||
main(['backtesting'])
|
||||
|
@ -36,6 +37,7 @@ def test_parse_args_backtesting(mocker) -> None:
|
|||
|
||||
def test_main_start_hyperopt(mocker) -> None:
|
||||
hyperopt_mock = mocker.patch('freqtrade.optimize.start_hyperopt', MagicMock())
|
||||
hyperopt_mock.__name__ = PropertyMock("start_hyperopt")
|
||||
# it's sys.exit(0) at the end of hyperopt
|
||||
with pytest.raises(SystemExit):
|
||||
main(['hyperopt'])
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
# pragma pylint: disable=missing-docstring,C0103
|
||||
|
||||
import datetime
|
||||
from pathlib import Path
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
from freqtrade.data.converter import parse_ticker_dataframe
|
||||
|
@ -34,12 +35,12 @@ def test_datesarray_to_datetimearray(ticker_history_list):
|
|||
def test_file_dump_json(mocker) -> None:
|
||||
file_open = mocker.patch('freqtrade.misc.open', MagicMock())
|
||||
json_dump = mocker.patch('rapidjson.dump', MagicMock())
|
||||
file_dump_json('somefile', [1, 2, 3])
|
||||
file_dump_json(Path('somefile'), [1, 2, 3])
|
||||
assert file_open.call_count == 1
|
||||
assert json_dump.call_count == 1
|
||||
file_open = mocker.patch('freqtrade.misc.gzip.open', MagicMock())
|
||||
json_dump = mocker.patch('rapidjson.dump', MagicMock())
|
||||
file_dump_json('somefile', [1, 2, 3], True)
|
||||
file_dump_json(Path('somefile'), [1, 2, 3], True)
|
||||
assert file_open.call_count == 1
|
||||
assert json_dump.call_count == 1
|
||||
|
||||
|
|
|
@ -51,7 +51,7 @@ def test_init_plotscript(default_conf, mocker):
|
|||
assert "pairs" in ret
|
||||
assert "strategy" in ret
|
||||
|
||||
default_conf['pairs'] = "POWR/BTC,XLM/BTC"
|
||||
default_conf['pairs'] = ["POWR/BTC", "XLM/BTC"]
|
||||
ret = init_plotscript(default_conf)
|
||||
assert "tickers" in ret
|
||||
assert "POWR/BTC" in ret["tickers"]
|
||||
|
|
|
@ -1,12 +1,13 @@
|
|||
import re
|
||||
from unittest.mock import MagicMock
|
||||
from pathlib import Path
|
||||
from unittest.mock import MagicMock, PropertyMock
|
||||
|
||||
import pytest
|
||||
|
||||
from freqtrade.state import RunMode
|
||||
from freqtrade.tests.conftest import get_args, log_has
|
||||
from freqtrade.tests.conftest import get_args, log_has, patch_exchange
|
||||
from freqtrade.utils import (setup_utils_configuration, start_create_userdir,
|
||||
start_list_exchanges)
|
||||
start_download_data, start_list_exchanges)
|
||||
|
||||
|
||||
def test_setup_utils_configuration():
|
||||
|
@ -67,3 +68,87 @@ def test_create_datadir(caplog, mocker):
|
|||
|
||||
assert cud.call_count == 1
|
||||
assert len(caplog.record_tuples) == 0
|
||||
|
||||
|
||||
def test_download_data(mocker, markets, caplog):
|
||||
dl_mock = mocker.patch('freqtrade.utils.download_pair_history', MagicMock())
|
||||
patch_exchange(mocker)
|
||||
mocker.patch(
|
||||
'freqtrade.exchange.Exchange.markets', PropertyMock(return_value=markets)
|
||||
)
|
||||
mocker.patch.object(Path, "exists", MagicMock(return_value=True))
|
||||
mocker.patch.object(Path, "unlink", MagicMock())
|
||||
|
||||
args = [
|
||||
"download-data",
|
||||
"--exchange", "binance",
|
||||
"--pairs", "ETH/BTC", "XRP/BTC",
|
||||
"--erase",
|
||||
]
|
||||
start_download_data(get_args(args))
|
||||
|
||||
assert dl_mock.call_count == 4
|
||||
assert dl_mock.call_args[1]['timerange'].starttype is None
|
||||
assert dl_mock.call_args[1]['timerange'].stoptype is None
|
||||
assert log_has("Deleting existing data for pair ETH/BTC, interval 1m.", caplog)
|
||||
assert log_has("Downloading pair ETH/BTC, interval 1m.", caplog)
|
||||
|
||||
|
||||
def test_download_data_days(mocker, markets, caplog):
|
||||
dl_mock = mocker.patch('freqtrade.utils.download_pair_history', MagicMock())
|
||||
patch_exchange(mocker)
|
||||
mocker.patch(
|
||||
'freqtrade.exchange.Exchange.markets', PropertyMock(return_value=markets)
|
||||
)
|
||||
mocker.patch.object(Path, "exists", MagicMock(return_value=True))
|
||||
mocker.patch.object(Path, "unlink", MagicMock())
|
||||
|
||||
args = [
|
||||
"download-data",
|
||||
"--exchange", "binance",
|
||||
"--pairs", "ETH/BTC", "XRP/BTC",
|
||||
"--days", "20",
|
||||
]
|
||||
|
||||
start_download_data(get_args(args))
|
||||
|
||||
assert dl_mock.call_count == 4
|
||||
assert dl_mock.call_args[1]['timerange'].starttype == 'date'
|
||||
|
||||
assert log_has("Downloading pair ETH/BTC, interval 1m.", caplog)
|
||||
|
||||
|
||||
def test_download_data_no_markets(mocker, caplog):
|
||||
dl_mock = mocker.patch('freqtrade.utils.download_pair_history', MagicMock())
|
||||
patch_exchange(mocker)
|
||||
mocker.patch(
|
||||
'freqtrade.exchange.Exchange.markets', PropertyMock(return_value={})
|
||||
)
|
||||
args = [
|
||||
"download-data",
|
||||
"--exchange", "binance",
|
||||
"--pairs", "ETH/BTC", "XRP/BTC",
|
||||
]
|
||||
start_download_data(get_args(args))
|
||||
|
||||
assert dl_mock.call_count == 0
|
||||
assert log_has("Skipping pair ETH/BTC...", caplog)
|
||||
assert log_has("Pairs [ETH/BTC,XRP/BTC] not available on exchange binance.", caplog)
|
||||
|
||||
|
||||
def test_download_data_keyboardInterrupt(mocker, caplog, markets):
|
||||
dl_mock = mocker.patch('freqtrade.utils.download_pair_history',
|
||||
MagicMock(side_effect=KeyboardInterrupt))
|
||||
patch_exchange(mocker)
|
||||
mocker.patch(
|
||||
'freqtrade.exchange.Exchange.markets', PropertyMock(return_value=markets)
|
||||
)
|
||||
args = [
|
||||
"download-data",
|
||||
"--exchange", "binance",
|
||||
"--pairs", "ETH/BTC", "XRP/BTC",
|
||||
]
|
||||
with pytest.raises(SystemExit):
|
||||
start_download_data(get_args(args))
|
||||
|
||||
assert dl_mock.call_count == 1
|
||||
|
|
|
@ -1,11 +1,16 @@
|
|||
import logging
|
||||
import sys
|
||||
from argparse import Namespace
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict
|
||||
|
||||
from freqtrade.configuration import Configuration
|
||||
import arrow
|
||||
|
||||
from freqtrade.configuration import Configuration, TimeRange
|
||||
from freqtrade.configuration.directory_operations import create_userdata_dir
|
||||
from freqtrade.data.history import download_pair_history
|
||||
from freqtrade.exchange import available_exchanges
|
||||
from freqtrade.resolvers import ExchangeResolver
|
||||
from freqtrade.state import RunMode
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
@ -18,7 +23,7 @@ def setup_utils_configuration(args: Namespace, method: RunMode) -> Dict[str, Any
|
|||
:return: Configuration
|
||||
"""
|
||||
configuration = Configuration(args, method)
|
||||
config = configuration.load_config()
|
||||
config = configuration.get_config()
|
||||
|
||||
config['exchange']['dry_run'] = True
|
||||
# Ensure we do not use Exchange credentials
|
||||
|
@ -53,3 +58,56 @@ def start_create_userdir(args: Namespace) -> None:
|
|||
else:
|
||||
logger.warning("`create-userdir` requires --userdir to be set.")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def start_download_data(args: Namespace) -> None:
|
||||
"""
|
||||
Download data (former download_backtest_data.py script)
|
||||
"""
|
||||
config = setup_utils_configuration(args, RunMode.OTHER)
|
||||
|
||||
timerange = TimeRange()
|
||||
if 'days' in config:
|
||||
time_since = arrow.utcnow().shift(days=-config['days']).strftime("%Y%m%d")
|
||||
timerange = TimeRange.parse_timerange(f'{time_since}-')
|
||||
|
||||
dl_path = Path(config['datadir'])
|
||||
logger.info(f'About to download pairs: {config["pairs"]}, '
|
||||
f'intervals: {config["timeframes"]} to {dl_path}')
|
||||
|
||||
pairs_not_available = []
|
||||
|
||||
try:
|
||||
# Init exchange
|
||||
exchange = ExchangeResolver(config['exchange']['name'], config).exchange
|
||||
|
||||
for pair in config["pairs"]:
|
||||
if pair not in exchange.markets:
|
||||
pairs_not_available.append(pair)
|
||||
logger.info(f"Skipping pair {pair}...")
|
||||
continue
|
||||
for ticker_interval in config["timeframes"]:
|
||||
pair_print = pair.replace('/', '_')
|
||||
filename = f'{pair_print}-{ticker_interval}.json'
|
||||
dl_file = dl_path.joinpath(filename)
|
||||
if config.get("erase") and dl_file.exists():
|
||||
logger.info(
|
||||
f'Deleting existing data for pair {pair}, interval {ticker_interval}.')
|
||||
dl_file.unlink()
|
||||
|
||||
logger.info(f'Downloading pair {pair}, interval {ticker_interval}.')
|
||||
download_pair_history(datadir=dl_path, exchange=exchange,
|
||||
pair=pair, ticker_interval=str(ticker_interval),
|
||||
timerange=timerange)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
sys.exit("SIGINT received, aborting ...")
|
||||
|
||||
finally:
|
||||
if pairs_not_available:
|
||||
logger.info(
|
||||
f"Pairs [{','.join(pairs_not_available)}] not available "
|
||||
f"on exchange {config['exchange']['name']}.")
|
||||
|
||||
# configuration.resolve_pairs_list()
|
||||
print(config)
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
# requirements without requirements installable via conda
|
||||
# mainly used for Raspberry pi installs
|
||||
ccxt==1.18.1063
|
||||
ccxt==1.18.1068
|
||||
SQLAlchemy==1.3.7
|
||||
python-telegram-bot==11.1.0
|
||||
arrow==0.14.5
|
||||
|
|
|
@ -1,142 +1,11 @@
|
|||
#!/usr/bin/env python3
|
||||
"""
|
||||
This script generates json files with pairs history data
|
||||
"""
|
||||
import arrow
|
||||
import json
|
||||
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from freqtrade.configuration import Arguments, TimeRange
|
||||
from freqtrade.configuration import Configuration
|
||||
from freqtrade.configuration.arguments import ARGS_DOWNLOADER
|
||||
from freqtrade.configuration.check_exchange import check_exchange
|
||||
from freqtrade.configuration.load_config import load_config_file
|
||||
from freqtrade.data.history import download_pair_history
|
||||
from freqtrade.exchange import Exchange
|
||||
from freqtrade.misc import deep_merge_dicts
|
||||
|
||||
import logging
|
||||
print("This script has been integrated into freqtrade "
|
||||
"and its functionality is available by calling `freqtrade download-data`.")
|
||||
print("Please check the documentation on https://www.freqtrade.io/en/latest/backtesting/ "
|
||||
"for details.")
|
||||
|
||||
logger = logging.getLogger('download_backtest_data')
|
||||
|
||||
# Do not read the default config if config is not specified
|
||||
# in the command line options explicitely
|
||||
arguments = Arguments(sys.argv[1:], 'Download backtest data',
|
||||
no_default_config=True)
|
||||
arguments._build_args(optionlist=ARGS_DOWNLOADER)
|
||||
args = arguments._parse_args()
|
||||
|
||||
# Use bittrex as default exchange
|
||||
exchange_name = args.exchange or 'bittrex'
|
||||
|
||||
pairs: List = []
|
||||
|
||||
configuration = Configuration(args)
|
||||
config: Dict[str, Any] = {}
|
||||
|
||||
if args.config:
|
||||
# Now expecting a list of config filenames here, not a string
|
||||
for path in args.config:
|
||||
logger.info(f"Using config: {path}...")
|
||||
# Merge config options, overwriting old values
|
||||
config = deep_merge_dicts(load_config_file(path), config)
|
||||
|
||||
config['stake_currency'] = ''
|
||||
# Ensure we do not use Exchange credentials
|
||||
config['exchange']['dry_run'] = True
|
||||
config['exchange']['key'] = ''
|
||||
config['exchange']['secret'] = ''
|
||||
|
||||
pairs = config['exchange']['pair_whitelist']
|
||||
|
||||
if config.get('ticker_interval'):
|
||||
timeframes = args.timeframes or [config.get('ticker_interval')]
|
||||
else:
|
||||
timeframes = args.timeframes or ['1m', '5m']
|
||||
|
||||
else:
|
||||
config = {
|
||||
'stake_currency': '',
|
||||
'dry_run': True,
|
||||
'exchange': {
|
||||
'name': exchange_name,
|
||||
'key': '',
|
||||
'secret': '',
|
||||
'pair_whitelist': [],
|
||||
'ccxt_async_config': {
|
||||
'enableRateLimit': True,
|
||||
'rateLimit': 200
|
||||
}
|
||||
}
|
||||
}
|
||||
timeframes = args.timeframes or ['1m', '5m']
|
||||
|
||||
configuration._process_logging_options(config)
|
||||
|
||||
if args.config and args.exchange:
|
||||
logger.warning("The --exchange option is ignored, "
|
||||
"using exchange settings from the configuration file.")
|
||||
|
||||
# Check if the exchange set by the user is supported
|
||||
check_exchange(config)
|
||||
|
||||
configuration._process_datadir_options(config)
|
||||
|
||||
dl_path = Path(config['datadir'])
|
||||
|
||||
pairs_file = Path(args.pairs_file) if args.pairs_file else dl_path.joinpath('pairs.json')
|
||||
|
||||
if not pairs or args.pairs_file:
|
||||
logger.info(f'Reading pairs file "{pairs_file}".')
|
||||
# Download pairs from the pairs file if no config is specified
|
||||
# or if pairs file is specified explicitely
|
||||
if not pairs_file.exists():
|
||||
sys.exit(f'No pairs file found with path "{pairs_file}".')
|
||||
|
||||
with pairs_file.open() as file:
|
||||
pairs = list(set(json.load(file)))
|
||||
|
||||
pairs.sort()
|
||||
|
||||
timerange = TimeRange()
|
||||
if args.days:
|
||||
time_since = arrow.utcnow().shift(days=-args.days).strftime("%Y%m%d")
|
||||
timerange = TimeRange.parse_timerange(f'{time_since}-')
|
||||
|
||||
logger.info(f'About to download pairs: {pairs}, intervals: {timeframes} to {dl_path}')
|
||||
|
||||
pairs_not_available = []
|
||||
|
||||
try:
|
||||
# Init exchange
|
||||
exchange = Exchange(config)
|
||||
|
||||
for pair in pairs:
|
||||
if pair not in exchange._api.markets:
|
||||
pairs_not_available.append(pair)
|
||||
logger.info(f"Skipping pair {pair}...")
|
||||
continue
|
||||
for ticker_interval in timeframes:
|
||||
pair_print = pair.replace('/', '_')
|
||||
filename = f'{pair_print}-{ticker_interval}.json'
|
||||
dl_file = dl_path.joinpath(filename)
|
||||
if args.erase and dl_file.exists():
|
||||
logger.info(
|
||||
f'Deleting existing data for pair {pair}, interval {ticker_interval}.')
|
||||
dl_file.unlink()
|
||||
|
||||
logger.info(f'Downloading pair {pair}, interval {ticker_interval}.')
|
||||
download_pair_history(datadir=dl_path, exchange=exchange,
|
||||
pair=pair, ticker_interval=str(ticker_interval),
|
||||
timerange=timerange)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
sys.exit("SIGINT received, aborting ...")
|
||||
|
||||
finally:
|
||||
if pairs_not_available:
|
||||
logger.info(
|
||||
f"Pairs [{','.join(pairs_not_available)}] not available "
|
||||
f"on exchange {config['exchange']['name']}.")
|
||||
sys.exit(1)
|
||||
|
|
Loading…
Reference in New Issue
Block a user