mirror of
https://github.com/freqtrade/freqtrade.git
synced 2024-11-11 10:43:56 +00:00
Merge pull request #7806 from freqtrade/new_release
New release 2022.11
This commit is contained in:
commit
77826ebf78
|
@ -11,12 +11,14 @@
|
||||||
"mounts": [
|
"mounts": [
|
||||||
"source=freqtrade-bashhistory,target=/home/ftuser/commandhistory,type=volume"
|
"source=freqtrade-bashhistory,target=/home/ftuser/commandhistory,type=volume"
|
||||||
],
|
],
|
||||||
|
"workspaceMount": "source=${localWorkspaceFolder},target=/workspaces/freqtrade,type=bind,consistency=cached",
|
||||||
// Uncomment to connect as a non-root user if you've added one. See https://aka.ms/vscode-remote/containers/non-root.
|
// Uncomment to connect as a non-root user if you've added one. See https://aka.ms/vscode-remote/containers/non-root.
|
||||||
"remoteUser": "ftuser",
|
"remoteUser": "ftuser",
|
||||||
|
|
||||||
|
"onCreateCommand": "pip install --user -e .",
|
||||||
"postCreateCommand": "freqtrade create-userdir --userdir user_data/",
|
"postCreateCommand": "freqtrade create-userdir --userdir user_data/",
|
||||||
|
|
||||||
"workspaceFolder": "/freqtrade/",
|
"workspaceFolder": "/workspaces/freqtrade",
|
||||||
|
|
||||||
"settings": {
|
"settings": {
|
||||||
"terminal.integrated.shell.linux": "/bin/bash",
|
"terminal.integrated.shell.linux": "/bin/bash",
|
||||||
|
|
8
.github/workflows/ci.yml
vendored
8
.github/workflows/ci.yml
vendored
|
@ -258,7 +258,7 @@ jobs:
|
||||||
webhookUrl: ${{ secrets.DISCORD_WEBHOOK }}
|
webhookUrl: ${{ secrets.DISCORD_WEBHOOK }}
|
||||||
|
|
||||||
mypy_version_check:
|
mypy_version_check:
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-22.04
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
|
@ -283,7 +283,7 @@ jobs:
|
||||||
- uses: pre-commit/action@v3.0.0
|
- uses: pre-commit/action@v3.0.0
|
||||||
|
|
||||||
docs_check:
|
docs_check:
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-22.04
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
|
@ -313,7 +313,7 @@ jobs:
|
||||||
# Notify only once - when CI completes (and after deploy) in case it's successfull
|
# Notify only once - when CI completes (and after deploy) in case it's successfull
|
||||||
notify-complete:
|
notify-complete:
|
||||||
needs: [ build_linux, build_macos, build_windows, docs_check, mypy_version_check, pre-commit ]
|
needs: [ build_linux, build_macos, build_windows, docs_check, mypy_version_check, pre-commit ]
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-22.04
|
||||||
# Discord notification can't handle schedule events
|
# Discord notification can't handle schedule events
|
||||||
if: (github.event_name != 'schedule')
|
if: (github.event_name != 'schedule')
|
||||||
permissions:
|
permissions:
|
||||||
|
@ -338,7 +338,7 @@ jobs:
|
||||||
|
|
||||||
deploy:
|
deploy:
|
||||||
needs: [ build_linux, build_macos, build_windows, docs_check, mypy_version_check, pre-commit ]
|
needs: [ build_linux, build_macos, build_windows, docs_check, mypy_version_check, pre-commit ]
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-22.04
|
||||||
|
|
||||||
if: (github.event_name == 'push' || github.event_name == 'schedule' || github.event_name == 'release') && github.repository == 'freqtrade/freqtrade'
|
if: (github.event_name == 'push' || github.event_name == 'schedule' || github.event_name == 'release') && github.repository == 'freqtrade/freqtrade'
|
||||||
|
|
||||||
|
|
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -109,7 +109,6 @@ target/
|
||||||
!*.gitkeep
|
!*.gitkeep
|
||||||
!config_examples/config_binance.example.json
|
!config_examples/config_binance.example.json
|
||||||
!config_examples/config_bittrex.example.json
|
!config_examples/config_bittrex.example.json
|
||||||
!config_examples/config_ftx.example.json
|
|
||||||
!config_examples/config_full.example.json
|
!config_examples/config_full.example.json
|
||||||
!config_examples/config_kraken.example.json
|
!config_examples/config_kraken.example.json
|
||||||
!config_examples/config_freqai.example.json
|
!config_examples/config_freqai.example.json
|
||||||
|
|
|
@ -15,9 +15,9 @@ repos:
|
||||||
additional_dependencies:
|
additional_dependencies:
|
||||||
- types-cachetools==5.2.1
|
- types-cachetools==5.2.1
|
||||||
- types-filelock==3.2.7
|
- types-filelock==3.2.7
|
||||||
- types-requests==2.28.11.2
|
- types-requests==2.28.11.5
|
||||||
- types-tabulate==0.9.0.0
|
- types-tabulate==0.9.0.0
|
||||||
- types-python-dateutil==2.8.19.2
|
- types-python-dateutil==2.8.19.4
|
||||||
# stages: [push]
|
# stages: [push]
|
||||||
|
|
||||||
- repo: https://github.com/pycqa/isort
|
- repo: https://github.com/pycqa/isort
|
||||||
|
|
|
@ -28,7 +28,6 @@ Please read the [exchange specific notes](docs/exchanges.md) to learn about even
|
||||||
|
|
||||||
- [X] [Binance](https://www.binance.com/)
|
- [X] [Binance](https://www.binance.com/)
|
||||||
- [X] [Bittrex](https://bittrex.com/)
|
- [X] [Bittrex](https://bittrex.com/)
|
||||||
- [X] [FTX](https://ftx.com/#a=2258149)
|
|
||||||
- [X] [Gate.io](https://www.gate.io/ref/6266643)
|
- [X] [Gate.io](https://www.gate.io/ref/6266643)
|
||||||
- [X] [Huobi](http://huobi.com/)
|
- [X] [Huobi](http://huobi.com/)
|
||||||
- [X] [Kraken](https://kraken.com/)
|
- [X] [Kraken](https://kraken.com/)
|
||||||
|
@ -39,7 +38,7 @@ Please read the [exchange specific notes](docs/exchanges.md) to learn about even
|
||||||
|
|
||||||
- [X] [Binance](https://www.binance.com/)
|
- [X] [Binance](https://www.binance.com/)
|
||||||
- [X] [Gate.io](https://www.gate.io/ref/6266643)
|
- [X] [Gate.io](https://www.gate.io/ref/6266643)
|
||||||
- [X] [OKX](https://okx.com/).
|
- [X] [OKX](https://okx.com/)
|
||||||
|
|
||||||
Please make sure to read the [exchange specific notes](docs/exchanges.md), as well as the [trading with leverage](docs/leverage.md) documentation before diving in.
|
Please make sure to read the [exchange specific notes](docs/exchanges.md), as well as the [trading with leverage](docs/leverage.md) documentation before diving in.
|
||||||
|
|
||||||
|
|
Binary file not shown.
|
@ -1,96 +0,0 @@
|
||||||
{
|
|
||||||
"max_open_trades": 3,
|
|
||||||
"stake_currency": "USD",
|
|
||||||
"stake_amount": 50,
|
|
||||||
"tradable_balance_ratio": 0.99,
|
|
||||||
"fiat_display_currency": "USD",
|
|
||||||
"timeframe": "5m",
|
|
||||||
"dry_run": true,
|
|
||||||
"cancel_open_orders_on_exit": false,
|
|
||||||
"unfilledtimeout": {
|
|
||||||
"entry": 10,
|
|
||||||
"exit": 10,
|
|
||||||
"exit_timeout_count": 0,
|
|
||||||
"unit": "minutes"
|
|
||||||
},
|
|
||||||
"entry_pricing": {
|
|
||||||
"price_side": "same",
|
|
||||||
"use_order_book": true,
|
|
||||||
"order_book_top": 1,
|
|
||||||
"price_last_balance": 0.0,
|
|
||||||
"check_depth_of_market": {
|
|
||||||
"enabled": false,
|
|
||||||
"bids_to_ask_delta": 1
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"exit_pricing": {
|
|
||||||
"price_side": "same",
|
|
||||||
"use_order_book": true,
|
|
||||||
"order_book_top": 1
|
|
||||||
},
|
|
||||||
"exchange": {
|
|
||||||
"name": "ftx",
|
|
||||||
"key": "your_exchange_key",
|
|
||||||
"secret": "your_exchange_secret",
|
|
||||||
"ccxt_config": {},
|
|
||||||
"ccxt_async_config": {},
|
|
||||||
"pair_whitelist": [
|
|
||||||
"BTC/USD",
|
|
||||||
"ETH/USD",
|
|
||||||
"BNB/USD",
|
|
||||||
"USDT/USD",
|
|
||||||
"LTC/USD",
|
|
||||||
"SRM/USD",
|
|
||||||
"SXP/USD",
|
|
||||||
"XRP/USD",
|
|
||||||
"DOGE/USD",
|
|
||||||
"1INCH/USD",
|
|
||||||
"CHZ/USD",
|
|
||||||
"MATIC/USD",
|
|
||||||
"LINK/USD",
|
|
||||||
"OXY/USD",
|
|
||||||
"SUSHI/USD"
|
|
||||||
],
|
|
||||||
"pair_blacklist": [
|
|
||||||
"FTT/USD"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"pairlists": [
|
|
||||||
{"method": "StaticPairList"}
|
|
||||||
],
|
|
||||||
"edge": {
|
|
||||||
"enabled": false,
|
|
||||||
"process_throttle_secs": 3600,
|
|
||||||
"calculate_since_number_of_days": 7,
|
|
||||||
"allowed_risk": 0.01,
|
|
||||||
"stoploss_range_min": -0.01,
|
|
||||||
"stoploss_range_max": -0.1,
|
|
||||||
"stoploss_range_step": -0.01,
|
|
||||||
"minimum_winrate": 0.60,
|
|
||||||
"minimum_expectancy": 0.20,
|
|
||||||
"min_trade_number": 10,
|
|
||||||
"max_trade_duration_minute": 1440,
|
|
||||||
"remove_pumps": false
|
|
||||||
},
|
|
||||||
"telegram": {
|
|
||||||
"enabled": false,
|
|
||||||
"token": "your_telegram_token",
|
|
||||||
"chat_id": "your_telegram_chat_id"
|
|
||||||
},
|
|
||||||
"api_server": {
|
|
||||||
"enabled": false,
|
|
||||||
"listen_ip_address": "127.0.0.1",
|
|
||||||
"listen_port": 8080,
|
|
||||||
"verbosity": "error",
|
|
||||||
"jwt_secret_key": "somethingrandom",
|
|
||||||
"CORS_origins": [],
|
|
||||||
"username": "freqtrader",
|
|
||||||
"password": "SuperSecurePassword"
|
|
||||||
},
|
|
||||||
"bot_name": "freqtrade",
|
|
||||||
"initial_state": "running",
|
|
||||||
"force_entry_enable": false,
|
|
||||||
"internals": {
|
|
||||||
"process_throttle_secs": 5
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -204,6 +204,7 @@
|
||||||
"strategy_path": "user_data/strategies/",
|
"strategy_path": "user_data/strategies/",
|
||||||
"recursive_strategy_search": false,
|
"recursive_strategy_search": false,
|
||||||
"add_config_files": [],
|
"add_config_files": [],
|
||||||
|
"reduce_df_footprint": false,
|
||||||
"dataformat_ohlcv": "json",
|
"dataformat_ohlcv": "json",
|
||||||
"dataformat_trades": "jsongz"
|
"dataformat_trades": "jsongz"
|
||||||
}
|
}
|
||||||
|
|
|
@ -546,8 +546,8 @@ In addition to the above assumptions, strategy authors should carefully read the
|
||||||
|
|
||||||
### Trading limits in backtesting
|
### Trading limits in backtesting
|
||||||
|
|
||||||
Exchanges have certain trading limits, like minimum base currency, or minimum stake (quote) currency.
|
Exchanges have certain trading limits, like minimum (and maximum) base currency, or minimum/maximum stake (quote) currency.
|
||||||
These limits are usually listed in the exchange documentation as "trading rules" or similar.
|
These limits are usually listed in the exchange documentation as "trading rules" or similar and can be quite different between different pairs.
|
||||||
|
|
||||||
Backtesting (as well as live and dry-run) does honor these limits, and will ensure that a stoploss can be placed below this value - so the value will be slightly higher than what the exchange specifies.
|
Backtesting (as well as live and dry-run) does honor these limits, and will ensure that a stoploss can be placed below this value - so the value will be slightly higher than what the exchange specifies.
|
||||||
Freqtrade has however no information about historic limits.
|
Freqtrade has however no information about historic limits.
|
||||||
|
|
|
@ -253,6 +253,7 @@ Mandatory parameters are marked as **Required**, which means that they are requi
|
||||||
| `add_config_files` | Additional config files. These files will be loaded and merged with the current config file. The files are resolved relative to the initial file.<br> *Defaults to `[]`*. <br> **Datatype:** List of strings
|
| `add_config_files` | Additional config files. These files will be loaded and merged with the current config file. The files are resolved relative to the initial file.<br> *Defaults to `[]`*. <br> **Datatype:** List of strings
|
||||||
| `dataformat_ohlcv` | Data format to use to store historical candle (OHLCV) data. <br> *Defaults to `json`*. <br> **Datatype:** String
|
| `dataformat_ohlcv` | Data format to use to store historical candle (OHLCV) data. <br> *Defaults to `json`*. <br> **Datatype:** String
|
||||||
| `dataformat_trades` | Data format to use to store historical trades data. <br> *Defaults to `jsongz`*. <br> **Datatype:** String
|
| `dataformat_trades` | Data format to use to store historical trades data. <br> *Defaults to `jsongz`*. <br> **Datatype:** String
|
||||||
|
| `reduce_df_footprint` | Recast all numeric columns to float32/int32, with the objective of reducing ram/disk usage (and decreasing train/inference timing in FreqAI). (Currently only affects FreqAI use-cases) <br> **Datatype:** Boolean. <br> Default: `False`.
|
||||||
|
|
||||||
### Parameters in the strategy
|
### Parameters in the strategy
|
||||||
|
|
||||||
|
@ -552,7 +553,7 @@ The possible values are: `GTC` (default), `FOK` or `IOC`.
|
||||||
```
|
```
|
||||||
|
|
||||||
!!! Warning
|
!!! Warning
|
||||||
This is ongoing work. For now, it is supported only for binance, gate, ftx and kucoin.
|
This is ongoing work. For now, it is supported only for binance, gate and kucoin.
|
||||||
Please don't change the default value unless you know what you are doing and have researched the impact of using different values for your particular exchange.
|
Please don't change the default value unless you know what you are doing and have researched the impact of using different values for your particular exchange.
|
||||||
|
|
||||||
### What values can be used for fiat_display_currency?
|
### What values can be used for fiat_display_currency?
|
||||||
|
@ -664,6 +665,7 @@ You should also make sure to read the [Exchanges](exchanges.md) section of the d
|
||||||
### Using proxy with Freqtrade
|
### Using proxy with Freqtrade
|
||||||
|
|
||||||
To use a proxy with freqtrade, export your proxy settings using the variables `"HTTP_PROXY"` and `"HTTPS_PROXY"` set to the appropriate values.
|
To use a proxy with freqtrade, export your proxy settings using the variables `"HTTP_PROXY"` and `"HTTPS_PROXY"` set to the appropriate values.
|
||||||
|
This will have the proxy settings applied to everything (telegram, coingecko, ...) except exchange requests.
|
||||||
|
|
||||||
``` bash
|
``` bash
|
||||||
export HTTP_PROXY="http://addr:port"
|
export HTTP_PROXY="http://addr:port"
|
||||||
|
@ -671,17 +673,20 @@ export HTTPS_PROXY="http://addr:port"
|
||||||
freqtrade
|
freqtrade
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Proxy just exchange requests
|
#### Proxy exchange requests
|
||||||
|
|
||||||
To use a proxy just for exchange connections (skips/ignores telegram and coingecko) - you can also define the proxies as part of the ccxt configuration.
|
To use a proxy for exchange connections - you will have to define the proxies as part of the ccxt configuration.
|
||||||
|
|
||||||
``` json
|
``` json
|
||||||
"ccxt_config": {
|
{
|
||||||
|
"exchange": {
|
||||||
|
"ccxt_config": {
|
||||||
"aiohttp_proxy": "http://addr:port",
|
"aiohttp_proxy": "http://addr:port",
|
||||||
"proxies": {
|
"proxies": {
|
||||||
"http": "http://addr:port",
|
"http": "http://addr:port",
|
||||||
"https": "http://addr:port"
|
"https": "http://addr:port"
|
||||||
},
|
},
|
||||||
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
@ -177,13 +177,13 @@ freqtrade download-data --exchange binance --pairs ETH/USDT XRP/USDT BTC/USDT --
|
||||||
|
|
||||||
### Data format
|
### Data format
|
||||||
|
|
||||||
Freqtrade currently supports 3 data-formats for both OHLCV and trades data:
|
Freqtrade currently supports the following data-formats:
|
||||||
|
|
||||||
* `json` - plain "text" json files
|
* `json` - plain "text" json files
|
||||||
* `jsongz` - a gzip-zipped version of json files
|
* `jsongz` - a gzip-zipped version of json files
|
||||||
* `hdf5` - a high performance datastore
|
* `hdf5` - a high performance datastore
|
||||||
* `feather` - a dataformat based on Apache Arrow
|
* `feather` - a dataformat based on Apache Arrow (OHLCV only)
|
||||||
* `parquet` - columnar datastore
|
* `parquet` - columnar datastore (OHLCV only)
|
||||||
|
|
||||||
By default, OHLCV data is stored as `json` data, while trades data is stored as `jsongz` data.
|
By default, OHLCV data is stored as `json` data, while trades data is stored as `jsongz` data.
|
||||||
|
|
||||||
|
|
|
@ -434,6 +434,11 @@ To keep the release-log short, best wrap the full git changelog into a collapsib
|
||||||
</details>
|
</details>
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### FreqUI release
|
||||||
|
|
||||||
|
If FreqUI has been updated substantially, make sure to create a release before merging the release branch.
|
||||||
|
Make sure that freqUI CI on the release is finished and passed before merging the release.
|
||||||
|
|
||||||
### Create github release / tag
|
### Create github release / tag
|
||||||
|
|
||||||
Once the PR against stable is merged (best right after merging):
|
Once the PR against stable is merged (best right after merging):
|
||||||
|
|
|
@ -173,26 +173,6 @@ res = [p for p, x in lm.items() if 'US' in x['info']['prohibitedIn']]
|
||||||
print(res)
|
print(res)
|
||||||
```
|
```
|
||||||
|
|
||||||
## FTX
|
|
||||||
|
|
||||||
!!! Tip "Stoploss on Exchange"
|
|
||||||
FTX supports `stoploss_on_exchange` and can use both stop-loss-market and stop-loss-limit orders. It provides great advantages, so we recommend to benefit from it.
|
|
||||||
You can use either `"limit"` or `"market"` in the `order_types.stoploss` configuration setting to decide which type of stoploss shall be used.
|
|
||||||
|
|
||||||
### Using subaccounts
|
|
||||||
|
|
||||||
To use subaccounts with FTX, you need to edit the configuration and add the following:
|
|
||||||
|
|
||||||
``` json
|
|
||||||
"exchange": {
|
|
||||||
"ccxt_config": {
|
|
||||||
"headers": {
|
|
||||||
"FTX-SUBACCOUNT": "name"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## Kucoin
|
## Kucoin
|
||||||
|
|
||||||
Kucoin requires a passphrase for each api key, you will therefore need to add this key into the configuration so your exchange section looks as follows:
|
Kucoin requires a passphrase for each api key, you will therefore need to add this key into the configuration so your exchange section looks as follows:
|
||||||
|
|
|
@ -61,7 +61,7 @@ The FreqAI strategy requires including the following lines of code in the standa
|
||||||
"""
|
"""
|
||||||
Function designed to automatically generate, name and merge features
|
Function designed to automatically generate, name and merge features
|
||||||
from user indicated timeframes in the configuration file. User controls the indicators
|
from user indicated timeframes in the configuration file. User controls the indicators
|
||||||
passed to the training/prediction by prepending indicators with `'%-' + coin `
|
passed to the training/prediction by prepending indicators with `'%-' + pair `
|
||||||
(see convention below). I.e. user should not prepend any supporting metrics
|
(see convention below). I.e. user should not prepend any supporting metrics
|
||||||
(e.g. bb_lowerband below) with % unless they explicitly want to pass that metric to the
|
(e.g. bb_lowerband below) with % unless they explicitly want to pass that metric to the
|
||||||
model.
|
model.
|
||||||
|
@ -69,20 +69,17 @@ The FreqAI strategy requires including the following lines of code in the standa
|
||||||
:param df: strategy dataframe which will receive merges from informatives
|
:param df: strategy dataframe which will receive merges from informatives
|
||||||
:param tf: timeframe of the dataframe which will modify the feature names
|
:param tf: timeframe of the dataframe which will modify the feature names
|
||||||
:param informative: the dataframe associated with the informative pair
|
:param informative: the dataframe associated with the informative pair
|
||||||
:param coin: the name of the coin which will modify the feature names.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
coin = pair.split('/')[0]
|
|
||||||
|
|
||||||
if informative is None:
|
if informative is None:
|
||||||
informative = self.dp.get_pair_dataframe(pair, tf)
|
informative = self.dp.get_pair_dataframe(pair, tf)
|
||||||
|
|
||||||
# first loop is automatically duplicating indicators for time periods
|
# first loop is automatically duplicating indicators for time periods
|
||||||
for t in self.freqai_info["feature_parameters"]["indicator_periods_candles"]:
|
for t in self.freqai_info["feature_parameters"]["indicator_periods_candles"]:
|
||||||
t = int(t)
|
t = int(t)
|
||||||
informative[f"%-{coin}rsi-period_{t}"] = ta.RSI(informative, timeperiod=t)
|
informative[f"%-{pair}rsi-period_{t}"] = ta.RSI(informative, timeperiod=t)
|
||||||
informative[f"%-{coin}mfi-period_{t}"] = ta.MFI(informative, timeperiod=t)
|
informative[f"%-{pair}mfi-period_{t}"] = ta.MFI(informative, timeperiod=t)
|
||||||
informative[f"%-{coin}adx-period_{t}"] = ta.ADX(informative, window=t)
|
informative[f"%-{pair}adx-period_{t}"] = ta.ADX(informative, window=t)
|
||||||
|
|
||||||
indicators = [col for col in informative if col.startswith("%")]
|
indicators = [col for col in informative if col.startswith("%")]
|
||||||
# This loop duplicates and shifts all indicators to add a sense of recency to data
|
# This loop duplicates and shifts all indicators to add a sense of recency to data
|
||||||
|
@ -134,7 +131,7 @@ Notice also the location of the labels under `if set_generalized_indicators:` at
|
||||||
(as exemplified in `freqtrade/templates/FreqaiExampleStrategy.py`):
|
(as exemplified in `freqtrade/templates/FreqaiExampleStrategy.py`):
|
||||||
|
|
||||||
```python
|
```python
|
||||||
def populate_any_indicators(self, metadata, pair, df, tf, informative=None, coin="", set_generalized_indicators=False):
|
def populate_any_indicators(self, pair, df, tf, informative=None, set_generalized_indicators=False):
|
||||||
|
|
||||||
...
|
...
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,10 @@
|
||||||
|
|
||||||
## Defining the features
|
## Defining the features
|
||||||
|
|
||||||
Low level feature engineering is performed in the user strategy within a function called `populate_any_indicators()`. That function sets the `base features` such as, `RSI`, `MFI`, `EMA`, `SMA`, time of day, volume, etc. The `base features` can be custom indicators or they can be imported from any technical-analysis library that you can find. One important syntax rule is that all `base features` string names are prepended with `%`, while labels/targets are prepended with `&`.
|
Low level feature engineering is performed in the user strategy within a function called `populate_any_indicators()`. That function sets the `base features` such as, `RSI`, `MFI`, `EMA`, `SMA`, time of day, volume, etc. The `base features` can be custom indicators or they can be imported from any technical-analysis library that you can find. One important syntax rule is that all `base features` string names are prepended with `%-{pair}`, while labels/targets are prepended with `&`.
|
||||||
|
|
||||||
|
!!! Note
|
||||||
|
Adding the full pair string, e.g. XYZ/USD, in the feature name enables improved performance for dataframe caching on the backend. If you decide *not* to add the full pair string in the feature string, FreqAI will operate in a reduced performance mode.
|
||||||
|
|
||||||
Meanwhile, high level feature engineering is handled within `"feature_parameters":{}` in the FreqAI config. Within this file, it is possible to decide large scale feature expansions on top of the `base_features` such as "including correlated pairs" or "including informative timeframes" or even "including recent candles."
|
Meanwhile, high level feature engineering is handled within `"feature_parameters":{}` in the FreqAI config. Within this file, it is possible to decide large scale feature expansions on top of the `base_features` such as "including correlated pairs" or "including informative timeframes" or even "including recent candles."
|
||||||
|
|
||||||
|
@ -15,7 +18,7 @@ It is advisable to start from the template `populate_any_indicators()` in the so
|
||||||
"""
|
"""
|
||||||
Function designed to automatically generate, name, and merge features
|
Function designed to automatically generate, name, and merge features
|
||||||
from user-indicated timeframes in the configuration file. The user controls the indicators
|
from user-indicated timeframes in the configuration file. The user controls the indicators
|
||||||
passed to the training/prediction by prepending indicators with `'%-' + coin `
|
passed to the training/prediction by prepending indicators with `'%-' + pair `
|
||||||
(see convention below). I.e., the user should not prepend any supporting metrics
|
(see convention below). I.e., the user should not prepend any supporting metrics
|
||||||
(e.g., bb_lowerband below) with % unless they explicitly want to pass that metric to the
|
(e.g., bb_lowerband below) with % unless they explicitly want to pass that metric to the
|
||||||
model.
|
model.
|
||||||
|
@ -23,37 +26,34 @@ It is advisable to start from the template `populate_any_indicators()` in the so
|
||||||
:param df: strategy dataframe which will receive merges from informatives
|
:param df: strategy dataframe which will receive merges from informatives
|
||||||
:param tf: timeframe of the dataframe which will modify the feature names
|
:param tf: timeframe of the dataframe which will modify the feature names
|
||||||
:param informative: the dataframe associated with the informative pair
|
:param informative: the dataframe associated with the informative pair
|
||||||
:param coin: the name of the coin which will modify the feature names.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
coin = pair.split('/')[0]
|
|
||||||
|
|
||||||
if informative is None:
|
if informative is None:
|
||||||
informative = self.dp.get_pair_dataframe(pair, tf)
|
informative = self.dp.get_pair_dataframe(pair, tf)
|
||||||
|
|
||||||
# first loop is automatically duplicating indicators for time periods
|
# first loop is automatically duplicating indicators for time periods
|
||||||
for t in self.freqai_info["feature_parameters"]["indicator_periods_candles"]:
|
for t in self.freqai_info["feature_parameters"]["indicator_periods_candles"]:
|
||||||
t = int(t)
|
t = int(t)
|
||||||
informative[f"%-{coin}rsi-period_{t}"] = ta.RSI(informative, timeperiod=t)
|
informative[f"%-{pair}rsi-period_{t}"] = ta.RSI(informative, timeperiod=t)
|
||||||
informative[f"%-{coin}mfi-period_{t}"] = ta.MFI(informative, timeperiod=t)
|
informative[f"%-{pair}mfi-period_{t}"] = ta.MFI(informative, timeperiod=t)
|
||||||
informative[f"%-{coin}adx-period_{t}"] = ta.ADX(informative, window=t)
|
informative[f"%-{pair}adx-period_{t}"] = ta.ADX(informative, window=t)
|
||||||
|
|
||||||
bollinger = qtpylib.bollinger_bands(
|
bollinger = qtpylib.bollinger_bands(
|
||||||
qtpylib.typical_price(informative), window=t, stds=2.2
|
qtpylib.typical_price(informative), window=t, stds=2.2
|
||||||
)
|
)
|
||||||
informative[f"{coin}bb_lowerband-period_{t}"] = bollinger["lower"]
|
informative[f"{pair}bb_lowerband-period_{t}"] = bollinger["lower"]
|
||||||
informative[f"{coin}bb_middleband-period_{t}"] = bollinger["mid"]
|
informative[f"{pair}bb_middleband-period_{t}"] = bollinger["mid"]
|
||||||
informative[f"{coin}bb_upperband-period_{t}"] = bollinger["upper"]
|
informative[f"{pair}bb_upperband-period_{t}"] = bollinger["upper"]
|
||||||
|
|
||||||
informative[f"%-{coin}bb_width-period_{t}"] = (
|
informative[f"%-{pair}bb_width-period_{t}"] = (
|
||||||
informative[f"{coin}bb_upperband-period_{t}"]
|
informative[f"{pair}bb_upperband-period_{t}"]
|
||||||
- informative[f"{coin}bb_lowerband-period_{t}"]
|
- informative[f"{pair}bb_lowerband-period_{t}"]
|
||||||
) / informative[f"{coin}bb_middleband-period_{t}"]
|
) / informative[f"{pair}bb_middleband-period_{t}"]
|
||||||
informative[f"%-{coin}close-bb_lower-period_{t}"] = (
|
informative[f"%-{pair}close-bb_lower-period_{t}"] = (
|
||||||
informative["close"] / informative[f"{coin}bb_lowerband-period_{t}"]
|
informative["close"] / informative[f"{pair}bb_lowerband-period_{t}"]
|
||||||
)
|
)
|
||||||
|
|
||||||
informative[f"%-{coin}relative_volume-period_{t}"] = (
|
informative[f"%-{pair}relative_volume-period_{t}"] = (
|
||||||
informative["volume"] / informative["volume"].rolling(t).mean()
|
informative["volume"] / informative["volume"].rolling(t).mean()
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -18,6 +18,7 @@ Mandatory parameters are marked as **Required** and have to be set in one of the
|
||||||
| `fit_live_predictions_candles` | Number of historical candles to use for computing target (label) statistics from prediction data, instead of from the training dataset (more information can be found [here](freqai-configuration.md#creating-a-dynamic-target-threshold)). <br> **Datatype:** Positive integer.
|
| `fit_live_predictions_candles` | Number of historical candles to use for computing target (label) statistics from prediction data, instead of from the training dataset (more information can be found [here](freqai-configuration.md#creating-a-dynamic-target-threshold)). <br> **Datatype:** Positive integer.
|
||||||
| `follow_mode` | Use a `follower` that will look for models associated with a specific `identifier` and load those for inferencing. A `follower` will **not** train new models. <br> **Datatype:** Boolean. <br> Default: `False`.
|
| `follow_mode` | Use a `follower` that will look for models associated with a specific `identifier` and load those for inferencing. A `follower` will **not** train new models. <br> **Datatype:** Boolean. <br> Default: `False`.
|
||||||
| `continual_learning` | Use the final state of the most recently trained model as starting point for the new model, allowing for incremental learning (more information can be found [here](freqai-running.md#continual-learning)). <br> **Datatype:** Boolean. <br> Default: `False`.
|
| `continual_learning` | Use the final state of the most recently trained model as starting point for the new model, allowing for incremental learning (more information can be found [here](freqai-running.md#continual-learning)). <br> **Datatype:** Boolean. <br> Default: `False`.
|
||||||
|
| `write_metrics_to_disk` | Collect train timings, inference timings and cpu usage in json file. <br> **Datatype:** Boolean. <br> Default: `False`
|
||||||
| | **Feature parameters**
|
| | **Feature parameters**
|
||||||
| `feature_parameters` | A dictionary containing the parameters used to engineer the feature set. Details and examples are shown [here](freqai-feature-engineering.md). <br> **Datatype:** Dictionary.
|
| `feature_parameters` | A dictionary containing the parameters used to engineer the feature set. Details and examples are shown [here](freqai-feature-engineering.md). <br> **Datatype:** Dictionary.
|
||||||
| `include_timeframes` | A list of timeframes that all indicators in `populate_any_indicators` will be created for. The list is added as features to the base indicators dataset. <br> **Datatype:** List of timeframes (strings).
|
| `include_timeframes` | A list of timeframes that all indicators in `populate_any_indicators` will be created for. The list is added as features to the base indicators dataset. <br> **Datatype:** List of timeframes (strings).
|
||||||
|
@ -37,7 +38,6 @@ Mandatory parameters are marked as **Required** and have to be set in one of the
|
||||||
| `noise_standard_deviation` | If set, FreqAI adds noise to the training features with the aim of preventing overfitting. FreqAI generates random deviates from a gaussian distribution with a standard deviation of `noise_standard_deviation` and adds them to all data points. `noise_standard_deviation` should be kept relative to the normalized space, i.e., between -1 and 1. In other words, since data in FreqAI is always normalized to be between -1 and 1, `noise_standard_deviation: 0.05` would result in 32% of the data being randomly increased/decreased by more than 2.5% (i.e., the percent of data falling within the first standard deviation). <br> **Datatype:** Integer. <br> Default: `0`.
|
| `noise_standard_deviation` | If set, FreqAI adds noise to the training features with the aim of preventing overfitting. FreqAI generates random deviates from a gaussian distribution with a standard deviation of `noise_standard_deviation` and adds them to all data points. `noise_standard_deviation` should be kept relative to the normalized space, i.e., between -1 and 1. In other words, since data in FreqAI is always normalized to be between -1 and 1, `noise_standard_deviation: 0.05` would result in 32% of the data being randomly increased/decreased by more than 2.5% (i.e., the percent of data falling within the first standard deviation). <br> **Datatype:** Integer. <br> Default: `0`.
|
||||||
| `outlier_protection_percentage` | Enable to prevent outlier detection methods from discarding too much data. If more than `outlier_protection_percentage` % of points are detected as outliers by the SVM or DBSCAN, FreqAI will log a warning message and ignore outlier detection, i.e., the original dataset will be kept intact. If the outlier protection is triggered, no predictions will be made based on the training dataset. <br> **Datatype:** Float. <br> Default: `30`.
|
| `outlier_protection_percentage` | Enable to prevent outlier detection methods from discarding too much data. If more than `outlier_protection_percentage` % of points are detected as outliers by the SVM or DBSCAN, FreqAI will log a warning message and ignore outlier detection, i.e., the original dataset will be kept intact. If the outlier protection is triggered, no predictions will be made based on the training dataset. <br> **Datatype:** Float. <br> Default: `30`.
|
||||||
| `reverse_train_test_order` | Split the feature dataset (see below) and use the latest data split for training and test on historical split of the data. This allows the model to be trained up to the most recent data point, while avoiding overfitting. However, you should be careful to understand the unorthodox nature of this parameter before employing it. <br> **Datatype:** Boolean. <br> Default: `False` (no reversal).
|
| `reverse_train_test_order` | Split the feature dataset (see below) and use the latest data split for training and test on historical split of the data. This allows the model to be trained up to the most recent data point, while avoiding overfitting. However, you should be careful to understand the unorthodox nature of this parameter before employing it. <br> **Datatype:** Boolean. <br> Default: `False` (no reversal).
|
||||||
| `write_metrics_to_disk` | Collect train timings, inference timings and cpu usage in json file. <br> **Datatype:** Boolean. <br> Default: `False`
|
|
||||||
| | **Data split parameters**
|
| | **Data split parameters**
|
||||||
| `data_split_parameters` | Include any additional parameters available from Scikit-learn `test_train_split()`, which are shown [here](https://scikit-learn.org/stable/modules/generated/sklearn.model_selection.train_test_split.html) (external website). <br> **Datatype:** Dictionary.
|
| `data_split_parameters` | Include any additional parameters available from Scikit-learn `test_train_split()`, which are shown [here](https://scikit-learn.org/stable/modules/generated/sklearn.model_selection.train_test_split.html) (external website). <br> **Datatype:** Dictionary.
|
||||||
| `test_size` | The fraction of data that should be used for testing instead of training. <br> **Datatype:** Positive float < 1.
|
| `test_size` | The fraction of data that should be used for testing instead of training. <br> **Datatype:** Positive float < 1.
|
||||||
|
@ -50,3 +50,4 @@ Mandatory parameters are marked as **Required** and have to be set in one of the
|
||||||
| | **Extraneous parameters**
|
| | **Extraneous parameters**
|
||||||
| `keras` | If the selected model makes use of Keras (typical for Tensorflow-based prediction models), this flag needs to be activated so that the model save/loading follows Keras standards. <br> **Datatype:** Boolean. <br> Default: `False`.
|
| `keras` | If the selected model makes use of Keras (typical for Tensorflow-based prediction models), this flag needs to be activated so that the model save/loading follows Keras standards. <br> **Datatype:** Boolean. <br> Default: `False`.
|
||||||
| `conv_width` | The width of a convolutional neural network input tensor. This replaces the need for shifting candles (`include_shifted_candles`) by feeding in historical data points as the second dimension of the tensor. Technically, this parameter can also be used for regressors, but it only adds computational overhead and does not change the model training/prediction. <br> **Datatype:** Integer. <br> Default: `2`.
|
| `conv_width` | The width of a convolutional neural network input tensor. This replaces the need for shifting candles (`include_shifted_candles`) by feeding in historical data points as the second dimension of the tensor. Technically, this parameter can also be used for regressors, but it only adds computational overhead and does not change the model training/prediction. <br> **Datatype:** Integer. <br> Default: `2`.
|
||||||
|
| `reduce_df_footprint` | Recast all numeric columns to float32/int32, with the objective of reducing ram/disk usage and decreasing train/inference timing. This parameter is set in the main level of the Freqtrade configuration file (not inside FreqAI). <br> **Datatype:** Boolean. <br> Default: `False`.
|
||||||
|
|
|
@ -73,12 +73,24 @@ Backtesting mode requires [downloading the necessary data](#downloading-data-to-
|
||||||
|
|
||||||
To allow for tweaking your strategy (**not** the features!), FreqAI will automatically save the predictions during backtesting so that they can be reused for future backtests and live runs using the same `identifier` model. This provides a performance enhancement geared towards enabling **high-level hyperopting** of entry/exit criteria.
|
To allow for tweaking your strategy (**not** the features!), FreqAI will automatically save the predictions during backtesting so that they can be reused for future backtests and live runs using the same `identifier` model. This provides a performance enhancement geared towards enabling **high-level hyperopting** of entry/exit criteria.
|
||||||
|
|
||||||
An additional directory called `predictions`, which contains all the predictions stored in `hdf` format, will be created in the `unique-id` folder.
|
An additional directory called `backtesting_predictions`, which contains all the predictions stored in `hdf` format, will be created in the `unique-id` folder.
|
||||||
|
|
||||||
To change your **features**, you **must** set a new `identifier` in the config to signal to FreqAI to train new models.
|
To change your **features**, you **must** set a new `identifier` in the config to signal to FreqAI to train new models.
|
||||||
|
|
||||||
To save the models generated during a particular backtest so that you can start a live deployment from one of them instead of training a new model, you must set `save_backtest_models` to `True` in the config.
|
To save the models generated during a particular backtest so that you can start a live deployment from one of them instead of training a new model, you must set `save_backtest_models` to `True` in the config.
|
||||||
|
|
||||||
|
### Backtest live models
|
||||||
|
|
||||||
|
FreqAI allow you to reuse ready models through the backtest parameter `--freqai-backtest-live-models`. This can be useful when you want to reuse models generated in dry/run for comparison or other study. For that, you must set `"purge_old_models"` to `True` in the config.
|
||||||
|
|
||||||
|
The `--timerange` parameter must not be informed, as it will be automatically calculated through the training end dates of the models.
|
||||||
|
|
||||||
|
Each model has an identifier derived from the training end date. If you have only 1 model trained, FreqAI will backtest from the training end date until the current date. If you have more than 1 model, each model will perform the backtesting according to the training end date until the training end date of the next model and so on. For the last model, the period of the previous model will be used for the execution.
|
||||||
|
|
||||||
|
!!! Note
|
||||||
|
Currently, there is no checking for expired models, even if the `expired_hours` parameter is set.
|
||||||
|
|
||||||
|
|
||||||
### Downloading data to cover the full backtest period
|
### Downloading data to cover the full backtest period
|
||||||
|
|
||||||
For live/dry deployments, FreqAI will download the necessary data automatically. However, to use backtesting functionality, you need to download the necessary data using `download-data` (details [here](data-download.md#data-downloading)). You need to pay careful attention to understanding how much *additional* data needs to be downloaded to ensure that there is a sufficient amount of training data *before* the start of the backtesting time range. The amount of additional data can be roughly estimated by moving the start date of the time range backwards by `train_period_days` and the `startup_candle_count` (see the [parameter table](freqai-parameter-table.md) for detailed descriptions of these parameters) from the beginning of the desired backtesting time range.
|
For live/dry deployments, FreqAI will download the necessary data automatically. However, to use backtesting functionality, you need to download the necessary data using `download-data` (details [here](data-download.md#data-downloading)). You need to pay careful attention to understanding how much *additional* data needs to be downloaded to ensure that there is a sufficient amount of training data *before* the start of the backtesting time range. The amount of additional data can be roughly estimated by moving the start date of the time range backwards by `train_period_days` and the `startup_candle_count` (see the [parameter table](freqai-parameter-table.md) for detailed descriptions of these parameters) from the beginning of the desired backtesting time range.
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
|
|
||||||
## Introduction
|
## Introduction
|
||||||
|
|
||||||
FreqAI is a software designed to automate a variety of tasks associated with training a predictive machine learning model to generate market forecasts given a set of input features.
|
FreqAI is a software designed to automate a variety of tasks associated with training a predictive machine learning model to generate market forecasts given a set of input signals. In general, the FreqAI aims to be a sand-box for easily deploying robust machine-learning libraries on real-time data ([details])(#freqai-position-in-open-source-machine-learning-landscape).
|
||||||
|
|
||||||
Features include:
|
Features include:
|
||||||
|
|
||||||
|
@ -72,6 +72,11 @@ pip install -r requirements-freqai.txt
|
||||||
|
|
||||||
If you are using docker, a dedicated tag with FreqAI dependencies is available as `:freqai`. As such - you can replace the image line in your docker-compose file with `image: freqtradeorg/freqtrade:develop_freqai`. This image contains the regular FreqAI dependencies. Similar to native installs, Catboost will not be available on ARM based devices.
|
If you are using docker, a dedicated tag with FreqAI dependencies is available as `:freqai`. As such - you can replace the image line in your docker-compose file with `image: freqtradeorg/freqtrade:develop_freqai`. This image contains the regular FreqAI dependencies. Similar to native installs, Catboost will not be available on ARM based devices.
|
||||||
|
|
||||||
|
|
||||||
|
### FreqAI position in open-source machine learning landscape
|
||||||
|
|
||||||
|
Forecasting chaotic time-series based systems, such as equity/cryptocurrency markets, requires a broad set of tools geared toward testing a wide range of hypotheses. Fortunately, a recent maturation of robust machine learning libraries (e.g. `scikit-learn`) has opened up a wide range of research possibilities. Scientists from a diverse range of fields can now easily prototype their studies on an abundance of established machine learning algorithms. Similarly, these user-friendly libraries enable "citzen scientists" to use their basic Python skills for data-exploration. However, leveraging these machine learning libraries on historical and live chaotic data sources can be logistically difficult and expensive. Additionally, robust data-collection, storage, and handling presents a disparate challenge. [`FreqAI`](#freqai) aims to provide a generalized and extensible open-sourced framework geared toward live deployments of adaptive modeling for market forecasting. The `FreqAI` framework is effectively a sandbox for the rich world of open-source machine learning libraries. Inside the `FreqAI` sandbox, users find they can combine a wide variety of third-party libraries to test creative hypotheses on a free live 24/7 chaotic data source - cryptocurrency exchange data.
|
||||||
|
|
||||||
## Common pitfalls
|
## Common pitfalls
|
||||||
|
|
||||||
FreqAI cannot be combined with dynamic `VolumePairlists` (or any pairlist filter that adds and removes pairs dynamically).
|
FreqAI cannot be combined with dynamic `VolumePairlists` (or any pairlist filter that adds and removes pairs dynamically).
|
||||||
|
|
|
@ -268,7 +268,7 @@ This option is disabled by default, and will only apply if set to > 0.
|
||||||
The `max_value` setting removes pairs where the minimum value change is above a specified value.
|
The `max_value` setting removes pairs where the minimum value change is above a specified value.
|
||||||
This is useful when an exchange has unbalanced limits. For example, if step-size = 1 (so you can only buy 1, or 2, or 3, but not 1.1 Coins) - and the price is pretty high (like 20\$) as the coin has risen sharply since the last limit adaption.
|
This is useful when an exchange has unbalanced limits. For example, if step-size = 1 (so you can only buy 1, or 2, or 3, but not 1.1 Coins) - and the price is pretty high (like 20\$) as the coin has risen sharply since the last limit adaption.
|
||||||
As a result of the above, you can only buy for 20\$, or 40\$ - but not for 25\$.
|
As a result of the above, you can only buy for 20\$, or 40\$ - but not for 25\$.
|
||||||
On exchanges that deduct fees from the receiving currency (e.g. FTX) - this can result in high value coins / amounts that are unsellable as the amount is slightly below the limit.
|
On exchanges that deduct fees from the receiving currency (e.g. binance) - this can result in high value coins / amounts that are unsellable as the amount is slightly below the limit.
|
||||||
|
|
||||||
The `low_price_ratio` setting removes pairs where a raise of 1 price unit (pip) is above the `low_price_ratio` ratio.
|
The `low_price_ratio` setting removes pairs where a raise of 1 price unit (pip) is above the `low_price_ratio` ratio.
|
||||||
This option is disabled by default, and will only apply if set to > 0.
|
This option is disabled by default, and will only apply if set to > 0.
|
||||||
|
@ -286,6 +286,18 @@ Min price precision for SHITCOIN/BTC is 8 decimals. If its price is 0.00000011 -
|
||||||
|
|
||||||
Shuffles (randomizes) pairs in the pairlist. It can be used for preventing the bot from trading some of the pairs more frequently then others when you want all pairs be treated with the same priority.
|
Shuffles (randomizes) pairs in the pairlist. It can be used for preventing the bot from trading some of the pairs more frequently then others when you want all pairs be treated with the same priority.
|
||||||
|
|
||||||
|
By default, ShuffleFilter will shuffle pairs once per candle.
|
||||||
|
To shuffle on every iteration, set `"shuffle_frequency"` to `"iteration"` instead of the default of `"candle"`.
|
||||||
|
|
||||||
|
``` json
|
||||||
|
{
|
||||||
|
"method": "ShuffleFilter",
|
||||||
|
"shuffle_frequency": "candle",
|
||||||
|
"seed": 42
|
||||||
|
}
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
!!! Tip
|
!!! Tip
|
||||||
You may set the `seed` value for this Pairlist to obtain reproducible results, which can be useful for repeated backtesting sessions. If `seed` is not set, the pairs are shuffled in the non-repeatable random order. ShuffleFilter will automatically detect runmodes and apply the `seed` only for backtesting modes - if a `seed` value is set.
|
You may set the `seed` value for this Pairlist to obtain reproducible results, which can be useful for repeated backtesting sessions. If `seed` is not set, the pairs are shuffled in the non-repeatable random order. ShuffleFilter will automatically detect runmodes and apply the `seed` only for backtesting modes - if a `seed` value is set.
|
||||||
|
|
||||||
|
|
|
@ -32,7 +32,7 @@ Freqtrade is a free and open source crypto trading bot written in Python. It is
|
||||||
- Run: Test your strategy with simulated money (Dry-Run mode) or deploy it with real money (Live-Trade mode).
|
- Run: Test your strategy with simulated money (Dry-Run mode) or deploy it with real money (Live-Trade mode).
|
||||||
- Run using Edge (optional module): The concept is to find the best historical [trade expectancy](edge.md#expectancy) by markets based on variation of the stop-loss and then allow/reject markets to trade. The sizing of the trade is based on a risk of a percentage of your capital.
|
- Run using Edge (optional module): The concept is to find the best historical [trade expectancy](edge.md#expectancy) by markets based on variation of the stop-loss and then allow/reject markets to trade. The sizing of the trade is based on a risk of a percentage of your capital.
|
||||||
- Control/Monitor: Use Telegram or a WebUI (start/stop the bot, show profit/loss, daily summary, current open trades results, etc.).
|
- Control/Monitor: Use Telegram or a WebUI (start/stop the bot, show profit/loss, daily summary, current open trades results, etc.).
|
||||||
- Analyse: Further analysis can be performed on either Backtesting data or Freqtrade trading history (SQL database), including automated standard plots, and methods to load the data into [interactive environments](data-analysis.md).
|
- Analyze: Further analysis can be performed on either Backtesting data or Freqtrade trading history (SQL database), including automated standard plots, and methods to load the data into [interactive environments](data-analysis.md).
|
||||||
|
|
||||||
## Supported exchange marketplaces
|
## Supported exchange marketplaces
|
||||||
|
|
||||||
|
@ -40,7 +40,6 @@ Please read the [exchange specific notes](exchanges.md) to learn about eventual,
|
||||||
|
|
||||||
- [X] [Binance](https://www.binance.com/)
|
- [X] [Binance](https://www.binance.com/)
|
||||||
- [X] [Bittrex](https://bittrex.com/)
|
- [X] [Bittrex](https://bittrex.com/)
|
||||||
- [X] [FTX](https://ftx.com/#a=2258149)
|
|
||||||
- [X] [Gate.io](https://www.gate.io/ref/6266643)
|
- [X] [Gate.io](https://www.gate.io/ref/6266643)
|
||||||
- [X] [Huobi](http://huobi.com/)
|
- [X] [Huobi](http://huobi.com/)
|
||||||
- [X] [Kraken](https://kraken.com/)
|
- [X] [Kraken](https://kraken.com/)
|
||||||
|
@ -51,7 +50,7 @@ Please read the [exchange specific notes](exchanges.md) to learn about eventual,
|
||||||
|
|
||||||
- [X] [Binance](https://www.binance.com/)
|
- [X] [Binance](https://www.binance.com/)
|
||||||
- [X] [Gate.io](https://www.gate.io/ref/6266643)
|
- [X] [Gate.io](https://www.gate.io/ref/6266643)
|
||||||
- [X] [OKX](https://okx.com/).
|
- [X] [OKX](https://okx.com/)
|
||||||
|
|
||||||
Please make sure to read the [exchange specific notes](exchanges.md), as well as the [trading with leverage](leverage.md) documentation before diving in.
|
Please make sure to read the [exchange specific notes](exchanges.md), as well as the [trading with leverage](leverage.md) documentation before diving in.
|
||||||
|
|
||||||
|
|
|
@ -21,6 +21,7 @@ Enable subscribing to an instance by adding the `external_message_consumer` sect
|
||||||
"name": "default", // This can be any name you'd like, default is "default"
|
"name": "default", // This can be any name you'd like, default is "default"
|
||||||
"host": "127.0.0.1", // The host from your producer's api_server config
|
"host": "127.0.0.1", // The host from your producer's api_server config
|
||||||
"port": 8080, // The port from your producer's api_server config
|
"port": 8080, // The port from your producer's api_server config
|
||||||
|
"secure": false, // Use a secure websockets connection, default false
|
||||||
"ws_token": "sercet_Ws_t0ken" // The ws_token from your producer's api_server config
|
"ws_token": "sercet_Ws_t0ken" // The ws_token from your producer's api_server config
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
|
@ -42,6 +43,7 @@ Enable subscribing to an instance by adding the `external_message_consumer` sect
|
||||||
| `producers.name` | **Required.** Name of this producer. This name must be used in calls to `get_producer_pairs()` and `get_producer_df()` if more than one producer is used.<br> **Datatype:** string
|
| `producers.name` | **Required.** Name of this producer. This name must be used in calls to `get_producer_pairs()` and `get_producer_df()` if more than one producer is used.<br> **Datatype:** string
|
||||||
| `producers.host` | **Required.** The hostname or IP address from your producer.<br> **Datatype:** string
|
| `producers.host` | **Required.** The hostname or IP address from your producer.<br> **Datatype:** string
|
||||||
| `producers.port` | **Required.** The port matching the above host.<br> **Datatype:** string
|
| `producers.port` | **Required.** The port matching the above host.<br> **Datatype:** string
|
||||||
|
| `producers.secure` | **Optional.** Use ssl in websockets connection. Default False.<br> **Datatype:** string
|
||||||
| `producers.ws_token` | **Required.** `ws_token` as configured on the producer.<br> **Datatype:** string
|
| `producers.ws_token` | **Required.** `ws_token` as configured on the producer.<br> **Datatype:** string
|
||||||
| | **Optional settings**
|
| | **Optional settings**
|
||||||
| `wait_timeout` | Timeout until we ping again if no message is received. <br>*Defaults to `300`.*<br> **Datatype:** Integer - in seconds.
|
| `wait_timeout` | Timeout until we ping again if no message is received. <br>*Defaults to `300`.*<br> **Datatype:** Integer - in seconds.
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
markdown==3.3.7
|
markdown==3.3.7
|
||||||
mkdocs==1.4.1
|
mkdocs==1.4.2
|
||||||
mkdocs-material==8.5.7
|
mkdocs-material==8.5.10
|
||||||
mdx_truly_sane_lists==1.3
|
mdx_truly_sane_lists==1.3
|
||||||
pymdown-extensions==9.7
|
pymdown-extensions==9.8
|
||||||
jinja2==3.1.2
|
jinja2==3.1.2
|
||||||
|
|
|
@ -389,6 +389,44 @@ Now anytime those types of RPC messages are sent in the bot, you will receive th
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
#### Reverse Proxy setup
|
||||||
|
|
||||||
|
When using [Nginx](https://nginx.org/en/docs/), the following configuration is required for WebSockets to work (Note this configuration is incomplete, it's missing some information and can not be used as is):
|
||||||
|
|
||||||
|
Please make sure to replace `<freqtrade_listen_ip>` (and the subsequent port) with the IP and Port matching your configuration/setup.
|
||||||
|
|
||||||
|
```
|
||||||
|
http {
|
||||||
|
map $http_upgrade $connection_upgrade {
|
||||||
|
default upgrade;
|
||||||
|
'' close;
|
||||||
|
}
|
||||||
|
|
||||||
|
#...
|
||||||
|
|
||||||
|
server {
|
||||||
|
#...
|
||||||
|
|
||||||
|
location / {
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_pass http://<freqtrade_listen_ip>:8080;
|
||||||
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
|
proxy_set_header Connection $connection_upgrade;
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
To properly configure your reverse proxy (securely), please consult it's documentation for proxying websockets.
|
||||||
|
|
||||||
|
- **Traefik**: Traefik supports websockets out of the box, see the [documentation](https://doc.traefik.io/traefik/)
|
||||||
|
- **Caddy**: Caddy v2 supports websockets out of the box, see the [documentation](https://caddyserver.com/docs/v2-upgrade#proxy)
|
||||||
|
|
||||||
|
!!! Tip "SSL certificates"
|
||||||
|
You can use tools like certbot to setup ssl certificates to access your bot's UI through encrypted connection by using any fo the above reverse proxies.
|
||||||
|
While this will protect your data in transit, we do not recommend to run the freqtrade API outside of your private network (VPN, SSH tunnel).
|
||||||
|
|
||||||
### OpenAPI interface
|
### OpenAPI interface
|
||||||
|
|
||||||
To enable the builtin openAPI interface (Swagger UI), specify `"enable_openapi": true` in the api_server configuration.
|
To enable the builtin openAPI interface (Swagger UI), specify `"enable_openapi": true` in the api_server configuration.
|
||||||
|
|
|
@ -24,7 +24,7 @@ These modes can be configured with these values:
|
||||||
```
|
```
|
||||||
|
|
||||||
!!! Note
|
!!! Note
|
||||||
Stoploss on exchange is only supported for Binance (stop-loss-limit), Huobi (stop-limit), Kraken (stop-loss-market, stop-loss-limit), FTX (stop limit and stop-market) Gateio (stop-limit), and Kucoin (stop-limit and stop-market) as of now.
|
Stoploss on exchange is only supported for Binance (stop-loss-limit), Huobi (stop-limit), Kraken (stop-loss-market, stop-loss-limit), Gateio (stop-limit), and Kucoin (stop-limit and stop-market) as of now.
|
||||||
<ins>Do not set too low/tight stoploss value if using stop loss on exchange!</ins>
|
<ins>Do not set too low/tight stoploss value if using stop loss on exchange!</ins>
|
||||||
If set to low/tight then you have greater risk of missing fill on the order and stoploss will not work.
|
If set to low/tight then you have greater risk of missing fill on the order and stoploss will not work.
|
||||||
|
|
||||||
|
|
|
@ -446,15 +446,17 @@ A full sample can be found [in the DataProvider section](#complete-data-provider
|
||||||
|
|
||||||
??? Note "Alternative candle types"
|
??? Note "Alternative candle types"
|
||||||
Informative_pairs can also provide a 3rd tuple element defining the candle type explicitly.
|
Informative_pairs can also provide a 3rd tuple element defining the candle type explicitly.
|
||||||
Availability of alternative candle-types will depend on the trading-mode and the exchange. Details about this can be found in the exchange documentation.
|
Availability of alternative candle-types will depend on the trading-mode and the exchange.
|
||||||
|
In general, spot pairs cannot be used in futures markets, and futures candles can't be used as informative pairs for spot bots.
|
||||||
|
Details about this may vary, if they do, this can be found in the exchange documentation.
|
||||||
|
|
||||||
``` python
|
``` python
|
||||||
def informative_pairs(self):
|
def informative_pairs(self):
|
||||||
return [
|
return [
|
||||||
("ETH/USDT", "5m", ""), # Uses default candletype, depends on trading_mode
|
("ETH/USDT", "5m", ""), # Uses default candletype, depends on trading_mode (recommended)
|
||||||
("ETH/USDT", "5m", "spot"), # Forces usage of spot candles
|
("ETH/USDT", "5m", "spot"), # Forces usage of spot candles (only valid for bots running on spot markets).
|
||||||
("BTC/TUSD", "15m", "futures"), # Uses futures candles
|
("BTC/TUSD", "15m", "futures"), # Uses futures candles (only bots with `trading_mode=futures`)
|
||||||
("BTC/TUSD", "15m", "mark"), # Uses mark candles
|
("BTC/TUSD", "15m", "mark"), # Uses mark candles (only bots with `trading_mode=futures`)
|
||||||
]
|
]
|
||||||
```
|
```
|
||||||
***
|
***
|
||||||
|
@ -723,7 +725,7 @@ if self.dp.runmode.value in ('live', 'dry_run'):
|
||||||
|
|
||||||
!!! Warning
|
!!! Warning
|
||||||
Although the ticker data structure is a part of the ccxt Unified Interface, the values returned by this method can
|
Although the ticker data structure is a part of the ccxt Unified Interface, the values returned by this method can
|
||||||
vary for different exchanges. For instance, many exchanges do not return `vwap` values, the FTX exchange
|
vary for different exchanges. For instance, many exchanges do not return `vwap` values, some exchanges
|
||||||
does not always fills in the `last` field (so it can be None), etc. So you need to carefully verify the ticker
|
does not always fills in the `last` field (so it can be None), etc. So you need to carefully verify the ticker
|
||||||
data returned from the exchange and add appropriate error handling / defaults.
|
data returned from the exchange and add appropriate error handling / defaults.
|
||||||
|
|
||||||
|
|
|
@ -263,7 +263,6 @@ equos True missing opt: fetchTicker, fetchTickers
|
||||||
eterbase True
|
eterbase True
|
||||||
fcoin True missing opt: fetchMyTrades, fetchTickers
|
fcoin True missing opt: fetchMyTrades, fetchTickers
|
||||||
fcoinjp True missing opt: fetchMyTrades, fetchTickers
|
fcoinjp True missing opt: fetchMyTrades, fetchTickers
|
||||||
ftx True
|
|
||||||
gateio True
|
gateio True
|
||||||
gemini True
|
gemini True
|
||||||
gopax True
|
gopax True
|
||||||
|
@ -369,7 +368,6 @@ fcoin True missing opt: fetchMyTrades, fetchTickers
|
||||||
fcoinjp True missing opt: fetchMyTrades, fetchTickers
|
fcoinjp True missing opt: fetchMyTrades, fetchTickers
|
||||||
flowbtc False missing: fetchOrder, fetchOHLCV
|
flowbtc False missing: fetchOrder, fetchOHLCV
|
||||||
foxbit False missing: fetchOrder, fetchOHLCV
|
foxbit False missing: fetchOrder, fetchOHLCV
|
||||||
ftx True
|
|
||||||
gateio True
|
gateio True
|
||||||
gemini True
|
gemini True
|
||||||
gopax True
|
gopax True
|
||||||
|
|
|
@ -3,15 +3,16 @@
|
||||||
We **strongly** recommend that Windows users use [Docker](docker_quickstart.md) as this will work much easier and smoother (also more secure).
|
We **strongly** recommend that Windows users use [Docker](docker_quickstart.md) as this will work much easier and smoother (also more secure).
|
||||||
|
|
||||||
If that is not possible, try using the Windows Linux subsystem (WSL) - for which the Ubuntu instructions should work.
|
If that is not possible, try using the Windows Linux subsystem (WSL) - for which the Ubuntu instructions should work.
|
||||||
Otherwise, try the instructions below.
|
Otherwise, please follow the instructions below.
|
||||||
|
|
||||||
## Install freqtrade manually
|
## Install freqtrade manually
|
||||||
|
|
||||||
!!! Note
|
!!! Note "64bit Python version"
|
||||||
Make sure to use 64bit Windows and 64bit Python to avoid problems with backtesting or hyperopt due to the memory constraints 32bit applications have under Windows.
|
Please make sure to use 64bit Windows and 64bit Python to avoid problems with backtesting or hyperopt due to the memory constraints 32bit applications have under Windows.
|
||||||
|
32bit python versions are no longer supported under Windows.
|
||||||
|
|
||||||
!!! Hint
|
!!! Hint
|
||||||
Using the [Anaconda Distribution](https://www.anaconda.com/distribution/) under Windows can greatly help with installation problems. Check out the [Anaconda installation section](installation.md#Anaconda) in this document for more information.
|
Using the [Anaconda Distribution](https://www.anaconda.com/distribution/) under Windows can greatly help with installation problems. Check out the [Anaconda installation section](installation.md#installation-with-conda) in the documentation for more information.
|
||||||
|
|
||||||
### 1. Clone the git repository
|
### 1. Clone the git repository
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
""" Freqtrade bot """
|
""" Freqtrade bot """
|
||||||
__version__ = '2022.10'
|
__version__ = '2022.11'
|
||||||
|
|
||||||
if 'dev' in __version__:
|
if 'dev' in __version__:
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -25,7 +25,8 @@ ARGS_COMMON_OPTIMIZE = ["timeframe", "timerange", "dataformat_ohlcv",
|
||||||
ARGS_BACKTEST = ARGS_COMMON_OPTIMIZE + ["position_stacking", "use_max_market_positions",
|
ARGS_BACKTEST = ARGS_COMMON_OPTIMIZE + ["position_stacking", "use_max_market_positions",
|
||||||
"enable_protections", "dry_run_wallet", "timeframe_detail",
|
"enable_protections", "dry_run_wallet", "timeframe_detail",
|
||||||
"strategy_list", "export", "exportfilename",
|
"strategy_list", "export", "exportfilename",
|
||||||
"backtest_breakdown", "backtest_cache"]
|
"backtest_breakdown", "backtest_cache",
|
||||||
|
"freqai_backtest_live_models"]
|
||||||
|
|
||||||
ARGS_HYPEROPT = ARGS_COMMON_OPTIMIZE + ["hyperopt", "hyperopt_path",
|
ARGS_HYPEROPT = ARGS_COMMON_OPTIMIZE + ["hyperopt", "hyperopt_path",
|
||||||
"position_stacking", "use_max_market_positions",
|
"position_stacking", "use_max_market_positions",
|
||||||
|
|
|
@ -108,7 +108,6 @@ def ask_user_config() -> Dict[str, Any]:
|
||||||
"binance",
|
"binance",
|
||||||
"binanceus",
|
"binanceus",
|
||||||
"bittrex",
|
"bittrex",
|
||||||
"ftx",
|
|
||||||
"gateio",
|
"gateio",
|
||||||
"huobi",
|
"huobi",
|
||||||
"kraken",
|
"kraken",
|
||||||
|
|
|
@ -49,7 +49,7 @@ AVAILABLE_CLI_OPTIONS = {
|
||||||
default=0,
|
default=0,
|
||||||
),
|
),
|
||||||
"logfile": Arg(
|
"logfile": Arg(
|
||||||
'--logfile',
|
'--logfile', '--log-file',
|
||||||
help="Log to the file specified. Special values are: 'syslog', 'journald'. "
|
help="Log to the file specified. Special values are: 'syslog', 'journald'. "
|
||||||
"See the documentation for more details.",
|
"See the documentation for more details.",
|
||||||
metavar='FILE',
|
metavar='FILE',
|
||||||
|
@ -668,4 +668,9 @@ AVAILABLE_CLI_OPTIONS = {
|
||||||
help='Specify additional lookup path for freqaimodels.',
|
help='Specify additional lookup path for freqaimodels.',
|
||||||
metavar='PATH',
|
metavar='PATH',
|
||||||
),
|
),
|
||||||
|
"freqai_backtest_live_models": Arg(
|
||||||
|
'--freqai-backtest-live-models',
|
||||||
|
help='Run backtest with ready models.',
|
||||||
|
action='store_true'
|
||||||
|
),
|
||||||
}
|
}
|
||||||
|
|
|
@ -86,6 +86,7 @@ def validate_config_consistency(conf: Dict[str, Any], preliminary: bool = False)
|
||||||
_validate_unlimited_amount(conf)
|
_validate_unlimited_amount(conf)
|
||||||
_validate_ask_orderbook(conf)
|
_validate_ask_orderbook(conf)
|
||||||
_validate_freqai_hyperopt(conf)
|
_validate_freqai_hyperopt(conf)
|
||||||
|
_validate_freqai_backtest(conf)
|
||||||
_validate_freqai_include_timeframes(conf)
|
_validate_freqai_include_timeframes(conf)
|
||||||
_validate_consumers(conf)
|
_validate_consumers(conf)
|
||||||
validate_migrated_strategy_settings(conf)
|
validate_migrated_strategy_settings(conf)
|
||||||
|
@ -355,6 +356,26 @@ def _validate_freqai_include_timeframes(conf: Dict[str, Any]) -> None:
|
||||||
f"`include_timeframes`.Offending include-timeframes: {', '.join(offending_lines)}")
|
f"`include_timeframes`.Offending include-timeframes: {', '.join(offending_lines)}")
|
||||||
|
|
||||||
|
|
||||||
|
def _validate_freqai_backtest(conf: Dict[str, Any]) -> None:
|
||||||
|
if conf.get('runmode', RunMode.OTHER) == RunMode.BACKTEST:
|
||||||
|
freqai_enabled = conf.get('freqai', {}).get('enabled', False)
|
||||||
|
timerange = conf.get('timerange')
|
||||||
|
freqai_backtest_live_models = conf.get('freqai_backtest_live_models', False)
|
||||||
|
if freqai_backtest_live_models and freqai_enabled and timerange:
|
||||||
|
raise OperationalException(
|
||||||
|
'Using timerange parameter is not supported with '
|
||||||
|
'--freqai-backtest-live-models parameter.')
|
||||||
|
|
||||||
|
if freqai_backtest_live_models and not freqai_enabled:
|
||||||
|
raise OperationalException(
|
||||||
|
'Using --freqai-backtest-live-models parameter is only '
|
||||||
|
'supported with a FreqAI strategy.')
|
||||||
|
|
||||||
|
if freqai_enabled and not freqai_backtest_live_models and not timerange:
|
||||||
|
raise OperationalException(
|
||||||
|
'Please pass --timerange if you intend to use FreqAI for backtesting.')
|
||||||
|
|
||||||
|
|
||||||
def _validate_consumers(conf: Dict[str, Any]) -> None:
|
def _validate_consumers(conf: Dict[str, Any]) -> None:
|
||||||
emc_conf = conf.get('external_message_consumer', {})
|
emc_conf = conf.get('external_message_consumer', {})
|
||||||
if emc_conf.get('enabled', False):
|
if emc_conf.get('enabled', False):
|
||||||
|
|
|
@ -279,6 +279,9 @@ class Configuration:
|
||||||
self._args_to_config(config, argname='disableparamexport',
|
self._args_to_config(config, argname='disableparamexport',
|
||||||
logstring='Parameter --disableparamexport detected: {} ...')
|
logstring='Parameter --disableparamexport detected: {} ...')
|
||||||
|
|
||||||
|
self._args_to_config(config, argname='freqai_backtest_live_models',
|
||||||
|
logstring='Parameter --freqai-backtest-live-models detected ...')
|
||||||
|
|
||||||
# Edge section:
|
# Edge section:
|
||||||
if 'stoploss_range' in self.args and self.args["stoploss_range"]:
|
if 'stoploss_range' in self.args and self.args["stoploss_range"]:
|
||||||
txt_range = eval(self.args["stoploss_range"])
|
txt_range = eval(self.args["stoploss_range"])
|
||||||
|
|
|
@ -3,11 +3,12 @@ This module contains the argument manager class
|
||||||
"""
|
"""
|
||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
from datetime import datetime
|
from datetime import datetime, timezone
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
import arrow
|
import arrow
|
||||||
|
|
||||||
|
from freqtrade.constants import DATETIME_PRINT_FORMAT
|
||||||
from freqtrade.exceptions import OperationalException
|
from freqtrade.exceptions import OperationalException
|
||||||
|
|
||||||
|
|
||||||
|
@ -29,6 +30,52 @@ class TimeRange:
|
||||||
self.startts: int = startts
|
self.startts: int = startts
|
||||||
self.stopts: int = stopts
|
self.stopts: int = stopts
|
||||||
|
|
||||||
|
@property
|
||||||
|
def startdt(self) -> Optional[datetime]:
|
||||||
|
if self.startts:
|
||||||
|
return datetime.fromtimestamp(self.startts, tz=timezone.utc)
|
||||||
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def stopdt(self) -> Optional[datetime]:
|
||||||
|
if self.stopts:
|
||||||
|
return datetime.fromtimestamp(self.stopts, tz=timezone.utc)
|
||||||
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def timerange_str(self) -> str:
|
||||||
|
"""
|
||||||
|
Returns a string representation of the timerange as used by parse_timerange.
|
||||||
|
Follows the format yyyymmdd-yyyymmdd - leaving out the parts that are not set.
|
||||||
|
"""
|
||||||
|
start = ''
|
||||||
|
stop = ''
|
||||||
|
if startdt := self.startdt:
|
||||||
|
start = startdt.strftime('%Y%m%d')
|
||||||
|
if stopdt := self.stopdt:
|
||||||
|
stop = stopdt.strftime('%Y%m%d')
|
||||||
|
return f"{start}-{stop}"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def start_fmt(self) -> str:
|
||||||
|
"""
|
||||||
|
Returns a string representation of the start date
|
||||||
|
"""
|
||||||
|
val = 'unbounded'
|
||||||
|
if (startdt := self.startdt) is not None:
|
||||||
|
val = startdt.strftime(DATETIME_PRINT_FORMAT)
|
||||||
|
return val
|
||||||
|
|
||||||
|
@property
|
||||||
|
def stop_fmt(self) -> str:
|
||||||
|
"""
|
||||||
|
Returns a string representation of the stop date
|
||||||
|
"""
|
||||||
|
val = 'unbounded'
|
||||||
|
if (stopdt := self.stopdt) is not None:
|
||||||
|
val = stopdt.strftime(DATETIME_PRINT_FORMAT)
|
||||||
|
return val
|
||||||
|
|
||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
"""Override the default Equals behavior"""
|
"""Override the default Equals behavior"""
|
||||||
return (self.starttype == other.starttype and self.stoptype == other.stoptype
|
return (self.starttype == other.starttype and self.stoptype == other.stoptype
|
||||||
|
|
|
@ -159,6 +159,7 @@ CONF_SCHEMA = {
|
||||||
'ignore_buying_expired_candle_after': {'type': 'number'},
|
'ignore_buying_expired_candle_after': {'type': 'number'},
|
||||||
'trading_mode': {'type': 'string', 'enum': TRADING_MODES},
|
'trading_mode': {'type': 'string', 'enum': TRADING_MODES},
|
||||||
'margin_mode': {'type': 'string', 'enum': MARGIN_MODES},
|
'margin_mode': {'type': 'string', 'enum': MARGIN_MODES},
|
||||||
|
'reduce_df_footprint': {'type': 'boolean', 'default': False},
|
||||||
'liquidation_buffer': {'type': 'number', 'minimum': 0.0, 'maximum': 0.99},
|
'liquidation_buffer': {'type': 'number', 'minimum': 0.0, 'maximum': 0.99},
|
||||||
'backtest_breakdown': {
|
'backtest_breakdown': {
|
||||||
'type': 'array',
|
'type': 'array',
|
||||||
|
@ -511,6 +512,7 @@ CONF_SCHEMA = {
|
||||||
'minimum': 0,
|
'minimum': 0,
|
||||||
'maximum': 65535
|
'maximum': 65535
|
||||||
},
|
},
|
||||||
|
'secure': {'type': 'boolean', 'default': False},
|
||||||
'ws_token': {'type': 'string'},
|
'ws_token': {'type': 'string'},
|
||||||
},
|
},
|
||||||
'required': ['name', 'host', 'ws_token']
|
'required': ['name', 'host', 'ws_token']
|
||||||
|
@ -542,7 +544,7 @@ CONF_SCHEMA = {
|
||||||
"keras": {"type": "boolean", "default": False},
|
"keras": {"type": "boolean", "default": False},
|
||||||
"write_metrics_to_disk": {"type": "boolean", "default": False},
|
"write_metrics_to_disk": {"type": "boolean", "default": False},
|
||||||
"purge_old_models": {"type": "boolean", "default": True},
|
"purge_old_models": {"type": "boolean", "default": True},
|
||||||
"conv_width": {"type": "integer", "default": 2},
|
"conv_width": {"type": "integer", "default": 1},
|
||||||
"train_period_days": {"type": "integer", "default": 0},
|
"train_period_days": {"type": "integer", "default": 0},
|
||||||
"backtest_period_days": {"type": "number", "default": 7},
|
"backtest_period_days": {"type": "number", "default": 7},
|
||||||
"identifier": {"type": "string", "default": "example"},
|
"identifier": {"type": "string", "default": "example"},
|
||||||
|
|
|
@ -26,7 +26,7 @@ BT_DATA_COLUMNS = ['pair', 'stake_amount', 'amount', 'open_date', 'close_date',
|
||||||
'profit_ratio', 'profit_abs', 'exit_reason',
|
'profit_ratio', 'profit_abs', 'exit_reason',
|
||||||
'initial_stop_loss_abs', 'initial_stop_loss_ratio', 'stop_loss_abs',
|
'initial_stop_loss_abs', 'initial_stop_loss_ratio', 'stop_loss_abs',
|
||||||
'stop_loss_ratio', 'min_rate', 'max_rate', 'is_open', 'enter_tag',
|
'stop_loss_ratio', 'min_rate', 'max_rate', 'is_open', 'enter_tag',
|
||||||
'is_short', 'open_timestamp', 'close_timestamp', 'orders'
|
'leverage', 'is_short', 'open_timestamp', 'close_timestamp', 'orders'
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@ -280,6 +280,8 @@ def load_backtest_data(filename: Union[Path, str], strategy: Optional[str] = Non
|
||||||
# Compatibility support for pre short Columns
|
# Compatibility support for pre short Columns
|
||||||
if 'is_short' not in df.columns:
|
if 'is_short' not in df.columns:
|
||||||
df['is_short'] = 0
|
df['is_short'] = 0
|
||||||
|
if 'leverage' not in df.columns:
|
||||||
|
df['leverage'] = 1.0
|
||||||
if 'enter_tag' not in df.columns:
|
if 'enter_tag' not in df.columns:
|
||||||
df['enter_tag'] = df['buy_tag']
|
df['enter_tag'] = df['buy_tag']
|
||||||
df = df.drop(['buy_tag'], axis=1)
|
df = df.drop(['buy_tag'], axis=1)
|
||||||
|
|
|
@ -3,10 +3,10 @@ Functions to convert data from one format to another
|
||||||
"""
|
"""
|
||||||
import itertools
|
import itertools
|
||||||
import logging
|
import logging
|
||||||
from datetime import datetime, timezone
|
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
from typing import Dict, List
|
from typing import Dict, List
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
from pandas import DataFrame, to_datetime
|
from pandas import DataFrame, to_datetime
|
||||||
|
|
||||||
|
@ -137,11 +137,9 @@ def trim_dataframe(df: DataFrame, timerange, df_date_col: str = 'date',
|
||||||
df = df.iloc[startup_candles:, :]
|
df = df.iloc[startup_candles:, :]
|
||||||
else:
|
else:
|
||||||
if timerange.starttype == 'date':
|
if timerange.starttype == 'date':
|
||||||
start = datetime.fromtimestamp(timerange.startts, tz=timezone.utc)
|
df = df.loc[df[df_date_col] >= timerange.startdt, :]
|
||||||
df = df.loc[df[df_date_col] >= start, :]
|
|
||||||
if timerange.stoptype == 'date':
|
if timerange.stoptype == 'date':
|
||||||
stop = datetime.fromtimestamp(timerange.stopts, tz=timezone.utc)
|
df = df.loc[df[df_date_col] <= timerange.stopdt, :]
|
||||||
df = df.loc[df[df_date_col] <= stop, :]
|
|
||||||
return df
|
return df
|
||||||
|
|
||||||
|
|
||||||
|
@ -313,3 +311,29 @@ def convert_ohlcv_format(
|
||||||
if erase and convert_from != convert_to:
|
if erase and convert_from != convert_to:
|
||||||
logger.info(f"Deleting source data for {pair} / {timeframe}")
|
logger.info(f"Deleting source data for {pair} / {timeframe}")
|
||||||
src.ohlcv_purge(pair=pair, timeframe=timeframe, candle_type=candle_type)
|
src.ohlcv_purge(pair=pair, timeframe=timeframe, candle_type=candle_type)
|
||||||
|
|
||||||
|
|
||||||
|
def reduce_dataframe_footprint(df: DataFrame) -> DataFrame:
|
||||||
|
"""
|
||||||
|
Ensure all values are float32 in the incoming dataframe.
|
||||||
|
:param df: Dataframe to be converted to float/int 32s
|
||||||
|
:return: Dataframe converted to float/int 32s
|
||||||
|
"""
|
||||||
|
|
||||||
|
logger.debug(f"Memory usage of dataframe is "
|
||||||
|
f"{df.memory_usage().sum() / 1024**2:.2f} MB")
|
||||||
|
|
||||||
|
df_dtypes = df.dtypes
|
||||||
|
for column, dtype in df_dtypes.items():
|
||||||
|
if column in ['open', 'high', 'low', 'close', 'volume']:
|
||||||
|
continue
|
||||||
|
if dtype == np.float64:
|
||||||
|
df_dtypes[column] = np.float32
|
||||||
|
elif dtype == np.int64:
|
||||||
|
df_dtypes[column] = np.int32
|
||||||
|
df = df.astype(df_dtypes)
|
||||||
|
|
||||||
|
logger.debug(f"Memory usage after optimization is: "
|
||||||
|
f"{df.memory_usage().sum() / 1024**2:.2f} MB")
|
||||||
|
|
||||||
|
return df
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
import logging
|
import logging
|
||||||
import operator
|
import operator
|
||||||
from datetime import datetime, timezone
|
from datetime import datetime
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Dict, List, Optional, Tuple
|
from typing import Dict, List, Optional, Tuple
|
||||||
|
|
||||||
|
@ -160,9 +160,9 @@ def _load_cached_data_for_updating(
|
||||||
end = None
|
end = None
|
||||||
if timerange:
|
if timerange:
|
||||||
if timerange.starttype == 'date':
|
if timerange.starttype == 'date':
|
||||||
start = datetime.fromtimestamp(timerange.startts, tz=timezone.utc)
|
start = timerange.startdt
|
||||||
if timerange.stoptype == 'date':
|
if timerange.stoptype == 'date':
|
||||||
end = datetime.fromtimestamp(timerange.stopts, tz=timezone.utc)
|
end = timerange.stopdt
|
||||||
|
|
||||||
# Intentionally don't pass timerange in - since we need to load the full dataset.
|
# Intentionally don't pass timerange in - since we need to load the full dataset.
|
||||||
data = data_handler.ohlcv_load(pair, timeframe=timeframe,
|
data = data_handler.ohlcv_load(pair, timeframe=timeframe,
|
||||||
|
|
|
@ -102,6 +102,11 @@ class IDataHandler(ABC):
|
||||||
:return: (min, max)
|
:return: (min, max)
|
||||||
"""
|
"""
|
||||||
data = self._ohlcv_load(pair, timeframe, None, candle_type)
|
data = self._ohlcv_load(pair, timeframe, None, candle_type)
|
||||||
|
if data.empty:
|
||||||
|
return (
|
||||||
|
datetime.fromtimestamp(0, tz=timezone.utc),
|
||||||
|
datetime.fromtimestamp(0, tz=timezone.utc)
|
||||||
|
)
|
||||||
return data.iloc[0]['date'].to_pydatetime(), data.iloc[-1]['date'].to_pydatetime()
|
return data.iloc[0]['date'].to_pydatetime(), data.iloc[-1]['date'].to_pydatetime()
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
|
@ -361,13 +366,11 @@ class IDataHandler(ABC):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if timerange.starttype == 'date':
|
if timerange.starttype == 'date':
|
||||||
start = datetime.fromtimestamp(timerange.startts, tz=timezone.utc)
|
if pairdata.iloc[0]['date'] > timerange.startdt:
|
||||||
if pairdata.iloc[0]['date'] > start:
|
|
||||||
logger.warning(f"{pair}, {candle_type}, {timeframe}, "
|
logger.warning(f"{pair}, {candle_type}, {timeframe}, "
|
||||||
f"data starts at {pairdata.iloc[0]['date']:%Y-%m-%d %H:%M:%S}")
|
f"data starts at {pairdata.iloc[0]['date']:%Y-%m-%d %H:%M:%S}")
|
||||||
if timerange.stoptype == 'date':
|
if timerange.stoptype == 'date':
|
||||||
stop = datetime.fromtimestamp(timerange.stopts, tz=timezone.utc)
|
if pairdata.iloc[-1]['date'] < timerange.stopdt:
|
||||||
if pairdata.iloc[-1]['date'] < stop:
|
|
||||||
logger.warning(f"{pair}, {candle_type}, {timeframe}, "
|
logger.warning(f"{pair}, {candle_type}, {timeframe}, "
|
||||||
f"data ends at {pairdata.iloc[-1]['date']:%Y-%m-%d %H:%M:%S}")
|
f"data ends at {pairdata.iloc[-1]['date']:%Y-%m-%d %H:%M:%S}")
|
||||||
|
|
||||||
|
|
|
@ -392,7 +392,7 @@ class Edge:
|
||||||
# Returning a list of pairs in order of "expectancy"
|
# Returning a list of pairs in order of "expectancy"
|
||||||
return final
|
return final
|
||||||
|
|
||||||
def _find_trades_for_stoploss_range(self, df, pair, stoploss_range):
|
def _find_trades_for_stoploss_range(self, df, pair: str, stoploss_range) -> list:
|
||||||
buy_column = df['enter_long'].values
|
buy_column = df['enter_long'].values
|
||||||
sell_column = df['exit_long'].values
|
sell_column = df['exit_long'].values
|
||||||
date_column = df['date'].values
|
date_column = df['date'].values
|
||||||
|
@ -407,7 +407,7 @@ class Edge:
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def _detect_next_stop_or_sell_point(self, buy_column, sell_column, date_column,
|
def _detect_next_stop_or_sell_point(self, buy_column, sell_column, date_column,
|
||||||
ohlc_columns, stoploss, pair):
|
ohlc_columns, stoploss, pair: str):
|
||||||
"""
|
"""
|
||||||
Iterate through ohlc_columns in order to find the next trade
|
Iterate through ohlc_columns in order to find the next trade
|
||||||
Next trade opens from the first buy signal noticed to
|
Next trade opens from the first buy signal noticed to
|
||||||
|
|
|
@ -9,15 +9,15 @@ from freqtrade.exchange.bitpanda import Bitpanda
|
||||||
from freqtrade.exchange.bittrex import Bittrex
|
from freqtrade.exchange.bittrex import Bittrex
|
||||||
from freqtrade.exchange.bybit import Bybit
|
from freqtrade.exchange.bybit import Bybit
|
||||||
from freqtrade.exchange.coinbasepro import Coinbasepro
|
from freqtrade.exchange.coinbasepro import Coinbasepro
|
||||||
from freqtrade.exchange.exchange import (amount_to_contract_precision, amount_to_contracts,
|
from freqtrade.exchange.exchange_utils import (amount_to_contract_precision, amount_to_contracts,
|
||||||
amount_to_precision, available_exchanges, ccxt_exchanges,
|
amount_to_precision, available_exchanges,
|
||||||
contracts_to_amount, date_minus_candles,
|
ccxt_exchanges, contracts_to_amount,
|
||||||
is_exchange_known_ccxt, market_is_active,
|
date_minus_candles, is_exchange_known_ccxt,
|
||||||
price_to_precision, timeframe_to_minutes,
|
market_is_active, price_to_precision,
|
||||||
timeframe_to_msecs, timeframe_to_next_date,
|
timeframe_to_minutes, timeframe_to_msecs,
|
||||||
timeframe_to_prev_date, timeframe_to_seconds,
|
timeframe_to_next_date, timeframe_to_prev_date,
|
||||||
validate_exchange, validate_exchanges)
|
timeframe_to_seconds, validate_exchange,
|
||||||
from freqtrade.exchange.ftx import Ftx
|
validate_exchanges)
|
||||||
from freqtrade.exchange.gateio import Gateio
|
from freqtrade.exchange.gateio import Gateio
|
||||||
from freqtrade.exchange.hitbtc import Hitbtc
|
from freqtrade.exchange.hitbtc import Hitbtc
|
||||||
from freqtrade.exchange.huobi import Huobi
|
from freqtrade.exchange.huobi import Huobi
|
||||||
|
|
|
@ -42,24 +42,6 @@ class Binance(Exchange):
|
||||||
(TradingMode.FUTURES, MarginMode.ISOLATED)
|
(TradingMode.FUTURES, MarginMode.ISOLATED)
|
||||||
]
|
]
|
||||||
|
|
||||||
def stoploss_adjust(self, stop_loss: float, order: Dict, side: str) -> bool:
|
|
||||||
"""
|
|
||||||
Verify stop_loss against stoploss-order value (limit or price)
|
|
||||||
Returns True if adjustment is necessary.
|
|
||||||
:param side: "buy" or "sell"
|
|
||||||
"""
|
|
||||||
order_types = ('stop_loss_limit', 'stop', 'stop_market')
|
|
||||||
|
|
||||||
return (
|
|
||||||
order.get('stopPrice', None) is None
|
|
||||||
or (
|
|
||||||
order['type'] in order_types
|
|
||||||
and (
|
|
||||||
(side == "sell" and stop_loss > float(order['stopPrice'])) or
|
|
||||||
(side == "buy" and stop_loss < float(order['stopPrice']))
|
|
||||||
)
|
|
||||||
))
|
|
||||||
|
|
||||||
def get_tickers(self, symbols: Optional[List[str]] = None, cached: bool = False) -> Tickers:
|
def get_tickers(self, symbols: Optional[List[str]] = None, cached: bool = False) -> Tickers:
|
||||||
tickers = super().get_tickers(symbols=symbols, cached=cached)
|
tickers = super().get_tickers(symbols=symbols, cached=cached)
|
||||||
if self.trading_mode == TradingMode.FUTURES:
|
if self.trading_mode == TradingMode.FUTURES:
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -20,8 +20,12 @@ class Bybit(Exchange):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
_ft_has: Dict = {
|
_ft_has: Dict = {
|
||||||
"ohlcv_candle_limit": 200,
|
"ohlcv_candle_limit": 1000,
|
||||||
"ccxt_futures_name": "linear"
|
"ccxt_futures_name": "linear",
|
||||||
|
"ohlcv_has_history": False,
|
||||||
|
}
|
||||||
|
_ft_has_futures: Dict = {
|
||||||
|
"ohlcv_has_history": True,
|
||||||
}
|
}
|
||||||
|
|
||||||
_supported_trading_mode_margin_pairs: List[Tuple[TradingMode, MarginMode]] = [
|
_supported_trading_mode_margin_pairs: List[Tuple[TradingMode, MarginMode]] = [
|
||||||
|
|
|
@ -52,7 +52,6 @@ MAP_EXCHANGE_CHILDCLASS = {
|
||||||
SUPPORTED_EXCHANGES = [
|
SUPPORTED_EXCHANGES = [
|
||||||
'binance',
|
'binance',
|
||||||
'bittrex',
|
'bittrex',
|
||||||
'ftx',
|
|
||||||
'gateio',
|
'gateio',
|
||||||
'huobi',
|
'huobi',
|
||||||
'kraken',
|
'kraken',
|
||||||
|
|
|
@ -8,7 +8,6 @@ import inspect
|
||||||
import logging
|
import logging
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
from datetime import datetime, timedelta, timezone
|
from datetime import datetime, timedelta, timezone
|
||||||
from math import ceil
|
|
||||||
from threading import Lock
|
from threading import Lock
|
||||||
from typing import Any, Coroutine, Dict, List, Literal, Optional, Tuple, Union
|
from typing import Any, Coroutine, Dict, List, Literal, Optional, Tuple, Union
|
||||||
|
|
||||||
|
@ -16,7 +15,7 @@ import arrow
|
||||||
import ccxt
|
import ccxt
|
||||||
import ccxt.async_support as ccxt_async
|
import ccxt.async_support as ccxt_async
|
||||||
from cachetools import TTLCache
|
from cachetools import TTLCache
|
||||||
from ccxt import ROUND_DOWN, ROUND_UP, TICK_SIZE, TRUNCATE, decimal_to_precision
|
from ccxt import TICK_SIZE
|
||||||
from dateutil import parser
|
from dateutil import parser
|
||||||
from pandas import DataFrame, concat
|
from pandas import DataFrame, concat
|
||||||
|
|
||||||
|
@ -28,17 +27,19 @@ from freqtrade.enums import OPTIMIZE_MODES, CandleType, MarginMode, TradingMode
|
||||||
from freqtrade.exceptions import (DDosProtection, ExchangeError, InsufficientFundsError,
|
from freqtrade.exceptions import (DDosProtection, ExchangeError, InsufficientFundsError,
|
||||||
InvalidOrderException, OperationalException, PricingError,
|
InvalidOrderException, OperationalException, PricingError,
|
||||||
RetryableOrderError, TemporaryError)
|
RetryableOrderError, TemporaryError)
|
||||||
from freqtrade.exchange.common import (API_FETCH_ORDER_RETRY_COUNT, BAD_EXCHANGES,
|
from freqtrade.exchange.common import (API_FETCH_ORDER_RETRY_COUNT, remove_credentials, retrier,
|
||||||
EXCHANGE_HAS_OPTIONAL, EXCHANGE_HAS_REQUIRED,
|
retrier_async)
|
||||||
remove_credentials, retrier, retrier_async)
|
from freqtrade.exchange.exchange_utils import (CcxtModuleType, amount_to_contract_precision,
|
||||||
|
amount_to_contracts, amount_to_precision,
|
||||||
|
contracts_to_amount, date_minus_candles,
|
||||||
|
is_exchange_known_ccxt, market_is_active,
|
||||||
|
price_to_precision, timeframe_to_minutes,
|
||||||
|
timeframe_to_msecs, timeframe_to_next_date,
|
||||||
|
timeframe_to_prev_date, timeframe_to_seconds)
|
||||||
from freqtrade.exchange.types import Ticker, Tickers
|
from freqtrade.exchange.types import Ticker, Tickers
|
||||||
from freqtrade.misc import (chunks, deep_merge_dicts, file_dump_json, file_load_json,
|
from freqtrade.misc import (chunks, deep_merge_dicts, file_dump_json, file_load_json,
|
||||||
safe_value_fallback2)
|
safe_value_fallback2)
|
||||||
from freqtrade.plugins.pairlist.pairlist_helpers import expand_pairlist
|
from freqtrade.plugins.pairlist.pairlist_helpers import expand_pairlist
|
||||||
from freqtrade.util import FtPrecise
|
|
||||||
|
|
||||||
|
|
||||||
CcxtModuleType = Any
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -1076,7 +1077,14 @@ class Exchange:
|
||||||
Verify stop_loss against stoploss-order value (limit or price)
|
Verify stop_loss against stoploss-order value (limit or price)
|
||||||
Returns True if adjustment is necessary.
|
Returns True if adjustment is necessary.
|
||||||
"""
|
"""
|
||||||
raise OperationalException(f"stoploss is not implemented for {self.name}.")
|
if not self._ft_has.get('stoploss_on_exchange'):
|
||||||
|
raise OperationalException(f"stoploss is not implemented for {self.name}.")
|
||||||
|
|
||||||
|
return (
|
||||||
|
order.get('stopPrice', None) is None
|
||||||
|
or ((side == "sell" and stop_loss > float(order['stopPrice'])) or
|
||||||
|
(side == "buy" and stop_loss < float(order['stopPrice'])))
|
||||||
|
)
|
||||||
|
|
||||||
def _get_stop_order_type(self, user_order_type) -> Tuple[str, str]:
|
def _get_stop_order_type(self, user_order_type) -> Tuple[str, str]:
|
||||||
|
|
||||||
|
@ -1106,7 +1114,7 @@ class Exchange:
|
||||||
'In stoploss limit order, stop price should be more than limit price')
|
'In stoploss limit order, stop price should be more than limit price')
|
||||||
return limit_rate
|
return limit_rate
|
||||||
|
|
||||||
def _get_stop_params(self, ordertype: str, stop_price: float) -> Dict:
|
def _get_stop_params(self, side: BuySell, ordertype: str, stop_price: float) -> Dict:
|
||||||
params = self._params.copy()
|
params = self._params.copy()
|
||||||
# Verify if stopPrice works for your exchange!
|
# Verify if stopPrice works for your exchange!
|
||||||
params.update({'stopPrice': stop_price})
|
params.update({'stopPrice': stop_price})
|
||||||
|
@ -1155,7 +1163,8 @@ class Exchange:
|
||||||
return dry_order
|
return dry_order
|
||||||
|
|
||||||
try:
|
try:
|
||||||
params = self._get_stop_params(ordertype=ordertype, stop_price=stop_price_norm)
|
params = self._get_stop_params(side=side, ordertype=ordertype,
|
||||||
|
stop_price=stop_price_norm)
|
||||||
if self.trading_mode == TradingMode.FUTURES:
|
if self.trading_mode == TradingMode.FUTURES:
|
||||||
params['reduceOnly'] = True
|
params['reduceOnly'] = True
|
||||||
|
|
||||||
|
@ -1680,6 +1689,17 @@ class Exchange:
|
||||||
@retrier
|
@retrier
|
||||||
def get_fee(self, symbol: str, type: str = '', side: str = '', amount: float = 1,
|
def get_fee(self, symbol: str, type: str = '', side: str = '', amount: float = 1,
|
||||||
price: float = 1, taker_or_maker: MakerTaker = 'maker') -> float:
|
price: float = 1, taker_or_maker: MakerTaker = 'maker') -> float:
|
||||||
|
"""
|
||||||
|
Retrieve fee from exchange
|
||||||
|
:param symbol: Pair
|
||||||
|
:param type: Type of order (market, limit, ...)
|
||||||
|
:param side: Side of order (buy, sell)
|
||||||
|
:param amount: Amount of order
|
||||||
|
:param price: Price of order
|
||||||
|
:param taker_or_maker: 'maker' or 'taker' (ignored if "type" is provided)
|
||||||
|
"""
|
||||||
|
if type and type == 'market':
|
||||||
|
taker_or_maker = 'taker'
|
||||||
try:
|
try:
|
||||||
if self._config['dry_run'] and self._config.get('fee', None) is not None:
|
if self._config['dry_run'] and self._config.get('fee', None) is not None:
|
||||||
return self._config['fee']
|
return self._config['fee']
|
||||||
|
@ -1995,11 +2015,8 @@ class Exchange:
|
||||||
def _now_is_time_to_refresh(self, pair: str, timeframe: str, candle_type: CandleType) -> bool:
|
def _now_is_time_to_refresh(self, pair: str, timeframe: str, candle_type: CandleType) -> bool:
|
||||||
# Timeframe in seconds
|
# Timeframe in seconds
|
||||||
interval_in_sec = timeframe_to_seconds(timeframe)
|
interval_in_sec = timeframe_to_seconds(timeframe)
|
||||||
|
plr = self._pairs_last_refresh_time.get((pair, timeframe, candle_type), 0) + interval_in_sec
|
||||||
return (
|
return plr < arrow.utcnow().int_timestamp
|
||||||
(self._pairs_last_refresh_time.get((pair, timeframe, candle_type), 0)
|
|
||||||
+ interval_in_sec) < arrow.utcnow().int_timestamp
|
|
||||||
)
|
|
||||||
|
|
||||||
@retrier_async
|
@retrier_async
|
||||||
async def _async_get_candle_history(
|
async def _async_get_candle_history(
|
||||||
|
@ -2802,240 +2819,3 @@ class Exchange:
|
||||||
# describes the min amt for a tier, and the lowest tier will always go down to 0
|
# describes the min amt for a tier, and the lowest tier will always go down to 0
|
||||||
else:
|
else:
|
||||||
raise OperationalException(f"Cannot get maintenance ratio using {self.name}")
|
raise OperationalException(f"Cannot get maintenance ratio using {self.name}")
|
||||||
|
|
||||||
|
|
||||||
def is_exchange_known_ccxt(exchange_name: str, ccxt_module: CcxtModuleType = None) -> bool:
|
|
||||||
return exchange_name in ccxt_exchanges(ccxt_module)
|
|
||||||
|
|
||||||
|
|
||||||
def ccxt_exchanges(ccxt_module: CcxtModuleType = None) -> List[str]:
|
|
||||||
"""
|
|
||||||
Return the list of all exchanges known to ccxt
|
|
||||||
"""
|
|
||||||
return ccxt_module.exchanges if ccxt_module is not None else ccxt.exchanges
|
|
||||||
|
|
||||||
|
|
||||||
def available_exchanges(ccxt_module: CcxtModuleType = None) -> List[str]:
|
|
||||||
"""
|
|
||||||
Return exchanges available to the bot, i.e. non-bad exchanges in the ccxt list
|
|
||||||
"""
|
|
||||||
exchanges = ccxt_exchanges(ccxt_module)
|
|
||||||
return [x for x in exchanges if validate_exchange(x)[0]]
|
|
||||||
|
|
||||||
|
|
||||||
def validate_exchange(exchange: str) -> Tuple[bool, str]:
|
|
||||||
ex_mod = getattr(ccxt, exchange.lower())()
|
|
||||||
if not ex_mod or not ex_mod.has:
|
|
||||||
return False, ''
|
|
||||||
missing = [k for k in EXCHANGE_HAS_REQUIRED if ex_mod.has.get(k) is not True]
|
|
||||||
if missing:
|
|
||||||
return False, f"missing: {', '.join(missing)}"
|
|
||||||
|
|
||||||
missing_opt = [k for k in EXCHANGE_HAS_OPTIONAL if not ex_mod.has.get(k)]
|
|
||||||
|
|
||||||
if exchange.lower() in BAD_EXCHANGES:
|
|
||||||
return False, BAD_EXCHANGES.get(exchange.lower(), '')
|
|
||||||
if missing_opt:
|
|
||||||
return True, f"missing opt: {', '.join(missing_opt)}"
|
|
||||||
|
|
||||||
return True, ''
|
|
||||||
|
|
||||||
|
|
||||||
def validate_exchanges(all_exchanges: bool) -> List[Tuple[str, bool, str]]:
|
|
||||||
"""
|
|
||||||
:return: List of tuples with exchangename, valid, reason.
|
|
||||||
"""
|
|
||||||
exchanges = ccxt_exchanges() if all_exchanges else available_exchanges()
|
|
||||||
exchanges_valid = [
|
|
||||||
(e, *validate_exchange(e)) for e in exchanges
|
|
||||||
]
|
|
||||||
return exchanges_valid
|
|
||||||
|
|
||||||
|
|
||||||
def timeframe_to_seconds(timeframe: str) -> int:
|
|
||||||
"""
|
|
||||||
Translates the timeframe interval value written in the human readable
|
|
||||||
form ('1m', '5m', '1h', '1d', '1w', etc.) to the number
|
|
||||||
of seconds for one timeframe interval.
|
|
||||||
"""
|
|
||||||
return ccxt.Exchange.parse_timeframe(timeframe)
|
|
||||||
|
|
||||||
|
|
||||||
def timeframe_to_minutes(timeframe: str) -> int:
|
|
||||||
"""
|
|
||||||
Same as timeframe_to_seconds, but returns minutes.
|
|
||||||
"""
|
|
||||||
return ccxt.Exchange.parse_timeframe(timeframe) // 60
|
|
||||||
|
|
||||||
|
|
||||||
def timeframe_to_msecs(timeframe: str) -> int:
|
|
||||||
"""
|
|
||||||
Same as timeframe_to_seconds, but returns milliseconds.
|
|
||||||
"""
|
|
||||||
return ccxt.Exchange.parse_timeframe(timeframe) * 1000
|
|
||||||
|
|
||||||
|
|
||||||
def timeframe_to_prev_date(timeframe: str, date: datetime = None) -> datetime:
|
|
||||||
"""
|
|
||||||
Use Timeframe and determine the candle start date for this date.
|
|
||||||
Does not round when given a candle start date.
|
|
||||||
:param timeframe: timeframe in string format (e.g. "5m")
|
|
||||||
:param date: date to use. Defaults to now(utc)
|
|
||||||
:returns: date of previous candle (with utc timezone)
|
|
||||||
"""
|
|
||||||
if not date:
|
|
||||||
date = datetime.now(timezone.utc)
|
|
||||||
|
|
||||||
new_timestamp = ccxt.Exchange.round_timeframe(timeframe, date.timestamp() * 1000,
|
|
||||||
ROUND_DOWN) // 1000
|
|
||||||
return datetime.fromtimestamp(new_timestamp, tz=timezone.utc)
|
|
||||||
|
|
||||||
|
|
||||||
def timeframe_to_next_date(timeframe: str, date: datetime = None) -> datetime:
|
|
||||||
"""
|
|
||||||
Use Timeframe and determine next candle.
|
|
||||||
:param timeframe: timeframe in string format (e.g. "5m")
|
|
||||||
:param date: date to use. Defaults to now(utc)
|
|
||||||
:returns: date of next candle (with utc timezone)
|
|
||||||
"""
|
|
||||||
if not date:
|
|
||||||
date = datetime.now(timezone.utc)
|
|
||||||
new_timestamp = ccxt.Exchange.round_timeframe(timeframe, date.timestamp() * 1000,
|
|
||||||
ROUND_UP) // 1000
|
|
||||||
return datetime.fromtimestamp(new_timestamp, tz=timezone.utc)
|
|
||||||
|
|
||||||
|
|
||||||
def date_minus_candles(
|
|
||||||
timeframe: str, candle_count: int, date: Optional[datetime] = None) -> datetime:
|
|
||||||
"""
|
|
||||||
subtract X candles from a date.
|
|
||||||
:param timeframe: timeframe in string format (e.g. "5m")
|
|
||||||
:param candle_count: Amount of candles to subtract.
|
|
||||||
:param date: date to use. Defaults to now(utc)
|
|
||||||
|
|
||||||
"""
|
|
||||||
if not date:
|
|
||||||
date = datetime.now(timezone.utc)
|
|
||||||
|
|
||||||
tf_min = timeframe_to_minutes(timeframe)
|
|
||||||
new_date = timeframe_to_prev_date(timeframe, date) - timedelta(minutes=tf_min * candle_count)
|
|
||||||
return new_date
|
|
||||||
|
|
||||||
|
|
||||||
def market_is_active(market: Dict) -> bool:
|
|
||||||
"""
|
|
||||||
Return True if the market is active.
|
|
||||||
"""
|
|
||||||
# "It's active, if the active flag isn't explicitly set to false. If it's missing or
|
|
||||||
# true then it's true. If it's undefined, then it's most likely true, but not 100% )"
|
|
||||||
# See https://github.com/ccxt/ccxt/issues/4874,
|
|
||||||
# https://github.com/ccxt/ccxt/issues/4075#issuecomment-434760520
|
|
||||||
return market.get('active', True) is not False
|
|
||||||
|
|
||||||
|
|
||||||
def amount_to_contracts(amount: float, contract_size: Optional[float]) -> float:
|
|
||||||
"""
|
|
||||||
Convert amount to contracts.
|
|
||||||
:param amount: amount to convert
|
|
||||||
:param contract_size: contract size - taken from exchange.get_contract_size(pair)
|
|
||||||
:return: num-contracts
|
|
||||||
"""
|
|
||||||
if contract_size and contract_size != 1:
|
|
||||||
return float(FtPrecise(amount) / FtPrecise(contract_size))
|
|
||||||
else:
|
|
||||||
return amount
|
|
||||||
|
|
||||||
|
|
||||||
def contracts_to_amount(num_contracts: float, contract_size: Optional[float]) -> float:
|
|
||||||
"""
|
|
||||||
Takes num-contracts and converts it to contract size
|
|
||||||
:param num_contracts: number of contracts
|
|
||||||
:param contract_size: contract size - taken from exchange.get_contract_size(pair)
|
|
||||||
:return: Amount
|
|
||||||
"""
|
|
||||||
|
|
||||||
if contract_size and contract_size != 1:
|
|
||||||
return float(FtPrecise(num_contracts) * FtPrecise(contract_size))
|
|
||||||
else:
|
|
||||||
return num_contracts
|
|
||||||
|
|
||||||
|
|
||||||
def amount_to_precision(amount: float, amount_precision: Optional[float],
|
|
||||||
precisionMode: Optional[int]) -> float:
|
|
||||||
"""
|
|
||||||
Returns the amount to buy or sell to a precision the Exchange accepts
|
|
||||||
Re-implementation of ccxt internal methods - ensuring we can test the result is correct
|
|
||||||
based on our definitions.
|
|
||||||
:param amount: amount to truncate
|
|
||||||
:param amount_precision: amount precision to use.
|
|
||||||
should be retrieved from markets[pair]['precision']['amount']
|
|
||||||
:param precisionMode: precision mode to use. Should be used from precisionMode
|
|
||||||
one of ccxt's DECIMAL_PLACES, SIGNIFICANT_DIGITS, or TICK_SIZE
|
|
||||||
:return: truncated amount
|
|
||||||
"""
|
|
||||||
if amount_precision is not None and precisionMode is not None:
|
|
||||||
precision = int(amount_precision) if precisionMode != TICK_SIZE else amount_precision
|
|
||||||
# precision must be an int for non-ticksize inputs.
|
|
||||||
amount = float(decimal_to_precision(amount, rounding_mode=TRUNCATE,
|
|
||||||
precision=precision,
|
|
||||||
counting_mode=precisionMode,
|
|
||||||
))
|
|
||||||
|
|
||||||
return amount
|
|
||||||
|
|
||||||
|
|
||||||
def amount_to_contract_precision(
|
|
||||||
amount, amount_precision: Optional[float], precisionMode: Optional[int],
|
|
||||||
contract_size: Optional[float]) -> float:
|
|
||||||
"""
|
|
||||||
Returns the amount to buy or sell to a precision the Exchange accepts
|
|
||||||
including calculation to and from contracts.
|
|
||||||
Re-implementation of ccxt internal methods - ensuring we can test the result is correct
|
|
||||||
based on our definitions.
|
|
||||||
:param amount: amount to truncate
|
|
||||||
:param amount_precision: amount precision to use.
|
|
||||||
should be retrieved from markets[pair]['precision']['amount']
|
|
||||||
:param precisionMode: precision mode to use. Should be used from precisionMode
|
|
||||||
one of ccxt's DECIMAL_PLACES, SIGNIFICANT_DIGITS, or TICK_SIZE
|
|
||||||
:param contract_size: contract size - taken from exchange.get_contract_size(pair)
|
|
||||||
:return: truncated amount
|
|
||||||
"""
|
|
||||||
if amount_precision is not None and precisionMode is not None:
|
|
||||||
contracts = amount_to_contracts(amount, contract_size)
|
|
||||||
amount_p = amount_to_precision(contracts, amount_precision, precisionMode)
|
|
||||||
return contracts_to_amount(amount_p, contract_size)
|
|
||||||
return amount
|
|
||||||
|
|
||||||
|
|
||||||
def price_to_precision(price: float, price_precision: Optional[float],
|
|
||||||
precisionMode: Optional[int]) -> float:
|
|
||||||
"""
|
|
||||||
Returns the price rounded up to the precision the Exchange accepts.
|
|
||||||
Partial Re-implementation of ccxt internal method decimal_to_precision(),
|
|
||||||
which does not support rounding up
|
|
||||||
TODO: If ccxt supports ROUND_UP for decimal_to_precision(), we could remove this and
|
|
||||||
align with amount_to_precision().
|
|
||||||
!!! Rounds up
|
|
||||||
:param price: price to convert
|
|
||||||
:param price_precision: price precision to use. Used from markets[pair]['precision']['price']
|
|
||||||
:param precisionMode: precision mode to use. Should be used from precisionMode
|
|
||||||
one of ccxt's DECIMAL_PLACES, SIGNIFICANT_DIGITS, or TICK_SIZE
|
|
||||||
:return: price rounded up to the precision the Exchange accepts
|
|
||||||
|
|
||||||
"""
|
|
||||||
if price_precision is not None and precisionMode is not None:
|
|
||||||
# price = float(decimal_to_precision(price, rounding_mode=ROUND,
|
|
||||||
# precision=price_precision,
|
|
||||||
# counting_mode=self.precisionMode,
|
|
||||||
# ))
|
|
||||||
if precisionMode == TICK_SIZE:
|
|
||||||
precision = FtPrecise(price_precision)
|
|
||||||
price_str = FtPrecise(price)
|
|
||||||
missing = price_str % precision
|
|
||||||
if not missing == FtPrecise("0"):
|
|
||||||
price = round(float(str(price_str - missing + precision)), 14)
|
|
||||||
else:
|
|
||||||
symbol_prec = price_precision
|
|
||||||
big_price = price * pow(10, symbol_prec)
|
|
||||||
price = ceil(big_price) / pow(10, symbol_prec)
|
|
||||||
return price
|
|
||||||
|
|
252
freqtrade/exchange/exchange_utils.py
Normal file
252
freqtrade/exchange/exchange_utils.py
Normal file
|
@ -0,0 +1,252 @@
|
||||||
|
"""
|
||||||
|
Exchange support utils
|
||||||
|
"""
|
||||||
|
from datetime import datetime, timedelta, timezone
|
||||||
|
from math import ceil
|
||||||
|
from typing import Any, Dict, List, Optional, Tuple
|
||||||
|
|
||||||
|
import ccxt
|
||||||
|
from ccxt import ROUND_DOWN, ROUND_UP, TICK_SIZE, TRUNCATE, decimal_to_precision
|
||||||
|
|
||||||
|
from freqtrade.exchange.common import BAD_EXCHANGES, EXCHANGE_HAS_OPTIONAL, EXCHANGE_HAS_REQUIRED
|
||||||
|
from freqtrade.util import FtPrecise
|
||||||
|
|
||||||
|
|
||||||
|
CcxtModuleType = Any
|
||||||
|
|
||||||
|
|
||||||
|
def is_exchange_known_ccxt(exchange_name: str, ccxt_module: CcxtModuleType = None) -> bool:
|
||||||
|
return exchange_name in ccxt_exchanges(ccxt_module)
|
||||||
|
|
||||||
|
|
||||||
|
def ccxt_exchanges(ccxt_module: CcxtModuleType = None) -> List[str]:
|
||||||
|
"""
|
||||||
|
Return the list of all exchanges known to ccxt
|
||||||
|
"""
|
||||||
|
return ccxt_module.exchanges if ccxt_module is not None else ccxt.exchanges
|
||||||
|
|
||||||
|
|
||||||
|
def available_exchanges(ccxt_module: CcxtModuleType = None) -> List[str]:
|
||||||
|
"""
|
||||||
|
Return exchanges available to the bot, i.e. non-bad exchanges in the ccxt list
|
||||||
|
"""
|
||||||
|
exchanges = ccxt_exchanges(ccxt_module)
|
||||||
|
return [x for x in exchanges if validate_exchange(x)[0]]
|
||||||
|
|
||||||
|
|
||||||
|
def validate_exchange(exchange: str) -> Tuple[bool, str]:
|
||||||
|
ex_mod = getattr(ccxt, exchange.lower())()
|
||||||
|
if not ex_mod or not ex_mod.has:
|
||||||
|
return False, ''
|
||||||
|
missing = [k for k in EXCHANGE_HAS_REQUIRED if ex_mod.has.get(k) is not True]
|
||||||
|
if missing:
|
||||||
|
return False, f"missing: {', '.join(missing)}"
|
||||||
|
|
||||||
|
missing_opt = [k for k in EXCHANGE_HAS_OPTIONAL if not ex_mod.has.get(k)]
|
||||||
|
|
||||||
|
if exchange.lower() in BAD_EXCHANGES:
|
||||||
|
return False, BAD_EXCHANGES.get(exchange.lower(), '')
|
||||||
|
if missing_opt:
|
||||||
|
return True, f"missing opt: {', '.join(missing_opt)}"
|
||||||
|
|
||||||
|
return True, ''
|
||||||
|
|
||||||
|
|
||||||
|
def validate_exchanges(all_exchanges: bool) -> List[Tuple[str, bool, str]]:
|
||||||
|
"""
|
||||||
|
:return: List of tuples with exchangename, valid, reason.
|
||||||
|
"""
|
||||||
|
exchanges = ccxt_exchanges() if all_exchanges else available_exchanges()
|
||||||
|
exchanges_valid = [
|
||||||
|
(e, *validate_exchange(e)) for e in exchanges
|
||||||
|
]
|
||||||
|
return exchanges_valid
|
||||||
|
|
||||||
|
|
||||||
|
def timeframe_to_seconds(timeframe: str) -> int:
|
||||||
|
"""
|
||||||
|
Translates the timeframe interval value written in the human readable
|
||||||
|
form ('1m', '5m', '1h', '1d', '1w', etc.) to the number
|
||||||
|
of seconds for one timeframe interval.
|
||||||
|
"""
|
||||||
|
return ccxt.Exchange.parse_timeframe(timeframe)
|
||||||
|
|
||||||
|
|
||||||
|
def timeframe_to_minutes(timeframe: str) -> int:
|
||||||
|
"""
|
||||||
|
Same as timeframe_to_seconds, but returns minutes.
|
||||||
|
"""
|
||||||
|
return ccxt.Exchange.parse_timeframe(timeframe) // 60
|
||||||
|
|
||||||
|
|
||||||
|
def timeframe_to_msecs(timeframe: str) -> int:
|
||||||
|
"""
|
||||||
|
Same as timeframe_to_seconds, but returns milliseconds.
|
||||||
|
"""
|
||||||
|
return ccxt.Exchange.parse_timeframe(timeframe) * 1000
|
||||||
|
|
||||||
|
|
||||||
|
def timeframe_to_prev_date(timeframe: str, date: datetime = None) -> datetime:
|
||||||
|
"""
|
||||||
|
Use Timeframe and determine the candle start date for this date.
|
||||||
|
Does not round when given a candle start date.
|
||||||
|
:param timeframe: timeframe in string format (e.g. "5m")
|
||||||
|
:param date: date to use. Defaults to now(utc)
|
||||||
|
:returns: date of previous candle (with utc timezone)
|
||||||
|
"""
|
||||||
|
if not date:
|
||||||
|
date = datetime.now(timezone.utc)
|
||||||
|
|
||||||
|
new_timestamp = ccxt.Exchange.round_timeframe(timeframe, date.timestamp() * 1000,
|
||||||
|
ROUND_DOWN) // 1000
|
||||||
|
return datetime.fromtimestamp(new_timestamp, tz=timezone.utc)
|
||||||
|
|
||||||
|
|
||||||
|
def timeframe_to_next_date(timeframe: str, date: datetime = None) -> datetime:
|
||||||
|
"""
|
||||||
|
Use Timeframe and determine next candle.
|
||||||
|
:param timeframe: timeframe in string format (e.g. "5m")
|
||||||
|
:param date: date to use. Defaults to now(utc)
|
||||||
|
:returns: date of next candle (with utc timezone)
|
||||||
|
"""
|
||||||
|
if not date:
|
||||||
|
date = datetime.now(timezone.utc)
|
||||||
|
new_timestamp = ccxt.Exchange.round_timeframe(timeframe, date.timestamp() * 1000,
|
||||||
|
ROUND_UP) // 1000
|
||||||
|
return datetime.fromtimestamp(new_timestamp, tz=timezone.utc)
|
||||||
|
|
||||||
|
|
||||||
|
def date_minus_candles(
|
||||||
|
timeframe: str, candle_count: int, date: Optional[datetime] = None) -> datetime:
|
||||||
|
"""
|
||||||
|
subtract X candles from a date.
|
||||||
|
:param timeframe: timeframe in string format (e.g. "5m")
|
||||||
|
:param candle_count: Amount of candles to subtract.
|
||||||
|
:param date: date to use. Defaults to now(utc)
|
||||||
|
|
||||||
|
"""
|
||||||
|
if not date:
|
||||||
|
date = datetime.now(timezone.utc)
|
||||||
|
|
||||||
|
tf_min = timeframe_to_minutes(timeframe)
|
||||||
|
new_date = timeframe_to_prev_date(timeframe, date) - timedelta(minutes=tf_min * candle_count)
|
||||||
|
return new_date
|
||||||
|
|
||||||
|
|
||||||
|
def market_is_active(market: Dict) -> bool:
|
||||||
|
"""
|
||||||
|
Return True if the market is active.
|
||||||
|
"""
|
||||||
|
# "It's active, if the active flag isn't explicitly set to false. If it's missing or
|
||||||
|
# true then it's true. If it's undefined, then it's most likely true, but not 100% )"
|
||||||
|
# See https://github.com/ccxt/ccxt/issues/4874,
|
||||||
|
# https://github.com/ccxt/ccxt/issues/4075#issuecomment-434760520
|
||||||
|
return market.get('active', True) is not False
|
||||||
|
|
||||||
|
|
||||||
|
def amount_to_contracts(amount: float, contract_size: Optional[float]) -> float:
|
||||||
|
"""
|
||||||
|
Convert amount to contracts.
|
||||||
|
:param amount: amount to convert
|
||||||
|
:param contract_size: contract size - taken from exchange.get_contract_size(pair)
|
||||||
|
:return: num-contracts
|
||||||
|
"""
|
||||||
|
if contract_size and contract_size != 1:
|
||||||
|
return float(FtPrecise(amount) / FtPrecise(contract_size))
|
||||||
|
else:
|
||||||
|
return amount
|
||||||
|
|
||||||
|
|
||||||
|
def contracts_to_amount(num_contracts: float, contract_size: Optional[float]) -> float:
|
||||||
|
"""
|
||||||
|
Takes num-contracts and converts it to contract size
|
||||||
|
:param num_contracts: number of contracts
|
||||||
|
:param contract_size: contract size - taken from exchange.get_contract_size(pair)
|
||||||
|
:return: Amount
|
||||||
|
"""
|
||||||
|
|
||||||
|
if contract_size and contract_size != 1:
|
||||||
|
return float(FtPrecise(num_contracts) * FtPrecise(contract_size))
|
||||||
|
else:
|
||||||
|
return num_contracts
|
||||||
|
|
||||||
|
|
||||||
|
def amount_to_precision(amount: float, amount_precision: Optional[float],
|
||||||
|
precisionMode: Optional[int]) -> float:
|
||||||
|
"""
|
||||||
|
Returns the amount to buy or sell to a precision the Exchange accepts
|
||||||
|
Re-implementation of ccxt internal methods - ensuring we can test the result is correct
|
||||||
|
based on our definitions.
|
||||||
|
:param amount: amount to truncate
|
||||||
|
:param amount_precision: amount precision to use.
|
||||||
|
should be retrieved from markets[pair]['precision']['amount']
|
||||||
|
:param precisionMode: precision mode to use. Should be used from precisionMode
|
||||||
|
one of ccxt's DECIMAL_PLACES, SIGNIFICANT_DIGITS, or TICK_SIZE
|
||||||
|
:return: truncated amount
|
||||||
|
"""
|
||||||
|
if amount_precision is not None and precisionMode is not None:
|
||||||
|
precision = int(amount_precision) if precisionMode != TICK_SIZE else amount_precision
|
||||||
|
# precision must be an int for non-ticksize inputs.
|
||||||
|
amount = float(decimal_to_precision(amount, rounding_mode=TRUNCATE,
|
||||||
|
precision=precision,
|
||||||
|
counting_mode=precisionMode,
|
||||||
|
))
|
||||||
|
|
||||||
|
return amount
|
||||||
|
|
||||||
|
|
||||||
|
def amount_to_contract_precision(
|
||||||
|
amount, amount_precision: Optional[float], precisionMode: Optional[int],
|
||||||
|
contract_size: Optional[float]) -> float:
|
||||||
|
"""
|
||||||
|
Returns the amount to buy or sell to a precision the Exchange accepts
|
||||||
|
including calculation to and from contracts.
|
||||||
|
Re-implementation of ccxt internal methods - ensuring we can test the result is correct
|
||||||
|
based on our definitions.
|
||||||
|
:param amount: amount to truncate
|
||||||
|
:param amount_precision: amount precision to use.
|
||||||
|
should be retrieved from markets[pair]['precision']['amount']
|
||||||
|
:param precisionMode: precision mode to use. Should be used from precisionMode
|
||||||
|
one of ccxt's DECIMAL_PLACES, SIGNIFICANT_DIGITS, or TICK_SIZE
|
||||||
|
:param contract_size: contract size - taken from exchange.get_contract_size(pair)
|
||||||
|
:return: truncated amount
|
||||||
|
"""
|
||||||
|
if amount_precision is not None and precisionMode is not None:
|
||||||
|
contracts = amount_to_contracts(amount, contract_size)
|
||||||
|
amount_p = amount_to_precision(contracts, amount_precision, precisionMode)
|
||||||
|
return contracts_to_amount(amount_p, contract_size)
|
||||||
|
return amount
|
||||||
|
|
||||||
|
|
||||||
|
def price_to_precision(price: float, price_precision: Optional[float],
|
||||||
|
precisionMode: Optional[int]) -> float:
|
||||||
|
"""
|
||||||
|
Returns the price rounded up to the precision the Exchange accepts.
|
||||||
|
Partial Re-implementation of ccxt internal method decimal_to_precision(),
|
||||||
|
which does not support rounding up
|
||||||
|
TODO: If ccxt supports ROUND_UP for decimal_to_precision(), we could remove this and
|
||||||
|
align with amount_to_precision().
|
||||||
|
!!! Rounds up
|
||||||
|
:param price: price to convert
|
||||||
|
:param price_precision: price precision to use. Used from markets[pair]['precision']['price']
|
||||||
|
:param precisionMode: precision mode to use. Should be used from precisionMode
|
||||||
|
one of ccxt's DECIMAL_PLACES, SIGNIFICANT_DIGITS, or TICK_SIZE
|
||||||
|
:return: price rounded up to the precision the Exchange accepts
|
||||||
|
|
||||||
|
"""
|
||||||
|
if price_precision is not None and precisionMode is not None:
|
||||||
|
# price = float(decimal_to_precision(price, rounding_mode=ROUND,
|
||||||
|
# precision=price_precision,
|
||||||
|
# counting_mode=self.precisionMode,
|
||||||
|
# ))
|
||||||
|
if precisionMode == TICK_SIZE:
|
||||||
|
precision = FtPrecise(price_precision)
|
||||||
|
price_str = FtPrecise(price)
|
||||||
|
missing = price_str % precision
|
||||||
|
if not missing == FtPrecise("0"):
|
||||||
|
price = round(float(str(price_str - missing + precision)), 14)
|
||||||
|
else:
|
||||||
|
symbol_prec = price_precision
|
||||||
|
big_price = price * pow(10, symbol_prec)
|
||||||
|
price = ceil(big_price) / pow(10, symbol_prec)
|
||||||
|
return price
|
|
@ -1,178 +0,0 @@
|
||||||
""" FTX exchange subclass """
|
|
||||||
import logging
|
|
||||||
from typing import Any, Dict, List, Optional, Tuple
|
|
||||||
|
|
||||||
import ccxt
|
|
||||||
|
|
||||||
from freqtrade.constants import BuySell
|
|
||||||
from freqtrade.enums import MarginMode, TradingMode
|
|
||||||
from freqtrade.exceptions import (DDosProtection, InsufficientFundsError, InvalidOrderException,
|
|
||||||
OperationalException, TemporaryError)
|
|
||||||
from freqtrade.exchange import Exchange
|
|
||||||
from freqtrade.exchange.common import API_FETCH_ORDER_RETRY_COUNT, retrier
|
|
||||||
from freqtrade.misc import safe_value_fallback2
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class Ftx(Exchange):
|
|
||||||
|
|
||||||
_ft_has: Dict = {
|
|
||||||
"order_time_in_force": ['GTC', 'IOC', 'PO'],
|
|
||||||
"stoploss_on_exchange": True,
|
|
||||||
"ohlcv_candle_limit": 1500,
|
|
||||||
"ohlcv_require_since": True,
|
|
||||||
"ohlcv_volume_currency": "quote",
|
|
||||||
"mark_ohlcv_price": "index",
|
|
||||||
"mark_ohlcv_timeframe": "1h",
|
|
||||||
}
|
|
||||||
|
|
||||||
_supported_trading_mode_margin_pairs: List[Tuple[TradingMode, MarginMode]] = [
|
|
||||||
# TradingMode.SPOT always supported and not required in this list
|
|
||||||
# (TradingMode.MARGIN, MarginMode.CROSS),
|
|
||||||
# (TradingMode.FUTURES, MarginMode.CROSS)
|
|
||||||
]
|
|
||||||
|
|
||||||
def stoploss_adjust(self, stop_loss: float, order: Dict, side: str) -> bool:
|
|
||||||
"""
|
|
||||||
Verify stop_loss against stoploss-order value (limit or price)
|
|
||||||
Returns True if adjustment is necessary.
|
|
||||||
"""
|
|
||||||
return order['type'] == 'stop' and (
|
|
||||||
side == "sell" and stop_loss > float(order['price']) or
|
|
||||||
side == "buy" and stop_loss < float(order['price'])
|
|
||||||
)
|
|
||||||
|
|
||||||
@retrier(retries=0)
|
|
||||||
def stoploss(self, pair: str, amount: float, stop_price: float,
|
|
||||||
order_types: Dict, side: BuySell, leverage: float) -> Dict:
|
|
||||||
"""
|
|
||||||
Creates a stoploss order.
|
|
||||||
depending on order_types.stoploss configuration, uses 'market' or limit order.
|
|
||||||
|
|
||||||
Limit orders are defined by having orderPrice set, otherwise a market order is used.
|
|
||||||
"""
|
|
||||||
limit_price_pct = order_types.get('stoploss_on_exchange_limit_ratio', 0.99)
|
|
||||||
if side == "sell":
|
|
||||||
limit_rate = stop_price * limit_price_pct
|
|
||||||
else:
|
|
||||||
limit_rate = stop_price * (2 - limit_price_pct)
|
|
||||||
|
|
||||||
ordertype = "stop"
|
|
||||||
|
|
||||||
stop_price = self.price_to_precision(pair, stop_price)
|
|
||||||
|
|
||||||
if self._config['dry_run']:
|
|
||||||
dry_order = self.create_dry_run_order(
|
|
||||||
pair, ordertype, side, amount, stop_price, leverage, stop_loss=True)
|
|
||||||
return dry_order
|
|
||||||
|
|
||||||
try:
|
|
||||||
params = self._params.copy()
|
|
||||||
if order_types.get('stoploss', 'market') == 'limit':
|
|
||||||
# set orderPrice to place limit order, otherwise it's a market order
|
|
||||||
params['orderPrice'] = limit_rate
|
|
||||||
if self.trading_mode == TradingMode.FUTURES:
|
|
||||||
params.update({'reduceOnly': True})
|
|
||||||
|
|
||||||
params['stopPrice'] = stop_price
|
|
||||||
amount = self.amount_to_precision(pair, amount)
|
|
||||||
|
|
||||||
self._lev_prep(pair, leverage, side)
|
|
||||||
order = self._api.create_order(symbol=pair, type=ordertype, side=side,
|
|
||||||
amount=amount, params=params)
|
|
||||||
self._log_exchange_response('create_stoploss_order', order)
|
|
||||||
logger.info('stoploss order added for %s. '
|
|
||||||
'stop price: %s.', pair, stop_price)
|
|
||||||
return order
|
|
||||||
except ccxt.InsufficientFunds as e:
|
|
||||||
raise InsufficientFundsError(
|
|
||||||
f'Insufficient funds to create {ordertype} {side} order on market {pair}. '
|
|
||||||
f'Tried to create stoploss with amount {amount} at stoploss {stop_price}. '
|
|
||||||
f'Message: {e}') from e
|
|
||||||
except ccxt.InvalidOrder as e:
|
|
||||||
raise InvalidOrderException(
|
|
||||||
f'Could not create {ordertype} {side} order on market {pair}. '
|
|
||||||
f'Tried to create stoploss with amount {amount} at stoploss {stop_price}. '
|
|
||||||
f'Message: {e}') from e
|
|
||||||
except ccxt.DDoSProtection as e:
|
|
||||||
raise DDosProtection(e) from e
|
|
||||||
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
|
|
||||||
raise TemporaryError(
|
|
||||||
f'Could not place {side} order due to {e.__class__.__name__}. Message: {e}') from e
|
|
||||||
except ccxt.BaseError as e:
|
|
||||||
raise OperationalException(e) from e
|
|
||||||
|
|
||||||
@retrier(retries=API_FETCH_ORDER_RETRY_COUNT)
|
|
||||||
def fetch_stoploss_order(self, order_id: str, pair: str, params: Dict = {}) -> Dict:
|
|
||||||
if self._config['dry_run']:
|
|
||||||
return self.fetch_dry_run_order(order_id)
|
|
||||||
|
|
||||||
try:
|
|
||||||
orders = self._api.fetch_orders(pair, None, params={'type': 'stop'})
|
|
||||||
|
|
||||||
order = [order for order in orders if order['id'] == order_id]
|
|
||||||
self._log_exchange_response('fetch_stoploss_order', order)
|
|
||||||
if len(order) == 1:
|
|
||||||
if order[0].get('status') == 'closed':
|
|
||||||
# Trigger order was triggered ...
|
|
||||||
real_order_id: Optional[str] = order[0].get('info', {}).get('orderId')
|
|
||||||
# OrderId may be None for stoploss-market orders
|
|
||||||
# So we need to get it through the endpoint
|
|
||||||
# /conditional_orders/{conditional_order_id}/triggers
|
|
||||||
if not real_order_id:
|
|
||||||
res = self._api.privateGetConditionalOrdersConditionalOrderIdTriggers(
|
|
||||||
params={'conditional_order_id': order_id})
|
|
||||||
self._log_exchange_response('fetch_stoploss_order2', res)
|
|
||||||
real_order_id = res['result'][0]['orderId'] if res.get(
|
|
||||||
'result', []) else None
|
|
||||||
|
|
||||||
if real_order_id:
|
|
||||||
order1 = self._api.fetch_order(real_order_id, pair)
|
|
||||||
self._log_exchange_response('fetch_stoploss_order1', order1)
|
|
||||||
# Fake type to stop - as this was really a stop order.
|
|
||||||
order1['id_stop'] = order1['id']
|
|
||||||
order1['id'] = order_id
|
|
||||||
order1['type'] = 'stop'
|
|
||||||
order1['status_stop'] = 'triggered'
|
|
||||||
return order1
|
|
||||||
|
|
||||||
return order[0]
|
|
||||||
else:
|
|
||||||
raise InvalidOrderException(f"Could not get stoploss order for id {order_id}")
|
|
||||||
|
|
||||||
except ccxt.InvalidOrder as e:
|
|
||||||
raise InvalidOrderException(
|
|
||||||
f'Tried to get an invalid order (id: {order_id}). Message: {e}') from e
|
|
||||||
except ccxt.DDoSProtection as e:
|
|
||||||
raise DDosProtection(e) from e
|
|
||||||
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
|
|
||||||
raise TemporaryError(
|
|
||||||
f'Could not get order due to {e.__class__.__name__}. Message: {e}') from e
|
|
||||||
except ccxt.BaseError as e:
|
|
||||||
raise OperationalException(e) from e
|
|
||||||
|
|
||||||
@retrier
|
|
||||||
def cancel_stoploss_order(self, order_id: str, pair: str, params: Dict = {}) -> Dict:
|
|
||||||
if self._config['dry_run']:
|
|
||||||
return {}
|
|
||||||
try:
|
|
||||||
order = self._api.cancel_order(order_id, pair, params={'type': 'stop'})
|
|
||||||
self._log_exchange_response('cancel_stoploss_order', order)
|
|
||||||
return order
|
|
||||||
except ccxt.InvalidOrder as e:
|
|
||||||
raise InvalidOrderException(
|
|
||||||
f'Could not cancel order. Message: {e}') from e
|
|
||||||
except ccxt.DDoSProtection as e:
|
|
||||||
raise DDosProtection(e) from e
|
|
||||||
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
|
|
||||||
raise TemporaryError(
|
|
||||||
f'Could not cancel order due to {e.__class__.__name__}. Message: {e}') from e
|
|
||||||
except ccxt.BaseError as e:
|
|
||||||
raise OperationalException(e) from e
|
|
||||||
|
|
||||||
def get_order_id_conditional(self, order: Dict[str, Any]) -> str:
|
|
||||||
if order['type'] == 'stop':
|
|
||||||
return safe_value_fallback2(order, order, 'id_stop', 'id')
|
|
||||||
return order['id']
|
|
|
@ -126,13 +126,3 @@ class Gateio(Exchange):
|
||||||
pair=pair,
|
pair=pair,
|
||||||
params={'stop': True}
|
params={'stop': True}
|
||||||
)
|
)
|
||||||
|
|
||||||
def stoploss_adjust(self, stop_loss: float, order: Dict, side: str) -> bool:
|
|
||||||
"""
|
|
||||||
Verify stop_loss against stoploss-order value (limit or price)
|
|
||||||
Returns True if adjustment is necessary.
|
|
||||||
"""
|
|
||||||
return (order.get('stopPrice', None) is None or (
|
|
||||||
side == "sell" and stop_loss > float(order['stopPrice'])) or
|
|
||||||
(side == "buy" and stop_loss < float(order['stopPrice']))
|
|
||||||
)
|
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
import logging
|
import logging
|
||||||
from typing import Dict
|
from typing import Dict
|
||||||
|
|
||||||
|
from freqtrade.constants import BuySell
|
||||||
from freqtrade.exchange import Exchange
|
from freqtrade.exchange import Exchange
|
||||||
|
|
||||||
|
|
||||||
|
@ -22,20 +23,7 @@ class Huobi(Exchange):
|
||||||
"l2_limit_range_required": False,
|
"l2_limit_range_required": False,
|
||||||
}
|
}
|
||||||
|
|
||||||
def stoploss_adjust(self, stop_loss: float, order: Dict, side: str) -> bool:
|
def _get_stop_params(self, side: BuySell, ordertype: str, stop_price: float) -> Dict:
|
||||||
"""
|
|
||||||
Verify stop_loss against stoploss-order value (limit or price)
|
|
||||||
Returns True if adjustment is necessary.
|
|
||||||
"""
|
|
||||||
return (
|
|
||||||
order.get('stopPrice', None) is None
|
|
||||||
or (
|
|
||||||
order['type'] == 'stop'
|
|
||||||
and stop_loss > float(order['stopPrice'])
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
def _get_stop_params(self, ordertype: str, stop_price: float) -> Dict:
|
|
||||||
|
|
||||||
params = self._params.copy()
|
params = self._params.copy()
|
||||||
params.update({
|
params.update({
|
||||||
|
|
|
@ -218,3 +218,19 @@ class Kraken(Exchange):
|
||||||
fees = sum(df['open_fund'] * df['open_mark'] * amount * time_in_ratio)
|
fees = sum(df['open_fund'] * df['open_mark'] * amount * time_in_ratio)
|
||||||
|
|
||||||
return fees if is_short else -fees
|
return fees if is_short else -fees
|
||||||
|
|
||||||
|
def _trades_contracts_to_amount(self, trades: List) -> List:
|
||||||
|
"""
|
||||||
|
Fix "last" id issue for kraken data downloads
|
||||||
|
This whole override can probably be removed once the following
|
||||||
|
issue is closed in ccxt: https://github.com/ccxt/ccxt/issues/15827
|
||||||
|
"""
|
||||||
|
super()._trades_contracts_to_amount(trades)
|
||||||
|
if (
|
||||||
|
len(trades) > 0
|
||||||
|
and isinstance(trades[-1].get('info'), list)
|
||||||
|
and len(trades[-1].get('info', [])) > 7
|
||||||
|
):
|
||||||
|
|
||||||
|
trades[-1]['id'] = trades[-1].get('info', [])[-1]
|
||||||
|
return trades
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
import logging
|
import logging
|
||||||
from typing import Dict
|
from typing import Dict
|
||||||
|
|
||||||
|
from freqtrade.constants import BuySell
|
||||||
from freqtrade.exchange import Exchange
|
from freqtrade.exchange import Exchange
|
||||||
|
|
||||||
|
|
||||||
|
@ -27,17 +28,7 @@ class Kucoin(Exchange):
|
||||||
"ohlcv_candle_limit": 1500,
|
"ohlcv_candle_limit": 1500,
|
||||||
}
|
}
|
||||||
|
|
||||||
def stoploss_adjust(self, stop_loss: float, order: Dict, side: str) -> bool:
|
def _get_stop_params(self, side: BuySell, ordertype: str, stop_price: float) -> Dict:
|
||||||
"""
|
|
||||||
Verify stop_loss against stoploss-order value (limit or price)
|
|
||||||
Returns True if adjustment is necessary.
|
|
||||||
"""
|
|
||||||
return (
|
|
||||||
order.get('stopPrice', None) is None
|
|
||||||
or stop_loss > float(order['stopPrice'])
|
|
||||||
)
|
|
||||||
|
|
||||||
def _get_stop_params(self, ordertype: str, stop_price: float) -> Dict:
|
|
||||||
|
|
||||||
params = self._params.copy()
|
params = self._params.copy()
|
||||||
params.update({
|
params.update({
|
||||||
|
|
93
freqtrade/freqai/base_models/FreqaiMultiOutputClassifier.py
Normal file
93
freqtrade/freqai/base_models/FreqaiMultiOutputClassifier.py
Normal file
|
@ -0,0 +1,93 @@
|
||||||
|
import numpy as np
|
||||||
|
from joblib import Parallel
|
||||||
|
from sklearn.base import is_classifier
|
||||||
|
from sklearn.multioutput import MultiOutputClassifier, _fit_estimator
|
||||||
|
from sklearn.utils.fixes import delayed
|
||||||
|
from sklearn.utils.multiclass import check_classification_targets
|
||||||
|
from sklearn.utils.validation import has_fit_parameter
|
||||||
|
|
||||||
|
from freqtrade.exceptions import OperationalException
|
||||||
|
|
||||||
|
|
||||||
|
class FreqaiMultiOutputClassifier(MultiOutputClassifier):
|
||||||
|
|
||||||
|
def fit(self, X, y, sample_weight=None, fit_params=None):
|
||||||
|
"""Fit the model to data, separately for each output variable.
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
X : {array-like, sparse matrix} of shape (n_samples, n_features)
|
||||||
|
The input data.
|
||||||
|
y : {array-like, sparse matrix} of shape (n_samples, n_outputs)
|
||||||
|
Multi-output targets. An indicator matrix turns on multilabel
|
||||||
|
estimation.
|
||||||
|
sample_weight : array-like of shape (n_samples,), default=None
|
||||||
|
Sample weights. If `None`, then samples are equally weighted.
|
||||||
|
Only supported if the underlying classifier supports sample
|
||||||
|
weights.
|
||||||
|
fit_params : A list of dicts for the fit_params
|
||||||
|
Parameters passed to the ``estimator.fit`` method of each step.
|
||||||
|
Each dict may contain same or different values (e.g. different
|
||||||
|
eval_sets or init_models)
|
||||||
|
.. versionadded:: 0.23
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
self : object
|
||||||
|
Returns a fitted instance.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not hasattr(self.estimator, "fit"):
|
||||||
|
raise ValueError("The base estimator should implement a fit method")
|
||||||
|
|
||||||
|
y = self._validate_data(X="no_validation", y=y, multi_output=True)
|
||||||
|
|
||||||
|
if is_classifier(self):
|
||||||
|
check_classification_targets(y)
|
||||||
|
|
||||||
|
if y.ndim == 1:
|
||||||
|
raise ValueError(
|
||||||
|
"y must have at least two dimensions for "
|
||||||
|
"multi-output regression but has only one."
|
||||||
|
)
|
||||||
|
|
||||||
|
if sample_weight is not None and not has_fit_parameter(
|
||||||
|
self.estimator, "sample_weight"
|
||||||
|
):
|
||||||
|
raise ValueError("Underlying estimator does not support sample weights.")
|
||||||
|
|
||||||
|
if not fit_params:
|
||||||
|
fit_params = [None] * y.shape[1]
|
||||||
|
|
||||||
|
self.estimators_ = Parallel(n_jobs=self.n_jobs)(
|
||||||
|
delayed(_fit_estimator)(
|
||||||
|
self.estimator, X, y[:, i], sample_weight, **fit_params[i]
|
||||||
|
)
|
||||||
|
for i in range(y.shape[1])
|
||||||
|
)
|
||||||
|
|
||||||
|
self.classes_ = []
|
||||||
|
for estimator in self.estimators_:
|
||||||
|
self.classes_.extend(estimator.classes_)
|
||||||
|
if len(set(self.classes_)) != len(self.classes_):
|
||||||
|
raise OperationalException(f"Class labels must be unique across targets: "
|
||||||
|
f"{self.classes_}")
|
||||||
|
|
||||||
|
if hasattr(self.estimators_[0], "n_features_in_"):
|
||||||
|
self.n_features_in_ = self.estimators_[0].n_features_in_
|
||||||
|
if hasattr(self.estimators_[0], "feature_names_in_"):
|
||||||
|
self.feature_names_in_ = self.estimators_[0].feature_names_in_
|
||||||
|
|
||||||
|
return self
|
||||||
|
|
||||||
|
def predict_proba(self, X):
|
||||||
|
"""
|
||||||
|
Get predict_proba and stack arrays horizontally
|
||||||
|
"""
|
||||||
|
results = np.hstack(super().predict_proba(X))
|
||||||
|
return np.squeeze(results)
|
||||||
|
|
||||||
|
def predict(self, X):
|
||||||
|
"""
|
||||||
|
Get predict and squeeze into 2D array
|
||||||
|
"""
|
||||||
|
results = super().predict(X)
|
||||||
|
return np.squeeze(results)
|
|
@ -87,6 +87,7 @@ class FreqaiDataDrawer:
|
||||||
self.create_follower_dict()
|
self.create_follower_dict()
|
||||||
self.load_drawer_from_disk()
|
self.load_drawer_from_disk()
|
||||||
self.load_historic_predictions_from_disk()
|
self.load_historic_predictions_from_disk()
|
||||||
|
self.metric_tracker: Dict[str, Dict[str, Dict[str, list]]] = {}
|
||||||
self.load_metric_tracker_from_disk()
|
self.load_metric_tracker_from_disk()
|
||||||
self.training_queue: Dict[str, int] = {}
|
self.training_queue: Dict[str, int] = {}
|
||||||
self.history_lock = threading.Lock()
|
self.history_lock = threading.Lock()
|
||||||
|
@ -97,7 +98,6 @@ class FreqaiDataDrawer:
|
||||||
self.empty_pair_dict: pair_info = {
|
self.empty_pair_dict: pair_info = {
|
||||||
"model_filename": "", "trained_timestamp": 0,
|
"model_filename": "", "trained_timestamp": 0,
|
||||||
"data_path": "", "extras": {}}
|
"data_path": "", "extras": {}}
|
||||||
self.metric_tracker: Dict[str, Dict[str, Dict[str, list]]] = {}
|
|
||||||
|
|
||||||
def update_metric_tracker(self, metric: str, value: float, pair: str) -> None:
|
def update_metric_tracker(self, metric: str, value: float, pair: str) -> None:
|
||||||
"""
|
"""
|
||||||
|
@ -153,6 +153,7 @@ class FreqaiDataDrawer:
|
||||||
if exists:
|
if exists:
|
||||||
with open(self.metric_tracker_path, "r") as fp:
|
with open(self.metric_tracker_path, "r") as fp:
|
||||||
self.metric_tracker = rapidjson.load(fp, number_mode=rapidjson.NM_NATIVE)
|
self.metric_tracker = rapidjson.load(fp, number_mode=rapidjson.NM_NATIVE)
|
||||||
|
logger.info("Loading existing metric tracker from disk.")
|
||||||
else:
|
else:
|
||||||
logger.info("Could not find existing metric tracker, starting from scratch")
|
logger.info("Could not find existing metric tracker, starting from scratch")
|
||||||
|
|
||||||
|
@ -636,6 +637,8 @@ class FreqaiDataDrawer:
|
||||||
axis=0,
|
axis=0,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
self.current_candle = history_data[dk.pair][self.config['timeframe']].iloc[-1]['date']
|
||||||
|
|
||||||
def load_all_pair_histories(self, timerange: TimeRange, dk: FreqaiDataKitchen) -> None:
|
def load_all_pair_histories(self, timerange: TimeRange, dk: FreqaiDataKitchen) -> None:
|
||||||
"""
|
"""
|
||||||
Load pair histories for all whitelist and corr_pairlist pairs.
|
Load pair histories for all whitelist and corr_pairlist pairs.
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
import copy
|
import copy
|
||||||
import logging
|
import logging
|
||||||
import shutil
|
import shutil
|
||||||
from datetime import datetime, timezone
|
from datetime import datetime, timedelta, timezone
|
||||||
from math import cos, sin
|
from math import cos, sin
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, Dict, List, Tuple
|
from typing import Any, Dict, List, Tuple
|
||||||
|
@ -19,6 +19,7 @@ from sklearn.neighbors import NearestNeighbors
|
||||||
|
|
||||||
from freqtrade.configuration import TimeRange
|
from freqtrade.configuration import TimeRange
|
||||||
from freqtrade.constants import Config
|
from freqtrade.constants import Config
|
||||||
|
from freqtrade.data.converter import reduce_dataframe_footprint
|
||||||
from freqtrade.exceptions import OperationalException
|
from freqtrade.exceptions import OperationalException
|
||||||
from freqtrade.exchange import timeframe_to_seconds
|
from freqtrade.exchange import timeframe_to_seconds
|
||||||
from freqtrade.strategy.interface import IStrategy
|
from freqtrade.strategy.interface import IStrategy
|
||||||
|
@ -80,25 +81,32 @@ class FreqaiDataKitchen:
|
||||||
self.svm_model: linear_model.SGDOneClassSVM = None
|
self.svm_model: linear_model.SGDOneClassSVM = None
|
||||||
self.keras: bool = self.freqai_config.get("keras", False)
|
self.keras: bool = self.freqai_config.get("keras", False)
|
||||||
self.set_all_pairs()
|
self.set_all_pairs()
|
||||||
if not self.live:
|
self.backtest_live_models = config.get("freqai_backtest_live_models", False)
|
||||||
if not self.config["timerange"]:
|
|
||||||
raise OperationalException(
|
|
||||||
'Please pass --timerange if you intend to use FreqAI for backtesting.')
|
|
||||||
self.full_timerange = self.create_fulltimerange(
|
|
||||||
self.config["timerange"], self.freqai_config.get("train_period_days", 0)
|
|
||||||
)
|
|
||||||
|
|
||||||
(self.training_timeranges, self.backtesting_timeranges) = self.split_timerange(
|
if not self.live:
|
||||||
self.full_timerange,
|
self.full_path = self.get_full_models_path(self.config)
|
||||||
config["freqai"]["train_period_days"],
|
|
||||||
config["freqai"]["backtest_period_days"],
|
if self.backtest_live_models:
|
||||||
)
|
if self.pair:
|
||||||
|
self.set_timerange_from_ready_models()
|
||||||
|
(self.training_timeranges,
|
||||||
|
self.backtesting_timeranges) = self.split_timerange_live_models()
|
||||||
|
else:
|
||||||
|
self.full_timerange = self.create_fulltimerange(
|
||||||
|
self.config["timerange"], self.freqai_config.get("train_period_days", 0)
|
||||||
|
)
|
||||||
|
(self.training_timeranges, self.backtesting_timeranges) = self.split_timerange(
|
||||||
|
self.full_timerange,
|
||||||
|
config["freqai"]["train_period_days"],
|
||||||
|
config["freqai"]["backtest_period_days"],
|
||||||
|
)
|
||||||
|
|
||||||
self.data['extra_returns_per_train'] = self.freqai_config.get('extra_returns_per_train', {})
|
self.data['extra_returns_per_train'] = self.freqai_config.get('extra_returns_per_train', {})
|
||||||
self.thread_count = self.freqai_config.get("data_kitchen_thread_count", -1)
|
self.thread_count = self.freqai_config.get("data_kitchen_thread_count", -1)
|
||||||
self.train_dates: DataFrame = pd.DataFrame()
|
self.train_dates: DataFrame = pd.DataFrame()
|
||||||
self.unique_classes: Dict[str, list] = {}
|
self.unique_classes: Dict[str, list] = {}
|
||||||
self.unique_class_list: list = []
|
self.unique_class_list: list = []
|
||||||
|
self.backtest_live_models_data: Dict[str, Any] = {}
|
||||||
|
|
||||||
def set_paths(
|
def set_paths(
|
||||||
self,
|
self,
|
||||||
|
@ -110,10 +118,7 @@ class FreqaiDataKitchen:
|
||||||
:param metadata: dict = strategy furnished pair metadata
|
:param metadata: dict = strategy furnished pair metadata
|
||||||
:param trained_timestamp: int = timestamp of most recent training
|
:param trained_timestamp: int = timestamp of most recent training
|
||||||
"""
|
"""
|
||||||
self.full_path = Path(
|
self.full_path = self.get_full_models_path(self.config)
|
||||||
self.config["user_data_dir"] / "models" / str(self.freqai_config.get("identifier"))
|
|
||||||
)
|
|
||||||
|
|
||||||
self.data_path = Path(
|
self.data_path = Path(
|
||||||
self.full_path
|
self.full_path
|
||||||
/ f"sub-train-{pair.split('/')[0]}_{trained_timestamp}"
|
/ f"sub-train-{pair.split('/')[0]}_{trained_timestamp}"
|
||||||
|
@ -244,7 +249,7 @@ class FreqaiDataKitchen:
|
||||||
self.data["filter_drop_index_training"] = drop_index
|
self.data["filter_drop_index_training"] = drop_index
|
||||||
|
|
||||||
else:
|
else:
|
||||||
if len(self.data['constant_features_list']):
|
if 'constant_features_list' in self.data and len(self.data['constant_features_list']):
|
||||||
filtered_df = self.check_pred_labels(filtered_df)
|
filtered_df = self.check_pred_labels(filtered_df)
|
||||||
# we are backtesting so we need to preserve row number to send back to strategy,
|
# we are backtesting so we need to preserve row number to send back to strategy,
|
||||||
# so now we use do_predict to avoid any prediction based on a NaN
|
# so now we use do_predict to avoid any prediction based on a NaN
|
||||||
|
@ -354,13 +359,19 @@ class FreqaiDataKitchen:
|
||||||
:param df: Dataframe to be standardized
|
:param df: Dataframe to be standardized
|
||||||
"""
|
"""
|
||||||
|
|
||||||
for item in df.keys():
|
train_max = [None] * len(df.keys())
|
||||||
df[item] = (
|
train_min = [None] * len(df.keys())
|
||||||
2
|
|
||||||
* (df[item] - self.data[f"{item}_min"])
|
for i, item in enumerate(df.keys()):
|
||||||
/ (self.data[f"{item}_max"] - self.data[f"{item}_min"])
|
train_max[i] = self.data[f"{item}_max"]
|
||||||
- 1
|
train_min[i] = self.data[f"{item}_min"]
|
||||||
)
|
|
||||||
|
train_max_series = pd.Series(train_max, index=df.keys())
|
||||||
|
train_min_series = pd.Series(train_min, index=df.keys())
|
||||||
|
|
||||||
|
df = (
|
||||||
|
2 * (df - train_min_series) / (train_max_series - train_min_series) - 1
|
||||||
|
)
|
||||||
|
|
||||||
return df
|
return df
|
||||||
|
|
||||||
|
@ -422,9 +433,7 @@ class FreqaiDataKitchen:
|
||||||
timerange_train.stopts = timerange_train.startts + train_period_days
|
timerange_train.stopts = timerange_train.startts + train_period_days
|
||||||
|
|
||||||
first = False
|
first = False
|
||||||
start = datetime.fromtimestamp(timerange_train.startts, tz=timezone.utc)
|
tr_training_list.append(timerange_train.timerange_str)
|
||||||
stop = datetime.fromtimestamp(timerange_train.stopts, tz=timezone.utc)
|
|
||||||
tr_training_list.append(start.strftime("%Y%m%d") + "-" + stop.strftime("%Y%m%d"))
|
|
||||||
tr_training_list_timerange.append(copy.deepcopy(timerange_train))
|
tr_training_list_timerange.append(copy.deepcopy(timerange_train))
|
||||||
|
|
||||||
# associated backtest period
|
# associated backtest period
|
||||||
|
@ -436,9 +445,7 @@ class FreqaiDataKitchen:
|
||||||
if timerange_backtest.stopts > config_timerange.stopts:
|
if timerange_backtest.stopts > config_timerange.stopts:
|
||||||
timerange_backtest.stopts = config_timerange.stopts
|
timerange_backtest.stopts = config_timerange.stopts
|
||||||
|
|
||||||
start = datetime.fromtimestamp(timerange_backtest.startts, tz=timezone.utc)
|
tr_backtesting_list.append(timerange_backtest.timerange_str)
|
||||||
stop = datetime.fromtimestamp(timerange_backtest.stopts, tz=timezone.utc)
|
|
||||||
tr_backtesting_list.append(start.strftime("%Y%m%d") + "-" + stop.strftime("%Y%m%d"))
|
|
||||||
tr_backtesting_list_timerange.append(copy.deepcopy(timerange_backtest))
|
tr_backtesting_list_timerange.append(copy.deepcopy(timerange_backtest))
|
||||||
|
|
||||||
# ensure we are predicting on exactly same amount of data as requested by user defined
|
# ensure we are predicting on exactly same amount of data as requested by user defined
|
||||||
|
@ -449,6 +456,29 @@ class FreqaiDataKitchen:
|
||||||
# print(tr_training_list, tr_backtesting_list)
|
# print(tr_training_list, tr_backtesting_list)
|
||||||
return tr_training_list_timerange, tr_backtesting_list_timerange
|
return tr_training_list_timerange, tr_backtesting_list_timerange
|
||||||
|
|
||||||
|
def split_timerange_live_models(
|
||||||
|
self
|
||||||
|
) -> Tuple[list, list]:
|
||||||
|
|
||||||
|
tr_backtesting_list_timerange = []
|
||||||
|
asset = self.pair.split("/")[0]
|
||||||
|
if asset not in self.backtest_live_models_data["assets_end_dates"]:
|
||||||
|
raise OperationalException(
|
||||||
|
f"Model not available for pair {self.pair}. "
|
||||||
|
"Please, try again after removing this pair from the configuration file."
|
||||||
|
)
|
||||||
|
asset_data = self.backtest_live_models_data["assets_end_dates"][asset]
|
||||||
|
backtesting_timerange = self.backtest_live_models_data["backtesting_timerange"]
|
||||||
|
model_end_dates = [x for x in asset_data]
|
||||||
|
model_end_dates.append(backtesting_timerange.stopts)
|
||||||
|
model_end_dates.sort()
|
||||||
|
for index, item in enumerate(model_end_dates):
|
||||||
|
if len(model_end_dates) > (index + 1):
|
||||||
|
tr_to_add = TimeRange("date", "date", item, model_end_dates[index + 1])
|
||||||
|
tr_backtesting_list_timerange.append(tr_to_add)
|
||||||
|
|
||||||
|
return tr_backtesting_list_timerange, tr_backtesting_list_timerange
|
||||||
|
|
||||||
def slice_dataframe(self, timerange: TimeRange, df: DataFrame) -> DataFrame:
|
def slice_dataframe(self, timerange: TimeRange, df: DataFrame) -> DataFrame:
|
||||||
"""
|
"""
|
||||||
Given a full dataframe, extract the user desired window
|
Given a full dataframe, extract the user desired window
|
||||||
|
@ -457,11 +487,9 @@ class FreqaiDataKitchen:
|
||||||
it is sliced down to just the present training period.
|
it is sliced down to just the present training period.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
start = datetime.fromtimestamp(timerange.startts, tz=timezone.utc)
|
df = df.loc[df["date"] >= timerange.startdt, :]
|
||||||
stop = datetime.fromtimestamp(timerange.stopts, tz=timezone.utc)
|
|
||||||
df = df.loc[df["date"] >= start, :]
|
|
||||||
if not self.live:
|
if not self.live:
|
||||||
df = df.loc[df["date"] < stop, :]
|
df = df.loc[df["date"] < timerange.stopdt, :]
|
||||||
|
|
||||||
return df
|
return df
|
||||||
|
|
||||||
|
@ -956,11 +984,13 @@ class FreqaiDataKitchen:
|
||||||
append_df[label] = predictions[label]
|
append_df[label] = predictions[label]
|
||||||
if append_df[label].dtype == object:
|
if append_df[label].dtype == object:
|
||||||
continue
|
continue
|
||||||
append_df[f"{label}_mean"] = self.data["labels_mean"][label]
|
if "labels_mean" in self.data:
|
||||||
append_df[f"{label}_std"] = self.data["labels_std"][label]
|
append_df[f"{label}_mean"] = self.data["labels_mean"][label]
|
||||||
|
if "labels_std" in self.data:
|
||||||
|
append_df[f"{label}_std"] = self.data["labels_std"][label]
|
||||||
|
|
||||||
for extra_col in self.data["extra_returns_per_train"]:
|
for extra_col in self.data["extra_returns_per_train"]:
|
||||||
append_df["{extra_col}"] = self.data["extra_returns_per_train"][extra_col]
|
append_df[f"{extra_col}"] = self.data["extra_returns_per_train"][extra_col]
|
||||||
|
|
||||||
append_df["do_predict"] = do_predict
|
append_df["do_predict"] = do_predict
|
||||||
if self.freqai_config["feature_parameters"].get("DI_threshold", 0) > 0:
|
if self.freqai_config["feature_parameters"].get("DI_threshold", 0) > 0:
|
||||||
|
@ -1022,14 +1052,7 @@ class FreqaiDataKitchen:
|
||||||
backtest_timerange.startts = (
|
backtest_timerange.startts = (
|
||||||
backtest_timerange.startts - backtest_period_days * SECONDS_IN_DAY
|
backtest_timerange.startts - backtest_period_days * SECONDS_IN_DAY
|
||||||
)
|
)
|
||||||
start = datetime.fromtimestamp(backtest_timerange.startts, tz=timezone.utc)
|
full_timerange = backtest_timerange.timerange_str
|
||||||
stop = datetime.fromtimestamp(backtest_timerange.stopts, tz=timezone.utc)
|
|
||||||
full_timerange = start.strftime("%Y%m%d") + "-" + stop.strftime("%Y%m%d")
|
|
||||||
|
|
||||||
self.full_path = Path(
|
|
||||||
self.config["user_data_dir"] / "models" / f"{self.freqai_config['identifier']}"
|
|
||||||
)
|
|
||||||
|
|
||||||
config_path = Path(self.config["config_files"][0])
|
config_path = Path(self.config["config_files"][0])
|
||||||
|
|
||||||
if not self.full_path.is_dir():
|
if not self.full_path.is_dir():
|
||||||
|
@ -1112,15 +1135,15 @@ class FreqaiDataKitchen:
|
||||||
|
|
||||||
return retrain, trained_timerange, data_load_timerange
|
return retrain, trained_timerange, data_load_timerange
|
||||||
|
|
||||||
def set_new_model_names(self, pair: str, trained_timerange: TimeRange):
|
def set_new_model_names(self, pair: str, timestamp_id: int):
|
||||||
|
|
||||||
coin, _ = pair.split("/")
|
coin, _ = pair.split("/")
|
||||||
self.data_path = Path(
|
self.data_path = Path(
|
||||||
self.full_path
|
self.full_path
|
||||||
/ f"sub-train-{pair.split('/')[0]}_{int(trained_timerange.stopts)}"
|
/ f"sub-train-{pair.split('/')[0]}_{timestamp_id}"
|
||||||
)
|
)
|
||||||
|
|
||||||
self.model_filename = f"cb_{coin.lower()}_{int(trained_timerange.stopts)}"
|
self.model_filename = f"cb_{coin.lower()}_{timestamp_id}"
|
||||||
|
|
||||||
def set_all_pairs(self) -> None:
|
def set_all_pairs(self) -> None:
|
||||||
|
|
||||||
|
@ -1131,6 +1154,54 @@ class FreqaiDataKitchen:
|
||||||
if pair not in self.all_pairs:
|
if pair not in self.all_pairs:
|
||||||
self.all_pairs.append(pair)
|
self.all_pairs.append(pair)
|
||||||
|
|
||||||
|
def extract_corr_pair_columns_from_populated_indicators(
|
||||||
|
self,
|
||||||
|
dataframe: DataFrame
|
||||||
|
) -> Dict[str, DataFrame]:
|
||||||
|
"""
|
||||||
|
Find the columns of the dataframe corresponding to the corr_pairlist, save them
|
||||||
|
in a dictionary to be reused and attached to other pairs.
|
||||||
|
|
||||||
|
:param dataframe: fully populated dataframe (current pair + corr_pairs)
|
||||||
|
:return: corr_dataframes, dictionary of dataframes to be attached
|
||||||
|
to other pairs in same candle.
|
||||||
|
"""
|
||||||
|
corr_dataframes: Dict[str, DataFrame] = {}
|
||||||
|
pairs = self.freqai_config["feature_parameters"].get("include_corr_pairlist", [])
|
||||||
|
|
||||||
|
for pair in pairs:
|
||||||
|
pair = pair.replace(':', '') # lightgbm doesnt like colons
|
||||||
|
valid_strs = [f"%-{pair}", f"%{pair}", f"%_{pair}"]
|
||||||
|
pair_cols = [col for col in dataframe.columns if
|
||||||
|
any(substr in col for substr in valid_strs)]
|
||||||
|
if pair_cols:
|
||||||
|
pair_cols.insert(0, 'date')
|
||||||
|
corr_dataframes[pair] = dataframe.filter(pair_cols, axis=1)
|
||||||
|
|
||||||
|
return corr_dataframes
|
||||||
|
|
||||||
|
def attach_corr_pair_columns(self, dataframe: DataFrame,
|
||||||
|
corr_dataframes: Dict[str, DataFrame],
|
||||||
|
current_pair: str) -> DataFrame:
|
||||||
|
"""
|
||||||
|
Attach the existing corr_pair dataframes to the current pair dataframe before training
|
||||||
|
|
||||||
|
:param dataframe: current pair strategy dataframe, indicators populated already
|
||||||
|
:param corr_dataframes: dictionary of saved dataframes from earlier in the same candle
|
||||||
|
:param current_pair: current pair to which we will attach corr pair dataframe
|
||||||
|
:return:
|
||||||
|
:dataframe: current pair dataframe of populated indicators, concatenated with corr_pairs
|
||||||
|
ready for training
|
||||||
|
"""
|
||||||
|
pairs = self.freqai_config["feature_parameters"].get("include_corr_pairlist", [])
|
||||||
|
current_pair = current_pair.replace(':', '')
|
||||||
|
for pair in pairs:
|
||||||
|
pair = pair.replace(':', '') # lightgbm doesnt work with colons
|
||||||
|
if current_pair != pair:
|
||||||
|
dataframe = dataframe.merge(corr_dataframes[pair], how='left', on='date')
|
||||||
|
|
||||||
|
return dataframe
|
||||||
|
|
||||||
def use_strategy_to_populate_indicators(
|
def use_strategy_to_populate_indicators(
|
||||||
self,
|
self,
|
||||||
strategy: IStrategy,
|
strategy: IStrategy,
|
||||||
|
@ -1138,6 +1209,7 @@ class FreqaiDataKitchen:
|
||||||
base_dataframes: dict = {},
|
base_dataframes: dict = {},
|
||||||
pair: str = "",
|
pair: str = "",
|
||||||
prediction_dataframe: DataFrame = pd.DataFrame(),
|
prediction_dataframe: DataFrame = pd.DataFrame(),
|
||||||
|
do_corr_pairs: bool = True,
|
||||||
) -> DataFrame:
|
) -> DataFrame:
|
||||||
"""
|
"""
|
||||||
Use the user defined strategy for populating indicators during retrain
|
Use the user defined strategy for populating indicators during retrain
|
||||||
|
@ -1147,15 +1219,15 @@ class FreqaiDataKitchen:
|
||||||
:param base_dataframes: dict = dict containing the current pair dataframes
|
:param base_dataframes: dict = dict containing the current pair dataframes
|
||||||
(for user defined timeframes)
|
(for user defined timeframes)
|
||||||
:param metadata: dict = strategy furnished pair metadata
|
:param metadata: dict = strategy furnished pair metadata
|
||||||
:returns:
|
:return:
|
||||||
dataframe: DataFrame = dataframe containing populated indicators
|
dataframe: DataFrame = dataframe containing populated indicators
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# for prediction dataframe creation, we let dataprovider handle everything in the strategy
|
# for prediction dataframe creation, we let dataprovider handle everything in the strategy
|
||||||
# so we create empty dictionaries, which allows us to pass None to
|
# so we create empty dictionaries, which allows us to pass None to
|
||||||
# `populate_any_indicators()`. Signaling we want the dp to give us the live dataframe.
|
# `populate_any_indicators()`. Signaling we want the dp to give us the live dataframe.
|
||||||
tfs = self.freqai_config["feature_parameters"].get("include_timeframes")
|
tfs: List[str] = self.freqai_config["feature_parameters"].get("include_timeframes")
|
||||||
pairs = self.freqai_config["feature_parameters"].get("include_corr_pairlist", [])
|
pairs: List[str] = self.freqai_config["feature_parameters"].get("include_corr_pairlist", [])
|
||||||
if not prediction_dataframe.empty:
|
if not prediction_dataframe.empty:
|
||||||
dataframe = prediction_dataframe.copy()
|
dataframe = prediction_dataframe.copy()
|
||||||
for tf in tfs:
|
for tf in tfs:
|
||||||
|
@ -1178,19 +1250,27 @@ class FreqaiDataKitchen:
|
||||||
informative=base_dataframes[tf],
|
informative=base_dataframes[tf],
|
||||||
set_generalized_indicators=sgi
|
set_generalized_indicators=sgi
|
||||||
)
|
)
|
||||||
if pairs:
|
|
||||||
for i in pairs:
|
# ensure corr pairs are always last
|
||||||
if pair in i:
|
for corr_pair in pairs:
|
||||||
continue # dont repeat anything from whitelist
|
if pair == corr_pair:
|
||||||
|
continue # dont repeat anything from whitelist
|
||||||
|
for tf in tfs:
|
||||||
|
if pairs and do_corr_pairs:
|
||||||
dataframe = strategy.populate_any_indicators(
|
dataframe = strategy.populate_any_indicators(
|
||||||
i,
|
corr_pair,
|
||||||
dataframe.copy(),
|
dataframe.copy(),
|
||||||
tf,
|
tf,
|
||||||
informative=corr_dataframes[i][tf]
|
informative=corr_dataframes[corr_pair][tf]
|
||||||
)
|
)
|
||||||
|
|
||||||
self.get_unique_classes_from_labels(dataframe)
|
self.get_unique_classes_from_labels(dataframe)
|
||||||
|
|
||||||
|
dataframe = self.remove_special_chars_from_feature_names(dataframe)
|
||||||
|
|
||||||
|
if self.config.get('reduce_df_footprint', False):
|
||||||
|
dataframe = reduce_dataframe_footprint(dataframe)
|
||||||
|
|
||||||
return dataframe
|
return dataframe
|
||||||
|
|
||||||
def fit_labels(self) -> None:
|
def fit_labels(self) -> None:
|
||||||
|
@ -1257,14 +1337,16 @@ class FreqaiDataKitchen:
|
||||||
append_df = pd.read_hdf(self.backtesting_results_path)
|
append_df = pd.read_hdf(self.backtesting_results_path)
|
||||||
return append_df
|
return append_df
|
||||||
|
|
||||||
def check_if_backtest_prediction_exists(
|
def check_if_backtest_prediction_is_valid(
|
||||||
self
|
self,
|
||||||
|
len_backtest_df: int
|
||||||
) -> bool:
|
) -> bool:
|
||||||
"""
|
"""
|
||||||
Check if a backtesting prediction already exists
|
Check if a backtesting prediction already exists and if the predictions
|
||||||
:param dk: FreqaiDataKitchen
|
to append have the same size as the backtesting dataframe slice
|
||||||
|
:param length_backtesting_dataframe: Length of backtesting dataframe slice
|
||||||
:return:
|
:return:
|
||||||
:boolean: whether the prediction file exists or not.
|
:boolean: whether the prediction file is valid.
|
||||||
"""
|
"""
|
||||||
path_to_predictionfile = Path(self.full_path /
|
path_to_predictionfile = Path(self.full_path /
|
||||||
self.backtest_predictions_folder /
|
self.backtest_predictions_folder /
|
||||||
|
@ -1272,10 +1354,134 @@ class FreqaiDataKitchen:
|
||||||
self.backtesting_results_path = path_to_predictionfile
|
self.backtesting_results_path = path_to_predictionfile
|
||||||
|
|
||||||
file_exists = path_to_predictionfile.is_file()
|
file_exists = path_to_predictionfile.is_file()
|
||||||
|
|
||||||
if file_exists:
|
if file_exists:
|
||||||
logger.info(f"Found backtesting prediction file at {path_to_predictionfile}")
|
append_df = self.get_backtesting_prediction()
|
||||||
|
if len(append_df) == len_backtest_df:
|
||||||
|
logger.info(f"Found backtesting prediction file at {path_to_predictionfile}")
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
logger.info("A new backtesting prediction file is required. "
|
||||||
|
"(Number of predictions is different from dataframe length).")
|
||||||
|
return False
|
||||||
else:
|
else:
|
||||||
logger.info(
|
logger.info(
|
||||||
f"Could not find backtesting prediction file at {path_to_predictionfile}"
|
f"Could not find backtesting prediction file at {path_to_predictionfile}"
|
||||||
)
|
)
|
||||||
return file_exists
|
return False
|
||||||
|
|
||||||
|
def set_timerange_from_ready_models(self):
|
||||||
|
backtesting_timerange, \
|
||||||
|
assets_end_dates = (
|
||||||
|
self.get_timerange_and_assets_end_dates_from_ready_models(self.full_path))
|
||||||
|
|
||||||
|
self.backtest_live_models_data = {
|
||||||
|
"backtesting_timerange": backtesting_timerange,
|
||||||
|
"assets_end_dates": assets_end_dates
|
||||||
|
}
|
||||||
|
return
|
||||||
|
|
||||||
|
def get_full_models_path(self, config: Config) -> Path:
|
||||||
|
"""
|
||||||
|
Returns default FreqAI model path
|
||||||
|
:param config: Configuration dictionary
|
||||||
|
"""
|
||||||
|
freqai_config: Dict[str, Any] = config["freqai"]
|
||||||
|
return Path(
|
||||||
|
config["user_data_dir"] / "models" / str(freqai_config.get("identifier"))
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_timerange_and_assets_end_dates_from_ready_models(
|
||||||
|
self, models_path: Path) -> Tuple[TimeRange, Dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
Returns timerange information based on a FreqAI model directory
|
||||||
|
:param models_path: FreqAI model path
|
||||||
|
|
||||||
|
:return: a Tuple with (Timerange calculated from directory and
|
||||||
|
a Dict with pair and model end training dates info)
|
||||||
|
"""
|
||||||
|
all_models_end_dates = []
|
||||||
|
assets_end_dates: Dict[str, Any] = self.get_assets_timestamps_training_from_ready_models(
|
||||||
|
models_path)
|
||||||
|
for key in assets_end_dates:
|
||||||
|
for model_end_date in assets_end_dates[key]:
|
||||||
|
if model_end_date not in all_models_end_dates:
|
||||||
|
all_models_end_dates.append(model_end_date)
|
||||||
|
|
||||||
|
if len(all_models_end_dates) == 0:
|
||||||
|
raise OperationalException(
|
||||||
|
'At least 1 saved model is required to '
|
||||||
|
'run backtest with the freqai-backtest-live-models option'
|
||||||
|
)
|
||||||
|
|
||||||
|
if len(all_models_end_dates) == 1:
|
||||||
|
logger.warning(
|
||||||
|
"Only 1 model was found. Backtesting will run with the "
|
||||||
|
"timerange from the end of the training date to the current date"
|
||||||
|
)
|
||||||
|
|
||||||
|
finish_timestamp = int(datetime.now(tz=timezone.utc).timestamp())
|
||||||
|
if len(all_models_end_dates) > 1:
|
||||||
|
# After last model end date, use the same period from previous model
|
||||||
|
# to finish the backtest
|
||||||
|
all_models_end_dates.sort(reverse=True)
|
||||||
|
finish_timestamp = all_models_end_dates[0] + \
|
||||||
|
(all_models_end_dates[0] - all_models_end_dates[1])
|
||||||
|
|
||||||
|
all_models_end_dates.append(finish_timestamp)
|
||||||
|
all_models_end_dates.sort()
|
||||||
|
start_date = (datetime(*datetime.fromtimestamp(min(all_models_end_dates),
|
||||||
|
timezone.utc).timetuple()[:3], tzinfo=timezone.utc))
|
||||||
|
end_date = (datetime(*datetime.fromtimestamp(max(all_models_end_dates),
|
||||||
|
timezone.utc).timetuple()[:3], tzinfo=timezone.utc))
|
||||||
|
|
||||||
|
# add 1 day to string timerange to ensure BT module will load all dataframe data
|
||||||
|
end_date = end_date + timedelta(days=1)
|
||||||
|
backtesting_timerange = TimeRange(
|
||||||
|
'date', 'date', int(start_date.timestamp()), int(end_date.timestamp())
|
||||||
|
)
|
||||||
|
return backtesting_timerange, assets_end_dates
|
||||||
|
|
||||||
|
def get_assets_timestamps_training_from_ready_models(
|
||||||
|
self, models_path: Path) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Scan the models path and returns all assets end training dates (timestamp)
|
||||||
|
:param models_path: FreqAI model path
|
||||||
|
|
||||||
|
:return: a Dict with asset and model end training dates info
|
||||||
|
"""
|
||||||
|
assets_end_dates: Dict[str, Any] = {}
|
||||||
|
if not models_path.is_dir():
|
||||||
|
raise OperationalException(
|
||||||
|
'Model folders not found. Saved models are required '
|
||||||
|
'to run backtest with the freqai-backtest-live-models option'
|
||||||
|
)
|
||||||
|
for model_dir in models_path.iterdir():
|
||||||
|
if str(model_dir.name).startswith("sub-train"):
|
||||||
|
model_end_date = int(model_dir.name.split("_")[1])
|
||||||
|
asset = model_dir.name.split("_")[0].replace("sub-train-", "")
|
||||||
|
model_file_name = (
|
||||||
|
f"cb_{str(model_dir.name).replace('sub-train-', '').lower()}"
|
||||||
|
"_model.joblib"
|
||||||
|
)
|
||||||
|
|
||||||
|
model_path_file = Path(model_dir / model_file_name)
|
||||||
|
if model_path_file.is_file():
|
||||||
|
if asset not in assets_end_dates:
|
||||||
|
assets_end_dates[asset] = []
|
||||||
|
assets_end_dates[asset].append(model_end_date)
|
||||||
|
|
||||||
|
return assets_end_dates
|
||||||
|
|
||||||
|
def remove_special_chars_from_feature_names(self, dataframe: pd.DataFrame) -> pd.DataFrame:
|
||||||
|
"""
|
||||||
|
Remove all special characters from feature strings (:)
|
||||||
|
:param dataframe: the dataframe that just finished indicator population. (unfiltered)
|
||||||
|
:return: dataframe with cleaned featrue names
|
||||||
|
"""
|
||||||
|
|
||||||
|
spec_chars = [':']
|
||||||
|
for c in spec_chars:
|
||||||
|
dataframe.columns = dataframe.columns.str.replace(c, "")
|
||||||
|
|
||||||
|
return dataframe
|
||||||
|
|
|
@ -1,12 +1,10 @@
|
||||||
import logging
|
import logging
|
||||||
import shutil
|
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from collections import deque
|
from collections import deque
|
||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from threading import Lock
|
|
||||||
from typing import Any, Dict, List, Literal, Tuple
|
from typing import Any, Dict, List, Literal, Tuple
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
@ -15,13 +13,13 @@ from numpy.typing import NDArray
|
||||||
from pandas import DataFrame
|
from pandas import DataFrame
|
||||||
|
|
||||||
from freqtrade.configuration import TimeRange
|
from freqtrade.configuration import TimeRange
|
||||||
from freqtrade.constants import DATETIME_PRINT_FORMAT, Config
|
from freqtrade.constants import Config
|
||||||
from freqtrade.enums import RunMode
|
from freqtrade.enums import RunMode
|
||||||
from freqtrade.exceptions import OperationalException
|
from freqtrade.exceptions import OperationalException
|
||||||
from freqtrade.exchange import timeframe_to_seconds
|
from freqtrade.exchange import timeframe_to_seconds
|
||||||
from freqtrade.freqai.data_drawer import FreqaiDataDrawer
|
from freqtrade.freqai.data_drawer import FreqaiDataDrawer
|
||||||
from freqtrade.freqai.data_kitchen import FreqaiDataKitchen
|
from freqtrade.freqai.data_kitchen import FreqaiDataKitchen
|
||||||
from freqtrade.freqai.utils import plot_feature_importance
|
from freqtrade.freqai.utils import plot_feature_importance, record_params
|
||||||
from freqtrade.strategy.interface import IStrategy
|
from freqtrade.strategy.interface import IStrategy
|
||||||
|
|
||||||
|
|
||||||
|
@ -61,6 +59,7 @@ class IFreqaiModel(ABC):
|
||||||
"data_split_parameters", {})
|
"data_split_parameters", {})
|
||||||
self.model_training_parameters: Dict[str, Any] = config.get("freqai", {}).get(
|
self.model_training_parameters: Dict[str, Any] = config.get("freqai", {}).get(
|
||||||
"model_training_parameters", {})
|
"model_training_parameters", {})
|
||||||
|
self.identifier: str = self.freqai_info.get("identifier", "no_id_provided")
|
||||||
self.retrain = False
|
self.retrain = False
|
||||||
self.first = True
|
self.first = True
|
||||||
self.set_full_path()
|
self.set_full_path()
|
||||||
|
@ -69,23 +68,23 @@ class IFreqaiModel(ABC):
|
||||||
if self.save_backtest_models:
|
if self.save_backtest_models:
|
||||||
logger.info('Backtesting module configured to save all models.')
|
logger.info('Backtesting module configured to save all models.')
|
||||||
self.dd = FreqaiDataDrawer(Path(self.full_path), self.config, self.follow_mode)
|
self.dd = FreqaiDataDrawer(Path(self.full_path), self.config, self.follow_mode)
|
||||||
self.identifier: str = self.freqai_info.get("identifier", "no_id_provided")
|
# set current candle to arbitrary historical date
|
||||||
|
self.current_candle: datetime = datetime.fromtimestamp(637887600, tz=timezone.utc)
|
||||||
|
self.dd.current_candle = self.current_candle
|
||||||
self.scanning = False
|
self.scanning = False
|
||||||
self.ft_params = self.freqai_info["feature_parameters"]
|
self.ft_params = self.freqai_info["feature_parameters"]
|
||||||
|
self.corr_pairlist: List[str] = self.ft_params.get("include_corr_pairlist", [])
|
||||||
self.keras: bool = self.freqai_info.get("keras", False)
|
self.keras: bool = self.freqai_info.get("keras", False)
|
||||||
if self.keras and self.ft_params.get("DI_threshold", 0):
|
if self.keras and self.ft_params.get("DI_threshold", 0):
|
||||||
self.ft_params["DI_threshold"] = 0
|
self.ft_params["DI_threshold"] = 0
|
||||||
logger.warning("DI threshold is not configured for Keras models yet. Deactivating.")
|
logger.warning("DI threshold is not configured for Keras models yet. Deactivating.")
|
||||||
self.CONV_WIDTH = self.freqai_info.get("conv_width", 2)
|
self.CONV_WIDTH = self.freqai_info.get('conv_width', 1)
|
||||||
if self.ft_params.get("inlier_metric_window", 0):
|
if self.ft_params.get("inlier_metric_window", 0):
|
||||||
self.CONV_WIDTH = self.ft_params.get("inlier_metric_window", 0) * 2
|
self.CONV_WIDTH = self.ft_params.get("inlier_metric_window", 0) * 2
|
||||||
self.pair_it = 0
|
self.pair_it = 0
|
||||||
self.pair_it_train = 0
|
self.pair_it_train = 0
|
||||||
self.total_pairs = len(self.config.get("exchange", {}).get("pair_whitelist"))
|
self.total_pairs = len(self.config.get("exchange", {}).get("pair_whitelist"))
|
||||||
self.train_queue = self._set_train_queue()
|
self.train_queue = self._set_train_queue()
|
||||||
self.last_trade_database_summary: DataFrame = {}
|
|
||||||
self.current_trade_database_summary: DataFrame = {}
|
|
||||||
self.analysis_lock = Lock()
|
|
||||||
self.inference_time: float = 0
|
self.inference_time: float = 0
|
||||||
self.train_time: float = 0
|
self.train_time: float = 0
|
||||||
self.begin_time: float = 0
|
self.begin_time: float = 0
|
||||||
|
@ -93,10 +92,15 @@ class IFreqaiModel(ABC):
|
||||||
self.base_tf_seconds = timeframe_to_seconds(self.config['timeframe'])
|
self.base_tf_seconds = timeframe_to_seconds(self.config['timeframe'])
|
||||||
self.continual_learning = self.freqai_info.get('continual_learning', False)
|
self.continual_learning = self.freqai_info.get('continual_learning', False)
|
||||||
self.plot_features = self.ft_params.get("plot_feature_importances", 0)
|
self.plot_features = self.ft_params.get("plot_feature_importances", 0)
|
||||||
|
self.corr_dataframes: Dict[str, DataFrame] = {}
|
||||||
|
# get_corr_dataframes is controlling the caching of corr_dataframes
|
||||||
|
# for improved performance. Careful with this boolean.
|
||||||
|
self.get_corr_dataframes: bool = True
|
||||||
self._threads: List[threading.Thread] = []
|
self._threads: List[threading.Thread] = []
|
||||||
self._stop_event = threading.Event()
|
self._stop_event = threading.Event()
|
||||||
|
|
||||||
|
record_params(config, self.full_path)
|
||||||
|
|
||||||
def __getstate__(self):
|
def __getstate__(self):
|
||||||
"""
|
"""
|
||||||
Return an empty state to be pickled in hyperopt
|
Return an empty state to be pickled in hyperopt
|
||||||
|
@ -135,7 +139,11 @@ class IFreqaiModel(ABC):
|
||||||
# the concatenated results for the full backtesting period back to the strategy.
|
# the concatenated results for the full backtesting period back to the strategy.
|
||||||
elif not self.follow_mode:
|
elif not self.follow_mode:
|
||||||
self.dk = FreqaiDataKitchen(self.config, self.live, metadata["pair"])
|
self.dk = FreqaiDataKitchen(self.config, self.live, metadata["pair"])
|
||||||
logger.info(f"Training {len(self.dk.training_timeranges)} timeranges")
|
if self.dk.backtest_live_models:
|
||||||
|
logger.info(
|
||||||
|
f"Backtesting {len(self.dk.backtesting_timeranges)} timeranges (live models)")
|
||||||
|
else:
|
||||||
|
logger.info(f"Training {len(self.dk.training_timeranges)} timeranges")
|
||||||
dataframe = self.dk.use_strategy_to_populate_indicators(
|
dataframe = self.dk.use_strategy_to_populate_indicators(
|
||||||
strategy, prediction_dataframe=dataframe, pair=metadata["pair"]
|
strategy, prediction_dataframe=dataframe, pair=metadata["pair"]
|
||||||
)
|
)
|
||||||
|
@ -255,25 +263,20 @@ class IFreqaiModel(ABC):
|
||||||
dataframe_train = dk.slice_dataframe(tr_train, dataframe)
|
dataframe_train = dk.slice_dataframe(tr_train, dataframe)
|
||||||
dataframe_backtest = dk.slice_dataframe(tr_backtest, dataframe)
|
dataframe_backtest = dk.slice_dataframe(tr_backtest, dataframe)
|
||||||
|
|
||||||
trained_timestamp = tr_train
|
if not self.ensure_data_exists(dataframe_backtest, tr_backtest, pair):
|
||||||
tr_train_startts_str = datetime.fromtimestamp(
|
continue
|
||||||
tr_train.startts,
|
|
||||||
tz=timezone.utc).strftime(DATETIME_PRINT_FORMAT)
|
|
||||||
tr_train_stopts_str = datetime.fromtimestamp(
|
|
||||||
tr_train.stopts,
|
|
||||||
tz=timezone.utc).strftime(DATETIME_PRINT_FORMAT)
|
|
||||||
logger.info(
|
|
||||||
f"Training {pair}, {self.pair_it}/{self.total_pairs} pairs"
|
|
||||||
f" from {tr_train_startts_str} to {tr_train_stopts_str}, {train_it}/{total_trains} "
|
|
||||||
"trains"
|
|
||||||
)
|
|
||||||
|
|
||||||
trained_timestamp_int = int(trained_timestamp.stopts)
|
self.log_backtesting_progress(tr_train, pair, train_it, total_trains)
|
||||||
dk.set_paths(pair, trained_timestamp_int)
|
|
||||||
|
|
||||||
dk.set_new_model_names(pair, trained_timestamp)
|
timestamp_model_id = int(tr_train.stopts)
|
||||||
|
if dk.backtest_live_models:
|
||||||
|
timestamp_model_id = int(tr_backtest.startts)
|
||||||
|
|
||||||
if dk.check_if_backtest_prediction_exists():
|
dk.set_paths(pair, timestamp_model_id)
|
||||||
|
|
||||||
|
dk.set_new_model_names(pair, timestamp_model_id)
|
||||||
|
|
||||||
|
if dk.check_if_backtest_prediction_is_valid(len(dataframe_backtest)):
|
||||||
self.dd.load_metadata(dk)
|
self.dd.load_metadata(dk)
|
||||||
dk.find_features(dataframe_train)
|
dk.find_features(dataframe_train)
|
||||||
self.check_if_feature_list_matches_strategy(dk)
|
self.check_if_feature_list_matches_strategy(dk)
|
||||||
|
@ -285,7 +288,7 @@ class IFreqaiModel(ABC):
|
||||||
dk.find_labels(dataframe_train)
|
dk.find_labels(dataframe_train)
|
||||||
self.model = self.train(dataframe_train, pair, dk)
|
self.model = self.train(dataframe_train, pair, dk)
|
||||||
self.dd.pair_dict[pair]["trained_timestamp"] = int(
|
self.dd.pair_dict[pair]["trained_timestamp"] = int(
|
||||||
trained_timestamp.stopts)
|
tr_train.stopts)
|
||||||
if self.plot_features:
|
if self.plot_features:
|
||||||
plot_feature_importance(self.model, pair, dk, self.plot_features)
|
plot_feature_importance(self.model, pair, dk, self.plot_features)
|
||||||
if self.save_backtest_models:
|
if self.save_backtest_models:
|
||||||
|
@ -337,6 +340,7 @@ class IFreqaiModel(ABC):
|
||||||
if self.dd.historic_data:
|
if self.dd.historic_data:
|
||||||
self.dd.update_historic_data(strategy, dk)
|
self.dd.update_historic_data(strategy, dk)
|
||||||
logger.debug(f'Updating historic data on pair {metadata["pair"]}')
|
logger.debug(f'Updating historic data on pair {metadata["pair"]}')
|
||||||
|
self.track_current_candle()
|
||||||
|
|
||||||
if not self.follow_mode:
|
if not self.follow_mode:
|
||||||
|
|
||||||
|
@ -363,10 +367,10 @@ class IFreqaiModel(ABC):
|
||||||
# load the model and associated data into the data kitchen
|
# load the model and associated data into the data kitchen
|
||||||
self.model = self.dd.load_data(metadata["pair"], dk)
|
self.model = self.dd.load_data(metadata["pair"], dk)
|
||||||
|
|
||||||
with self.analysis_lock:
|
dataframe = dk.use_strategy_to_populate_indicators(
|
||||||
dataframe = self.dk.use_strategy_to_populate_indicators(
|
strategy, prediction_dataframe=dataframe, pair=metadata["pair"],
|
||||||
strategy, prediction_dataframe=dataframe, pair=metadata["pair"]
|
do_corr_pairs=self.get_corr_dataframes
|
||||||
)
|
)
|
||||||
|
|
||||||
if not self.model:
|
if not self.model:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
|
@ -375,6 +379,9 @@ class IFreqaiModel(ABC):
|
||||||
self.dd.return_null_values_to_strategy(dataframe, dk)
|
self.dd.return_null_values_to_strategy(dataframe, dk)
|
||||||
return dk
|
return dk
|
||||||
|
|
||||||
|
if self.corr_pairlist:
|
||||||
|
dataframe = self.cache_corr_pairlist_dfs(dataframe, dk)
|
||||||
|
|
||||||
dk.find_labels(dataframe)
|
dk.find_labels(dataframe)
|
||||||
|
|
||||||
self.build_strategy_return_arrays(dataframe, dk, metadata["pair"], trained_timestamp)
|
self.build_strategy_return_arrays(dataframe, dk, metadata["pair"], trained_timestamp)
|
||||||
|
@ -526,14 +533,13 @@ class IFreqaiModel(ABC):
|
||||||
return file_exists
|
return file_exists
|
||||||
|
|
||||||
def set_full_path(self) -> None:
|
def set_full_path(self) -> None:
|
||||||
|
"""
|
||||||
|
Creates and sets the full path for the identifier
|
||||||
|
"""
|
||||||
self.full_path = Path(
|
self.full_path = Path(
|
||||||
self.config["user_data_dir"] / "models" / f"{self.freqai_info['identifier']}"
|
self.config["user_data_dir"] / "models" / f"{self.identifier}"
|
||||||
)
|
)
|
||||||
self.full_path.mkdir(parents=True, exist_ok=True)
|
self.full_path.mkdir(parents=True, exist_ok=True)
|
||||||
shutil.copy(
|
|
||||||
self.config["config_files"][0],
|
|
||||||
Path(self.full_path, Path(self.config["config_files"][0]).name),
|
|
||||||
)
|
|
||||||
|
|
||||||
def extract_data_and_train_model(
|
def extract_data_and_train_model(
|
||||||
self,
|
self,
|
||||||
|
@ -559,10 +565,9 @@ class IFreqaiModel(ABC):
|
||||||
data_load_timerange, pair, dk
|
data_load_timerange, pair, dk
|
||||||
)
|
)
|
||||||
|
|
||||||
with self.analysis_lock:
|
unfiltered_dataframe = dk.use_strategy_to_populate_indicators(
|
||||||
unfiltered_dataframe = dk.use_strategy_to_populate_indicators(
|
strategy, corr_dataframes, base_dataframes, pair
|
||||||
strategy, corr_dataframes, base_dataframes, pair
|
)
|
||||||
)
|
|
||||||
|
|
||||||
unfiltered_dataframe = dk.slice_dataframe(new_trained_timerange, unfiltered_dataframe)
|
unfiltered_dataframe = dk.slice_dataframe(new_trained_timerange, unfiltered_dataframe)
|
||||||
|
|
||||||
|
@ -573,7 +578,7 @@ class IFreqaiModel(ABC):
|
||||||
model = self.train(unfiltered_dataframe, pair, dk)
|
model = self.train(unfiltered_dataframe, pair, dk)
|
||||||
|
|
||||||
self.dd.pair_dict[pair]["trained_timestamp"] = new_trained_timerange.stopts
|
self.dd.pair_dict[pair]["trained_timestamp"] = new_trained_timerange.stopts
|
||||||
dk.set_new_model_names(pair, new_trained_timerange)
|
dk.set_new_model_names(pair, new_trained_timerange.stopts)
|
||||||
self.dd.save_data(model, pair, dk)
|
self.dd.save_data(model, pair, dk)
|
||||||
|
|
||||||
if self.plot_features:
|
if self.plot_features:
|
||||||
|
@ -624,7 +629,7 @@ class IFreqaiModel(ABC):
|
||||||
hist_preds_df['DI_values'] = 0
|
hist_preds_df['DI_values'] = 0
|
||||||
|
|
||||||
for return_str in dk.data['extra_returns_per_train']:
|
for return_str in dk.data['extra_returns_per_train']:
|
||||||
hist_preds_df[return_str] = 0
|
hist_preds_df[return_str] = dk.data['extra_returns_per_train'][return_str]
|
||||||
|
|
||||||
hist_preds_df['close_price'] = strat_df['close']
|
hist_preds_df['close_price'] = strat_df['close']
|
||||||
hist_preds_df['date_pred'] = strat_df['date']
|
hist_preds_df['date_pred'] = strat_df['date']
|
||||||
|
@ -738,6 +743,74 @@ class IFreqaiModel(ABC):
|
||||||
f'Best approximation queue: {best_queue}')
|
f'Best approximation queue: {best_queue}')
|
||||||
return best_queue
|
return best_queue
|
||||||
|
|
||||||
|
def cache_corr_pairlist_dfs(self, dataframe: DataFrame, dk: FreqaiDataKitchen) -> DataFrame:
|
||||||
|
"""
|
||||||
|
Cache the corr_pairlist dfs to speed up performance for subsequent pairs during the
|
||||||
|
current candle.
|
||||||
|
:param dataframe: strategy fed dataframe
|
||||||
|
:param dk: datakitchen object for current asset
|
||||||
|
:return: dataframe to attach/extract cached corr_pair dfs to/from.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if self.get_corr_dataframes:
|
||||||
|
self.corr_dataframes = dk.extract_corr_pair_columns_from_populated_indicators(dataframe)
|
||||||
|
if not self.corr_dataframes:
|
||||||
|
logger.warning("Couldn't cache corr_pair dataframes for improved performance. "
|
||||||
|
"Consider ensuring that the full coin/stake, e.g. XYZ/USD, "
|
||||||
|
"is included in the column names when you are creating features "
|
||||||
|
"in `populate_any_indicators()`.")
|
||||||
|
self.get_corr_dataframes = not bool(self.corr_dataframes)
|
||||||
|
elif self.corr_dataframes:
|
||||||
|
dataframe = dk.attach_corr_pair_columns(
|
||||||
|
dataframe, self.corr_dataframes, dk.pair)
|
||||||
|
|
||||||
|
return dataframe
|
||||||
|
|
||||||
|
def track_current_candle(self):
|
||||||
|
"""
|
||||||
|
Checks if the latest candle appended by the datadrawer is
|
||||||
|
equivalent to the latest candle seen by FreqAI. If not, it
|
||||||
|
asks to refresh the cached corr_dfs, and resets the pair
|
||||||
|
counter.
|
||||||
|
"""
|
||||||
|
if self.dd.current_candle > self.current_candle:
|
||||||
|
self.get_corr_dataframes = True
|
||||||
|
self.pair_it = 1
|
||||||
|
self.current_candle = self.dd.current_candle
|
||||||
|
|
||||||
|
def ensure_data_exists(self, dataframe_backtest: DataFrame,
|
||||||
|
tr_backtest: TimeRange, pair: str) -> bool:
|
||||||
|
"""
|
||||||
|
Check if the dataframe is empty, if not, report useful information to user.
|
||||||
|
:param dataframe_backtest: the backtesting dataframe, maybe empty.
|
||||||
|
:param tr_backtest: current backtesting timerange.
|
||||||
|
:param pair: current pair
|
||||||
|
:return: if the data exists or not
|
||||||
|
"""
|
||||||
|
if self.config.get("freqai_backtest_live_models", False) and len(dataframe_backtest) == 0:
|
||||||
|
logger.info(f"No data found for pair {pair} from "
|
||||||
|
f"from { tr_backtest.start_fmt} to {tr_backtest.stop_fmt}. "
|
||||||
|
"Probably more than one training within the same candle period.")
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def log_backtesting_progress(self, tr_train: TimeRange, pair: str,
|
||||||
|
train_it: int, total_trains: int):
|
||||||
|
"""
|
||||||
|
Log the backtesting progress so user knows how many pairs have been trained and
|
||||||
|
how many more pairs/trains remain.
|
||||||
|
:param tr_train: the training timerange
|
||||||
|
:param train_it: the train iteration for the current pair (the sliding window progress)
|
||||||
|
:param pair: the current pair
|
||||||
|
:param total_trains: total trains (total number of slides for the sliding window)
|
||||||
|
"""
|
||||||
|
if not self.config.get("freqai_backtest_live_models", False):
|
||||||
|
logger.info(
|
||||||
|
f"Training {pair}, {self.pair_it}/{self.total_pairs} pairs"
|
||||||
|
f" from {tr_train.start_fmt} "
|
||||||
|
f"to {tr_train.stop_fmt}, {train_it}/{total_trains} "
|
||||||
|
"trains"
|
||||||
|
)
|
||||||
# Following methods which are overridden by user made prediction models.
|
# Following methods which are overridden by user made prediction models.
|
||||||
# See freqai/prediction_models/CatboostPredictionModel.py for an example.
|
# See freqai/prediction_models/CatboostPredictionModel.py for an example.
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,74 @@
|
||||||
|
import logging
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, Dict
|
||||||
|
|
||||||
|
from catboost import CatBoostClassifier, Pool
|
||||||
|
|
||||||
|
from freqtrade.freqai.base_models.BaseClassifierModel import BaseClassifierModel
|
||||||
|
from freqtrade.freqai.base_models.FreqaiMultiOutputClassifier import FreqaiMultiOutputClassifier
|
||||||
|
from freqtrade.freqai.data_kitchen import FreqaiDataKitchen
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class CatboostClassifierMultiTarget(BaseClassifierModel):
|
||||||
|
"""
|
||||||
|
User created prediction model. The class needs to override three necessary
|
||||||
|
functions, predict(), train(), fit(). The class inherits ModelHandler which
|
||||||
|
has its own DataHandler where data is held, saved, loaded, and managed.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||||
|
"""
|
||||||
|
User sets up the training and test data to fit their desired model here
|
||||||
|
:param data_dictionary: the dictionary constructed by DataHandler to hold
|
||||||
|
all the training and test data/labels.
|
||||||
|
"""
|
||||||
|
|
||||||
|
cbc = CatBoostClassifier(
|
||||||
|
allow_writing_files=True,
|
||||||
|
loss_function='MultiClass',
|
||||||
|
train_dir=Path(dk.data_path),
|
||||||
|
**self.model_training_parameters,
|
||||||
|
)
|
||||||
|
|
||||||
|
X = data_dictionary["train_features"]
|
||||||
|
y = data_dictionary["train_labels"]
|
||||||
|
|
||||||
|
sample_weight = data_dictionary["train_weights"]
|
||||||
|
|
||||||
|
eval_sets = [None] * y.shape[1]
|
||||||
|
|
||||||
|
if self.freqai_info.get('data_split_parameters', {}).get('test_size', 0.1) != 0:
|
||||||
|
eval_sets = [None] * data_dictionary['test_labels'].shape[1]
|
||||||
|
|
||||||
|
for i in range(data_dictionary['test_labels'].shape[1]):
|
||||||
|
eval_sets[i] = Pool(
|
||||||
|
data=data_dictionary["test_features"],
|
||||||
|
label=data_dictionary["test_labels"].iloc[:, i],
|
||||||
|
weight=data_dictionary["test_weights"],
|
||||||
|
)
|
||||||
|
|
||||||
|
init_model = self.get_init_model(dk.pair)
|
||||||
|
|
||||||
|
if init_model:
|
||||||
|
init_models = init_model.estimators_
|
||||||
|
else:
|
||||||
|
init_models = [None] * y.shape[1]
|
||||||
|
|
||||||
|
fit_params = []
|
||||||
|
for i in range(len(eval_sets)):
|
||||||
|
fit_params.append({
|
||||||
|
'eval_set': eval_sets[i], 'init_model': init_models[i],
|
||||||
|
'log_cout': sys.stdout, 'log_cerr': sys.stderr,
|
||||||
|
})
|
||||||
|
|
||||||
|
model = FreqaiMultiOutputClassifier(estimator=cbc)
|
||||||
|
thread_training = self.freqai_info.get('multitarget_parallel_training', False)
|
||||||
|
if thread_training:
|
||||||
|
model.n_jobs = y.shape[1]
|
||||||
|
model.fit(X=X, y=y, sample_weight=sample_weight, fit_params=fit_params)
|
||||||
|
|
||||||
|
return model
|
|
@ -0,0 +1,64 @@
|
||||||
|
import logging
|
||||||
|
from typing import Any, Dict
|
||||||
|
|
||||||
|
from lightgbm import LGBMClassifier
|
||||||
|
|
||||||
|
from freqtrade.freqai.base_models.BaseClassifierModel import BaseClassifierModel
|
||||||
|
from freqtrade.freqai.base_models.FreqaiMultiOutputClassifier import FreqaiMultiOutputClassifier
|
||||||
|
from freqtrade.freqai.data_kitchen import FreqaiDataKitchen
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class LightGBMClassifierMultiTarget(BaseClassifierModel):
|
||||||
|
"""
|
||||||
|
User created prediction model. The class needs to override three necessary
|
||||||
|
functions, predict(), train(), fit(). The class inherits ModelHandler which
|
||||||
|
has its own DataHandler where data is held, saved, loaded, and managed.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||||
|
"""
|
||||||
|
User sets up the training and test data to fit their desired model here
|
||||||
|
:param data_dictionary: the dictionary constructed by DataHandler to hold
|
||||||
|
all the training and test data/labels.
|
||||||
|
"""
|
||||||
|
|
||||||
|
lgb = LGBMClassifier(**self.model_training_parameters)
|
||||||
|
|
||||||
|
X = data_dictionary["train_features"]
|
||||||
|
y = data_dictionary["train_labels"]
|
||||||
|
sample_weight = data_dictionary["train_weights"]
|
||||||
|
|
||||||
|
eval_weights = None
|
||||||
|
eval_sets = [None] * y.shape[1]
|
||||||
|
|
||||||
|
if self.freqai_info.get('data_split_parameters', {}).get('test_size', 0.1) != 0:
|
||||||
|
eval_weights = [data_dictionary["test_weights"]]
|
||||||
|
eval_sets = [(None, None)] * data_dictionary['test_labels'].shape[1] # type: ignore
|
||||||
|
for i in range(data_dictionary['test_labels'].shape[1]):
|
||||||
|
eval_sets[i] = ( # type: ignore
|
||||||
|
data_dictionary["test_features"],
|
||||||
|
data_dictionary["test_labels"].iloc[:, i]
|
||||||
|
)
|
||||||
|
|
||||||
|
init_model = self.get_init_model(dk.pair)
|
||||||
|
if init_model:
|
||||||
|
init_models = init_model.estimators_
|
||||||
|
else:
|
||||||
|
init_models = [None] * y.shape[1]
|
||||||
|
|
||||||
|
fit_params = []
|
||||||
|
for i in range(len(eval_sets)):
|
||||||
|
fit_params.append(
|
||||||
|
{'eval_set': eval_sets[i], 'eval_sample_weight': eval_weights,
|
||||||
|
'init_model': init_models[i]})
|
||||||
|
|
||||||
|
model = FreqaiMultiOutputClassifier(estimator=lgb)
|
||||||
|
thread_training = self.freqai_info.get('multitarget_parallel_training', False)
|
||||||
|
if thread_training:
|
||||||
|
model.n_jobs = y.shape[1]
|
||||||
|
model.fit(X=X, y=y, sample_weight=sample_weight, fit_params=fit_params)
|
||||||
|
|
||||||
|
return model
|
|
@ -1,9 +1,11 @@
|
||||||
import logging
|
import logging
|
||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone
|
||||||
from typing import Any
|
from pathlib import Path
|
||||||
|
from typing import Any, Dict
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
|
import rapidjson
|
||||||
|
|
||||||
from freqtrade.configuration import TimeRange
|
from freqtrade.configuration import TimeRange
|
||||||
from freqtrade.constants import Config
|
from freqtrade.constants import Config
|
||||||
|
@ -191,3 +193,41 @@ def plot_feature_importance(model: Any, pair: str, dk: FreqaiDataKitchen,
|
||||||
fig.update_layout(title_text=f"Best and worst features by importance {pair}")
|
fig.update_layout(title_text=f"Best and worst features by importance {pair}")
|
||||||
label = label.replace('&', '').replace('%', '') # escape two FreqAI specific characters
|
label = label.replace('&', '').replace('%', '') # escape two FreqAI specific characters
|
||||||
store_plot_file(fig, f"{dk.model_filename}-{label}.html", dk.data_path)
|
store_plot_file(fig, f"{dk.model_filename}-{label}.html", dk.data_path)
|
||||||
|
|
||||||
|
|
||||||
|
def record_params(config: Dict[str, Any], full_path: Path) -> None:
|
||||||
|
"""
|
||||||
|
Records run params in the full path for reproducibility
|
||||||
|
"""
|
||||||
|
params_record_path = full_path / "run_params.json"
|
||||||
|
|
||||||
|
run_params = {
|
||||||
|
"freqai": config.get('freqai', {}),
|
||||||
|
"timeframe": config.get('timeframe'),
|
||||||
|
"stake_amount": config.get('stake_amount'),
|
||||||
|
"stake_currency": config.get('stake_currency'),
|
||||||
|
"max_open_trades": config.get('max_open_trades'),
|
||||||
|
"pairs": config.get('exchange', {}).get('pair_whitelist')
|
||||||
|
}
|
||||||
|
|
||||||
|
with open(params_record_path, "w") as handle:
|
||||||
|
rapidjson.dump(
|
||||||
|
run_params,
|
||||||
|
handle,
|
||||||
|
indent=4,
|
||||||
|
default=str,
|
||||||
|
number_mode=rapidjson.NM_NATIVE | rapidjson.NM_NAN
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def get_timerange_backtest_live_models(config: Config) -> str:
|
||||||
|
"""
|
||||||
|
Returns a formated timerange for backtest live/ready models
|
||||||
|
:param config: Configuration dictionary
|
||||||
|
|
||||||
|
:return: a string timerange (format example: '20220801-20220822')
|
||||||
|
"""
|
||||||
|
dk = FreqaiDataKitchen(config)
|
||||||
|
models_path = dk.get_full_models_path(config)
|
||||||
|
timerange, _ = dk.get_timerange_and_assets_end_dates_from_ready_models(models_path)
|
||||||
|
return timerange.timerange_str
|
||||||
|
|
|
@ -191,10 +191,10 @@ class FreqtradeBot(LoggingMixin):
|
||||||
# Check whether markets have to be reloaded and reload them when it's needed
|
# Check whether markets have to be reloaded and reload them when it's needed
|
||||||
self.exchange.reload_markets()
|
self.exchange.reload_markets()
|
||||||
|
|
||||||
self.update_closed_trades_without_assigned_fees()
|
self.update_trades_without_assigned_fees()
|
||||||
|
|
||||||
# Query trades from persistence layer
|
# Query trades from persistence layer
|
||||||
trades = Trade.get_open_trades()
|
trades: List[Trade] = Trade.get_open_trades()
|
||||||
|
|
||||||
self.active_pair_whitelist = self._refresh_active_whitelist(trades)
|
self.active_pair_whitelist = self._refresh_active_whitelist(trades)
|
||||||
|
|
||||||
|
@ -354,7 +354,7 @@ class FreqtradeBot(LoggingMixin):
|
||||||
if self.trading_mode == TradingMode.FUTURES:
|
if self.trading_mode == TradingMode.FUTURES:
|
||||||
self._schedule.run_pending()
|
self._schedule.run_pending()
|
||||||
|
|
||||||
def update_closed_trades_without_assigned_fees(self):
|
def update_trades_without_assigned_fees(self) -> None:
|
||||||
"""
|
"""
|
||||||
Update closed trades without close fees assigned.
|
Update closed trades without close fees assigned.
|
||||||
Only acts when Orders are in the database, otherwise the last order-id is unknown.
|
Only acts when Orders are in the database, otherwise the last order-id is unknown.
|
||||||
|
@ -379,17 +379,18 @@ class FreqtradeBot(LoggingMixin):
|
||||||
stoploss_order=order.ft_order_side == 'stoploss',
|
stoploss_order=order.ft_order_side == 'stoploss',
|
||||||
send_msg=False)
|
send_msg=False)
|
||||||
|
|
||||||
trades: List[Trade] = Trade.get_open_trades_without_assigned_fees()
|
trades = Trade.get_open_trades_without_assigned_fees()
|
||||||
for trade in trades:
|
for trade in trades:
|
||||||
if trade.is_open and not trade.fee_updated(trade.entry_side):
|
with self._exit_lock:
|
||||||
order = trade.select_order(trade.entry_side, False)
|
if trade.is_open and not trade.fee_updated(trade.entry_side):
|
||||||
open_order = trade.select_order(trade.entry_side, True)
|
order = trade.select_order(trade.entry_side, False)
|
||||||
if order and open_order is None:
|
open_order = trade.select_order(trade.entry_side, True)
|
||||||
logger.info(
|
if order and open_order is None:
|
||||||
f"Updating {trade.entry_side}-fee on trade {trade}"
|
logger.info(
|
||||||
f"for order {order.order_id}."
|
f"Updating {trade.entry_side}-fee on trade {trade}"
|
||||||
)
|
f"for order {order.order_id}."
|
||||||
self.update_trade_state(trade, order.order_id, send_msg=False)
|
)
|
||||||
|
self.update_trade_state(trade, order.order_id, send_msg=False)
|
||||||
|
|
||||||
def handle_insufficient_funds(self, trade: Trade):
|
def handle_insufficient_funds(self, trade: Trade):
|
||||||
"""
|
"""
|
||||||
|
@ -826,6 +827,8 @@ class FreqtradeBot(LoggingMixin):
|
||||||
co = self.exchange.cancel_stoploss_order_with_result(
|
co = self.exchange.cancel_stoploss_order_with_result(
|
||||||
trade.stoploss_order_id, trade.pair, trade.amount)
|
trade.stoploss_order_id, trade.pair, trade.amount)
|
||||||
trade.update_order(co)
|
trade.update_order(co)
|
||||||
|
# Reset stoploss order id.
|
||||||
|
trade.stoploss_order_id = None
|
||||||
except InvalidOrderException:
|
except InvalidOrderException:
|
||||||
logger.exception(f"Could not cancel stoploss order {trade.stoploss_order_id}")
|
logger.exception(f"Could not cancel stoploss order {trade.stoploss_order_id}")
|
||||||
return trade
|
return trade
|
||||||
|
@ -982,7 +985,7 @@ class FreqtradeBot(LoggingMixin):
|
||||||
# SELL / exit positions / close trades logic and methods
|
# SELL / exit positions / close trades logic and methods
|
||||||
#
|
#
|
||||||
|
|
||||||
def exit_positions(self, trades: List[Any]) -> int:
|
def exit_positions(self, trades: List[Trade]) -> int:
|
||||||
"""
|
"""
|
||||||
Tries to execute exit orders for open trades (positions)
|
Tries to execute exit orders for open trades (positions)
|
||||||
"""
|
"""
|
||||||
|
@ -1010,7 +1013,7 @@ class FreqtradeBot(LoggingMixin):
|
||||||
|
|
||||||
def handle_trade(self, trade: Trade) -> bool:
|
def handle_trade(self, trade: Trade) -> bool:
|
||||||
"""
|
"""
|
||||||
Sells/exits_short the current pair if the threshold is reached and updates the trade record.
|
Exits the current pair if the threshold is reached and updates the trade record.
|
||||||
:return: True if trade has been sold/exited_short, False otherwise
|
:return: True if trade has been sold/exited_short, False otherwise
|
||||||
"""
|
"""
|
||||||
if not trade.is_open:
|
if not trade.is_open:
|
||||||
|
@ -1133,10 +1136,8 @@ class FreqtradeBot(LoggingMixin):
|
||||||
trade.exit_reason = ExitType.STOPLOSS_ON_EXCHANGE.value
|
trade.exit_reason = ExitType.STOPLOSS_ON_EXCHANGE.value
|
||||||
self.update_trade_state(trade, trade.stoploss_order_id, stoploss_order,
|
self.update_trade_state(trade, trade.stoploss_order_id, stoploss_order,
|
||||||
stoploss_order=True)
|
stoploss_order=True)
|
||||||
# Lock pair for one candle to prevent immediate rebuys
|
|
||||||
self.strategy.lock_pair(trade.pair, datetime.now(timezone.utc),
|
|
||||||
reason='Auto lock')
|
|
||||||
self._notify_exit(trade, "stoploss", True)
|
self._notify_exit(trade, "stoploss", True)
|
||||||
|
self.handle_protections(trade.pair, trade.trade_direction)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
if trade.open_order_id or not trade.is_open:
|
if trade.open_order_id or not trade.is_open:
|
||||||
|
@ -1169,7 +1170,6 @@ class FreqtradeBot(LoggingMixin):
|
||||||
if self.create_stoploss_order(trade=trade, stop_price=trade.stoploss_or_liquidation):
|
if self.create_stoploss_order(trade=trade, stop_price=trade.stoploss_or_liquidation):
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
trade.stoploss_order_id = None
|
|
||||||
logger.warning('Stoploss order was cancelled, but unable to recreate one.')
|
logger.warning('Stoploss order was cancelled, but unable to recreate one.')
|
||||||
|
|
||||||
# Finally we check if stoploss on exchange should be moved up because of trailing.
|
# Finally we check if stoploss on exchange should be moved up because of trailing.
|
||||||
|
@ -1595,11 +1595,6 @@ class FreqtradeBot(LoggingMixin):
|
||||||
trade.close_rate_requested = limit
|
trade.close_rate_requested = limit
|
||||||
trade.exit_reason = exit_reason
|
trade.exit_reason = exit_reason
|
||||||
|
|
||||||
if not sub_trade_amt:
|
|
||||||
# Lock pair for one candle to prevent immediate re-trading
|
|
||||||
self.strategy.lock_pair(trade.pair, datetime.now(timezone.utc),
|
|
||||||
reason='Auto lock')
|
|
||||||
|
|
||||||
self._notify_exit(trade, order_type, sub_trade=bool(sub_trade_amt), order=order_obj)
|
self._notify_exit(trade, order_type, sub_trade=bool(sub_trade_amt), order=order_obj)
|
||||||
# In case of market sell orders the order can be closed immediately
|
# In case of market sell orders the order can be closed immediately
|
||||||
if order.get('status', 'unknown') in ('closed', 'expired'):
|
if order.get('status', 'unknown') in ('closed', 'expired'):
|
||||||
|
@ -1809,6 +1804,8 @@ class FreqtradeBot(LoggingMixin):
|
||||||
self._notify_enter(trade, order, fill=True, sub_trade=sub_trade)
|
self._notify_enter(trade, order, fill=True, sub_trade=sub_trade)
|
||||||
|
|
||||||
def handle_protections(self, pair: str, side: LongShort) -> None:
|
def handle_protections(self, pair: str, side: LongShort) -> None:
|
||||||
|
# Lock pair for one candle to prevent immediate rebuys
|
||||||
|
self.strategy.lock_pair(pair, datetime.now(timezone.utc), reason='Auto lock')
|
||||||
prot_trig = self.protections.stop_per_pair(pair, side=side)
|
prot_trig = self.protections.stop_per_pair(pair, side=side)
|
||||||
if prot_trig:
|
if prot_trig:
|
||||||
msg = {'type': RPCMessageType.PROTECTION_TRIGGER, }
|
msg = {'type': RPCMessageType.PROTECTION_TRIGGER, }
|
||||||
|
|
|
@ -35,9 +35,5 @@ def interest(
|
||||||
elif exchange_name == "kraken":
|
elif exchange_name == "kraken":
|
||||||
# Rounded based on https://kraken-fees-calculator.github.io/
|
# Rounded based on https://kraken-fees-calculator.github.io/
|
||||||
return borrowed * rate * (one + FtPrecise(ceil(hours / four)))
|
return borrowed * rate * (one + FtPrecise(ceil(hours / four)))
|
||||||
elif exchange_name == "ftx":
|
|
||||||
# As Explained under #Interest rates section in
|
|
||||||
# https://help.ftx.com/hc/en-us/articles/360053007671-Spot-Margin-Trading-Explainer
|
|
||||||
return borrowed * rate * FtPrecise(ceil(hours)) / twenty_four
|
|
||||||
else:
|
else:
|
||||||
raise OperationalException(f"Leverage not available on {exchange_name} with freqtrade")
|
raise OperationalException(f"Leverage not available on {exchange_name} with freqtrade")
|
||||||
|
|
|
@ -10,7 +10,8 @@ from typing import Any, Dict, Iterator, List, Mapping, Union
|
||||||
from typing.io import IO
|
from typing.io import IO
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
import pandas
|
import orjson
|
||||||
|
import pandas as pd
|
||||||
import rapidjson
|
import rapidjson
|
||||||
|
|
||||||
from freqtrade.constants import DECIMAL_PER_COIN_FALLBACK, DECIMALS_PER_COIN
|
from freqtrade.constants import DECIMAL_PER_COIN_FALLBACK, DECIMALS_PER_COIN
|
||||||
|
@ -256,29 +257,37 @@ def parse_db_uri_for_logging(uri: str):
|
||||||
return parsed_db_uri.geturl().replace(f':{pwd}@', ':*****@')
|
return parsed_db_uri.geturl().replace(f':{pwd}@', ':*****@')
|
||||||
|
|
||||||
|
|
||||||
def dataframe_to_json(dataframe: pandas.DataFrame) -> str:
|
def dataframe_to_json(dataframe: pd.DataFrame) -> str:
|
||||||
"""
|
"""
|
||||||
Serialize a DataFrame for transmission over the wire using JSON
|
Serialize a DataFrame for transmission over the wire using JSON
|
||||||
:param dataframe: A pandas DataFrame
|
:param dataframe: A pandas DataFrame
|
||||||
:returns: A JSON string of the pandas DataFrame
|
:returns: A JSON string of the pandas DataFrame
|
||||||
"""
|
"""
|
||||||
return dataframe.to_json(orient='split')
|
# https://github.com/pandas-dev/pandas/issues/24889
|
||||||
|
# https://github.com/pandas-dev/pandas/issues/40443
|
||||||
|
# We need to convert to a dict to avoid mem leak
|
||||||
|
def default(z):
|
||||||
|
if isinstance(z, pd.Timestamp):
|
||||||
|
return z.timestamp() * 1e3
|
||||||
|
raise TypeError
|
||||||
|
|
||||||
|
return str(orjson.dumps(dataframe.to_dict(orient='split'), default=default), 'utf-8')
|
||||||
|
|
||||||
|
|
||||||
def json_to_dataframe(data: str) -> pandas.DataFrame:
|
def json_to_dataframe(data: str) -> pd.DataFrame:
|
||||||
"""
|
"""
|
||||||
Deserialize JSON into a DataFrame
|
Deserialize JSON into a DataFrame
|
||||||
:param data: A JSON string
|
:param data: A JSON string
|
||||||
:returns: A pandas DataFrame from the JSON string
|
:returns: A pandas DataFrame from the JSON string
|
||||||
"""
|
"""
|
||||||
dataframe = pandas.read_json(data, orient='split')
|
dataframe = pd.read_json(data, orient='split')
|
||||||
if 'date' in dataframe.columns:
|
if 'date' in dataframe.columns:
|
||||||
dataframe['date'] = pandas.to_datetime(dataframe['date'], unit='ms', utc=True)
|
dataframe['date'] = pd.to_datetime(dataframe['date'], unit='ms', utc=True)
|
||||||
|
|
||||||
return dataframe
|
return dataframe
|
||||||
|
|
||||||
|
|
||||||
def remove_entry_exit_signals(dataframe: pandas.DataFrame):
|
def remove_entry_exit_signals(dataframe: pd.DataFrame):
|
||||||
"""
|
"""
|
||||||
Remove Entry and Exit signals from a DataFrame
|
Remove Entry and Exit signals from a DataFrame
|
||||||
|
|
||||||
|
|
|
@ -134,6 +134,10 @@ class Backtesting:
|
||||||
self.fee = self.exchange.get_fee(symbol=self.pairlists.whitelist[0])
|
self.fee = self.exchange.get_fee(symbol=self.pairlists.whitelist[0])
|
||||||
self.precision_mode = self.exchange.precisionMode
|
self.precision_mode = self.exchange.precisionMode
|
||||||
|
|
||||||
|
if self.config.get('freqai_backtest_live_models', False):
|
||||||
|
from freqtrade.freqai.utils import get_timerange_backtest_live_models
|
||||||
|
self.config['timerange'] = get_timerange_backtest_live_models(self.config)
|
||||||
|
|
||||||
self.timerange = TimeRange.parse_timerange(
|
self.timerange = TimeRange.parse_timerange(
|
||||||
None if self.config.get('timerange') is None else str(self.config.get('timerange')))
|
None if self.config.get('timerange') is None else str(self.config.get('timerange')))
|
||||||
|
|
||||||
|
@ -162,7 +166,7 @@ class Backtesting:
|
||||||
PairLocks.use_db = True
|
PairLocks.use_db = True
|
||||||
Trade.use_db = True
|
Trade.use_db = True
|
||||||
|
|
||||||
def init_backtest_detail(self):
|
def init_backtest_detail(self) -> None:
|
||||||
# Load detail timeframe if specified
|
# Load detail timeframe if specified
|
||||||
self.timeframe_detail = str(self.config.get('timeframe_detail', ''))
|
self.timeframe_detail = str(self.config.get('timeframe_detail', ''))
|
||||||
if self.timeframe_detail:
|
if self.timeframe_detail:
|
||||||
|
@ -1282,8 +1286,7 @@ class Backtesting:
|
||||||
def _get_min_cached_backtest_date(self):
|
def _get_min_cached_backtest_date(self):
|
||||||
min_backtest_date = None
|
min_backtest_date = None
|
||||||
backtest_cache_age = self.config.get('backtest_cache', constants.BACKTEST_CACHE_DEFAULT)
|
backtest_cache_age = self.config.get('backtest_cache', constants.BACKTEST_CACHE_DEFAULT)
|
||||||
if self.timerange.stopts == 0 or datetime.fromtimestamp(
|
if self.timerange.stopts == 0 or self.timerange.stopdt > datetime.now(tz=timezone.utc):
|
||||||
self.timerange.stopts, tz=timezone.utc) > datetime.now(tz=timezone.utc):
|
|
||||||
logger.warning('Backtest result caching disabled due to use of open-ended timerange.')
|
logger.warning('Backtest result caching disabled due to use of open-ended timerange.')
|
||||||
elif backtest_cache_age == 'day':
|
elif backtest_cache_age == 'day':
|
||||||
min_backtest_date = datetime.now(tz=timezone.utc) - timedelta(days=1)
|
min_backtest_date = datetime.now(tz=timezone.utc) - timedelta(days=1)
|
||||||
|
|
|
@ -17,6 +17,7 @@ from freqtrade.enums import HyperoptState
|
||||||
from freqtrade.exceptions import OperationalException
|
from freqtrade.exceptions import OperationalException
|
||||||
from freqtrade.misc import deep_merge_dicts, round_coin_value, round_dict, safe_value_fallback2
|
from freqtrade.misc import deep_merge_dicts, round_coin_value, round_dict, safe_value_fallback2
|
||||||
from freqtrade.optimize.hyperopt_epoch_filters import hyperopt_filter_epochs
|
from freqtrade.optimize.hyperopt_epoch_filters import hyperopt_filter_epochs
|
||||||
|
from freqtrade.optimize.optimize_reports import generate_wins_draws_losses
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -325,8 +326,10 @@ class HyperoptTools():
|
||||||
|
|
||||||
# New mode, using backtest result for metrics
|
# New mode, using backtest result for metrics
|
||||||
trials['results_metrics.winsdrawslosses'] = trials.apply(
|
trials['results_metrics.winsdrawslosses'] = trials.apply(
|
||||||
lambda x: f"{x['results_metrics.wins']} {x['results_metrics.draws']:>4} "
|
lambda x: generate_wins_draws_losses(
|
||||||
f"{x['results_metrics.losses']:>4}", axis=1)
|
x['results_metrics.wins'], x['results_metrics.draws'],
|
||||||
|
x['results_metrics.losses']
|
||||||
|
), axis=1)
|
||||||
|
|
||||||
trials = trials[['Best', 'current_epoch', 'results_metrics.total_trades',
|
trials = trials[['Best', 'current_epoch', 'results_metrics.total_trades',
|
||||||
'results_metrics.winsdrawslosses',
|
'results_metrics.winsdrawslosses',
|
||||||
|
@ -337,7 +340,7 @@ class HyperoptTools():
|
||||||
'loss', 'is_initial_point', 'is_random', 'is_best']]
|
'loss', 'is_initial_point', 'is_random', 'is_best']]
|
||||||
|
|
||||||
trials.columns = [
|
trials.columns = [
|
||||||
'Best', 'Epoch', 'Trades', ' Win Draw Loss', 'Avg profit',
|
'Best', 'Epoch', 'Trades', ' Win Draw Loss Win%', 'Avg profit',
|
||||||
'Total profit', 'Profit', 'Avg duration', 'max_drawdown', 'max_drawdown_account',
|
'Total profit', 'Profit', 'Avg duration', 'max_drawdown', 'max_drawdown_account',
|
||||||
'max_drawdown_abs', 'Objective', 'is_initial_point', 'is_random', 'is_best'
|
'max_drawdown_abs', 'Objective', 'is_initial_point', 'is_random', 'is_best'
|
||||||
]
|
]
|
||||||
|
@ -467,9 +470,9 @@ class HyperoptTools():
|
||||||
|
|
||||||
base_metrics = ['Best', 'current_epoch', 'results_metrics.total_trades',
|
base_metrics = ['Best', 'current_epoch', 'results_metrics.total_trades',
|
||||||
'results_metrics.profit_mean', 'results_metrics.profit_median',
|
'results_metrics.profit_mean', 'results_metrics.profit_median',
|
||||||
'results_metrics.profit_total',
|
'results_metrics.profit_total', 'Stake currency',
|
||||||
'Stake currency',
|
|
||||||
'results_metrics.profit_total_abs', 'results_metrics.holding_avg',
|
'results_metrics.profit_total_abs', 'results_metrics.holding_avg',
|
||||||
|
'results_metrics.trade_count_long', 'results_metrics.trade_count_short',
|
||||||
'loss', 'is_initial_point', 'is_best']
|
'loss', 'is_initial_point', 'is_best']
|
||||||
perc_multi = 100
|
perc_multi = 100
|
||||||
|
|
||||||
|
@ -477,7 +480,9 @@ class HyperoptTools():
|
||||||
trials = trials[base_metrics + param_metrics]
|
trials = trials[base_metrics + param_metrics]
|
||||||
|
|
||||||
base_columns = ['Best', 'Epoch', 'Trades', 'Avg profit', 'Median profit', 'Total profit',
|
base_columns = ['Best', 'Epoch', 'Trades', 'Avg profit', 'Median profit', 'Total profit',
|
||||||
'Stake currency', 'Profit', 'Avg duration', 'Objective',
|
'Stake currency', 'Profit', 'Avg duration',
|
||||||
|
'Trade count long', 'Trade count short',
|
||||||
|
'Objective',
|
||||||
'is_initial_point', 'is_best']
|
'is_initial_point', 'is_best']
|
||||||
param_columns = list(results[0]['params_dict'].keys())
|
param_columns = list(results[0]['params_dict'].keys())
|
||||||
trials.columns = base_columns + param_columns
|
trials.columns = base_columns + param_columns
|
||||||
|
|
|
@ -86,7 +86,7 @@ def _get_line_header(first_column: str, stake_currency: str,
|
||||||
'Win Draw Loss Win%']
|
'Win Draw Loss Win%']
|
||||||
|
|
||||||
|
|
||||||
def _generate_wins_draws_losses(wins, draws, losses):
|
def generate_wins_draws_losses(wins, draws, losses):
|
||||||
if wins > 0 and losses == 0:
|
if wins > 0 and losses == 0:
|
||||||
wl_ratio = '100'
|
wl_ratio = '100'
|
||||||
elif wins == 0:
|
elif wins == 0:
|
||||||
|
@ -600,7 +600,7 @@ def text_table_bt_results(pair_results: List[Dict[str, Any]], stake_currency: st
|
||||||
output = [[
|
output = [[
|
||||||
t['key'], t['trades'], t['profit_mean_pct'], t['profit_sum_pct'], t['profit_total_abs'],
|
t['key'], t['trades'], t['profit_mean_pct'], t['profit_sum_pct'], t['profit_total_abs'],
|
||||||
t['profit_total_pct'], t['duration_avg'],
|
t['profit_total_pct'], t['duration_avg'],
|
||||||
_generate_wins_draws_losses(t['wins'], t['draws'], t['losses'])
|
generate_wins_draws_losses(t['wins'], t['draws'], t['losses'])
|
||||||
] for t in pair_results]
|
] for t in pair_results]
|
||||||
# Ignore type as floatfmt does allow tuples but mypy does not know that
|
# Ignore type as floatfmt does allow tuples but mypy does not know that
|
||||||
return tabulate(output, headers=headers,
|
return tabulate(output, headers=headers,
|
||||||
|
@ -626,7 +626,7 @@ def text_table_exit_reason(exit_reason_stats: List[Dict[str, Any]], stake_curren
|
||||||
|
|
||||||
output = [[
|
output = [[
|
||||||
t.get('exit_reason', t.get('sell_reason')), t['trades'],
|
t.get('exit_reason', t.get('sell_reason')), t['trades'],
|
||||||
_generate_wins_draws_losses(t['wins'], t['draws'], t['losses']),
|
generate_wins_draws_losses(t['wins'], t['draws'], t['losses']),
|
||||||
t['profit_mean_pct'], t['profit_sum_pct'],
|
t['profit_mean_pct'], t['profit_sum_pct'],
|
||||||
round_coin_value(t['profit_total_abs'], stake_currency, False),
|
round_coin_value(t['profit_total_abs'], stake_currency, False),
|
||||||
t['profit_total_pct'],
|
t['profit_total_pct'],
|
||||||
|
@ -656,7 +656,7 @@ def text_table_tags(tag_type: str, tag_results: List[Dict[str, Any]], stake_curr
|
||||||
t['profit_total_abs'],
|
t['profit_total_abs'],
|
||||||
t['profit_total_pct'],
|
t['profit_total_pct'],
|
||||||
t['duration_avg'],
|
t['duration_avg'],
|
||||||
_generate_wins_draws_losses(
|
generate_wins_draws_losses(
|
||||||
t['wins'],
|
t['wins'],
|
||||||
t['draws'],
|
t['draws'],
|
||||||
t['losses'])] for t in tag_results]
|
t['losses'])] for t in tag_results]
|
||||||
|
@ -715,7 +715,7 @@ def text_table_strategy(strategy_results, stake_currency: str) -> str:
|
||||||
output = [[
|
output = [[
|
||||||
t['key'], t['trades'], t['profit_mean_pct'], t['profit_sum_pct'], t['profit_total_abs'],
|
t['key'], t['trades'], t['profit_mean_pct'], t['profit_sum_pct'], t['profit_total_abs'],
|
||||||
t['profit_total_pct'], t['duration_avg'],
|
t['profit_total_pct'], t['duration_avg'],
|
||||||
_generate_wins_draws_losses(t['wins'], t['draws'], t['losses']), drawdown]
|
generate_wins_draws_losses(t['wins'], t['draws'], t['losses']), drawdown]
|
||||||
for t, drawdown in zip(strategy_results, drawdown)]
|
for t, drawdown in zip(strategy_results, drawdown)]
|
||||||
# Ignore type as floatfmt does allow tuples but mypy does not know that
|
# Ignore type as floatfmt does allow tuples but mypy does not know that
|
||||||
return tabulate(output, headers=headers,
|
return tabulate(output, headers=headers,
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import logging
|
import logging
|
||||||
from typing import List
|
from typing import List, Optional
|
||||||
|
|
||||||
from sqlalchemy import inspect, select, text, tuple_, update
|
from sqlalchemy import inspect, select, text, tuple_, update
|
||||||
|
|
||||||
|
@ -31,9 +31,9 @@ def get_backup_name(tabs: List[str], backup_prefix: str):
|
||||||
return table_back_name
|
return table_back_name
|
||||||
|
|
||||||
|
|
||||||
def get_last_sequence_ids(engine, trade_back_name, order_back_name):
|
def get_last_sequence_ids(engine, trade_back_name: str, order_back_name: str):
|
||||||
order_id: int = None
|
order_id: Optional[int] = None
|
||||||
trade_id: int = None
|
trade_id: Optional[int] = None
|
||||||
|
|
||||||
if engine.name == 'postgresql':
|
if engine.name == 'postgresql':
|
||||||
with engine.begin() as connection:
|
with engine.begin() as connection:
|
||||||
|
|
|
@ -90,6 +90,13 @@ class Order(_DECL_BASE):
|
||||||
def safe_filled(self) -> float:
|
def safe_filled(self) -> float:
|
||||||
return self.filled if self.filled is not None else self.amount or 0.0
|
return self.filled if self.filled is not None else self.amount or 0.0
|
||||||
|
|
||||||
|
@property
|
||||||
|
def safe_remaining(self) -> float:
|
||||||
|
return (
|
||||||
|
self.remaining if self.remaining is not None else
|
||||||
|
self.amount - (self.filled or 0.0)
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def safe_fee_base(self) -> float:
|
def safe_fee_base(self) -> float:
|
||||||
return self.ft_fee_base or 0.0
|
return self.ft_fee_base or 0.0
|
||||||
|
@ -667,7 +674,7 @@ class LocalTrade():
|
||||||
self.close(order.safe_price)
|
self.close(order.safe_price)
|
||||||
else:
|
else:
|
||||||
self.recalc_trade_from_orders()
|
self.recalc_trade_from_orders()
|
||||||
elif order.ft_order_side == 'stoploss':
|
elif order.ft_order_side == 'stoploss' and order.status not in ('canceled', 'open'):
|
||||||
self.stoploss_order_id = None
|
self.stoploss_order_id = None
|
||||||
self.close_rate_requested = self.stop_loss
|
self.close_rate_requested = self.stop_loss
|
||||||
self.exit_reason = ExitType.STOPLOSS_ON_EXCHANGE.value
|
self.exit_reason = ExitType.STOPLOSS_ON_EXCHANGE.value
|
||||||
|
@ -1144,7 +1151,8 @@ class Trade(_DECL_BASE, LocalTrade):
|
||||||
|
|
||||||
id = Column(Integer, primary_key=True)
|
id = Column(Integer, primary_key=True)
|
||||||
|
|
||||||
orders = relationship("Order", order_by="Order.id", cascade="all, delete-orphan", lazy="joined")
|
orders = relationship("Order", order_by="Order.id", cascade="all, delete-orphan",
|
||||||
|
lazy="selectin", innerjoin=True)
|
||||||
|
|
||||||
exchange = Column(String(25), nullable=False)
|
exchange = Column(String(25), nullable=False)
|
||||||
pair = Column(String(25), nullable=False, index=True)
|
pair = Column(String(25), nullable=False, index=True)
|
||||||
|
|
|
@ -36,7 +36,6 @@ class IPairList(LoggingMixin, ABC):
|
||||||
self._pairlistconfig = pairlistconfig
|
self._pairlistconfig = pairlistconfig
|
||||||
self._pairlist_pos = pairlist_pos
|
self._pairlist_pos = pairlist_pos
|
||||||
self.refresh_period = self._pairlistconfig.get('refresh_period', 1800)
|
self.refresh_period = self._pairlistconfig.get('refresh_period', 1800)
|
||||||
self._last_refresh = 0
|
|
||||||
LoggingMixin.__init__(self, logger, self.refresh_period)
|
LoggingMixin.__init__(self, logger, self.refresh_period)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
|
|
@ -3,16 +3,20 @@ Shuffle pair list filter
|
||||||
"""
|
"""
|
||||||
import logging
|
import logging
|
||||||
import random
|
import random
|
||||||
from typing import Any, Dict, List
|
from typing import Any, Dict, List, Literal
|
||||||
|
|
||||||
from freqtrade.constants import Config
|
from freqtrade.constants import Config
|
||||||
from freqtrade.enums import RunMode
|
from freqtrade.enums import RunMode
|
||||||
|
from freqtrade.exchange import timeframe_to_seconds
|
||||||
from freqtrade.exchange.types import Tickers
|
from freqtrade.exchange.types import Tickers
|
||||||
from freqtrade.plugins.pairlist.IPairList import IPairList
|
from freqtrade.plugins.pairlist.IPairList import IPairList
|
||||||
|
from freqtrade.util.periodic_cache import PeriodicCache
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
ShuffleValues = Literal['candle', 'iteration']
|
||||||
|
|
||||||
|
|
||||||
class ShuffleFilter(IPairList):
|
class ShuffleFilter(IPairList):
|
||||||
|
|
||||||
|
@ -31,6 +35,9 @@ class ShuffleFilter(IPairList):
|
||||||
logger.info(f"Backtesting mode detected, applying seed value: {self._seed}")
|
logger.info(f"Backtesting mode detected, applying seed value: {self._seed}")
|
||||||
|
|
||||||
self._random = random.Random(self._seed)
|
self._random = random.Random(self._seed)
|
||||||
|
self._shuffle_freq: ShuffleValues = pairlistconfig.get('shuffle_frequency', 'candle')
|
||||||
|
self.__pairlist_cache = PeriodicCache(
|
||||||
|
maxsize=1000, ttl=timeframe_to_seconds(self._config['timeframe']))
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def needstickers(self) -> bool:
|
def needstickers(self) -> bool:
|
||||||
|
@ -45,7 +52,7 @@ class ShuffleFilter(IPairList):
|
||||||
"""
|
"""
|
||||||
Short whitelist method description - used for startup-messages
|
Short whitelist method description - used for startup-messages
|
||||||
"""
|
"""
|
||||||
return (f"{self.name} - Shuffling pairs" +
|
return (f"{self.name} - Shuffling pairs every {self._shuffle_freq}" +
|
||||||
(f", seed = {self._seed}." if self._seed is not None else "."))
|
(f", seed = {self._seed}." if self._seed is not None else "."))
|
||||||
|
|
||||||
def filter_pairlist(self, pairlist: List[str], tickers: Tickers) -> List[str]:
|
def filter_pairlist(self, pairlist: List[str], tickers: Tickers) -> List[str]:
|
||||||
|
@ -56,7 +63,13 @@ class ShuffleFilter(IPairList):
|
||||||
:param tickers: Tickers (from exchange.get_tickers). May be cached.
|
:param tickers: Tickers (from exchange.get_tickers). May be cached.
|
||||||
:return: new whitelist
|
:return: new whitelist
|
||||||
"""
|
"""
|
||||||
|
pairlist_bef = tuple(pairlist)
|
||||||
|
pairlist_new = self.__pairlist_cache.get(pairlist_bef)
|
||||||
|
if pairlist_new and self._shuffle_freq == 'candle':
|
||||||
|
# Use cached pairlist.
|
||||||
|
return pairlist_new
|
||||||
# Shuffle is done inplace
|
# Shuffle is done inplace
|
||||||
self._random.shuffle(pairlist)
|
self._random.shuffle(pairlist)
|
||||||
|
self.__pairlist_cache[pairlist_bef] = pairlist
|
||||||
|
|
||||||
return pairlist
|
return pairlist
|
||||||
|
|
|
@ -84,11 +84,8 @@ async def _process_consumer_request(
|
||||||
# Limit the amount of candles per dataframe to 'limit' or 1500
|
# Limit the amount of candles per dataframe to 'limit' or 1500
|
||||||
limit = max(data.get('limit', 1500), 1500)
|
limit = max(data.get('limit', 1500), 1500)
|
||||||
|
|
||||||
# They requested the full historical analyzed dataframes
|
# For every pair in the generator, send a separate message
|
||||||
analyzed_df = rpc._ws_request_analyzed_df(limit)
|
for message in rpc._ws_request_analyzed_df(limit):
|
||||||
|
|
||||||
# For every dataframe, send as a separate message
|
|
||||||
for _, message in analyzed_df.items():
|
|
||||||
response = WSAnalyzedDFMessage(data=message)
|
response = WSAnalyzedDFMessage(data=message)
|
||||||
await channel_manager.send_direct(channel, response.dict(exclude_none=True))
|
await channel_manager.send_direct(channel, response.dict(exclude_none=True))
|
||||||
|
|
||||||
|
@ -127,13 +124,6 @@ async def message_endpoint(
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.info(f"Consumer connection failed - {channel}: {e}")
|
logger.info(f"Consumer connection failed - {channel}: {e}")
|
||||||
logger.debug(e, exc_info=e)
|
logger.debug(e, exc_info=e)
|
||||||
finally:
|
|
||||||
await channel_manager.on_disconnect(ws)
|
|
||||||
|
|
||||||
else:
|
|
||||||
if channel:
|
|
||||||
await channel_manager.on_disconnect(ws)
|
|
||||||
await ws.close()
|
|
||||||
|
|
||||||
except RuntimeError:
|
except RuntimeError:
|
||||||
# WebSocket was closed
|
# WebSocket was closed
|
||||||
|
@ -144,4 +134,5 @@ async def message_endpoint(
|
||||||
# Log tracebacks to keep track of what errors are happening
|
# Log tracebacks to keep track of what errors are happening
|
||||||
logger.exception(e)
|
logger.exception(e)
|
||||||
finally:
|
finally:
|
||||||
await channel_manager.on_disconnect(ws)
|
if channel:
|
||||||
|
await channel_manager.on_disconnect(ws)
|
||||||
|
|
|
@ -2,7 +2,7 @@ import asyncio
|
||||||
import logging
|
import logging
|
||||||
from ipaddress import IPv4Address
|
from ipaddress import IPv4Address
|
||||||
from threading import Thread
|
from threading import Thread
|
||||||
from typing import Any, Dict
|
from typing import Any, Dict, Optional
|
||||||
|
|
||||||
import orjson
|
import orjson
|
||||||
import uvicorn
|
import uvicorn
|
||||||
|
@ -51,9 +51,9 @@ class ApiServer(RPCHandler):
|
||||||
# Exchange - only available in webserver mode.
|
# Exchange - only available in webserver mode.
|
||||||
_exchange = None
|
_exchange = None
|
||||||
# websocket message queue stuff
|
# websocket message queue stuff
|
||||||
_ws_channel_manager = None
|
_ws_channel_manager: ChannelManager
|
||||||
_ws_thread = None
|
_ws_thread = None
|
||||||
_ws_loop = None
|
_ws_loop: Optional[asyncio.AbstractEventLoop] = None
|
||||||
|
|
||||||
def __new__(cls, *args, **kwargs):
|
def __new__(cls, *args, **kwargs):
|
||||||
"""
|
"""
|
||||||
|
@ -71,7 +71,7 @@ class ApiServer(RPCHandler):
|
||||||
return
|
return
|
||||||
self._standalone: bool = standalone
|
self._standalone: bool = standalone
|
||||||
self._server = None
|
self._server = None
|
||||||
self._ws_queue = None
|
self._ws_queue: Optional[ThreadedQueue] = None
|
||||||
self._ws_background_task = None
|
self._ws_background_task = None
|
||||||
|
|
||||||
ApiServer.__initialized = True
|
ApiServer.__initialized = True
|
||||||
|
@ -186,7 +186,7 @@ class ApiServer(RPCHandler):
|
||||||
self._ws_background_task = asyncio.run_coroutine_threadsafe(
|
self._ws_background_task = asyncio.run_coroutine_threadsafe(
|
||||||
self._broadcast_queue_data(), loop=self._ws_loop)
|
self._broadcast_queue_data(), loop=self._ws_loop)
|
||||||
|
|
||||||
async def _broadcast_queue_data(self):
|
async def _broadcast_queue_data(self) -> None:
|
||||||
# Instantiate the queue in this coroutine so it's attached to our loop
|
# Instantiate the queue in this coroutine so it's attached to our loop
|
||||||
self._ws_queue = ThreadedQueue()
|
self._ws_queue = ThreadedQueue()
|
||||||
async_queue = self._ws_queue.async_q
|
async_queue = self._ws_queue.async_q
|
||||||
|
@ -194,9 +194,13 @@ class ApiServer(RPCHandler):
|
||||||
try:
|
try:
|
||||||
while True:
|
while True:
|
||||||
logger.debug("Getting queue messages...")
|
logger.debug("Getting queue messages...")
|
||||||
|
if (qsize := async_queue.qsize()) > 20:
|
||||||
|
# If the queue becomes too big for too long, this may indicate a problem.
|
||||||
|
logger.warning(f"Queue size now {qsize}")
|
||||||
# Get data from queue
|
# Get data from queue
|
||||||
message: WSMessageSchemaType = await async_queue.get()
|
message: WSMessageSchemaType = await async_queue.get()
|
||||||
logger.debug(f"Found message of type: {message.get('type')}")
|
logger.debug(f"Found message of type: {message.get('type')}")
|
||||||
|
async_queue.task_done()
|
||||||
# Broadcast it
|
# Broadcast it
|
||||||
await self._ws_channel_manager.broadcast(message)
|
await self._ws_channel_manager.broadcast(message)
|
||||||
except asyncio.CancelledError:
|
except asyncio.CancelledError:
|
||||||
|
@ -209,7 +213,11 @@ class ApiServer(RPCHandler):
|
||||||
finally:
|
finally:
|
||||||
# Disconnect channels and stop the loop on cancel
|
# Disconnect channels and stop the loop on cancel
|
||||||
await self._ws_channel_manager.disconnect_all()
|
await self._ws_channel_manager.disconnect_all()
|
||||||
self._ws_loop.stop()
|
if self._ws_loop:
|
||||||
|
self._ws_loop.stop()
|
||||||
|
# Avoid adding more items to the queue if they aren't
|
||||||
|
# going to get broadcasted.
|
||||||
|
self._ws_queue = None
|
||||||
|
|
||||||
def start_api(self):
|
def start_api(self):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
|
import time
|
||||||
from threading import RLock
|
from threading import RLock
|
||||||
from typing import Any, Dict, List, Optional, Type, Union
|
from typing import Any, Dict, List, Optional, Type, Union
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
@ -34,8 +35,6 @@ class WebSocketChannel:
|
||||||
|
|
||||||
# The WebSocket object
|
# The WebSocket object
|
||||||
self._websocket = WebSocketProxy(websocket)
|
self._websocket = WebSocketProxy(websocket)
|
||||||
# The Serializing class for the WebSocket object
|
|
||||||
self._serializer_cls = serializer_cls
|
|
||||||
|
|
||||||
self.drain_timeout = drain_timeout
|
self.drain_timeout = drain_timeout
|
||||||
self.throttle = throttle
|
self.throttle = throttle
|
||||||
|
@ -46,10 +45,10 @@ class WebSocketChannel:
|
||||||
self._relay_task = asyncio.create_task(self.relay())
|
self._relay_task = asyncio.create_task(self.relay())
|
||||||
|
|
||||||
# Internal event to signify a closed websocket
|
# Internal event to signify a closed websocket
|
||||||
self._closed = False
|
self._closed = asyncio.Event()
|
||||||
|
|
||||||
# Wrap the WebSocket in the Serializing class
|
# Wrap the WebSocket in the Serializing class
|
||||||
self._wrapped_ws = self._serializer_cls(self._websocket)
|
self._wrapped_ws = serializer_cls(self._websocket)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return f"WebSocketChannel({self.channel_id}, {self.remote_addr})"
|
return f"WebSocketChannel({self.channel_id}, {self.remote_addr})"
|
||||||
|
@ -73,13 +72,27 @@ class WebSocketChannel:
|
||||||
Add the data to the queue to be sent.
|
Add the data to the queue to be sent.
|
||||||
:returns: True if data added to queue, False otherwise
|
:returns: True if data added to queue, False otherwise
|
||||||
"""
|
"""
|
||||||
try:
|
|
||||||
await asyncio.wait_for(
|
# This block only runs if the queue is full, it will wait
|
||||||
self.queue.put(data),
|
# until self.drain_timeout for the relay to drain the outgoing queue
|
||||||
timeout=self.drain_timeout
|
# We can't use asyncio.wait_for here because the queue may have been created with a
|
||||||
)
|
# different eventloop
|
||||||
|
if not self.is_closed():
|
||||||
|
start = time.time()
|
||||||
|
while self.queue.full():
|
||||||
|
await asyncio.sleep(1)
|
||||||
|
if (time.time() - start) > self.drain_timeout:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# If for some reason the queue is still full, just return False
|
||||||
|
try:
|
||||||
|
self.queue.put_nowait(data)
|
||||||
|
except asyncio.QueueFull:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# If we got here everything is ok
|
||||||
return True
|
return True
|
||||||
except asyncio.TimeoutError:
|
else:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
async def recv(self):
|
async def recv(self):
|
||||||
|
@ -99,14 +112,19 @@ class WebSocketChannel:
|
||||||
Close the WebSocketChannel
|
Close the WebSocketChannel
|
||||||
"""
|
"""
|
||||||
|
|
||||||
self._closed = True
|
self._closed.set()
|
||||||
self._relay_task.cancel()
|
self._relay_task.cancel()
|
||||||
|
|
||||||
|
try:
|
||||||
|
await self.raw_websocket.close()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
def is_closed(self) -> bool:
|
def is_closed(self) -> bool:
|
||||||
"""
|
"""
|
||||||
Closed flag
|
Closed flag
|
||||||
"""
|
"""
|
||||||
return self._closed
|
return self._closed.is_set()
|
||||||
|
|
||||||
def set_subscriptions(self, subscriptions: List[str] = []) -> None:
|
def set_subscriptions(self, subscriptions: List[str] = []) -> None:
|
||||||
"""
|
"""
|
||||||
|
@ -129,7 +147,7 @@ class WebSocketChannel:
|
||||||
Relay messages from the channel's queue and send them out. This is started
|
Relay messages from the channel's queue and send them out. This is started
|
||||||
as a task.
|
as a task.
|
||||||
"""
|
"""
|
||||||
while True:
|
while not self._closed.is_set():
|
||||||
message = await self.queue.get()
|
message = await self.queue.get()
|
||||||
try:
|
try:
|
||||||
await self._send(message)
|
await self._send(message)
|
||||||
|
|
|
@ -31,6 +31,7 @@ class Producer(TypedDict):
|
||||||
name: str
|
name: str
|
||||||
host: str
|
host: str
|
||||||
port: int
|
port: int
|
||||||
|
secure: bool
|
||||||
ws_token: str
|
ws_token: str
|
||||||
|
|
||||||
|
|
||||||
|
@ -180,7 +181,8 @@ class ExternalMessageConsumer:
|
||||||
host, port = producer['host'], producer['port']
|
host, port = producer['host'], producer['port']
|
||||||
token = producer['ws_token']
|
token = producer['ws_token']
|
||||||
name = producer['name']
|
name = producer['name']
|
||||||
ws_url = f"ws://{host}:{port}/api/v1/message/ws?token={token}"
|
scheme = 'wss' if producer.get('secure', False) else 'ws'
|
||||||
|
ws_url = f"{scheme}://{host}:{port}/api/v1/message/ws?token={token}"
|
||||||
|
|
||||||
# This will raise InvalidURI if the url is bad
|
# This will raise InvalidURI if the url is bad
|
||||||
async with websockets.connect(
|
async with websockets.connect(
|
||||||
|
@ -264,10 +266,10 @@ class ExternalMessageConsumer:
|
||||||
# We haven't received data yet. Check the connection and continue.
|
# We haven't received data yet. Check the connection and continue.
|
||||||
try:
|
try:
|
||||||
# ping
|
# ping
|
||||||
ping = await channel.ping()
|
pong = await channel.ping()
|
||||||
|
latency = (await asyncio.wait_for(pong, timeout=self.ping_timeout) * 1000)
|
||||||
|
|
||||||
await asyncio.wait_for(ping, timeout=self.ping_timeout)
|
logger.info(f"Connection to {channel} still alive, latency: {latency}ms")
|
||||||
logger.debug(f"Connection to {channel} still alive...")
|
|
||||||
|
|
||||||
continue
|
continue
|
||||||
except (websockets.exceptions.ConnectionClosed):
|
except (websockets.exceptions.ConnectionClosed):
|
||||||
|
@ -276,7 +278,7 @@ class ExternalMessageConsumer:
|
||||||
await asyncio.sleep(self.sleep_time)
|
await asyncio.sleep(self.sleep_time)
|
||||||
break
|
break
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning(f"Ping error {channel} - retrying in {self.sleep_time}s")
|
logger.warning(f"Ping error {channel} - {e} - retrying in {self.sleep_time}s")
|
||||||
logger.debug(e, exc_info=e)
|
logger.debug(e, exc_info=e)
|
||||||
await asyncio.sleep(self.sleep_time)
|
await asyncio.sleep(self.sleep_time)
|
||||||
|
|
||||||
|
|
|
@ -5,7 +5,7 @@ import logging
|
||||||
from abc import abstractmethod
|
from abc import abstractmethod
|
||||||
from datetime import date, datetime, timedelta, timezone
|
from datetime import date, datetime, timedelta, timezone
|
||||||
from math import isnan
|
from math import isnan
|
||||||
from typing import Any, Dict, List, Optional, Tuple, Union
|
from typing import Any, Dict, Generator, List, Optional, Tuple, Union
|
||||||
|
|
||||||
import arrow
|
import arrow
|
||||||
import psutil
|
import psutil
|
||||||
|
@ -218,9 +218,10 @@ class RPC:
|
||||||
stoploss_current_dist_pct=round(stoploss_current_dist_ratio * 100, 2),
|
stoploss_current_dist_pct=round(stoploss_current_dist_ratio * 100, 2),
|
||||||
stoploss_entry_dist=stoploss_entry_dist,
|
stoploss_entry_dist=stoploss_entry_dist,
|
||||||
stoploss_entry_dist_ratio=round(stoploss_entry_dist_ratio, 8),
|
stoploss_entry_dist_ratio=round(stoploss_entry_dist_ratio, 8),
|
||||||
open_order='({} {} rem={:.8f})'.format(
|
open_order=(
|
||||||
order.order_type, order.side, order.remaining
|
f'({order.order_type} {order.side} rem={order.safe_remaining:.8f})' if
|
||||||
) if order else None,
|
order else None
|
||||||
|
),
|
||||||
))
|
))
|
||||||
results.append(trade_dict)
|
results.append(trade_dict)
|
||||||
return results
|
return results
|
||||||
|
@ -773,6 +774,9 @@ class RPC:
|
||||||
is_short = trade.is_short
|
is_short = trade.is_short
|
||||||
if not self._freqtrade.strategy.position_adjustment_enable:
|
if not self._freqtrade.strategy.position_adjustment_enable:
|
||||||
raise RPCException(f'position for {pair} already open - id: {trade.id}')
|
raise RPCException(f'position for {pair} already open - id: {trade.id}')
|
||||||
|
if trade.open_order_id is not None:
|
||||||
|
raise RPCException(f'position for {pair} already open - id: {trade.id} '
|
||||||
|
f'and has open order {trade.open_order_id}')
|
||||||
else:
|
else:
|
||||||
if Trade.get_open_trade_count() >= self._config['max_open_trades']:
|
if Trade.get_open_trade_count() >= self._config['max_open_trades']:
|
||||||
raise RPCException("Maximum number of trades is reached.")
|
raise RPCException("Maximum number of trades is reached.")
|
||||||
|
@ -785,17 +789,18 @@ class RPC:
|
||||||
if not order_type:
|
if not order_type:
|
||||||
order_type = self._freqtrade.strategy.order_types.get(
|
order_type = self._freqtrade.strategy.order_types.get(
|
||||||
'force_entry', self._freqtrade.strategy.order_types['entry'])
|
'force_entry', self._freqtrade.strategy.order_types['entry'])
|
||||||
if self._freqtrade.execute_entry(pair, stake_amount, price,
|
with self._freqtrade._exit_lock:
|
||||||
ordertype=order_type, trade=trade,
|
if self._freqtrade.execute_entry(pair, stake_amount, price,
|
||||||
is_short=is_short,
|
ordertype=order_type, trade=trade,
|
||||||
enter_tag=enter_tag,
|
is_short=is_short,
|
||||||
leverage_=leverage,
|
enter_tag=enter_tag,
|
||||||
):
|
leverage_=leverage,
|
||||||
Trade.commit()
|
):
|
||||||
trade = Trade.get_trades([Trade.is_open.is_(True), Trade.pair == pair]).first()
|
Trade.commit()
|
||||||
return trade
|
trade = Trade.get_trades([Trade.is_open.is_(True), Trade.pair == pair]).first()
|
||||||
else:
|
return trade
|
||||||
raise RPCException(f'Failed to enter position for {pair}.')
|
else:
|
||||||
|
raise RPCException(f'Failed to enter position for {pair}.')
|
||||||
|
|
||||||
def _rpc_delete(self, trade_id: int) -> Dict[str, Union[str, int]]:
|
def _rpc_delete(self, trade_id: int) -> Dict[str, Union[str, int]]:
|
||||||
"""
|
"""
|
||||||
|
@ -1063,23 +1068,20 @@ class RPC:
|
||||||
self,
|
self,
|
||||||
pairlist: List[str],
|
pairlist: List[str],
|
||||||
limit: Optional[int]
|
limit: Optional[int]
|
||||||
) -> Dict[str, Any]:
|
) -> Generator[Dict[str, Any], None, None]:
|
||||||
""" Get the analysed dataframes of each pair in the pairlist """
|
""" Get the analysed dataframes of each pair in the pairlist """
|
||||||
timeframe = self._freqtrade.config['timeframe']
|
timeframe = self._freqtrade.config['timeframe']
|
||||||
candle_type = self._freqtrade.config.get('candle_type_def', CandleType.SPOT)
|
candle_type = self._freqtrade.config.get('candle_type_def', CandleType.SPOT)
|
||||||
_data = {}
|
|
||||||
|
|
||||||
for pair in pairlist:
|
for pair in pairlist:
|
||||||
dataframe, last_analyzed = self.__rpc_analysed_dataframe_raw(pair, timeframe, limit)
|
dataframe, last_analyzed = self.__rpc_analysed_dataframe_raw(pair, timeframe, limit)
|
||||||
|
|
||||||
_data[pair] = {
|
yield {
|
||||||
"key": (pair, timeframe, candle_type),
|
"key": (pair, timeframe, candle_type),
|
||||||
"df": dataframe,
|
"df": dataframe,
|
||||||
"la": last_analyzed
|
"la": last_analyzed
|
||||||
}
|
}
|
||||||
|
|
||||||
return _data
|
|
||||||
|
|
||||||
def _ws_request_analyzed_df(self, limit: Optional[int]):
|
def _ws_request_analyzed_df(self, limit: Optional[int]):
|
||||||
""" Historical Analyzed Dataframes for WebSocket """
|
""" Historical Analyzed Dataframes for WebSocket """
|
||||||
whitelist = self._freqtrade.active_pair_whitelist
|
whitelist = self._freqtrade.active_pair_whitelist
|
||||||
|
|
|
@ -1061,7 +1061,8 @@ class Telegram(RPCHandler):
|
||||||
try:
|
try:
|
||||||
self._rpc._rpc_force_entry(pair, price, order_side=order_side)
|
self._rpc._rpc_force_entry(pair, price, order_side=order_side)
|
||||||
except RPCException as e:
|
except RPCException as e:
|
||||||
self._send_msg(str(e))
|
logger.exception("Forcebuy error!")
|
||||||
|
self._send_msg(str(e), ParseMode.HTML)
|
||||||
|
|
||||||
def _force_enter_inline(self, update: Update, _: CallbackContext) -> None:
|
def _force_enter_inline(self, update: Update, _: CallbackContext) -> None:
|
||||||
if update.callback_query:
|
if update.callback_query:
|
||||||
|
|
|
@ -110,8 +110,6 @@ class FreqaiExampleHybridStrategy(IStrategy):
|
||||||
:param informative: the dataframe associated with the informative pair
|
:param informative: the dataframe associated with the informative pair
|
||||||
"""
|
"""
|
||||||
|
|
||||||
coin = pair.split('/')[0]
|
|
||||||
|
|
||||||
if informative is None:
|
if informative is None:
|
||||||
informative = self.dp.get_pair_dataframe(pair, tf)
|
informative = self.dp.get_pair_dataframe(pair, tf)
|
||||||
|
|
||||||
|
@ -119,13 +117,13 @@ class FreqaiExampleHybridStrategy(IStrategy):
|
||||||
for t in self.freqai_info["feature_parameters"]["indicator_periods_candles"]:
|
for t in self.freqai_info["feature_parameters"]["indicator_periods_candles"]:
|
||||||
|
|
||||||
t = int(t)
|
t = int(t)
|
||||||
informative[f"%-{coin}rsi-period_{t}"] = ta.RSI(informative, timeperiod=t)
|
informative[f"%-{pair}rsi-period_{t}"] = ta.RSI(informative, timeperiod=t)
|
||||||
informative[f"%-{coin}mfi-period_{t}"] = ta.MFI(informative, timeperiod=t)
|
informative[f"%-{pair}mfi-period_{t}"] = ta.MFI(informative, timeperiod=t)
|
||||||
informative[f"%-{coin}adx-period_{t}"] = ta.ADX(informative, timeperiod=t)
|
informative[f"%-{pair}adx-period_{t}"] = ta.ADX(informative, timeperiod=t)
|
||||||
informative[f"%-{coin}sma-period_{t}"] = ta.SMA(informative, timeperiod=t)
|
informative[f"%-{pair}sma-period_{t}"] = ta.SMA(informative, timeperiod=t)
|
||||||
informative[f"%-{coin}ema-period_{t}"] = ta.EMA(informative, timeperiod=t)
|
informative[f"%-{pair}ema-period_{t}"] = ta.EMA(informative, timeperiod=t)
|
||||||
informative[f"%-{coin}roc-period_{t}"] = ta.ROC(informative, timeperiod=t)
|
informative[f"%-{pair}roc-period_{t}"] = ta.ROC(informative, timeperiod=t)
|
||||||
informative[f"%-{coin}relative_volume-period_{t}"] = (
|
informative[f"%-{pair}relative_volume-period_{t}"] = (
|
||||||
informative["volume"] / informative["volume"].rolling(t).mean()
|
informative["volume"] / informative["volume"].rolling(t).mean()
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -53,7 +53,7 @@ class FreqaiExampleStrategy(IStrategy):
|
||||||
"""
|
"""
|
||||||
Function designed to automatically generate, name and merge features
|
Function designed to automatically generate, name and merge features
|
||||||
from user indicated timeframes in the configuration file. User controls the indicators
|
from user indicated timeframes in the configuration file. User controls the indicators
|
||||||
passed to the training/prediction by prepending indicators with `'%-' + coin `
|
passed to the training/prediction by prepending indicators with `f'%-{pair}`
|
||||||
(see convention below). I.e. user should not prepend any supporting metrics
|
(see convention below). I.e. user should not prepend any supporting metrics
|
||||||
(e.g. bb_lowerband below) with % unless they explicitly want to pass that metric to the
|
(e.g. bb_lowerband below) with % unless they explicitly want to pass that metric to the
|
||||||
model.
|
model.
|
||||||
|
@ -63,8 +63,6 @@ class FreqaiExampleStrategy(IStrategy):
|
||||||
:param informative: the dataframe associated with the informative pair
|
:param informative: the dataframe associated with the informative pair
|
||||||
"""
|
"""
|
||||||
|
|
||||||
coin = pair.split('/')[0]
|
|
||||||
|
|
||||||
if informative is None:
|
if informative is None:
|
||||||
informative = self.dp.get_pair_dataframe(pair, tf)
|
informative = self.dp.get_pair_dataframe(pair, tf)
|
||||||
|
|
||||||
|
@ -72,36 +70,36 @@ class FreqaiExampleStrategy(IStrategy):
|
||||||
for t in self.freqai_info["feature_parameters"]["indicator_periods_candles"]:
|
for t in self.freqai_info["feature_parameters"]["indicator_periods_candles"]:
|
||||||
|
|
||||||
t = int(t)
|
t = int(t)
|
||||||
informative[f"%-{coin}rsi-period_{t}"] = ta.RSI(informative, timeperiod=t)
|
informative[f"%-{pair}rsi-period_{t}"] = ta.RSI(informative, timeperiod=t)
|
||||||
informative[f"%-{coin}mfi-period_{t}"] = ta.MFI(informative, timeperiod=t)
|
informative[f"%-{pair}mfi-period_{t}"] = ta.MFI(informative, timeperiod=t)
|
||||||
informative[f"%-{coin}adx-period_{t}"] = ta.ADX(informative, timeperiod=t)
|
informative[f"%-{pair}adx-period_{t}"] = ta.ADX(informative, timeperiod=t)
|
||||||
informative[f"%-{coin}sma-period_{t}"] = ta.SMA(informative, timeperiod=t)
|
informative[f"%-{pair}sma-period_{t}"] = ta.SMA(informative, timeperiod=t)
|
||||||
informative[f"%-{coin}ema-period_{t}"] = ta.EMA(informative, timeperiod=t)
|
informative[f"%-{pair}ema-period_{t}"] = ta.EMA(informative, timeperiod=t)
|
||||||
|
|
||||||
bollinger = qtpylib.bollinger_bands(
|
bollinger = qtpylib.bollinger_bands(
|
||||||
qtpylib.typical_price(informative), window=t, stds=2.2
|
qtpylib.typical_price(informative), window=t, stds=2.2
|
||||||
)
|
)
|
||||||
informative[f"{coin}bb_lowerband-period_{t}"] = bollinger["lower"]
|
informative[f"{pair}bb_lowerband-period_{t}"] = bollinger["lower"]
|
||||||
informative[f"{coin}bb_middleband-period_{t}"] = bollinger["mid"]
|
informative[f"{pair}bb_middleband-period_{t}"] = bollinger["mid"]
|
||||||
informative[f"{coin}bb_upperband-period_{t}"] = bollinger["upper"]
|
informative[f"{pair}bb_upperband-period_{t}"] = bollinger["upper"]
|
||||||
|
|
||||||
informative[f"%-{coin}bb_width-period_{t}"] = (
|
informative[f"%-{pair}bb_width-period_{t}"] = (
|
||||||
informative[f"{coin}bb_upperband-period_{t}"]
|
informative[f"{pair}bb_upperband-period_{t}"]
|
||||||
- informative[f"{coin}bb_lowerband-period_{t}"]
|
- informative[f"{pair}bb_lowerband-period_{t}"]
|
||||||
) / informative[f"{coin}bb_middleband-period_{t}"]
|
) / informative[f"{pair}bb_middleband-period_{t}"]
|
||||||
informative[f"%-{coin}close-bb_lower-period_{t}"] = (
|
informative[f"%-{pair}close-bb_lower-period_{t}"] = (
|
||||||
informative["close"] / informative[f"{coin}bb_lowerband-period_{t}"]
|
informative["close"] / informative[f"{pair}bb_lowerband-period_{t}"]
|
||||||
)
|
)
|
||||||
|
|
||||||
informative[f"%-{coin}roc-period_{t}"] = ta.ROC(informative, timeperiod=t)
|
informative[f"%-{pair}roc-period_{t}"] = ta.ROC(informative, timeperiod=t)
|
||||||
|
|
||||||
informative[f"%-{coin}relative_volume-period_{t}"] = (
|
informative[f"%-{pair}relative_volume-period_{t}"] = (
|
||||||
informative["volume"] / informative["volume"].rolling(t).mean()
|
informative["volume"] / informative["volume"].rolling(t).mean()
|
||||||
)
|
)
|
||||||
|
|
||||||
informative[f"%-{coin}pct-change"] = informative["close"].pct_change()
|
informative[f"%-{pair}pct-change"] = informative["close"].pct_change()
|
||||||
informative[f"%-{coin}raw_volume"] = informative["volume"]
|
informative[f"%-{pair}raw_volume"] = informative["volume"]
|
||||||
informative[f"%-{coin}raw_price"] = informative["close"]
|
informative[f"%-{pair}raw_price"] = informative["close"]
|
||||||
|
|
||||||
indicators = [col for col in informative if col.startswith("%")]
|
indicators = [col for col in informative if col.startswith("%")]
|
||||||
# This loop duplicates and shifts all indicators to add a sense of recency to data
|
# This loop duplicates and shifts all indicators to add a sense of recency to data
|
||||||
|
|
|
@ -150,14 +150,20 @@ class Worker:
|
||||||
if timeframe:
|
if timeframe:
|
||||||
next_tf = timeframe_to_next_date(timeframe)
|
next_tf = timeframe_to_next_date(timeframe)
|
||||||
# Maximum throttling should be until new candle arrives
|
# Maximum throttling should be until new candle arrives
|
||||||
# Offset of 0.2s is added to ensure a new candle has been issued.
|
# Offset is added to ensure a new candle has been issued.
|
||||||
next_tf_with_offset = next_tf.timestamp() - time.time() + timeframe_offset
|
next_tft = next_tf.timestamp() - time.time()
|
||||||
|
next_tf_with_offset = next_tft + timeframe_offset
|
||||||
|
if next_tft < sleep_duration and sleep_duration < next_tf_with_offset:
|
||||||
|
# Avoid hitting a new loop between the new candle and the candle with offset
|
||||||
|
sleep_duration = next_tf_with_offset
|
||||||
sleep_duration = min(sleep_duration, next_tf_with_offset)
|
sleep_duration = min(sleep_duration, next_tf_with_offset)
|
||||||
sleep_duration = max(sleep_duration, 0.0)
|
sleep_duration = max(sleep_duration, 0.0)
|
||||||
# next_iter = datetime.now(timezone.utc) + timedelta(seconds=sleep_duration)
|
# next_iter = datetime.now(timezone.utc) + timedelta(seconds=sleep_duration)
|
||||||
|
|
||||||
logger.debug(f"Throttling with '{func.__name__}()': sleep for {sleep_duration:.2f} s, "
|
logger.debug(f"Throttling with '{func.__name__}()': sleep for {sleep_duration:.2f} s, "
|
||||||
f"last iteration took {time_passed:.2f} s.")
|
f"last iteration took {time_passed:.2f} s."
|
||||||
|
# f"next: {next_iter}"
|
||||||
|
)
|
||||||
self._sleep(sleep_duration)
|
self._sleep(sleep_duration)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
|
@ -30,6 +30,8 @@ asyncio_mode = "auto"
|
||||||
|
|
||||||
[tool.mypy]
|
[tool.mypy]
|
||||||
ignore_missing_imports = true
|
ignore_missing_imports = true
|
||||||
|
namespace_packages = false
|
||||||
|
implicit_optional = true
|
||||||
warn_unused_ignores = true
|
warn_unused_ignores = true
|
||||||
exclude = [
|
exclude = [
|
||||||
'^build_helpers\.py$'
|
'^build_helpers\.py$'
|
||||||
|
|
|
@ -8,23 +8,25 @@
|
||||||
coveralls==3.3.1
|
coveralls==3.3.1
|
||||||
flake8==5.0.4
|
flake8==5.0.4
|
||||||
flake8-tidy-imports==4.8.0
|
flake8-tidy-imports==4.8.0
|
||||||
mypy==0.982
|
mypy==0.991
|
||||||
pre-commit==2.20.0
|
pre-commit==2.20.0
|
||||||
pytest==7.1.3
|
pytest==7.2.0
|
||||||
pytest-asyncio==0.20.1
|
pytest-asyncio==0.20.2
|
||||||
pytest-cov==4.0.0
|
pytest-cov==4.0.0
|
||||||
pytest-mock==3.10.0
|
pytest-mock==3.10.0
|
||||||
pytest-random-order==1.0.4
|
pytest-random-order==1.0.4
|
||||||
isort==5.10.1
|
isort==5.10.1
|
||||||
# For datetime mocking
|
# For datetime mocking
|
||||||
time-machine==2.8.2
|
time-machine==2.8.2
|
||||||
|
# fastapi testing
|
||||||
|
httpx==0.23.1
|
||||||
|
|
||||||
# Convert jupyter notebooks to markdown documents
|
# Convert jupyter notebooks to markdown documents
|
||||||
nbconvert==7.2.1
|
nbconvert==7.2.5
|
||||||
|
|
||||||
# mypy types
|
# mypy types
|
||||||
types-cachetools==5.2.1
|
types-cachetools==5.2.1
|
||||||
types-filelock==3.2.7
|
types-filelock==3.2.7
|
||||||
types-requests==2.28.11.2
|
types-requests==2.28.11.5
|
||||||
types-tabulate==0.9.0.0
|
types-tabulate==0.9.0.0
|
||||||
types-python-dateutil==2.8.19.2
|
types-python-dateutil==2.8.19.4
|
||||||
|
|
|
@ -1,10 +1,11 @@
|
||||||
# Include all requirements to run the bot.
|
# Include all requirements to run the bot.
|
||||||
-r requirements.txt
|
-r requirements.txt
|
||||||
|
-r requirements-plot.txt
|
||||||
|
|
||||||
# Required for freqai
|
# Required for freqai
|
||||||
scikit-learn==1.1.2
|
scikit-learn==1.1.3
|
||||||
joblib==1.2.0
|
joblib==1.2.0
|
||||||
catboost==1.1; platform_machine != 'aarch64'
|
catboost==1.1.1; platform_machine != 'aarch64'
|
||||||
lightgbm==3.3.3
|
lightgbm==3.3.3
|
||||||
xgboost==1.6.2
|
xgboost==1.7.1
|
||||||
tensorboard==2.10.1
|
tensorboard==2.11.0
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
|
|
||||||
# Required for hyperopt
|
# Required for hyperopt
|
||||||
scipy==1.9.3
|
scipy==1.9.3
|
||||||
scikit-learn==1.1.2
|
scikit-learn==1.1.3
|
||||||
scikit-optimize==0.9.0
|
scikit-optimize==0.9.0
|
||||||
filelock==3.8.0
|
filelock==3.8.0
|
||||||
progressbar2==4.1.1
|
progressbar2==4.2.0
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
# Include all requirements to run the bot.
|
# Include all requirements to run the bot.
|
||||||
-r requirements.txt
|
-r requirements.txt
|
||||||
|
|
||||||
plotly==5.10.0
|
plotly==5.11.0
|
||||||
|
|
|
@ -1,29 +1,28 @@
|
||||||
numpy==1.23.4
|
numpy==1.23.5
|
||||||
pandas==1.5.1; platform_machine != 'armv7l'
|
pandas==1.5.1
|
||||||
# Piwheels doesn't have 1.5.0 yet.
|
|
||||||
pandas==1.4.3; platform_machine == 'armv7l'
|
|
||||||
pandas-ta==0.3.14b
|
pandas-ta==0.3.14b
|
||||||
|
|
||||||
ccxt==2.0.58
|
ccxt==2.1.96
|
||||||
# Pin cryptography for now due to rust build errors with piwheels
|
# Pin cryptography for now due to rust build errors with piwheels
|
||||||
cryptography==38.0.1
|
cryptography==38.0.1; platform_machine == 'armv7l'
|
||||||
|
cryptography==38.0.3; platform_machine != 'armv7l'
|
||||||
aiohttp==3.8.3
|
aiohttp==3.8.3
|
||||||
SQLAlchemy==1.4.42
|
SQLAlchemy==1.4.44
|
||||||
python-telegram-bot==13.14
|
python-telegram-bot==13.14
|
||||||
arrow==1.2.3
|
arrow==1.2.3
|
||||||
cachetools==4.2.2
|
cachetools==4.2.2
|
||||||
requests==2.28.1
|
requests==2.28.1
|
||||||
urllib3==1.26.12
|
urllib3==1.26.12
|
||||||
jsonschema==4.16.0
|
jsonschema==4.17.0
|
||||||
TA-Lib==0.4.25
|
TA-Lib==0.4.25
|
||||||
technical==1.3.0
|
technical==1.3.0
|
||||||
tabulate==0.9.0
|
tabulate==0.9.0
|
||||||
pycoingecko==3.0.0
|
pycoingecko==3.1.0
|
||||||
jinja2==3.1.2
|
jinja2==3.1.2
|
||||||
tables==3.7.0
|
tables==3.7.0
|
||||||
blosc==1.10.6
|
blosc==1.10.6
|
||||||
joblib==1.2.0
|
joblib==1.2.0
|
||||||
pyarrow==9.0.0; platform_machine != 'armv7l'
|
pyarrow==10.0.0; platform_machine != 'armv7l'
|
||||||
|
|
||||||
# find first, C search in arrays
|
# find first, C search in arrays
|
||||||
py_find_1st==1.1.5
|
py_find_1st==1.1.5
|
||||||
|
@ -31,24 +30,24 @@ py_find_1st==1.1.5
|
||||||
# Load ticker files 30% faster
|
# Load ticker files 30% faster
|
||||||
python-rapidjson==1.9
|
python-rapidjson==1.9
|
||||||
# Properly format api responses
|
# Properly format api responses
|
||||||
orjson==3.8.0
|
orjson==3.8.2
|
||||||
|
|
||||||
# Notify systemd
|
# Notify systemd
|
||||||
sdnotify==0.3.2
|
sdnotify==0.3.2
|
||||||
|
|
||||||
# API Server
|
# API Server
|
||||||
fastapi==0.85.1
|
fastapi==0.87.0
|
||||||
pydantic==1.10.2
|
pydantic==1.10.2
|
||||||
uvicorn==0.18.3
|
uvicorn==0.20.0
|
||||||
pyjwt==2.6.0
|
pyjwt==2.6.0
|
||||||
aiofiles==22.1.0
|
aiofiles==22.1.0
|
||||||
psutil==5.9.2
|
psutil==5.9.4
|
||||||
|
|
||||||
# Support for colorized terminal output
|
# Support for colorized terminal output
|
||||||
colorama==0.4.5
|
colorama==0.4.6
|
||||||
# Building config files interactively
|
# Building config files interactively
|
||||||
questionary==1.10.0
|
questionary==1.10.0
|
||||||
prompt-toolkit==3.0.31
|
prompt-toolkit==3.0.32
|
||||||
# Extensions to datetime library
|
# Extensions to datetime library
|
||||||
python-dateutil==2.8.2
|
python-dateutil==2.8.2
|
||||||
|
|
||||||
|
@ -56,5 +55,5 @@ python-dateutil==2.8.2
|
||||||
schedule==1.1.0
|
schedule==1.1.0
|
||||||
|
|
||||||
#WS Messages
|
#WS Messages
|
||||||
websockets==10.3
|
websockets==10.4
|
||||||
janus==1.0.0
|
janus==1.0.0
|
||||||
|
|
|
@ -18,7 +18,6 @@ import orjson
|
||||||
import pandas
|
import pandas
|
||||||
import rapidjson
|
import rapidjson
|
||||||
import websockets
|
import websockets
|
||||||
from dateutil.relativedelta import relativedelta
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger("WebSocketClient")
|
logger = logging.getLogger("WebSocketClient")
|
||||||
|
@ -28,7 +27,7 @@ logger = logging.getLogger("WebSocketClient")
|
||||||
|
|
||||||
def setup_logging(filename: str):
|
def setup_logging(filename: str):
|
||||||
logging.basicConfig(
|
logging.basicConfig(
|
||||||
level=logging.INFO,
|
level=logging.DEBUG,
|
||||||
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
||||||
handlers=[
|
handlers=[
|
||||||
logging.FileHandler(filename),
|
logging.FileHandler(filename),
|
||||||
|
@ -75,16 +74,15 @@ def load_config(configfile):
|
||||||
|
|
||||||
def readable_timedelta(delta):
|
def readable_timedelta(delta):
|
||||||
"""
|
"""
|
||||||
Convert a dateutil.relativedelta to a readable format
|
Convert a millisecond delta to a readable format
|
||||||
|
|
||||||
:param delta: A dateutil.relativedelta
|
:param delta: A delta between two timestamps in milliseconds
|
||||||
:returns: The readable time difference string
|
:returns: The readable time difference string
|
||||||
"""
|
"""
|
||||||
attrs = ['years', 'months', 'days', 'hours', 'minutes', 'seconds', 'microseconds']
|
seconds, milliseconds = divmod(delta, 1000)
|
||||||
return ", ".join([
|
minutes, seconds = divmod(seconds, 60)
|
||||||
'%d %s' % (getattr(delta, attr), attr if getattr(delta, attr) > 0 else attr[:-1])
|
|
||||||
for attr in attrs if getattr(delta, attr)
|
return f"{int(minutes)}:{int(seconds)}.{int(milliseconds)}"
|
||||||
])
|
|
||||||
|
|
||||||
# ----------------------------------------------------------------------------
|
# ----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
@ -170,8 +168,8 @@ class ClientProtocol:
|
||||||
|
|
||||||
def _calculate_time_difference(self):
|
def _calculate_time_difference(self):
|
||||||
old_last_received_at = self._LAST_RECEIVED_AT
|
old_last_received_at = self._LAST_RECEIVED_AT
|
||||||
self._LAST_RECEIVED_AT = time.time() * 1e6
|
self._LAST_RECEIVED_AT = time.time() * 1e3
|
||||||
time_delta = relativedelta(microseconds=(self._LAST_RECEIVED_AT - old_last_received_at))
|
time_delta = self._LAST_RECEIVED_AT - old_last_received_at
|
||||||
|
|
||||||
return readable_timedelta(time_delta)
|
return readable_timedelta(time_delta)
|
||||||
|
|
||||||
|
@ -201,6 +199,7 @@ async def create_client(
|
||||||
host,
|
host,
|
||||||
port,
|
port,
|
||||||
token,
|
token,
|
||||||
|
scheme='ws',
|
||||||
name='default',
|
name='default',
|
||||||
protocol=ClientProtocol(),
|
protocol=ClientProtocol(),
|
||||||
sleep_time=10,
|
sleep_time=10,
|
||||||
|
@ -213,13 +212,14 @@ async def create_client(
|
||||||
:param host: The host
|
:param host: The host
|
||||||
:param port: The port
|
:param port: The port
|
||||||
:param token: The websocket auth token
|
:param token: The websocket auth token
|
||||||
|
:param scheme: `ws` for most connections, `wss` for ssl
|
||||||
:param name: The name of the producer
|
:param name: The name of the producer
|
||||||
:param **kwargs: Any extra kwargs passed to websockets.connect
|
:param **kwargs: Any extra kwargs passed to websockets.connect
|
||||||
"""
|
"""
|
||||||
|
|
||||||
while 1:
|
while 1:
|
||||||
try:
|
try:
|
||||||
websocket_url = f"ws://{host}:{port}/api/v1/message/ws?token={token}"
|
websocket_url = f"{scheme}://{host}:{port}/api/v1/message/ws?token={token}"
|
||||||
logger.info(f"Attempting to connect to {name} @ {host}:{port}")
|
logger.info(f"Attempting to connect to {name} @ {host}:{port}")
|
||||||
|
|
||||||
async with websockets.connect(websocket_url, **kwargs) as ws:
|
async with websockets.connect(websocket_url, **kwargs) as ws:
|
||||||
|
@ -242,12 +242,10 @@ async def create_client(
|
||||||
):
|
):
|
||||||
# Try pinging
|
# Try pinging
|
||||||
try:
|
try:
|
||||||
pong = ws.ping()
|
pong = await ws.ping()
|
||||||
await asyncio.wait_for(
|
latency = (await asyncio.wait_for(pong, timeout=ping_timeout) * 1000)
|
||||||
pong,
|
|
||||||
timeout=ping_timeout
|
logger.info(f"Connection still alive, latency: {latency}ms")
|
||||||
)
|
|
||||||
logger.info("Connection still alive...")
|
|
||||||
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
@ -272,6 +270,7 @@ async def create_client(
|
||||||
websockets.exceptions.ConnectionClosedError,
|
websockets.exceptions.ConnectionClosedError,
|
||||||
websockets.exceptions.ConnectionClosedOK
|
websockets.exceptions.ConnectionClosedOK
|
||||||
):
|
):
|
||||||
|
logger.info("Connection was closed")
|
||||||
# Just keep trying to connect again indefinitely
|
# Just keep trying to connect again indefinitely
|
||||||
await asyncio.sleep(sleep_time)
|
await asyncio.sleep(sleep_time)
|
||||||
|
|
||||||
|
@ -307,6 +306,7 @@ async def _main(args):
|
||||||
producer['host'],
|
producer['host'],
|
||||||
producer['port'],
|
producer['port'],
|
||||||
producer['ws_token'],
|
producer['ws_token'],
|
||||||
|
'wss' if producer.get('secure', False) else 'ws',
|
||||||
producer['name'],
|
producer['name'],
|
||||||
sleep_time=sleep_time,
|
sleep_time=sleep_time,
|
||||||
ping_timeout=ping_timeout,
|
ping_timeout=ping_timeout,
|
||||||
|
|
2
setup.sh
2
setup.sh
|
@ -82,7 +82,7 @@ function updateenv() {
|
||||||
dev=$REPLY
|
dev=$REPLY
|
||||||
if [[ $REPLY =~ ^[Yy]$ ]]
|
if [[ $REPLY =~ ^[Yy]$ ]]
|
||||||
then
|
then
|
||||||
REQUIREMENTS_FREQAI="-r requirements-freqai.txt"
|
REQUIREMENTS_FREQAI="-r requirements-freqai.txt --use-pep517"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
${PYTHON} -m pip install --upgrade -r ${REQUIREMENTS} ${REQUIREMENTS_HYPEROPT} ${REQUIREMENTS_PLOT} ${REQUIREMENTS_FREQAI}
|
${PYTHON} -m pip install --upgrade -r ${REQUIREMENTS} ${REQUIREMENTS_HYPEROPT} ${REQUIREMENTS_PLOT} ${REQUIREMENTS_FREQAI}
|
||||||
|
|
|
@ -30,7 +30,7 @@ def test_validate_is_int():
|
||||||
assert not validate_is_int('-ee')
|
assert not validate_is_int('-ee')
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('exchange', ['bittrex', 'binance', 'kraken', 'ftx'])
|
@pytest.mark.parametrize('exchange', ['bittrex', 'binance', 'kraken'])
|
||||||
def test_start_new_config(mocker, caplog, exchange):
|
def test_start_new_config(mocker, caplog, exchange):
|
||||||
wt_mock = mocker.patch.object(Path, "write_text", MagicMock())
|
wt_mock = mocker.patch.object(Path, "write_text", MagicMock())
|
||||||
mocker.patch.object(Path, "exists", MagicMock(return_value=True))
|
mocker.patch.object(Path, "exists", MagicMock(return_value=True))
|
||||||
|
|
|
@ -1271,7 +1271,7 @@ def test_hyperopt_list(mocker, capsys, caplog, saved_hyperopt_results, tmpdir):
|
||||||
assert csv_file.is_file()
|
assert csv_file.is_file()
|
||||||
line = csv_file.read_text()
|
line = csv_file.read_text()
|
||||||
assert ('Best,1,2,-1.25%,-1.2222,-0.00125625,,-2.51,"3,930.0 m",0.43662' in line
|
assert ('Best,1,2,-1.25%,-1.2222,-0.00125625,,-2.51,"3,930.0 m",0.43662' in line
|
||||||
or "Best,1,2,-1.25%,-1.2222,-0.00125625,,-2.51,2 days 17:30:00,0.43662" in line)
|
or "Best,1,2,-1.25%,-1.2222,-0.00125625,,-2.51,2 days 17:30:00,2,0,0.43662" in line)
|
||||||
csv_file.unlink()
|
csv_file.unlink()
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1748,28 +1748,7 @@ def limit_buy_order_canceled_empty(request):
|
||||||
# https://docs.pytest.org/en/latest/example/parametrize.html#apply-indirect-on-particular-arguments
|
# https://docs.pytest.org/en/latest/example/parametrize.html#apply-indirect-on-particular-arguments
|
||||||
|
|
||||||
exchange_name = request.param
|
exchange_name = request.param
|
||||||
if exchange_name == 'ftx':
|
if exchange_name == 'kraken':
|
||||||
return {
|
|
||||||
'info': {},
|
|
||||||
'id': '1234512345',
|
|
||||||
'clientOrderId': None,
|
|
||||||
'timestamp': arrow.utcnow().shift(minutes=-601).int_timestamp * 1000,
|
|
||||||
'datetime': arrow.utcnow().shift(minutes=-601).isoformat(),
|
|
||||||
'lastTradeTimestamp': None,
|
|
||||||
'symbol': 'LTC/USDT',
|
|
||||||
'type': 'limit',
|
|
||||||
'side': 'buy',
|
|
||||||
'price': 34.3225,
|
|
||||||
'amount': 0.55,
|
|
||||||
'cost': 0.0,
|
|
||||||
'average': None,
|
|
||||||
'filled': 0.0,
|
|
||||||
'remaining': 0.0,
|
|
||||||
'status': 'closed',
|
|
||||||
'fee': None,
|
|
||||||
'trades': None
|
|
||||||
}
|
|
||||||
elif exchange_name == 'kraken':
|
|
||||||
return {
|
return {
|
||||||
'info': {},
|
'info': {},
|
||||||
'id': 'AZNPFF-4AC4N-7MKTAT',
|
'id': 'AZNPFF-4AC4N-7MKTAT',
|
||||||
|
@ -2700,7 +2679,7 @@ def saved_hyperopt_results():
|
||||||
'params_dict': {
|
'params_dict': {
|
||||||
'mfi-value': 15, 'fastd-value': 20, 'adx-value': 25, 'rsi-value': 28, 'mfi-enabled': False, 'fastd-enabled': True, 'adx-enabled': True, 'rsi-enabled': True, 'trigger': 'macd_cross_signal', 'sell-mfi-value': 88, 'sell-fastd-value': 97, 'sell-adx-value': 51, 'sell-rsi-value': 67, 'sell-mfi-enabled': False, 'sell-fastd-enabled': False, 'sell-adx-enabled': True, 'sell-rsi-enabled': True, 'sell-trigger': 'sell-bb_upper', 'roi_t1': 1190, 'roi_t2': 541, 'roi_t3': 408, 'roi_p1': 0.026035863879169705, 'roi_p2': 0.12508730043628782, 'roi_p3': 0.27766427921605896, 'stoploss': -0.2562930402099556}, # noqa: E501
|
'mfi-value': 15, 'fastd-value': 20, 'adx-value': 25, 'rsi-value': 28, 'mfi-enabled': False, 'fastd-enabled': True, 'adx-enabled': True, 'rsi-enabled': True, 'trigger': 'macd_cross_signal', 'sell-mfi-value': 88, 'sell-fastd-value': 97, 'sell-adx-value': 51, 'sell-rsi-value': 67, 'sell-mfi-enabled': False, 'sell-fastd-enabled': False, 'sell-adx-enabled': True, 'sell-rsi-enabled': True, 'sell-trigger': 'sell-bb_upper', 'roi_t1': 1190, 'roi_t2': 541, 'roi_t3': 408, 'roi_p1': 0.026035863879169705, 'roi_p2': 0.12508730043628782, 'roi_p3': 0.27766427921605896, 'stoploss': -0.2562930402099556}, # noqa: E501
|
||||||
'params_details': {'buy': {'mfi-value': 15, 'fastd-value': 20, 'adx-value': 25, 'rsi-value': 28, 'mfi-enabled': False, 'fastd-enabled': True, 'adx-enabled': True, 'rsi-enabled': True, 'trigger': 'macd_cross_signal'}, 'sell': {'sell-mfi-value': 88, 'sell-fastd-value': 97, 'sell-adx-value': 51, 'sell-rsi-value': 67, 'sell-mfi-enabled': False, 'sell-fastd-enabled': False, 'sell-adx-enabled': True, 'sell-rsi-enabled': True, 'sell-trigger': 'sell-bb_upper'}, 'roi': {0: 0.4287874435315165, 408: 0.15112316431545753, 949: 0.026035863879169705, 2139: 0}, 'stoploss': {'stoploss': -0.2562930402099556}}, # noqa: E501
|
'params_details': {'buy': {'mfi-value': 15, 'fastd-value': 20, 'adx-value': 25, 'rsi-value': 28, 'mfi-enabled': False, 'fastd-enabled': True, 'adx-enabled': True, 'rsi-enabled': True, 'trigger': 'macd_cross_signal'}, 'sell': {'sell-mfi-value': 88, 'sell-fastd-value': 97, 'sell-adx-value': 51, 'sell-rsi-value': 67, 'sell-mfi-enabled': False, 'sell-fastd-enabled': False, 'sell-adx-enabled': True, 'sell-rsi-enabled': True, 'sell-trigger': 'sell-bb_upper'}, 'roi': {0: 0.4287874435315165, 408: 0.15112316431545753, 949: 0.026035863879169705, 2139: 0}, 'stoploss': {'stoploss': -0.2562930402099556}}, # noqa: E501
|
||||||
'results_metrics': {'total_trades': 2, 'wins': 0, 'draws': 0, 'losses': 2, 'profit_mean': -0.01254995, 'profit_median': -0.012222, 'profit_total': -0.00125625, 'profit_total_abs': -2.50999, 'max_drawdown': 0.23, 'max_drawdown_abs': -0.00125625, 'holding_avg': timedelta(minutes=3930.0), 'stake_currency': 'BTC', 'strategy_name': 'SampleStrategy'}, # noqa: E501
|
'results_metrics': {'total_trades': 2, 'trade_count_long': 2, 'trade_count_short': 0, 'wins': 0, 'draws': 0, 'losses': 2, 'profit_mean': -0.01254995, 'profit_median': -0.012222, 'profit_total': -0.00125625, 'profit_total_abs': -2.50999, 'max_drawdown': 0.23, 'max_drawdown_abs': -0.00125625, 'holding_avg': timedelta(minutes=3930.0), 'stake_currency': 'BTC', 'strategy_name': 'SampleStrategy'}, # noqa: E501
|
||||||
'results_explanation': ' 2 trades. Avg profit -1.25%. Total profit -0.00125625 BTC ( -2.51Σ%). Avg duration 3930.0 min.', # noqa: E501
|
'results_explanation': ' 2 trades. Avg profit -1.25%. Total profit -0.00125625 BTC ( -2.51Σ%). Avg duration 3930.0 min.', # noqa: E501
|
||||||
'total_profit': -0.00125625,
|
'total_profit': -0.00125625,
|
||||||
'current_epoch': 1,
|
'current_epoch': 1,
|
||||||
|
@ -2717,7 +2696,7 @@ def saved_hyperopt_results():
|
||||||
'sell': {'sell-mfi-value': 96, 'sell-fastd-value': 68, 'sell-adx-value': 63, 'sell-rsi-value': 81, 'sell-mfi-enabled': False, 'sell-fastd-enabled': True, 'sell-adx-enabled': True, 'sell-rsi-enabled': True, 'sell-trigger': 'sell-sar_reversal'}, # noqa: E501
|
'sell': {'sell-mfi-value': 96, 'sell-fastd-value': 68, 'sell-adx-value': 63, 'sell-rsi-value': 81, 'sell-mfi-enabled': False, 'sell-fastd-enabled': True, 'sell-adx-enabled': True, 'sell-rsi-enabled': True, 'sell-trigger': 'sell-sar_reversal'}, # noqa: E501
|
||||||
'roi': {0: 0.4449309386008759, 140: 0.11955965746663, 823: 0.06403981740598495, 1157: 0}, # noqa: E501
|
'roi': {0: 0.4449309386008759, 140: 0.11955965746663, 823: 0.06403981740598495, 1157: 0}, # noqa: E501
|
||||||
'stoploss': {'stoploss': -0.338070047333259}},
|
'stoploss': {'stoploss': -0.338070047333259}},
|
||||||
'results_metrics': {'total_trades': 1, 'wins': 0, 'draws': 0, 'losses': 1, 'profit_mean': 0.012357, 'profit_median': -0.012222, 'profit_total': 6.185e-05, 'profit_total_abs': 0.12357, 'max_drawdown': 0.23, 'max_drawdown_abs': -0.00125625, 'holding_avg': timedelta(minutes=1200.0)}, # noqa: E501
|
'results_metrics': {'total_trades': 1, 'trade_count_long': 1, 'trade_count_short': 0, 'wins': 0, 'draws': 0, 'losses': 1, 'profit_mean': 0.012357, 'profit_median': -0.012222, 'profit_total': 6.185e-05, 'profit_total_abs': 0.12357, 'max_drawdown': 0.23, 'max_drawdown_abs': -0.00125625, 'holding_avg': timedelta(minutes=1200.0)}, # noqa: E501
|
||||||
'results_explanation': ' 1 trades. Avg profit 0.12%. Total profit 0.00006185 BTC ( 0.12Σ%). Avg duration 1200.0 min.', # noqa: E501
|
'results_explanation': ' 1 trades. Avg profit 0.12%. Total profit 0.00006185 BTC ( 0.12Σ%). Avg duration 1200.0 min.', # noqa: E501
|
||||||
'total_profit': 6.185e-05,
|
'total_profit': 6.185e-05,
|
||||||
'current_epoch': 2,
|
'current_epoch': 2,
|
||||||
|
@ -2728,7 +2707,7 @@ def saved_hyperopt_results():
|
||||||
'loss': 14.241196856510731,
|
'loss': 14.241196856510731,
|
||||||
'params_dict': {'mfi-value': 25, 'fastd-value': 16, 'adx-value': 29, 'rsi-value': 20, 'mfi-enabled': False, 'fastd-enabled': False, 'adx-enabled': False, 'rsi-enabled': False, 'trigger': 'macd_cross_signal', 'sell-mfi-value': 98, 'sell-fastd-value': 72, 'sell-adx-value': 51, 'sell-rsi-value': 82, 'sell-mfi-enabled': True, 'sell-fastd-enabled': True, 'sell-adx-enabled': True, 'sell-rsi-enabled': True, 'sell-trigger': 'sell-macd_cross_signal', 'roi_t1': 889, 'roi_t2': 533, 'roi_t3': 263, 'roi_p1': 0.04759065393663096, 'roi_p2': 0.1488819964638463, 'roi_p3': 0.4102801822104605, 'stoploss': -0.05394588767607611}, # noqa: E501
|
'params_dict': {'mfi-value': 25, 'fastd-value': 16, 'adx-value': 29, 'rsi-value': 20, 'mfi-enabled': False, 'fastd-enabled': False, 'adx-enabled': False, 'rsi-enabled': False, 'trigger': 'macd_cross_signal', 'sell-mfi-value': 98, 'sell-fastd-value': 72, 'sell-adx-value': 51, 'sell-rsi-value': 82, 'sell-mfi-enabled': True, 'sell-fastd-enabled': True, 'sell-adx-enabled': True, 'sell-rsi-enabled': True, 'sell-trigger': 'sell-macd_cross_signal', 'roi_t1': 889, 'roi_t2': 533, 'roi_t3': 263, 'roi_p1': 0.04759065393663096, 'roi_p2': 0.1488819964638463, 'roi_p3': 0.4102801822104605, 'stoploss': -0.05394588767607611}, # noqa: E501
|
||||||
'params_details': {'buy': {'mfi-value': 25, 'fastd-value': 16, 'adx-value': 29, 'rsi-value': 20, 'mfi-enabled': False, 'fastd-enabled': False, 'adx-enabled': False, 'rsi-enabled': False, 'trigger': 'macd_cross_signal'}, 'sell': {'sell-mfi-value': 98, 'sell-fastd-value': 72, 'sell-adx-value': 51, 'sell-rsi-value': 82, 'sell-mfi-enabled': True, 'sell-fastd-enabled': True, 'sell-adx-enabled': True, 'sell-rsi-enabled': True, 'sell-trigger': 'sell-macd_cross_signal'}, 'roi': {0: 0.6067528326109377, 263: 0.19647265040047726, 796: 0.04759065393663096, 1685: 0}, 'stoploss': {'stoploss': -0.05394588767607611}}, # noqa: E501
|
'params_details': {'buy': {'mfi-value': 25, 'fastd-value': 16, 'adx-value': 29, 'rsi-value': 20, 'mfi-enabled': False, 'fastd-enabled': False, 'adx-enabled': False, 'rsi-enabled': False, 'trigger': 'macd_cross_signal'}, 'sell': {'sell-mfi-value': 98, 'sell-fastd-value': 72, 'sell-adx-value': 51, 'sell-rsi-value': 82, 'sell-mfi-enabled': True, 'sell-fastd-enabled': True, 'sell-adx-enabled': True, 'sell-rsi-enabled': True, 'sell-trigger': 'sell-macd_cross_signal'}, 'roi': {0: 0.6067528326109377, 263: 0.19647265040047726, 796: 0.04759065393663096, 1685: 0}, 'stoploss': {'stoploss': -0.05394588767607611}}, # noqa: E501
|
||||||
'results_metrics': {'total_trades': 621, 'wins': 320, 'draws': 0, 'losses': 301, 'profit_mean': -0.043883302093397747, 'profit_median': -0.012222, 'profit_total': -0.13639474, 'profit_total_abs': -272.515306, 'max_drawdown': 0.25, 'max_drawdown_abs': -272.515306, 'holding_avg': timedelta(minutes=1691.207729468599)}, # noqa: E501
|
'results_metrics': {'total_trades': 621, 'trade_count_long': 621, 'trade_count_short': 0, 'wins': 320, 'draws': 0, 'losses': 301, 'profit_mean': -0.043883302093397747, 'profit_median': -0.012222, 'profit_total': -0.13639474, 'profit_total_abs': -272.515306, 'max_drawdown': 0.25, 'max_drawdown_abs': -272.515306, 'holding_avg': timedelta(minutes=1691.207729468599)}, # noqa: E501
|
||||||
'results_explanation': ' 621 trades. Avg profit -0.44%. Total profit -0.13639474 BTC (-272.52Σ%). Avg duration 1691.2 min.', # noqa: E501
|
'results_explanation': ' 621 trades. Avg profit -0.44%. Total profit -0.13639474 BTC (-272.52Σ%). Avg duration 1691.2 min.', # noqa: E501
|
||||||
'total_profit': -0.13639474,
|
'total_profit': -0.13639474,
|
||||||
'current_epoch': 3,
|
'current_epoch': 3,
|
||||||
|
@ -2739,14 +2718,14 @@ def saved_hyperopt_results():
|
||||||
'loss': 100000,
|
'loss': 100000,
|
||||||
'params_dict': {'mfi-value': 13, 'fastd-value': 35, 'adx-value': 39, 'rsi-value': 29, 'mfi-enabled': True, 'fastd-enabled': False, 'adx-enabled': False, 'rsi-enabled': True, 'trigger': 'macd_cross_signal', 'sell-mfi-value': 87, 'sell-fastd-value': 54, 'sell-adx-value': 63, 'sell-rsi-value': 93, 'sell-mfi-enabled': False, 'sell-fastd-enabled': True, 'sell-adx-enabled': True, 'sell-rsi-enabled': True, 'sell-trigger': 'sell-bb_upper', 'roi_t1': 1402, 'roi_t2': 676, 'roi_t3': 215, 'roi_p1': 0.06264755784937427, 'roi_p2': 0.14258587851894644, 'roi_p3': 0.20671291201040828, 'stoploss': -0.11818343570194478}, # noqa: E501
|
'params_dict': {'mfi-value': 13, 'fastd-value': 35, 'adx-value': 39, 'rsi-value': 29, 'mfi-enabled': True, 'fastd-enabled': False, 'adx-enabled': False, 'rsi-enabled': True, 'trigger': 'macd_cross_signal', 'sell-mfi-value': 87, 'sell-fastd-value': 54, 'sell-adx-value': 63, 'sell-rsi-value': 93, 'sell-mfi-enabled': False, 'sell-fastd-enabled': True, 'sell-adx-enabled': True, 'sell-rsi-enabled': True, 'sell-trigger': 'sell-bb_upper', 'roi_t1': 1402, 'roi_t2': 676, 'roi_t3': 215, 'roi_p1': 0.06264755784937427, 'roi_p2': 0.14258587851894644, 'roi_p3': 0.20671291201040828, 'stoploss': -0.11818343570194478}, # noqa: E501
|
||||||
'params_details': {'buy': {'mfi-value': 13, 'fastd-value': 35, 'adx-value': 39, 'rsi-value': 29, 'mfi-enabled': True, 'fastd-enabled': False, 'adx-enabled': False, 'rsi-enabled': True, 'trigger': 'macd_cross_signal'}, 'sell': {'sell-mfi-value': 87, 'sell-fastd-value': 54, 'sell-adx-value': 63, 'sell-rsi-value': 93, 'sell-mfi-enabled': False, 'sell-fastd-enabled': True, 'sell-adx-enabled': True, 'sell-rsi-enabled': True, 'sell-trigger': 'sell-bb_upper'}, 'roi': {0: 0.411946348378729, 215: 0.2052334363683207, 891: 0.06264755784937427, 2293: 0}, 'stoploss': {'stoploss': -0.11818343570194478}}, # noqa: E501
|
'params_details': {'buy': {'mfi-value': 13, 'fastd-value': 35, 'adx-value': 39, 'rsi-value': 29, 'mfi-enabled': True, 'fastd-enabled': False, 'adx-enabled': False, 'rsi-enabled': True, 'trigger': 'macd_cross_signal'}, 'sell': {'sell-mfi-value': 87, 'sell-fastd-value': 54, 'sell-adx-value': 63, 'sell-rsi-value': 93, 'sell-mfi-enabled': False, 'sell-fastd-enabled': True, 'sell-adx-enabled': True, 'sell-rsi-enabled': True, 'sell-trigger': 'sell-bb_upper'}, 'roi': {0: 0.411946348378729, 215: 0.2052334363683207, 891: 0.06264755784937427, 2293: 0}, 'stoploss': {'stoploss': -0.11818343570194478}}, # noqa: E501
|
||||||
'results_metrics': {'total_trades': 0, 'wins': 0, 'draws': 0, 'losses': 0, 'profit_mean': None, 'profit_median': None, 'profit_total': 0, 'profit': 0.0, 'holding_avg': timedelta()}, # noqa: E501
|
'results_metrics': {'total_trades': 0, 'trade_count_long': 0, 'trade_count_short': 0, 'wins': 0, 'draws': 0, 'losses': 0, 'profit_mean': None, 'profit_median': None, 'profit_total': 0, 'profit': 0.0, 'holding_avg': timedelta()}, # noqa: E501
|
||||||
'results_explanation': ' 0 trades. Avg profit nan%. Total profit 0.00000000 BTC ( 0.00Σ%). Avg duration nan min.', # noqa: E501
|
'results_explanation': ' 0 trades. Avg profit nan%. Total profit 0.00000000 BTC ( 0.00Σ%). Avg duration nan min.', # noqa: E501
|
||||||
'total_profit': 0, 'current_epoch': 4, 'is_initial_point': True, 'is_random': False, 'is_best': False # noqa: E501
|
'total_profit': 0, 'current_epoch': 4, 'is_initial_point': True, 'is_random': False, 'is_best': False # noqa: E501
|
||||||
}, {
|
}, {
|
||||||
'loss': 0.22195522184191518,
|
'loss': 0.22195522184191518,
|
||||||
'params_dict': {'mfi-value': 17, 'fastd-value': 21, 'adx-value': 38, 'rsi-value': 33, 'mfi-enabled': True, 'fastd-enabled': False, 'adx-enabled': True, 'rsi-enabled': False, 'trigger': 'macd_cross_signal', 'sell-mfi-value': 87, 'sell-fastd-value': 82, 'sell-adx-value': 78, 'sell-rsi-value': 69, 'sell-mfi-enabled': True, 'sell-fastd-enabled': False, 'sell-adx-enabled': True, 'sell-rsi-enabled': False, 'sell-trigger': 'sell-macd_cross_signal', 'roi_t1': 1269, 'roi_t2': 601, 'roi_t3': 444, 'roi_p1': 0.07280999507931168, 'roi_p2': 0.08946698095898986, 'roi_p3': 0.1454876733325284, 'stoploss': -0.18181041180901014}, # noqa: E501
|
'params_dict': {'mfi-value': 17, 'fastd-value': 21, 'adx-value': 38, 'rsi-value': 33, 'mfi-enabled': True, 'fastd-enabled': False, 'adx-enabled': True, 'rsi-enabled': False, 'trigger': 'macd_cross_signal', 'sell-mfi-value': 87, 'sell-fastd-value': 82, 'sell-adx-value': 78, 'sell-rsi-value': 69, 'sell-mfi-enabled': True, 'sell-fastd-enabled': False, 'sell-adx-enabled': True, 'sell-rsi-enabled': False, 'sell-trigger': 'sell-macd_cross_signal', 'roi_t1': 1269, 'roi_t2': 601, 'roi_t3': 444, 'roi_p1': 0.07280999507931168, 'roi_p2': 0.08946698095898986, 'roi_p3': 0.1454876733325284, 'stoploss': -0.18181041180901014}, # noqa: E501
|
||||||
'params_details': {'buy': {'mfi-value': 17, 'fastd-value': 21, 'adx-value': 38, 'rsi-value': 33, 'mfi-enabled': True, 'fastd-enabled': False, 'adx-enabled': True, 'rsi-enabled': False, 'trigger': 'macd_cross_signal'}, 'sell': {'sell-mfi-value': 87, 'sell-fastd-value': 82, 'sell-adx-value': 78, 'sell-rsi-value': 69, 'sell-mfi-enabled': True, 'sell-fastd-enabled': False, 'sell-adx-enabled': True, 'sell-rsi-enabled': False, 'sell-trigger': 'sell-macd_cross_signal'}, 'roi': {0: 0.3077646493708299, 444: 0.16227697603830155, 1045: 0.07280999507931168, 2314: 0}, 'stoploss': {'stoploss': -0.18181041180901014}}, # noqa: E501
|
'params_details': {'buy': {'mfi-value': 17, 'fastd-value': 21, 'adx-value': 38, 'rsi-value': 33, 'mfi-enabled': True, 'fastd-enabled': False, 'adx-enabled': True, 'rsi-enabled': False, 'trigger': 'macd_cross_signal'}, 'sell': {'sell-mfi-value': 87, 'sell-fastd-value': 82, 'sell-adx-value': 78, 'sell-rsi-value': 69, 'sell-mfi-enabled': True, 'sell-fastd-enabled': False, 'sell-adx-enabled': True, 'sell-rsi-enabled': False, 'sell-trigger': 'sell-macd_cross_signal'}, 'roi': {0: 0.3077646493708299, 444: 0.16227697603830155, 1045: 0.07280999507931168, 2314: 0}, 'stoploss': {'stoploss': -0.18181041180901014}}, # noqa: E501
|
||||||
'results_metrics': {'total_trades': 14, 'wins': 6, 'draws': 0, 'losses': 8, 'profit_mean': -0.003539515, 'profit_median': -0.012222, 'profit_total': -0.002480140000000001, 'profit_total_abs': -4.955321, 'max_drawdown': 0.34, 'max_drawdown_abs': -4.955321, 'holding_avg': timedelta(minutes=3402.8571428571427)}, # noqa: E501
|
'results_metrics': {'total_trades': 14, 'trade_count_long': 14, 'trade_count_short': 0, 'wins': 6, 'draws': 0, 'losses': 8, 'profit_mean': -0.003539515, 'profit_median': -0.012222, 'profit_total': -0.002480140000000001, 'profit_total_abs': -4.955321, 'max_drawdown': 0.34, 'max_drawdown_abs': -4.955321, 'holding_avg': timedelta(minutes=3402.8571428571427)}, # noqa: E501
|
||||||
'results_explanation': ' 14 trades. Avg profit -0.35%. Total profit -0.00248014 BTC ( -4.96Σ%). Avg duration 3402.9 min.', # noqa: E501
|
'results_explanation': ' 14 trades. Avg profit -0.35%. Total profit -0.00248014 BTC ( -4.96Σ%). Avg duration 3402.9 min.', # noqa: E501
|
||||||
'total_profit': -0.002480140000000001,
|
'total_profit': -0.002480140000000001,
|
||||||
'current_epoch': 5,
|
'current_epoch': 5,
|
||||||
|
@ -2757,7 +2736,7 @@ def saved_hyperopt_results():
|
||||||
'loss': 0.545315889154162,
|
'loss': 0.545315889154162,
|
||||||
'params_dict': {'mfi-value': 22, 'fastd-value': 43, 'adx-value': 46, 'rsi-value': 20, 'mfi-enabled': False, 'fastd-enabled': False, 'adx-enabled': True, 'rsi-enabled': True, 'trigger': 'bb_lower', 'sell-mfi-value': 87, 'sell-fastd-value': 65, 'sell-adx-value': 94, 'sell-rsi-value': 63, 'sell-mfi-enabled': False, 'sell-fastd-enabled': True, 'sell-adx-enabled': True, 'sell-rsi-enabled': True, 'sell-trigger': 'sell-macd_cross_signal', 'roi_t1': 319, 'roi_t2': 556, 'roi_t3': 216, 'roi_p1': 0.06251955472249589, 'roi_p2': 0.11659519602202795, 'roi_p3': 0.0953744132197762, 'stoploss': -0.024551752215582423}, # noqa: E501
|
'params_dict': {'mfi-value': 22, 'fastd-value': 43, 'adx-value': 46, 'rsi-value': 20, 'mfi-enabled': False, 'fastd-enabled': False, 'adx-enabled': True, 'rsi-enabled': True, 'trigger': 'bb_lower', 'sell-mfi-value': 87, 'sell-fastd-value': 65, 'sell-adx-value': 94, 'sell-rsi-value': 63, 'sell-mfi-enabled': False, 'sell-fastd-enabled': True, 'sell-adx-enabled': True, 'sell-rsi-enabled': True, 'sell-trigger': 'sell-macd_cross_signal', 'roi_t1': 319, 'roi_t2': 556, 'roi_t3': 216, 'roi_p1': 0.06251955472249589, 'roi_p2': 0.11659519602202795, 'roi_p3': 0.0953744132197762, 'stoploss': -0.024551752215582423}, # noqa: E501
|
||||||
'params_details': {'buy': {'mfi-value': 22, 'fastd-value': 43, 'adx-value': 46, 'rsi-value': 20, 'mfi-enabled': False, 'fastd-enabled': False, 'adx-enabled': True, 'rsi-enabled': True, 'trigger': 'bb_lower'}, 'sell': {'sell-mfi-value': 87, 'sell-fastd-value': 65, 'sell-adx-value': 94, 'sell-rsi-value': 63, 'sell-mfi-enabled': False, 'sell-fastd-enabled': True, 'sell-adx-enabled': True, 'sell-rsi-enabled': True, 'sell-trigger': 'sell-macd_cross_signal'}, 'roi': {0: 0.2744891639643, 216: 0.17911475074452382, 772: 0.06251955472249589, 1091: 0}, 'stoploss': {'stoploss': -0.024551752215582423}}, # noqa: E501
|
'params_details': {'buy': {'mfi-value': 22, 'fastd-value': 43, 'adx-value': 46, 'rsi-value': 20, 'mfi-enabled': False, 'fastd-enabled': False, 'adx-enabled': True, 'rsi-enabled': True, 'trigger': 'bb_lower'}, 'sell': {'sell-mfi-value': 87, 'sell-fastd-value': 65, 'sell-adx-value': 94, 'sell-rsi-value': 63, 'sell-mfi-enabled': False, 'sell-fastd-enabled': True, 'sell-adx-enabled': True, 'sell-rsi-enabled': True, 'sell-trigger': 'sell-macd_cross_signal'}, 'roi': {0: 0.2744891639643, 216: 0.17911475074452382, 772: 0.06251955472249589, 1091: 0}, 'stoploss': {'stoploss': -0.024551752215582423}}, # noqa: E501
|
||||||
'results_metrics': {'total_trades': 39, 'wins': 20, 'draws': 0, 'losses': 19, 'profit_mean': -0.0021400679487179478, 'profit_median': -0.012222, 'profit_total': -0.0041773, 'profit_total_abs': -8.346264999999997, 'max_drawdown': 0.45, 'max_drawdown_abs': -4.955321, 'holding_avg': timedelta(minutes=636.9230769230769)}, # noqa: E501
|
'results_metrics': {'total_trades': 39, 'trade_count_long': 39, 'trade_count_short': 0, 'wins': 20, 'draws': 0, 'losses': 19, 'profit_mean': -0.0021400679487179478, 'profit_median': -0.012222, 'profit_total': -0.0041773, 'profit_total_abs': -8.346264999999997, 'max_drawdown': 0.45, 'max_drawdown_abs': -4.955321, 'holding_avg': timedelta(minutes=636.9230769230769)}, # noqa: E501
|
||||||
'results_explanation': ' 39 trades. Avg profit -0.21%. Total profit -0.00417730 BTC ( -8.35Σ%). Avg duration 636.9 min.', # noqa: E501
|
'results_explanation': ' 39 trades. Avg profit -0.21%. Total profit -0.00417730 BTC ( -8.35Σ%). Avg duration 636.9 min.', # noqa: E501
|
||||||
'total_profit': -0.0041773,
|
'total_profit': -0.0041773,
|
||||||
'current_epoch': 6,
|
'current_epoch': 6,
|
||||||
|
@ -2770,7 +2749,7 @@ def saved_hyperopt_results():
|
||||||
'params_details': {
|
'params_details': {
|
||||||
'buy': {'mfi-value': 13, 'fastd-value': 41, 'adx-value': 21, 'rsi-value': 29, 'mfi-enabled': False, 'fastd-enabled': True, 'adx-enabled': False, 'rsi-enabled': False, 'trigger': 'bb_lower'}, 'sell': {'sell-mfi-value': 99, 'sell-fastd-value': 60, 'sell-adx-value': 81, 'sell-rsi-value': 69, 'sell-mfi-enabled': True, 'sell-fastd-enabled': True, 'sell-adx-enabled': True, 'sell-rsi-enabled': False, 'sell-trigger': 'sell-macd_cross_signal'}, 'roi': {0: 0.4837436938134452, 145: 0.10853310701097472, 765: 0.0586919200378493, 1536: 0}, # noqa: E501
|
'buy': {'mfi-value': 13, 'fastd-value': 41, 'adx-value': 21, 'rsi-value': 29, 'mfi-enabled': False, 'fastd-enabled': True, 'adx-enabled': False, 'rsi-enabled': False, 'trigger': 'bb_lower'}, 'sell': {'sell-mfi-value': 99, 'sell-fastd-value': 60, 'sell-adx-value': 81, 'sell-rsi-value': 69, 'sell-mfi-enabled': True, 'sell-fastd-enabled': True, 'sell-adx-enabled': True, 'sell-rsi-enabled': False, 'sell-trigger': 'sell-macd_cross_signal'}, 'roi': {0: 0.4837436938134452, 145: 0.10853310701097472, 765: 0.0586919200378493, 1536: 0}, # noqa: E501
|
||||||
'stoploss': {'stoploss': -0.14613268022709905}}, # noqa: E501
|
'stoploss': {'stoploss': -0.14613268022709905}}, # noqa: E501
|
||||||
'results_metrics': {'total_trades': 318, 'wins': 100, 'draws': 0, 'losses': 218, 'profit_mean': -0.0039833954716981146, 'profit_median': -0.012222, 'profit_total': -0.06339929, 'profit_total_abs': -126.67197600000004, 'max_drawdown': 0.50, 'max_drawdown_abs': -200.955321, 'holding_avg': timedelta(minutes=3140.377358490566)}, # noqa: E501
|
'results_metrics': {'total_trades': 318, 'trade_count_long': 318, 'trade_count_short': 0, 'wins': 100, 'draws': 0, 'losses': 218, 'profit_mean': -0.0039833954716981146, 'profit_median': -0.012222, 'profit_total': -0.06339929, 'profit_total_abs': -126.67197600000004, 'max_drawdown': 0.50, 'max_drawdown_abs': -200.955321, 'holding_avg': timedelta(minutes=3140.377358490566)}, # noqa: E501
|
||||||
'results_explanation': ' 318 trades. Avg profit -0.40%. Total profit -0.06339929 BTC (-126.67Σ%). Avg duration 3140.4 min.', # noqa: E501
|
'results_explanation': ' 318 trades. Avg profit -0.40%. Total profit -0.06339929 BTC (-126.67Σ%). Avg duration 3140.4 min.', # noqa: E501
|
||||||
'total_profit': -0.06339929,
|
'total_profit': -0.06339929,
|
||||||
'current_epoch': 7,
|
'current_epoch': 7,
|
||||||
|
@ -2781,7 +2760,7 @@ def saved_hyperopt_results():
|
||||||
'loss': 20.0, # noqa: E501
|
'loss': 20.0, # noqa: E501
|
||||||
'params_dict': {'mfi-value': 24, 'fastd-value': 43, 'adx-value': 33, 'rsi-value': 20, 'mfi-enabled': False, 'fastd-enabled': True, 'adx-enabled': True, 'rsi-enabled': True, 'trigger': 'sar_reversal', 'sell-mfi-value': 89, 'sell-fastd-value': 74, 'sell-adx-value': 70, 'sell-rsi-value': 70, 'sell-mfi-enabled': False, 'sell-fastd-enabled': False, 'sell-adx-enabled': False, 'sell-rsi-enabled': True, 'sell-trigger': 'sell-sar_reversal', 'roi_t1': 1149, 'roi_t2': 375, 'roi_t3': 289, 'roi_p1': 0.05571820757172588, 'roi_p2': 0.0606240398618907, 'roi_p3': 0.1729012220156157, 'stoploss': -0.1588514289110401}, # noqa: E501
|
'params_dict': {'mfi-value': 24, 'fastd-value': 43, 'adx-value': 33, 'rsi-value': 20, 'mfi-enabled': False, 'fastd-enabled': True, 'adx-enabled': True, 'rsi-enabled': True, 'trigger': 'sar_reversal', 'sell-mfi-value': 89, 'sell-fastd-value': 74, 'sell-adx-value': 70, 'sell-rsi-value': 70, 'sell-mfi-enabled': False, 'sell-fastd-enabled': False, 'sell-adx-enabled': False, 'sell-rsi-enabled': True, 'sell-trigger': 'sell-sar_reversal', 'roi_t1': 1149, 'roi_t2': 375, 'roi_t3': 289, 'roi_p1': 0.05571820757172588, 'roi_p2': 0.0606240398618907, 'roi_p3': 0.1729012220156157, 'stoploss': -0.1588514289110401}, # noqa: E501
|
||||||
'params_details': {'buy': {'mfi-value': 24, 'fastd-value': 43, 'adx-value': 33, 'rsi-value': 20, 'mfi-enabled': False, 'fastd-enabled': True, 'adx-enabled': True, 'rsi-enabled': True, 'trigger': 'sar_reversal'}, 'sell': {'sell-mfi-value': 89, 'sell-fastd-value': 74, 'sell-adx-value': 70, 'sell-rsi-value': 70, 'sell-mfi-enabled': False, 'sell-fastd-enabled': False, 'sell-adx-enabled': False, 'sell-rsi-enabled': True, 'sell-trigger': 'sell-sar_reversal'}, 'roi': {0: 0.2892434694492323, 289: 0.11634224743361658, 664: 0.05571820757172588, 1813: 0}, 'stoploss': {'stoploss': -0.1588514289110401}}, # noqa: E501
|
'params_details': {'buy': {'mfi-value': 24, 'fastd-value': 43, 'adx-value': 33, 'rsi-value': 20, 'mfi-enabled': False, 'fastd-enabled': True, 'adx-enabled': True, 'rsi-enabled': True, 'trigger': 'sar_reversal'}, 'sell': {'sell-mfi-value': 89, 'sell-fastd-value': 74, 'sell-adx-value': 70, 'sell-rsi-value': 70, 'sell-mfi-enabled': False, 'sell-fastd-enabled': False, 'sell-adx-enabled': False, 'sell-rsi-enabled': True, 'sell-trigger': 'sell-sar_reversal'}, 'roi': {0: 0.2892434694492323, 289: 0.11634224743361658, 664: 0.05571820757172588, 1813: 0}, 'stoploss': {'stoploss': -0.1588514289110401}}, # noqa: E501
|
||||||
'results_metrics': {'total_trades': 1, 'wins': 0, 'draws': 1, 'losses': 0, 'profit_mean': 0.0, 'profit_median': 0.0, 'profit_total': 0.0, 'profit_total_abs': 0.0, 'max_drawdown': 0.0, 'max_drawdown_abs': 0.52, 'holding_avg': timedelta(minutes=5340.0)}, # noqa: E501
|
'results_metrics': {'total_trades': 1, 'trade_count_long': 1, 'trade_count_short': 0, 'wins': 0, 'draws': 1, 'losses': 0, 'profit_mean': 0.0, 'profit_median': 0.0, 'profit_total': 0.0, 'profit_total_abs': 0.0, 'max_drawdown': 0.0, 'max_drawdown_abs': 0.52, 'holding_avg': timedelta(minutes=5340.0)}, # noqa: E501
|
||||||
'results_explanation': ' 1 trades. Avg profit 0.00%. Total profit 0.00000000 BTC ( 0.00Σ%). Avg duration 5340.0 min.', # noqa: E501
|
'results_explanation': ' 1 trades. Avg profit 0.00%. Total profit 0.00000000 BTC ( 0.00Σ%). Avg duration 5340.0 min.', # noqa: E501
|
||||||
'total_profit': 0.0,
|
'total_profit': 0.0,
|
||||||
'current_epoch': 8,
|
'current_epoch': 8,
|
||||||
|
@ -2792,7 +2771,7 @@ def saved_hyperopt_results():
|
||||||
'loss': 2.4731817780991223,
|
'loss': 2.4731817780991223,
|
||||||
'params_dict': {'mfi-value': 22, 'fastd-value': 20, 'adx-value': 29, 'rsi-value': 40, 'mfi-enabled': False, 'fastd-enabled': False, 'adx-enabled': False, 'rsi-enabled': False, 'trigger': 'sar_reversal', 'sell-mfi-value': 97, 'sell-fastd-value': 65, 'sell-adx-value': 81, 'sell-rsi-value': 64, 'sell-mfi-enabled': True, 'sell-fastd-enabled': True, 'sell-adx-enabled': True, 'sell-rsi-enabled': True, 'sell-trigger': 'sell-bb_upper', 'roi_t1': 1012, 'roi_t2': 584, 'roi_t3': 422, 'roi_p1': 0.036764323603472565, 'roi_p2': 0.10335480573205287, 'roi_p3': 0.10322347377503042, 'stoploss': -0.2780610808108503}, # noqa: E501
|
'params_dict': {'mfi-value': 22, 'fastd-value': 20, 'adx-value': 29, 'rsi-value': 40, 'mfi-enabled': False, 'fastd-enabled': False, 'adx-enabled': False, 'rsi-enabled': False, 'trigger': 'sar_reversal', 'sell-mfi-value': 97, 'sell-fastd-value': 65, 'sell-adx-value': 81, 'sell-rsi-value': 64, 'sell-mfi-enabled': True, 'sell-fastd-enabled': True, 'sell-adx-enabled': True, 'sell-rsi-enabled': True, 'sell-trigger': 'sell-bb_upper', 'roi_t1': 1012, 'roi_t2': 584, 'roi_t3': 422, 'roi_p1': 0.036764323603472565, 'roi_p2': 0.10335480573205287, 'roi_p3': 0.10322347377503042, 'stoploss': -0.2780610808108503}, # noqa: E501
|
||||||
'params_details': {'buy': {'mfi-value': 22, 'fastd-value': 20, 'adx-value': 29, 'rsi-value': 40, 'mfi-enabled': False, 'fastd-enabled': False, 'adx-enabled': False, 'rsi-enabled': False, 'trigger': 'sar_reversal'}, 'sell': {'sell-mfi-value': 97, 'sell-fastd-value': 65, 'sell-adx-value': 81, 'sell-rsi-value': 64, 'sell-mfi-enabled': True, 'sell-fastd-enabled': True, 'sell-adx-enabled': True, 'sell-rsi-enabled': True, 'sell-trigger': 'sell-bb_upper'}, 'roi': {0: 0.2433426031105559, 422: 0.14011912933552545, 1006: 0.036764323603472565, 2018: 0}, 'stoploss': {'stoploss': -0.2780610808108503}}, # noqa: E501
|
'params_details': {'buy': {'mfi-value': 22, 'fastd-value': 20, 'adx-value': 29, 'rsi-value': 40, 'mfi-enabled': False, 'fastd-enabled': False, 'adx-enabled': False, 'rsi-enabled': False, 'trigger': 'sar_reversal'}, 'sell': {'sell-mfi-value': 97, 'sell-fastd-value': 65, 'sell-adx-value': 81, 'sell-rsi-value': 64, 'sell-mfi-enabled': True, 'sell-fastd-enabled': True, 'sell-adx-enabled': True, 'sell-rsi-enabled': True, 'sell-trigger': 'sell-bb_upper'}, 'roi': {0: 0.2433426031105559, 422: 0.14011912933552545, 1006: 0.036764323603472565, 2018: 0}, 'stoploss': {'stoploss': -0.2780610808108503}}, # noqa: E501
|
||||||
'results_metrics': {'total_trades': 229, 'wins': 150, 'draws': 0, 'losses': 79, 'profit_mean': -0.0038433433624454144, 'profit_median': -0.012222, 'profit_total': -0.044050070000000004, 'profit_total_abs': -88.01256299999999, 'max_drawdown': 0.41, 'max_drawdown_abs': -150.955321, 'holding_avg': timedelta(minutes=6505.676855895196)}, # noqa: E501
|
'results_metrics': {'total_trades': 229, 'trade_count_long': 229, 'trade_count_short': 0, 'wins': 150, 'draws': 0, 'losses': 79, 'profit_mean': -0.0038433433624454144, 'profit_median': -0.012222, 'profit_total': -0.044050070000000004, 'profit_total_abs': -88.01256299999999, 'max_drawdown': 0.41, 'max_drawdown_abs': -150.955321, 'holding_avg': timedelta(minutes=6505.676855895196)}, # noqa: E501
|
||||||
'results_explanation': ' 229 trades. Avg profit -0.38%. Total profit -0.04405007 BTC ( -88.01Σ%). Avg duration 6505.7 min.', # noqa: E501
|
'results_explanation': ' 229 trades. Avg profit -0.38%. Total profit -0.04405007 BTC ( -88.01Σ%). Avg duration 6505.7 min.', # noqa: E501
|
||||||
'total_profit': -0.044050070000000004, # noqa: E501
|
'total_profit': -0.044050070000000004, # noqa: E501
|
||||||
'current_epoch': 9,
|
'current_epoch': 9,
|
||||||
|
@ -2803,7 +2782,7 @@ def saved_hyperopt_results():
|
||||||
'loss': -0.2604606005845212, # noqa: E501
|
'loss': -0.2604606005845212, # noqa: E501
|
||||||
'params_dict': {'mfi-value': 23, 'fastd-value': 24, 'adx-value': 22, 'rsi-value': 24, 'mfi-enabled': False, 'fastd-enabled': False, 'adx-enabled': False, 'rsi-enabled': True, 'trigger': 'macd_cross_signal', 'sell-mfi-value': 97, 'sell-fastd-value': 70, 'sell-adx-value': 64, 'sell-rsi-value': 80, 'sell-mfi-enabled': False, 'sell-fastd-enabled': True, 'sell-adx-enabled': True, 'sell-rsi-enabled': True, 'sell-trigger': 'sell-sar_reversal', 'roi_t1': 792, 'roi_t2': 464, 'roi_t3': 215, 'roi_p1': 0.04594053535385903, 'roi_p2': 0.09623192684243963, 'roi_p3': 0.04428219070850663, 'stoploss': -0.16992287161634415}, # noqa: E501
|
'params_dict': {'mfi-value': 23, 'fastd-value': 24, 'adx-value': 22, 'rsi-value': 24, 'mfi-enabled': False, 'fastd-enabled': False, 'adx-enabled': False, 'rsi-enabled': True, 'trigger': 'macd_cross_signal', 'sell-mfi-value': 97, 'sell-fastd-value': 70, 'sell-adx-value': 64, 'sell-rsi-value': 80, 'sell-mfi-enabled': False, 'sell-fastd-enabled': True, 'sell-adx-enabled': True, 'sell-rsi-enabled': True, 'sell-trigger': 'sell-sar_reversal', 'roi_t1': 792, 'roi_t2': 464, 'roi_t3': 215, 'roi_p1': 0.04594053535385903, 'roi_p2': 0.09623192684243963, 'roi_p3': 0.04428219070850663, 'stoploss': -0.16992287161634415}, # noqa: E501
|
||||||
'params_details': {'buy': {'mfi-value': 23, 'fastd-value': 24, 'adx-value': 22, 'rsi-value': 24, 'mfi-enabled': False, 'fastd-enabled': False, 'adx-enabled': False, 'rsi-enabled': True, 'trigger': 'macd_cross_signal'}, 'sell': {'sell-mfi-value': 97, 'sell-fastd-value': 70, 'sell-adx-value': 64, 'sell-rsi-value': 80, 'sell-mfi-enabled': False, 'sell-fastd-enabled': True, 'sell-adx-enabled': True, 'sell-rsi-enabled': True, 'sell-trigger': 'sell-sar_reversal'}, 'roi': {0: 0.18645465290480528, 215: 0.14217246219629864, 679: 0.04594053535385903, 1471: 0}, 'stoploss': {'stoploss': -0.16992287161634415}}, # noqa: E501
|
'params_details': {'buy': {'mfi-value': 23, 'fastd-value': 24, 'adx-value': 22, 'rsi-value': 24, 'mfi-enabled': False, 'fastd-enabled': False, 'adx-enabled': False, 'rsi-enabled': True, 'trigger': 'macd_cross_signal'}, 'sell': {'sell-mfi-value': 97, 'sell-fastd-value': 70, 'sell-adx-value': 64, 'sell-rsi-value': 80, 'sell-mfi-enabled': False, 'sell-fastd-enabled': True, 'sell-adx-enabled': True, 'sell-rsi-enabled': True, 'sell-trigger': 'sell-sar_reversal'}, 'roi': {0: 0.18645465290480528, 215: 0.14217246219629864, 679: 0.04594053535385903, 1471: 0}, 'stoploss': {'stoploss': -0.16992287161634415}}, # noqa: E501
|
||||||
'results_metrics': {'total_trades': 4, 'wins': 0, 'draws': 0, 'losses': 4, 'profit_mean': 0.001080385, 'profit_median': -0.012222, 'profit_total': 0.00021629, 'profit_total_abs': 0.432154, 'max_drawdown': 0.13, 'max_drawdown_abs': -4.955321, 'holding_avg': timedelta(minutes=2850.0)}, # noqa: E501
|
'results_metrics': {'total_trades': 4, 'trade_count_long': 4, 'trade_count_short': 0, 'wins': 0, 'draws': 0, 'losses': 4, 'profit_mean': 0.001080385, 'profit_median': -0.012222, 'profit_total': 0.00021629, 'profit_total_abs': 0.432154, 'max_drawdown': 0.13, 'max_drawdown_abs': -4.955321, 'holding_avg': timedelta(minutes=2850.0)}, # noqa: E501
|
||||||
'results_explanation': ' 4 trades. Avg profit 0.11%. Total profit 0.00021629 BTC ( 0.43Σ%). Avg duration 2850.0 min.', # noqa: E501
|
'results_explanation': ' 4 trades. Avg profit 0.11%. Total profit 0.00021629 BTC ( 0.43Σ%). Avg duration 2850.0 min.', # noqa: E501
|
||||||
'total_profit': 0.00021629,
|
'total_profit': 0.00021629,
|
||||||
'current_epoch': 10,
|
'current_epoch': 10,
|
||||||
|
@ -2815,7 +2794,7 @@ def saved_hyperopt_results():
|
||||||
'params_dict': {'mfi-value': 20, 'fastd-value': 32, 'adx-value': 49, 'rsi-value': 23, 'mfi-enabled': True, 'fastd-enabled': True, 'adx-enabled': False, 'rsi-enabled': False, 'trigger': 'bb_lower', 'sell-mfi-value': 75, 'sell-fastd-value': 56, 'sell-adx-value': 61, 'sell-rsi-value': 62, 'sell-mfi-enabled': False, 'sell-fastd-enabled': False, 'sell-adx-enabled': True, 'sell-rsi-enabled': True, 'sell-trigger': 'sell-macd_cross_signal', 'roi_t1': 579, 'roi_t2': 614, 'roi_t3': 273, 'roi_p1': 0.05307643172744114, 'roi_p2': 0.1352282078262871, 'roi_p3': 0.1913307406325751, 'stoploss': -0.25728526022513887}, # noqa: E501
|
'params_dict': {'mfi-value': 20, 'fastd-value': 32, 'adx-value': 49, 'rsi-value': 23, 'mfi-enabled': True, 'fastd-enabled': True, 'adx-enabled': False, 'rsi-enabled': False, 'trigger': 'bb_lower', 'sell-mfi-value': 75, 'sell-fastd-value': 56, 'sell-adx-value': 61, 'sell-rsi-value': 62, 'sell-mfi-enabled': False, 'sell-fastd-enabled': False, 'sell-adx-enabled': True, 'sell-rsi-enabled': True, 'sell-trigger': 'sell-macd_cross_signal', 'roi_t1': 579, 'roi_t2': 614, 'roi_t3': 273, 'roi_p1': 0.05307643172744114, 'roi_p2': 0.1352282078262871, 'roi_p3': 0.1913307406325751, 'stoploss': -0.25728526022513887}, # noqa: E501
|
||||||
'params_details': {'buy': {'mfi-value': 20, 'fastd-value': 32, 'adx-value': 49, 'rsi-value': 23, 'mfi-enabled': True, 'fastd-enabled': True, 'adx-enabled': False, 'rsi-enabled': False, 'trigger': 'bb_lower'}, 'sell': {'sell-mfi-value': 75, 'sell-fastd-value': 56, 'sell-adx-value': 61, 'sell-rsi-value': 62, 'sell-mfi-enabled': False, 'sell-fastd-enabled': False, 'sell-adx-enabled': True, 'sell-rsi-enabled': True, 'sell-trigger': 'sell-macd_cross_signal'}, 'roi': {0: 0.3796353801863034, 273: 0.18830463955372825, 887: 0.05307643172744114, 1466: 0}, 'stoploss': {'stoploss': -0.25728526022513887}}, # noqa: E501
|
'params_details': {'buy': {'mfi-value': 20, 'fastd-value': 32, 'adx-value': 49, 'rsi-value': 23, 'mfi-enabled': True, 'fastd-enabled': True, 'adx-enabled': False, 'rsi-enabled': False, 'trigger': 'bb_lower'}, 'sell': {'sell-mfi-value': 75, 'sell-fastd-value': 56, 'sell-adx-value': 61, 'sell-rsi-value': 62, 'sell-mfi-enabled': False, 'sell-fastd-enabled': False, 'sell-adx-enabled': True, 'sell-rsi-enabled': True, 'sell-trigger': 'sell-macd_cross_signal'}, 'roi': {0: 0.3796353801863034, 273: 0.18830463955372825, 887: 0.05307643172744114, 1466: 0}, 'stoploss': {'stoploss': -0.25728526022513887}}, # noqa: E501
|
||||||
# New Hyperopt mode!
|
# New Hyperopt mode!
|
||||||
'results_metrics': {'total_trades': 117, 'wins': 67, 'draws': 0, 'losses': 50, 'profit_mean': -0.012698609145299145, 'profit_median': -0.012222, 'profit_total': -0.07436117, 'profit_total_abs': -148.573727, 'max_drawdown': 0.52, 'max_drawdown_abs': -224.955321, 'holding_avg': timedelta(minutes=4282.5641025641025)}, # noqa: E501
|
'results_metrics': {'total_trades': 117, 'trade_count_long': 117, 'trade_count_short': 0, 'wins': 67, 'draws': 0, 'losses': 50, 'profit_mean': -0.012698609145299145, 'profit_median': -0.012222, 'profit_total': -0.07436117, 'profit_total_abs': -148.573727, 'max_drawdown': 0.52, 'max_drawdown_abs': -224.955321, 'holding_avg': timedelta(minutes=4282.5641025641025)}, # noqa: E501
|
||||||
'results_explanation': ' 117 trades. Avg profit -1.27%. Total profit -0.07436117 BTC (-148.57Σ%). Avg duration 4282.6 min.', # noqa: E501
|
'results_explanation': ' 117 trades. Avg profit -1.27%. Total profit -0.07436117 BTC (-148.57Σ%). Avg duration 4282.6 min.', # noqa: E501
|
||||||
'total_profit': -0.07436117,
|
'total_profit': -0.07436117,
|
||||||
'current_epoch': 11,
|
'current_epoch': 11,
|
||||||
|
@ -2826,7 +2805,7 @@ def saved_hyperopt_results():
|
||||||
'loss': 100000,
|
'loss': 100000,
|
||||||
'params_dict': {'mfi-value': 10, 'fastd-value': 36, 'adx-value': 31, 'rsi-value': 22, 'mfi-enabled': True, 'fastd-enabled': True, 'adx-enabled': True, 'rsi-enabled': False, 'trigger': 'sar_reversal', 'sell-mfi-value': 80, 'sell-fastd-value': 71, 'sell-adx-value': 60, 'sell-rsi-value': 85, 'sell-mfi-enabled': False, 'sell-fastd-enabled': False, 'sell-adx-enabled': True, 'sell-rsi-enabled': True, 'sell-trigger': 'sell-bb_upper', 'roi_t1': 1156, 'roi_t2': 581, 'roi_t3': 408, 'roi_p1': 0.06860454019988212, 'roi_p2': 0.12473718444931989, 'roi_p3': 0.2896360635226823, 'stoploss': -0.30889015124682806}, # noqa: E501
|
'params_dict': {'mfi-value': 10, 'fastd-value': 36, 'adx-value': 31, 'rsi-value': 22, 'mfi-enabled': True, 'fastd-enabled': True, 'adx-enabled': True, 'rsi-enabled': False, 'trigger': 'sar_reversal', 'sell-mfi-value': 80, 'sell-fastd-value': 71, 'sell-adx-value': 60, 'sell-rsi-value': 85, 'sell-mfi-enabled': False, 'sell-fastd-enabled': False, 'sell-adx-enabled': True, 'sell-rsi-enabled': True, 'sell-trigger': 'sell-bb_upper', 'roi_t1': 1156, 'roi_t2': 581, 'roi_t3': 408, 'roi_p1': 0.06860454019988212, 'roi_p2': 0.12473718444931989, 'roi_p3': 0.2896360635226823, 'stoploss': -0.30889015124682806}, # noqa: E501
|
||||||
'params_details': {'buy': {'mfi-value': 10, 'fastd-value': 36, 'adx-value': 31, 'rsi-value': 22, 'mfi-enabled': True, 'fastd-enabled': True, 'adx-enabled': True, 'rsi-enabled': False, 'trigger': 'sar_reversal'}, 'sell': {'sell-mfi-value': 80, 'sell-fastd-value': 71, 'sell-adx-value': 60, 'sell-rsi-value': 85, 'sell-mfi-enabled': False, 'sell-fastd-enabled': False, 'sell-adx-enabled': True, 'sell-rsi-enabled': True, 'sell-trigger': 'sell-bb_upper'}, 'roi': {0: 0.4829777881718843, 408: 0.19334172464920202, 989: 0.06860454019988212, 2145: 0}, 'stoploss': {'stoploss': -0.30889015124682806}}, # noqa: E501
|
'params_details': {'buy': {'mfi-value': 10, 'fastd-value': 36, 'adx-value': 31, 'rsi-value': 22, 'mfi-enabled': True, 'fastd-enabled': True, 'adx-enabled': True, 'rsi-enabled': False, 'trigger': 'sar_reversal'}, 'sell': {'sell-mfi-value': 80, 'sell-fastd-value': 71, 'sell-adx-value': 60, 'sell-rsi-value': 85, 'sell-mfi-enabled': False, 'sell-fastd-enabled': False, 'sell-adx-enabled': True, 'sell-rsi-enabled': True, 'sell-trigger': 'sell-bb_upper'}, 'roi': {0: 0.4829777881718843, 408: 0.19334172464920202, 989: 0.06860454019988212, 2145: 0}, 'stoploss': {'stoploss': -0.30889015124682806}}, # noqa: E501
|
||||||
'results_metrics': {'total_trades': 0, 'wins': 0, 'draws': 0, 'losses': 0, 'profit_mean': None, 'profit_median': None, 'profit_total': 0, 'profit_total_abs': 0.0, 'max_drawdown': 0.0, 'max_drawdown_abs': 0.0, 'holding_avg': timedelta()}, # noqa: E501
|
'results_metrics': {'total_trades': 0, 'trade_count_long': 0, 'trade_count_short': 0, 'wins': 0, 'draws': 0, 'losses': 0, 'profit_mean': None, 'profit_median': None, 'profit_total': 0, 'profit_total_abs': 0.0, 'max_drawdown': 0.0, 'max_drawdown_abs': 0.0, 'holding_avg': timedelta()}, # noqa: E501
|
||||||
'results_explanation': ' 0 trades. Avg profit nan%. Total profit 0.00000000 BTC ( 0.00Σ%). Avg duration nan min.', # noqa: E501
|
'results_explanation': ' 0 trades. Avg profit nan%. Total profit 0.00000000 BTC ( 0.00Σ%). Avg duration nan min.', # noqa: E501
|
||||||
'total_profit': 0,
|
'total_profit': 0,
|
||||||
'current_epoch': 12,
|
'current_epoch': 12,
|
||||||
|
|
|
@ -3,18 +3,19 @@ import logging
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from shutil import copyfile
|
from shutil import copyfile
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from freqtrade.configuration.timerange import TimeRange
|
from freqtrade.configuration.timerange import TimeRange
|
||||||
from freqtrade.data.converter import (convert_ohlcv_format, convert_trades_format,
|
from freqtrade.data.converter import (convert_ohlcv_format, convert_trades_format,
|
||||||
ohlcv_fill_up_missing_data, ohlcv_to_dataframe,
|
ohlcv_fill_up_missing_data, ohlcv_to_dataframe,
|
||||||
trades_dict_to_list, trades_remove_duplicates,
|
reduce_dataframe_footprint, trades_dict_to_list,
|
||||||
trades_to_ohlcv, trim_dataframe)
|
trades_remove_duplicates, trades_to_ohlcv, trim_dataframe)
|
||||||
from freqtrade.data.history import (get_timerange, load_data, load_pair_history,
|
from freqtrade.data.history import (get_timerange, load_data, load_pair_history,
|
||||||
validate_backtest_data)
|
validate_backtest_data)
|
||||||
from freqtrade.data.history.idatahandler import IDataHandler
|
from freqtrade.data.history.idatahandler import IDataHandler
|
||||||
from freqtrade.enums import CandleType
|
from freqtrade.enums import CandleType
|
||||||
from tests.conftest import log_has, log_has_re
|
from tests.conftest import generate_test_data, log_has, log_has_re
|
||||||
from tests.data.test_history import _clean_test_file
|
from tests.data.test_history import _clean_test_file
|
||||||
|
|
||||||
|
|
||||||
|
@ -344,3 +345,33 @@ def test_convert_ohlcv_format(default_conf, testdatadir, tmpdir, file_base, cand
|
||||||
assert file.exists()
|
assert file.exists()
|
||||||
for file in (files_new):
|
for file in (files_new):
|
||||||
assert not file.exists()
|
assert not file.exists()
|
||||||
|
|
||||||
|
|
||||||
|
def test_reduce_dataframe_footprint():
|
||||||
|
data = generate_test_data('15m', 40)
|
||||||
|
|
||||||
|
data['open_copy'] = data['open']
|
||||||
|
data['close_copy'] = data['close']
|
||||||
|
data['close_copy'] = data['close']
|
||||||
|
|
||||||
|
assert data['open'].dtype == np.float64
|
||||||
|
assert data['open_copy'].dtype == np.float64
|
||||||
|
assert data['close_copy'].dtype == np.float64
|
||||||
|
|
||||||
|
df2 = reduce_dataframe_footprint(data)
|
||||||
|
|
||||||
|
# Does not modify original dataframe
|
||||||
|
assert data['open'].dtype == np.float64
|
||||||
|
assert data['open_copy'].dtype == np.float64
|
||||||
|
assert data['close_copy'].dtype == np.float64
|
||||||
|
|
||||||
|
# skips ohlcv columns
|
||||||
|
assert df2['open'].dtype == np.float64
|
||||||
|
assert df2['high'].dtype == np.float64
|
||||||
|
assert df2['low'].dtype == np.float64
|
||||||
|
assert df2['close'].dtype == np.float64
|
||||||
|
assert df2['volume'].dtype == np.float64
|
||||||
|
|
||||||
|
# Changes dtype of returned dataframe
|
||||||
|
assert df2['open_copy'].dtype == np.float32
|
||||||
|
assert df2['close_copy'].dtype == np.float32
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
# pragma pylint: disable=missing-docstring, protected-access, C0103
|
# pragma pylint: disable=missing-docstring, protected-access, C0103
|
||||||
|
|
||||||
import re
|
import re
|
||||||
|
from datetime import datetime, timezone
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from unittest.mock import MagicMock
|
from unittest.mock import MagicMock
|
||||||
|
|
||||||
|
@ -69,7 +70,7 @@ def test_datahandler_ohlcv_regex(filename, pair, timeframe, candletype):
|
||||||
('BTC_USDT_USDT', 'BTC/USDT:USDT'), # Futures
|
('BTC_USDT_USDT', 'BTC/USDT:USDT'), # Futures
|
||||||
('XRP_USDT_USDT', 'XRP/USDT:USDT'), # futures
|
('XRP_USDT_USDT', 'XRP/USDT:USDT'), # futures
|
||||||
('BTC-PERP', 'BTC-PERP'),
|
('BTC-PERP', 'BTC-PERP'),
|
||||||
('BTC-PERP_USDT', 'BTC-PERP:USDT'), # potential FTX case
|
('BTC-PERP_USDT', 'BTC-PERP:USDT'),
|
||||||
('UNITTEST_USDT', 'UNITTEST/USDT'),
|
('UNITTEST_USDT', 'UNITTEST/USDT'),
|
||||||
])
|
])
|
||||||
def test_rebuild_pair_from_filename(input, expected):
|
def test_rebuild_pair_from_filename(input, expected):
|
||||||
|
@ -154,6 +155,23 @@ def test_jsondatahandler_ohlcv_load(testdatadir, caplog):
|
||||||
assert df.columns.equals(df1.columns)
|
assert df.columns.equals(df1.columns)
|
||||||
|
|
||||||
|
|
||||||
|
def test_datahandler_ohlcv_data_min_max(testdatadir):
|
||||||
|
dh = JsonDataHandler(testdatadir)
|
||||||
|
min_max = dh.ohlcv_data_min_max('UNITTEST/BTC', '5m', 'spot')
|
||||||
|
assert len(min_max) == 2
|
||||||
|
|
||||||
|
# Empty pair
|
||||||
|
min_max = dh.ohlcv_data_min_max('UNITTEST/BTC', '8m', 'spot')
|
||||||
|
assert len(min_max) == 2
|
||||||
|
assert min_max[0] == datetime.fromtimestamp(0, tz=timezone.utc)
|
||||||
|
assert min_max[0] == min_max[1]
|
||||||
|
# Empty pair2
|
||||||
|
min_max = dh.ohlcv_data_min_max('NOPAIR/XXX', '4m', 'spot')
|
||||||
|
assert len(min_max) == 2
|
||||||
|
assert min_max[0] == datetime.fromtimestamp(0, tz=timezone.utc)
|
||||||
|
assert min_max[0] == min_max[1]
|
||||||
|
|
||||||
|
|
||||||
def test_datahandler__check_empty_df(testdatadir, caplog):
|
def test_datahandler__check_empty_df(testdatadir, caplog):
|
||||||
dh = JsonDataHandler(testdatadir)
|
dh = JsonDataHandler(testdatadir)
|
||||||
expected_text = r"Price jump in UNITTEST/USDT, 1h, spot between"
|
expected_text = r"Price jump in UNITTEST/USDT, 1h, spot between"
|
||||||
|
|
|
@ -162,9 +162,6 @@ def test_stoploss_adjust_binance(mocker, default_conf, sl1, sl2, sl3, side):
|
||||||
}
|
}
|
||||||
assert exchange.stoploss_adjust(sl1, order, side=side)
|
assert exchange.stoploss_adjust(sl1, order, side=side)
|
||||||
assert not exchange.stoploss_adjust(sl2, order, side=side)
|
assert not exchange.stoploss_adjust(sl2, order, side=side)
|
||||||
# Test with invalid order case
|
|
||||||
order['type'] = 'stop_loss'
|
|
||||||
assert not exchange.stoploss_adjust(sl3, order, side=side)
|
|
||||||
|
|
||||||
|
|
||||||
def test_fill_leverage_tiers_binance(default_conf, mocker):
|
def test_fill_leverage_tiers_binance(default_conf, mocker):
|
||||||
|
|
|
@ -45,16 +45,6 @@ EXCHANGES = {
|
||||||
'leverage_tiers_public': False,
|
'leverage_tiers_public': False,
|
||||||
'leverage_in_spot_market': True,
|
'leverage_in_spot_market': True,
|
||||||
},
|
},
|
||||||
'ftx': {
|
|
||||||
'pair': 'BTC/USD',
|
|
||||||
'stake_currency': 'USD',
|
|
||||||
'hasQuoteVolume': True,
|
|
||||||
'timeframe': '5m',
|
|
||||||
'futures_pair': 'BTC/USD:USD',
|
|
||||||
'futures': False,
|
|
||||||
'leverage_tiers_public': False, # TODO: Set to True once implemented on CCXT
|
|
||||||
'leverage_in_spot_market': True,
|
|
||||||
},
|
|
||||||
'kucoin': {
|
'kucoin': {
|
||||||
'pair': 'XRP/USDT',
|
'pair': 'XRP/USDT',
|
||||||
'stake_currency': 'USDT',
|
'stake_currency': 'USDT',
|
||||||
|
|
|
@ -27,7 +27,7 @@ from tests.conftest import (generate_test_data_raw, get_mock_coro, get_patched_e
|
||||||
|
|
||||||
|
|
||||||
# Make sure to always keep one exchange here which is NOT subclassed!!
|
# Make sure to always keep one exchange here which is NOT subclassed!!
|
||||||
EXCHANGES = ['bittrex', 'binance', 'kraken', 'ftx', 'gateio']
|
EXCHANGES = ['bittrex', 'binance', 'kraken', 'gateio']
|
||||||
|
|
||||||
get_entry_rate_data = [
|
get_entry_rate_data = [
|
||||||
('other', 20, 19, 10, 0.0, 20), # Full ask side
|
('other', 20, 19, 10, 0.0, 20), # Full ask side
|
||||||
|
@ -1207,12 +1207,17 @@ def test_create_dry_run_order_fees(
|
||||||
assert order1['fee']['rate'] == fee
|
assert order1['fee']['rate'] == fee
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("side,startprice,endprice", [
|
@pytest.mark.parametrize("side,price,filled", [
|
||||||
("buy", 25.563, 25.566),
|
# order_book_l2_usd spread:
|
||||||
("sell", 25.566, 25.563)
|
# best ask: 25.566
|
||||||
|
# best bid: 25.563
|
||||||
|
("buy", 25.563, False),
|
||||||
|
("buy", 25.566, True),
|
||||||
|
("sell", 25.566, False),
|
||||||
|
("sell", 25.563, True),
|
||||||
])
|
])
|
||||||
@pytest.mark.parametrize("exchange_name", EXCHANGES)
|
@pytest.mark.parametrize("exchange_name", EXCHANGES)
|
||||||
def test_create_dry_run_order_limit_fill(default_conf, mocker, side, startprice, endprice,
|
def test_create_dry_run_order_limit_fill(default_conf, mocker, side, price, filled,
|
||||||
exchange_name, order_book_l2_usd):
|
exchange_name, order_book_l2_usd):
|
||||||
default_conf['dry_run'] = True
|
default_conf['dry_run'] = True
|
||||||
exchange = get_patched_exchange(mocker, default_conf, id=exchange_name)
|
exchange = get_patched_exchange(mocker, default_conf, id=exchange_name)
|
||||||
|
@ -1226,7 +1231,7 @@ def test_create_dry_run_order_limit_fill(default_conf, mocker, side, startprice,
|
||||||
ordertype='limit',
|
ordertype='limit',
|
||||||
side=side,
|
side=side,
|
||||||
amount=1,
|
amount=1,
|
||||||
rate=startprice,
|
rate=price,
|
||||||
leverage=1.0
|
leverage=1.0
|
||||||
)
|
)
|
||||||
assert order_book_l2_usd.call_count == 1
|
assert order_book_l2_usd.call_count == 1
|
||||||
|
@ -1235,22 +1240,17 @@ def test_create_dry_run_order_limit_fill(default_conf, mocker, side, startprice,
|
||||||
assert order["side"] == side
|
assert order["side"] == side
|
||||||
assert order["type"] == "limit"
|
assert order["type"] == "limit"
|
||||||
assert order["symbol"] == "LTC/USDT"
|
assert order["symbol"] == "LTC/USDT"
|
||||||
|
assert order["average"] == price
|
||||||
|
assert order['status'] == 'open' if not filled else 'closed'
|
||||||
order_book_l2_usd.reset_mock()
|
order_book_l2_usd.reset_mock()
|
||||||
|
|
||||||
|
# fetch order again...
|
||||||
order_closed = exchange.fetch_dry_run_order(order['id'])
|
order_closed = exchange.fetch_dry_run_order(order['id'])
|
||||||
assert order_book_l2_usd.call_count == 1
|
assert order_book_l2_usd.call_count == (1 if not filled else 0)
|
||||||
assert order_closed['status'] == 'open'
|
assert order_closed['status'] == ('open' if not filled else 'closed')
|
||||||
assert not order['fee']
|
assert order_closed['filled'] == (0 if not filled else 1)
|
||||||
assert order_closed['filled'] == 0
|
|
||||||
|
|
||||||
order_book_l2_usd.reset_mock()
|
order_book_l2_usd.reset_mock()
|
||||||
order_closed['price'] = endprice
|
|
||||||
|
|
||||||
order_closed = exchange.fetch_dry_run_order(order['id'])
|
|
||||||
assert order_closed['status'] == 'closed'
|
|
||||||
assert order['fee']
|
|
||||||
assert order_closed['filled'] == 1
|
|
||||||
assert order_closed['filled'] == order_closed['amount']
|
|
||||||
|
|
||||||
# Empty orderbook test
|
# Empty orderbook test
|
||||||
mocker.patch('freqtrade.exchange.Exchange.fetch_l2_order_book',
|
mocker.patch('freqtrade.exchange.Exchange.fetch_l2_order_book',
|
||||||
|
@ -3162,19 +3162,16 @@ def test_cancel_stoploss_order(default_conf, mocker, exchange_name):
|
||||||
def test_cancel_stoploss_order_with_result(default_conf, mocker, exchange_name):
|
def test_cancel_stoploss_order_with_result(default_conf, mocker, exchange_name):
|
||||||
default_conf['dry_run'] = False
|
default_conf['dry_run'] = False
|
||||||
mocker.patch('freqtrade.exchange.Exchange.fetch_stoploss_order', return_value={'for': 123})
|
mocker.patch('freqtrade.exchange.Exchange.fetch_stoploss_order', return_value={'for': 123})
|
||||||
mocker.patch('freqtrade.exchange.Ftx.fetch_stoploss_order', return_value={'for': 123})
|
|
||||||
mocker.patch('freqtrade.exchange.Gateio.fetch_stoploss_order', return_value={'for': 123})
|
mocker.patch('freqtrade.exchange.Gateio.fetch_stoploss_order', return_value={'for': 123})
|
||||||
exchange = get_patched_exchange(mocker, default_conf, id=exchange_name)
|
exchange = get_patched_exchange(mocker, default_conf, id=exchange_name)
|
||||||
|
|
||||||
res = {'fee': {}, 'status': 'canceled', 'amount': 1234}
|
res = {'fee': {}, 'status': 'canceled', 'amount': 1234}
|
||||||
mocker.patch('freqtrade.exchange.Exchange.cancel_stoploss_order', return_value=res)
|
mocker.patch('freqtrade.exchange.Exchange.cancel_stoploss_order', return_value=res)
|
||||||
mocker.patch('freqtrade.exchange.Ftx.cancel_stoploss_order', return_value=res)
|
|
||||||
mocker.patch('freqtrade.exchange.Gateio.cancel_stoploss_order', return_value=res)
|
mocker.patch('freqtrade.exchange.Gateio.cancel_stoploss_order', return_value=res)
|
||||||
co = exchange.cancel_stoploss_order_with_result(order_id='_', pair='TKN/BTC', amount=555)
|
co = exchange.cancel_stoploss_order_with_result(order_id='_', pair='TKN/BTC', amount=555)
|
||||||
assert co == res
|
assert co == res
|
||||||
|
|
||||||
mocker.patch('freqtrade.exchange.Exchange.cancel_stoploss_order', return_value='canceled')
|
mocker.patch('freqtrade.exchange.Exchange.cancel_stoploss_order', return_value='canceled')
|
||||||
mocker.patch('freqtrade.exchange.Ftx.cancel_stoploss_order', return_value='canceled')
|
|
||||||
mocker.patch('freqtrade.exchange.Gateio.cancel_stoploss_order', return_value='canceled')
|
mocker.patch('freqtrade.exchange.Gateio.cancel_stoploss_order', return_value='canceled')
|
||||||
# Fall back to fetch_stoploss_order
|
# Fall back to fetch_stoploss_order
|
||||||
co = exchange.cancel_stoploss_order_with_result(order_id='_', pair='TKN/BTC', amount=555)
|
co = exchange.cancel_stoploss_order_with_result(order_id='_', pair='TKN/BTC', amount=555)
|
||||||
|
@ -3182,7 +3179,6 @@ def test_cancel_stoploss_order_with_result(default_conf, mocker, exchange_name):
|
||||||
|
|
||||||
exc = InvalidOrderException("")
|
exc = InvalidOrderException("")
|
||||||
mocker.patch('freqtrade.exchange.Exchange.fetch_stoploss_order', side_effect=exc)
|
mocker.patch('freqtrade.exchange.Exchange.fetch_stoploss_order', side_effect=exc)
|
||||||
mocker.patch('freqtrade.exchange.Ftx.fetch_stoploss_order', side_effect=exc)
|
|
||||||
mocker.patch('freqtrade.exchange.Gateio.fetch_stoploss_order', side_effect=exc)
|
mocker.patch('freqtrade.exchange.Gateio.fetch_stoploss_order', side_effect=exc)
|
||||||
co = exchange.cancel_stoploss_order_with_result(order_id='_', pair='TKN/BTC', amount=555)
|
co = exchange.cancel_stoploss_order_with_result(order_id='_', pair='TKN/BTC', amount=555)
|
||||||
assert co['amount'] == 555
|
assert co['amount'] == 555
|
||||||
|
@ -3191,7 +3187,6 @@ def test_cancel_stoploss_order_with_result(default_conf, mocker, exchange_name):
|
||||||
with pytest.raises(InvalidOrderException):
|
with pytest.raises(InvalidOrderException):
|
||||||
exc = InvalidOrderException("Did not find order")
|
exc = InvalidOrderException("Did not find order")
|
||||||
mocker.patch('freqtrade.exchange.Exchange.cancel_stoploss_order', side_effect=exc)
|
mocker.patch('freqtrade.exchange.Exchange.cancel_stoploss_order', side_effect=exc)
|
||||||
mocker.patch('freqtrade.exchange.Ftx.cancel_stoploss_order', side_effect=exc)
|
|
||||||
mocker.patch('freqtrade.exchange.Gateio.cancel_stoploss_order', side_effect=exc)
|
mocker.patch('freqtrade.exchange.Gateio.cancel_stoploss_order', side_effect=exc)
|
||||||
exchange = get_patched_exchange(mocker, default_conf, id=exchange_name)
|
exchange = get_patched_exchange(mocker, default_conf, id=exchange_name)
|
||||||
exchange.cancel_stoploss_order_with_result(order_id='_', pair='TKN/BTC', amount=123)
|
exchange.cancel_stoploss_order_with_result(order_id='_', pair='TKN/BTC', amount=123)
|
||||||
|
@ -3253,9 +3248,6 @@ def test_fetch_order(default_conf, mocker, exchange_name, caplog):
|
||||||
@pytest.mark.usefixtures("init_persistence")
|
@pytest.mark.usefixtures("init_persistence")
|
||||||
@pytest.mark.parametrize("exchange_name", EXCHANGES)
|
@pytest.mark.parametrize("exchange_name", EXCHANGES)
|
||||||
def test_fetch_stoploss_order(default_conf, mocker, exchange_name):
|
def test_fetch_stoploss_order(default_conf, mocker, exchange_name):
|
||||||
# Don't test FTX here - that needs a separate test
|
|
||||||
if exchange_name == 'ftx':
|
|
||||||
return
|
|
||||||
default_conf['dry_run'] = True
|
default_conf['dry_run'] = True
|
||||||
order = MagicMock()
|
order = MagicMock()
|
||||||
order.myid = 123
|
order.myid = 123
|
||||||
|
@ -3699,16 +3691,6 @@ def test_date_minus_candles():
|
||||||
# no darkpools
|
# no darkpools
|
||||||
("BTC/EUR.d", 'BTC', 'EUR', "kraken", True, False, False, 'spot',
|
("BTC/EUR.d", 'BTC', 'EUR', "kraken", True, False, False, 'spot',
|
||||||
{"darkpool": True}, False),
|
{"darkpool": True}, False),
|
||||||
("BTC/USD", 'BTC', 'USD', "ftx", True, False, False, 'spot', {}, True),
|
|
||||||
("USD/BTC", 'USD', 'BTC', "ftx", True, False, False, 'spot', {}, True),
|
|
||||||
# Can only trade spot markets
|
|
||||||
("BTC/USD", 'BTC', 'USD', "ftx", False, False, True, 'spot', {}, False),
|
|
||||||
("BTC/USD", 'BTC', 'USD', "ftx", False, False, True, 'futures', {}, True),
|
|
||||||
# Can only trade spot markets
|
|
||||||
("BTC-PERP", 'BTC', 'USD', "ftx", False, False, True, 'spot', {}, False),
|
|
||||||
("BTC-PERP", 'BTC', 'USD', "ftx", False, False, True, 'margin', {}, False),
|
|
||||||
("BTC-PERP", 'BTC', 'USD', "ftx", False, False, True, 'futures', {}, True),
|
|
||||||
|
|
||||||
("BTC/USDT:USDT", 'BTC', 'USD', "okx", False, False, True, 'spot', {}, False),
|
("BTC/USDT:USDT", 'BTC', 'USD', "okx", False, False, True, 'spot', {}, False),
|
||||||
("BTC/USDT:USDT", 'BTC', 'USD', "okx", False, False, True, 'margin', {}, False),
|
("BTC/USDT:USDT", 'BTC', 'USD', "okx", False, False, True, 'margin', {}, False),
|
||||||
("BTC/USDT:USDT", 'BTC', 'USD', "okx", False, False, True, 'futures', {}, True),
|
("BTC/USDT:USDT", 'BTC', 'USD', "okx", False, False, True, 'futures', {}, True),
|
||||||
|
@ -3841,7 +3823,7 @@ def test_calculate_backoff(retrycount, max_retries, expected):
|
||||||
assert calculate_backoff(retrycount, max_retries) == expected
|
assert calculate_backoff(retrycount, max_retries) == expected
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("exchange_name", ['binance', 'ftx'])
|
@pytest.mark.parametrize("exchange_name", ['binance'])
|
||||||
def test__get_funding_fees_from_exchange(default_conf, mocker, exchange_name):
|
def test__get_funding_fees_from_exchange(default_conf, mocker, exchange_name):
|
||||||
api_mock = MagicMock()
|
api_mock = MagicMock()
|
||||||
api_mock.fetch_funding_history = MagicMock(return_value=[
|
api_mock.fetch_funding_history = MagicMock(return_value=[
|
||||||
|
@ -3909,7 +3891,7 @@ def test__get_funding_fees_from_exchange(default_conf, mocker, exchange_name):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('exchange', ['binance', 'kraken', 'ftx'])
|
@pytest.mark.parametrize('exchange', ['binance', 'kraken'])
|
||||||
@pytest.mark.parametrize('stake_amount,leverage,min_stake_with_lev', [
|
@pytest.mark.parametrize('stake_amount,leverage,min_stake_with_lev', [
|
||||||
(9.0, 3.0, 3.0),
|
(9.0, 3.0, 3.0),
|
||||||
(20.0, 5.0, 4.0),
|
(20.0, 5.0, 4.0),
|
||||||
|
@ -3930,8 +3912,6 @@ def test_get_stake_amount_considering_leverage(
|
||||||
|
|
||||||
@pytest.mark.parametrize("exchange_name,trading_mode", [
|
@pytest.mark.parametrize("exchange_name,trading_mode", [
|
||||||
("binance", TradingMode.FUTURES),
|
("binance", TradingMode.FUTURES),
|
||||||
("ftx", TradingMode.MARGIN),
|
|
||||||
("ftx", TradingMode.FUTURES)
|
|
||||||
])
|
])
|
||||||
def test__set_leverage(mocker, default_conf, exchange_name, trading_mode):
|
def test__set_leverage(mocker, default_conf, exchange_name, trading_mode):
|
||||||
|
|
||||||
|
@ -3982,9 +3962,6 @@ def test_set_margin_mode(mocker, default_conf, margin_mode):
|
||||||
("kraken", TradingMode.SPOT, None, False),
|
("kraken", TradingMode.SPOT, None, False),
|
||||||
("kraken", TradingMode.MARGIN, MarginMode.ISOLATED, True),
|
("kraken", TradingMode.MARGIN, MarginMode.ISOLATED, True),
|
||||||
("kraken", TradingMode.FUTURES, MarginMode.ISOLATED, True),
|
("kraken", TradingMode.FUTURES, MarginMode.ISOLATED, True),
|
||||||
("ftx", TradingMode.SPOT, None, False),
|
|
||||||
("ftx", TradingMode.MARGIN, MarginMode.ISOLATED, True),
|
|
||||||
("ftx", TradingMode.FUTURES, MarginMode.ISOLATED, True),
|
|
||||||
("bittrex", TradingMode.SPOT, None, False),
|
("bittrex", TradingMode.SPOT, None, False),
|
||||||
("bittrex", TradingMode.MARGIN, MarginMode.CROSS, True),
|
("bittrex", TradingMode.MARGIN, MarginMode.CROSS, True),
|
||||||
("bittrex", TradingMode.MARGIN, MarginMode.ISOLATED, True),
|
("bittrex", TradingMode.MARGIN, MarginMode.ISOLATED, True),
|
||||||
|
@ -4005,8 +3982,6 @@ def test_set_margin_mode(mocker, default_conf, margin_mode):
|
||||||
("binance", TradingMode.FUTURES, MarginMode.CROSS, True),
|
("binance", TradingMode.FUTURES, MarginMode.CROSS, True),
|
||||||
("kraken", TradingMode.MARGIN, MarginMode.CROSS, True),
|
("kraken", TradingMode.MARGIN, MarginMode.CROSS, True),
|
||||||
("kraken", TradingMode.FUTURES, MarginMode.CROSS, True),
|
("kraken", TradingMode.FUTURES, MarginMode.CROSS, True),
|
||||||
("ftx", TradingMode.MARGIN, MarginMode.CROSS, True),
|
|
||||||
("ftx", TradingMode.FUTURES, MarginMode.CROSS, True),
|
|
||||||
("gateio", TradingMode.MARGIN, MarginMode.CROSS, True),
|
("gateio", TradingMode.MARGIN, MarginMode.CROSS, True),
|
||||||
("gateio", TradingMode.FUTURES, MarginMode.CROSS, True),
|
("gateio", TradingMode.FUTURES, MarginMode.CROSS, True),
|
||||||
|
|
||||||
|
@ -4015,8 +3990,6 @@ def test_set_margin_mode(mocker, default_conf, margin_mode):
|
||||||
# ("binance", TradingMode.FUTURES, MarginMode.CROSS, False),
|
# ("binance", TradingMode.FUTURES, MarginMode.CROSS, False),
|
||||||
# ("kraken", TradingMode.MARGIN, MarginMode.CROSS, False),
|
# ("kraken", TradingMode.MARGIN, MarginMode.CROSS, False),
|
||||||
# ("kraken", TradingMode.FUTURES, MarginMode.CROSS, False),
|
# ("kraken", TradingMode.FUTURES, MarginMode.CROSS, False),
|
||||||
# ("ftx", TradingMode.MARGIN, MarginMode.CROSS, False),
|
|
||||||
# ("ftx", TradingMode.FUTURES, MarginMode.CROSS, False),
|
|
||||||
# ("gateio", TradingMode.MARGIN, MarginMode.CROSS, False),
|
# ("gateio", TradingMode.MARGIN, MarginMode.CROSS, False),
|
||||||
# ("gateio", TradingMode.FUTURES, MarginMode.CROSS, False),
|
# ("gateio", TradingMode.FUTURES, MarginMode.CROSS, False),
|
||||||
])
|
])
|
||||||
|
@ -4046,7 +4019,6 @@ def test_validate_trading_mode_and_margin_mode(
|
||||||
("bibox", "futures", {"has": {"fetchCurrencies": False}, "options": {"defaultType": "swap"}}),
|
("bibox", "futures", {"has": {"fetchCurrencies": False}, "options": {"defaultType": "swap"}}),
|
||||||
("bybit", "spot", {"options": {"defaultType": "spot"}}),
|
("bybit", "spot", {"options": {"defaultType": "spot"}}),
|
||||||
("bybit", "futures", {"options": {"defaultType": "linear"}}),
|
("bybit", "futures", {"options": {"defaultType": "linear"}}),
|
||||||
("ftx", "futures", {"options": {"defaultType": "swap"}}),
|
|
||||||
("gateio", "futures", {"options": {"defaultType": "swap"}}),
|
("gateio", "futures", {"options": {"defaultType": "swap"}}),
|
||||||
("hitbtc", "futures", {"options": {"defaultType": "swap"}}),
|
("hitbtc", "futures", {"options": {"defaultType": "swap"}}),
|
||||||
("kraken", "futures", {"options": {"defaultType": "swap"}}),
|
("kraken", "futures", {"options": {"defaultType": "swap"}}),
|
||||||
|
@ -4223,11 +4195,6 @@ def test_combine_funding_and_mark(
|
||||||
# ('kraken', "2021-09-01 00:00:00", "2021-09-01 07:59:59", 30.0, -0.0012443999999999999),
|
# ('kraken', "2021-09-01 00:00:00", "2021-09-01 07:59:59", 30.0, -0.0012443999999999999),
|
||||||
# ('kraken', "2021-09-01 00:00:00", "2021-09-01 12:00:00", 30.0, 0.0045759),
|
# ('kraken', "2021-09-01 00:00:00", "2021-09-01 12:00:00", 30.0, 0.0045759),
|
||||||
# ('kraken', "2021-09-01 00:00:01", "2021-09-01 08:00:00", 30.0, -0.0008289),
|
# ('kraken', "2021-09-01 00:00:01", "2021-09-01 08:00:00", 30.0, -0.0008289),
|
||||||
('ftx', 0, 2, "2021-09-01 00:10:00", "2021-09-01 00:30:00", 30.0, 0.0),
|
|
||||||
('ftx', 0, 9, "2021-09-01 00:00:00", "2021-09-01 08:00:00", 30.0, 0.0010008),
|
|
||||||
('ftx', 0, 13, "2021-09-01 00:00:00", "2021-09-01 12:00:00", 30.0, 0.0146691),
|
|
||||||
('ftx', 0, 9, "2021-09-01 00:00:00", "2021-09-01 08:00:00", 50.0, 0.001668),
|
|
||||||
('ftx', 1, 9, "2021-09-01 00:00:01", "2021-09-01 08:00:00", 30.0, 0.0019932),
|
|
||||||
('gateio', 0, 2, "2021-09-01 00:10:00", "2021-09-01 04:00:00", 30.0, 0.0),
|
('gateio', 0, 2, "2021-09-01 00:10:00", "2021-09-01 04:00:00", 30.0, 0.0),
|
||||||
('gateio', 0, 2, "2021-09-01 00:00:00", "2021-09-01 08:00:00", 30.0, -0.0009140999),
|
('gateio', 0, 2, "2021-09-01 00:00:00", "2021-09-01 08:00:00", 30.0, -0.0009140999),
|
||||||
('gateio', 0, 2, "2021-09-01 00:00:00", "2021-09-01 12:00:00", 30.0, -0.0009140999),
|
('gateio', 0, 2, "2021-09-01 00:00:00", "2021-09-01 12:00:00", 30.0, -0.0009140999),
|
||||||
|
@ -4289,7 +4256,6 @@ def test__fetch_and_calculate_funding_fees(
|
||||||
d2 = datetime.strptime(f"{d2} +0000", '%Y-%m-%d %H:%M:%S %z')
|
d2 = datetime.strptime(f"{d2} +0000", '%Y-%m-%d %H:%M:%S %z')
|
||||||
funding_rate_history = {
|
funding_rate_history = {
|
||||||
'binance': funding_rate_history_octohourly,
|
'binance': funding_rate_history_octohourly,
|
||||||
'ftx': funding_rate_history_hourly,
|
|
||||||
'gateio': funding_rate_history_octohourly,
|
'gateio': funding_rate_history_octohourly,
|
||||||
}[exchange][rate_start:rate_end]
|
}[exchange][rate_start:rate_end]
|
||||||
api_mock = MagicMock()
|
api_mock = MagicMock()
|
||||||
|
@ -5056,7 +5022,7 @@ def test_get_max_leverage_futures(default_conf, mocker, leverage_tiers):
|
||||||
exchange.get_max_leverage("BTC/USDT", 1000000000.01)
|
exchange.get_max_leverage("BTC/USDT", 1000000000.01)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("exchange_name", ['bittrex', 'binance', 'kraken', 'ftx', 'gateio', 'okx'])
|
@pytest.mark.parametrize("exchange_name", ['bittrex', 'binance', 'kraken', 'gateio', 'okx'])
|
||||||
def test__get_params(mocker, default_conf, exchange_name):
|
def test__get_params(mocker, default_conf, exchange_name):
|
||||||
api_mock = MagicMock()
|
api_mock = MagicMock()
|
||||||
mocker.patch('freqtrade.exchange.Exchange.exchange_has', return_value=True)
|
mocker.patch('freqtrade.exchange.Exchange.exchange_has', return_value=True)
|
||||||
|
|
|
@ -1,272 +0,0 @@
|
||||||
from random import randint
|
|
||||||
from unittest.mock import MagicMock
|
|
||||||
|
|
||||||
import ccxt
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from freqtrade.exceptions import DependencyException, InvalidOrderException
|
|
||||||
from freqtrade.exchange.common import API_FETCH_ORDER_RETRY_COUNT
|
|
||||||
from tests.conftest import get_patched_exchange
|
|
||||||
|
|
||||||
from .test_exchange import ccxt_exceptionhandlers
|
|
||||||
|
|
||||||
|
|
||||||
STOPLOSS_ORDERTYPE = 'stop'
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('order_price,exchangelimitratio,side', [
|
|
||||||
(217.8, 1.05, "sell"),
|
|
||||||
(222.2, 0.95, "buy"),
|
|
||||||
])
|
|
||||||
def test_stoploss_order_ftx(default_conf, mocker, order_price, exchangelimitratio, side):
|
|
||||||
api_mock = MagicMock()
|
|
||||||
order_id = 'test_prod_buy_{}'.format(randint(0, 10 ** 6))
|
|
||||||
|
|
||||||
api_mock.create_order = MagicMock(return_value={
|
|
||||||
'id': order_id,
|
|
||||||
'info': {
|
|
||||||
'foo': 'bar'
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
default_conf['dry_run'] = False
|
|
||||||
mocker.patch('freqtrade.exchange.Exchange.amount_to_precision', lambda s, x, y: y)
|
|
||||||
mocker.patch('freqtrade.exchange.Exchange.price_to_precision', lambda s, x, y: y)
|
|
||||||
|
|
||||||
exchange = get_patched_exchange(mocker, default_conf, api_mock, 'ftx')
|
|
||||||
|
|
||||||
# stoploss_on_exchange_limit_ratio is irrelevant for ftx market orders
|
|
||||||
order = exchange.stoploss(
|
|
||||||
pair='ETH/BTC',
|
|
||||||
amount=1,
|
|
||||||
stop_price=190,
|
|
||||||
side=side,
|
|
||||||
order_types={'stoploss_on_exchange_limit_ratio': exchangelimitratio},
|
|
||||||
leverage=1.0
|
|
||||||
)
|
|
||||||
|
|
||||||
assert api_mock.create_order.call_args_list[0][1]['symbol'] == 'ETH/BTC'
|
|
||||||
assert api_mock.create_order.call_args_list[0][1]['type'] == STOPLOSS_ORDERTYPE
|
|
||||||
assert api_mock.create_order.call_args_list[0][1]['side'] == side
|
|
||||||
assert api_mock.create_order.call_args_list[0][1]['amount'] == 1
|
|
||||||
assert 'orderPrice' not in api_mock.create_order.call_args_list[0][1]['params']
|
|
||||||
assert 'stopPrice' in api_mock.create_order.call_args_list[0][1]['params']
|
|
||||||
assert api_mock.create_order.call_args_list[0][1]['params']['stopPrice'] == 190
|
|
||||||
|
|
||||||
assert api_mock.create_order.call_count == 1
|
|
||||||
|
|
||||||
api_mock.create_order.reset_mock()
|
|
||||||
|
|
||||||
order = exchange.stoploss(
|
|
||||||
pair='ETH/BTC',
|
|
||||||
amount=1,
|
|
||||||
stop_price=220,
|
|
||||||
order_types={},
|
|
||||||
side=side,
|
|
||||||
leverage=1.0
|
|
||||||
)
|
|
||||||
|
|
||||||
assert 'id' in order
|
|
||||||
assert 'info' in order
|
|
||||||
assert order['id'] == order_id
|
|
||||||
assert api_mock.create_order.call_args_list[0][1]['symbol'] == 'ETH/BTC'
|
|
||||||
assert api_mock.create_order.call_args_list[0][1]['type'] == STOPLOSS_ORDERTYPE
|
|
||||||
assert api_mock.create_order.call_args_list[0][1]['side'] == side
|
|
||||||
assert api_mock.create_order.call_args_list[0][1]['amount'] == 1
|
|
||||||
assert 'orderPrice' not in api_mock.create_order.call_args_list[0][1]['params']
|
|
||||||
assert api_mock.create_order.call_args_list[0][1]['params']['stopPrice'] == 220
|
|
||||||
|
|
||||||
api_mock.create_order.reset_mock()
|
|
||||||
order = exchange.stoploss(
|
|
||||||
pair='ETH/BTC',
|
|
||||||
amount=1,
|
|
||||||
stop_price=220,
|
|
||||||
order_types={'stoploss': 'limit'}, side=side,
|
|
||||||
leverage=1.0
|
|
||||||
)
|
|
||||||
|
|
||||||
assert 'id' in order
|
|
||||||
assert 'info' in order
|
|
||||||
assert order['id'] == order_id
|
|
||||||
assert api_mock.create_order.call_args_list[0][1]['symbol'] == 'ETH/BTC'
|
|
||||||
assert api_mock.create_order.call_args_list[0][1]['type'] == STOPLOSS_ORDERTYPE
|
|
||||||
assert api_mock.create_order.call_args_list[0][1]['side'] == side
|
|
||||||
assert api_mock.create_order.call_args_list[0][1]['amount'] == 1
|
|
||||||
assert 'orderPrice' in api_mock.create_order.call_args_list[0][1]['params']
|
|
||||||
assert api_mock.create_order.call_args_list[0][1]['params']['orderPrice'] == order_price
|
|
||||||
assert api_mock.create_order.call_args_list[0][1]['params']['stopPrice'] == 220
|
|
||||||
|
|
||||||
# test exception handling
|
|
||||||
with pytest.raises(DependencyException):
|
|
||||||
api_mock.create_order = MagicMock(side_effect=ccxt.InsufficientFunds("0 balance"))
|
|
||||||
exchange = get_patched_exchange(mocker, default_conf, api_mock, 'ftx')
|
|
||||||
exchange.stoploss(
|
|
||||||
pair='ETH/BTC',
|
|
||||||
amount=1,
|
|
||||||
stop_price=220,
|
|
||||||
order_types={},
|
|
||||||
side=side,
|
|
||||||
leverage=1.0
|
|
||||||
)
|
|
||||||
|
|
||||||
with pytest.raises(InvalidOrderException):
|
|
||||||
api_mock.create_order = MagicMock(
|
|
||||||
side_effect=ccxt.InvalidOrder("ftx Order would trigger immediately."))
|
|
||||||
exchange = get_patched_exchange(mocker, default_conf, api_mock, 'ftx')
|
|
||||||
exchange.stoploss(
|
|
||||||
pair='ETH/BTC',
|
|
||||||
amount=1,
|
|
||||||
stop_price=220,
|
|
||||||
order_types={},
|
|
||||||
side=side,
|
|
||||||
leverage=1.0
|
|
||||||
)
|
|
||||||
|
|
||||||
ccxt_exceptionhandlers(mocker, default_conf, api_mock, "ftx",
|
|
||||||
"stoploss", "create_order", retries=1,
|
|
||||||
pair='ETH/BTC', amount=1, stop_price=220, order_types={},
|
|
||||||
side=side, leverage=1.0)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('side', [("sell"), ("buy")])
|
|
||||||
def test_stoploss_order_dry_run_ftx(default_conf, mocker, side):
|
|
||||||
api_mock = MagicMock()
|
|
||||||
default_conf['dry_run'] = True
|
|
||||||
mocker.patch('freqtrade.exchange.Exchange.amount_to_precision', lambda s, x, y: y)
|
|
||||||
mocker.patch('freqtrade.exchange.Exchange.price_to_precision', lambda s, x, y: y)
|
|
||||||
|
|
||||||
exchange = get_patched_exchange(mocker, default_conf, api_mock, 'ftx')
|
|
||||||
|
|
||||||
api_mock.create_order.reset_mock()
|
|
||||||
|
|
||||||
order = exchange.stoploss(
|
|
||||||
pair='ETH/BTC',
|
|
||||||
amount=1,
|
|
||||||
stop_price=220,
|
|
||||||
order_types={},
|
|
||||||
side=side,
|
|
||||||
leverage=1.0
|
|
||||||
)
|
|
||||||
|
|
||||||
assert 'id' in order
|
|
||||||
assert 'info' in order
|
|
||||||
assert 'type' in order
|
|
||||||
|
|
||||||
assert order['type'] == STOPLOSS_ORDERTYPE
|
|
||||||
assert order['price'] == 220
|
|
||||||
assert order['amount'] == 1
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('sl1,sl2,sl3,side', [
|
|
||||||
(1501, 1499, 1501, "sell"),
|
|
||||||
(1499, 1501, 1499, "buy")
|
|
||||||
])
|
|
||||||
def test_stoploss_adjust_ftx(mocker, default_conf, sl1, sl2, sl3, side):
|
|
||||||
exchange = get_patched_exchange(mocker, default_conf, id='ftx')
|
|
||||||
order = {
|
|
||||||
'type': STOPLOSS_ORDERTYPE,
|
|
||||||
'price': 1500,
|
|
||||||
}
|
|
||||||
assert exchange.stoploss_adjust(sl1, order, side=side)
|
|
||||||
assert not exchange.stoploss_adjust(sl2, order, side=side)
|
|
||||||
# Test with invalid order case ...
|
|
||||||
order['type'] = 'stop_loss_limit'
|
|
||||||
assert not exchange.stoploss_adjust(sl3, order, side=side)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures("init_persistence")
|
|
||||||
def test_fetch_stoploss_order_ftx(default_conf, mocker, limit_sell_order, limit_buy_order):
|
|
||||||
default_conf['dry_run'] = True
|
|
||||||
order = MagicMock()
|
|
||||||
order.myid = 123
|
|
||||||
exchange = get_patched_exchange(mocker, default_conf, id='ftx')
|
|
||||||
exchange._dry_run_open_orders['X'] = order
|
|
||||||
assert exchange.fetch_stoploss_order('X', 'TKN/BTC').myid == 123
|
|
||||||
|
|
||||||
with pytest.raises(InvalidOrderException, match=r'Tried to get an invalid dry-run-order.*'):
|
|
||||||
exchange.fetch_stoploss_order('Y', 'TKN/BTC')
|
|
||||||
|
|
||||||
default_conf['dry_run'] = False
|
|
||||||
api_mock = MagicMock()
|
|
||||||
api_mock.fetch_orders = MagicMock(return_value=[{'id': 'X', 'status': '456'}])
|
|
||||||
exchange = get_patched_exchange(mocker, default_conf, api_mock, id='ftx')
|
|
||||||
assert exchange.fetch_stoploss_order('X', 'TKN/BTC')['status'] == '456'
|
|
||||||
|
|
||||||
api_mock.fetch_orders = MagicMock(return_value=[{'id': 'Y', 'status': '456'}])
|
|
||||||
exchange = get_patched_exchange(mocker, default_conf, api_mock, id='ftx')
|
|
||||||
with pytest.raises(InvalidOrderException, match=r"Could not get stoploss order for id X"):
|
|
||||||
exchange.fetch_stoploss_order('X', 'TKN/BTC')['status']
|
|
||||||
|
|
||||||
# stoploss Limit order
|
|
||||||
api_mock.fetch_orders = MagicMock(return_value=[
|
|
||||||
{'id': 'X', 'status': 'closed',
|
|
||||||
'info': {
|
|
||||||
'orderId': 'mocked_limit_sell',
|
|
||||||
}}])
|
|
||||||
api_mock.fetch_order = MagicMock(return_value=limit_sell_order.copy())
|
|
||||||
|
|
||||||
# No orderId field - no call to fetch_order
|
|
||||||
resp = exchange.fetch_stoploss_order('X', 'TKN/BTC')
|
|
||||||
assert resp
|
|
||||||
assert api_mock.fetch_order.call_count == 1
|
|
||||||
assert resp['id_stop'] == 'mocked_limit_sell'
|
|
||||||
assert resp['id'] == 'X'
|
|
||||||
assert resp['type'] == 'stop'
|
|
||||||
assert resp['status_stop'] == 'triggered'
|
|
||||||
|
|
||||||
# Stoploss market order
|
|
||||||
# Contains no new Order, but "average" instead
|
|
||||||
order = {'id': 'X', 'status': 'closed', 'info': {'orderId': None}, 'average': 0.254}
|
|
||||||
api_mock.fetch_orders = MagicMock(return_value=[order])
|
|
||||||
api_mock.fetch_order.reset_mock()
|
|
||||||
api_mock.privateGetConditionalOrdersConditionalOrderIdTriggers = MagicMock(
|
|
||||||
return_value={'result': [
|
|
||||||
{'orderId': 'mocked_market_sell', 'type': 'market', 'side': 'sell', 'price': 0.254}
|
|
||||||
]})
|
|
||||||
resp = exchange.fetch_stoploss_order('X', 'TKN/BTC')
|
|
||||||
assert resp
|
|
||||||
# fetch_order not called (no regular order ID)
|
|
||||||
assert api_mock.fetch_order.call_count == 1
|
|
||||||
api_mock.privateGetConditionalOrdersConditionalOrderIdTriggers.call_count == 1
|
|
||||||
expected_resp = limit_sell_order.copy()
|
|
||||||
expected_resp.update({
|
|
||||||
'id_stop': 'X',
|
|
||||||
'id': 'X',
|
|
||||||
'type': 'stop',
|
|
||||||
'status_stop': 'triggered',
|
|
||||||
})
|
|
||||||
assert expected_resp == resp
|
|
||||||
|
|
||||||
with pytest.raises(InvalidOrderException):
|
|
||||||
api_mock.fetch_orders = MagicMock(side_effect=ccxt.InvalidOrder("Order not found"))
|
|
||||||
exchange = get_patched_exchange(mocker, default_conf, api_mock, id='ftx')
|
|
||||||
exchange.fetch_stoploss_order(order_id='_', pair='TKN/BTC')
|
|
||||||
assert api_mock.fetch_orders.call_count == 1
|
|
||||||
|
|
||||||
ccxt_exceptionhandlers(mocker, default_conf, api_mock, 'ftx',
|
|
||||||
'fetch_stoploss_order', 'fetch_orders',
|
|
||||||
retries=API_FETCH_ORDER_RETRY_COUNT + 1,
|
|
||||||
order_id='_', pair='TKN/BTC')
|
|
||||||
|
|
||||||
|
|
||||||
def test_get_order_id(mocker, default_conf):
|
|
||||||
exchange = get_patched_exchange(mocker, default_conf, id='ftx')
|
|
||||||
order = {
|
|
||||||
'type': STOPLOSS_ORDERTYPE,
|
|
||||||
'price': 1500,
|
|
||||||
'id': '1111',
|
|
||||||
'id_stop': '1234',
|
|
||||||
'info': {
|
|
||||||
}
|
|
||||||
}
|
|
||||||
assert exchange.get_order_id_conditional(order) == '1234'
|
|
||||||
|
|
||||||
order = {
|
|
||||||
'type': 'limit',
|
|
||||||
'price': 1500,
|
|
||||||
'id': '1111',
|
|
||||||
'id_stop': '1234',
|
|
||||||
'info': {
|
|
||||||
}
|
|
||||||
}
|
|
||||||
assert exchange.get_order_id_conditional(order) == '1111'
|
|
|
@ -113,5 +113,4 @@ def test_stoploss_adjust_huobi(mocker, default_conf):
|
||||||
assert exchange.stoploss_adjust(1501, order, 'sell')
|
assert exchange.stoploss_adjust(1501, order, 'sell')
|
||||||
assert not exchange.stoploss_adjust(1499, order, 'sell')
|
assert not exchange.stoploss_adjust(1499, order, 'sell')
|
||||||
# Test with invalid order case
|
# Test with invalid order case
|
||||||
order['type'] = 'stop_loss'
|
assert exchange.stoploss_adjust(1501, order, 'sell')
|
||||||
assert not exchange.stoploss_adjust(1501, order, 'sell')
|
|
||||||
|
|
|
@ -3,8 +3,11 @@ from datetime import datetime, timezone
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from unittest.mock import PropertyMock
|
from unittest.mock import PropertyMock
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
from freqtrade.commands.optimize_commands import setup_optimize_configuration
|
from freqtrade.commands.optimize_commands import setup_optimize_configuration
|
||||||
from freqtrade.enums import RunMode
|
from freqtrade.enums import RunMode
|
||||||
|
from freqtrade.exceptions import OperationalException
|
||||||
from freqtrade.optimize.backtesting import Backtesting
|
from freqtrade.optimize.backtesting import Backtesting
|
||||||
from tests.conftest import (CURRENT_TEST_STRATEGY, get_args, log_has_re, patch_exchange,
|
from tests.conftest import (CURRENT_TEST_STRATEGY, get_args, log_has_re, patch_exchange,
|
||||||
patched_configuration_load_config_file)
|
patched_configuration_load_config_file)
|
||||||
|
@ -51,3 +54,32 @@ def test_freqai_backtest_load_data(freqai_conf, mocker, caplog):
|
||||||
assert log_has_re('Increasing startup_candle_count for freqai to.*', caplog)
|
assert log_has_re('Increasing startup_candle_count for freqai to.*', caplog)
|
||||||
|
|
||||||
Backtesting.cleanup()
|
Backtesting.cleanup()
|
||||||
|
|
||||||
|
|
||||||
|
def test_freqai_backtest_live_models_model_not_found(freqai_conf, mocker, testdatadir, caplog):
|
||||||
|
patch_exchange(mocker)
|
||||||
|
|
||||||
|
now = datetime.now(timezone.utc)
|
||||||
|
mocker.patch('freqtrade.plugins.pairlistmanager.PairListManager.whitelist',
|
||||||
|
PropertyMock(return_value=['HULUMULU/USDT', 'XRP/USDT']))
|
||||||
|
mocker.patch('freqtrade.optimize.backtesting.history.load_data')
|
||||||
|
mocker.patch('freqtrade.optimize.backtesting.history.get_timerange', return_value=(now, now))
|
||||||
|
freqai_conf["timerange"] = ""
|
||||||
|
patched_configuration_load_config_file(mocker, freqai_conf)
|
||||||
|
|
||||||
|
args = [
|
||||||
|
'backtesting',
|
||||||
|
'--config', 'config.json',
|
||||||
|
'--datadir', str(testdatadir),
|
||||||
|
'--strategy-path', str(Path(__file__).parents[1] / 'strategy/strats'),
|
||||||
|
'--timeframe', '5m',
|
||||||
|
'--freqai-backtest-live-models'
|
||||||
|
]
|
||||||
|
args = get_args(args)
|
||||||
|
bt_config = setup_optimize_configuration(args, RunMode.BACKTEST)
|
||||||
|
|
||||||
|
with pytest.raises(OperationalException,
|
||||||
|
match=r".* Saved models are required to run backtest .*"):
|
||||||
|
Backtesting(bt_config)
|
||||||
|
|
||||||
|
Backtesting.cleanup()
|
||||||
|
|
|
@ -22,6 +22,7 @@ def test_update_historic_data(mocker, freqai_conf):
|
||||||
historic_candles = len(freqai.dd.historic_data["ADA/BTC"]["5m"])
|
historic_candles = len(freqai.dd.historic_data["ADA/BTC"]["5m"])
|
||||||
dp_candles = len(strategy.dp.get_pair_dataframe("ADA/BTC", "5m"))
|
dp_candles = len(strategy.dp.get_pair_dataframe("ADA/BTC", "5m"))
|
||||||
candle_difference = dp_candles - historic_candles
|
candle_difference = dp_candles - historic_candles
|
||||||
|
freqai.dk.pair = "ADA/BTC"
|
||||||
freqai.dd.update_historic_data(strategy, freqai.dk)
|
freqai.dd.update_historic_data(strategy, freqai.dk)
|
||||||
|
|
||||||
updated_historic_candles = len(freqai.dd.historic_data["ADA/BTC"]["5m"])
|
updated_historic_candles = len(freqai.dd.historic_data["ADA/BTC"]["5m"])
|
||||||
|
|
|
@ -1,13 +1,18 @@
|
||||||
import shutil
|
import shutil
|
||||||
from datetime import datetime, timedelta, timezone
|
from datetime import datetime, timedelta, timezone
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
from unittest.mock import MagicMock
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
from freqtrade.configuration import TimeRange
|
||||||
|
from freqtrade.data.dataprovider import DataProvider
|
||||||
from freqtrade.exceptions import OperationalException
|
from freqtrade.exceptions import OperationalException
|
||||||
from tests.conftest import log_has_re
|
from freqtrade.freqai.data_kitchen import FreqaiDataKitchen
|
||||||
from tests.freqai.conftest import (get_patched_data_kitchen, make_data_dictionary,
|
from freqtrade.freqai.utils import get_timerange_backtest_live_models
|
||||||
make_unfiltered_dataframe)
|
from tests.conftest import get_patched_exchange, log_has_re
|
||||||
|
from tests.freqai.conftest import (get_patched_data_kitchen, get_patched_freqai_strategy,
|
||||||
|
make_data_dictionary, make_unfiltered_dataframe)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
|
@ -159,3 +164,98 @@ def test_make_train_test_datasets(mocker, freqai_conf):
|
||||||
assert data_dictionary
|
assert data_dictionary
|
||||||
assert len(data_dictionary) == 7
|
assert len(data_dictionary) == 7
|
||||||
assert len(data_dictionary['train_features'].index) == 1916
|
assert len(data_dictionary['train_features'].index) == 1916
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_pairs_timestamp_validation(mocker, freqai_conf):
|
||||||
|
exchange = get_patched_exchange(mocker, freqai_conf)
|
||||||
|
strategy = get_patched_freqai_strategy(mocker, freqai_conf)
|
||||||
|
strategy.dp = DataProvider(freqai_conf, exchange)
|
||||||
|
strategy.freqai_info = freqai_conf.get("freqai", {})
|
||||||
|
freqai = strategy.freqai
|
||||||
|
freqai.live = True
|
||||||
|
freqai.dk = FreqaiDataKitchen(freqai_conf)
|
||||||
|
freqai_conf['freqai'].update({"identifier": "invalid_id"})
|
||||||
|
model_path = freqai.dk.get_full_models_path(freqai_conf)
|
||||||
|
with pytest.raises(
|
||||||
|
OperationalException,
|
||||||
|
match=r'.*required to run backtest with the freqai-backtest-live-models.*'
|
||||||
|
):
|
||||||
|
freqai.dk.get_assets_timestamps_training_from_ready_models(model_path)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('model', [
|
||||||
|
'LightGBMRegressor'
|
||||||
|
])
|
||||||
|
def test_get_timerange_from_ready_models(mocker, freqai_conf, model):
|
||||||
|
freqai_conf.update({"freqaimodel": model})
|
||||||
|
freqai_conf.update({"timerange": "20180110-20180130"})
|
||||||
|
freqai_conf.update({"strategy": "freqai_test_strat"})
|
||||||
|
|
||||||
|
strategy = get_patched_freqai_strategy(mocker, freqai_conf)
|
||||||
|
exchange = get_patched_exchange(mocker, freqai_conf)
|
||||||
|
strategy.dp = DataProvider(freqai_conf, exchange)
|
||||||
|
strategy.freqai_info = freqai_conf.get("freqai", {})
|
||||||
|
freqai = strategy.freqai
|
||||||
|
freqai.live = True
|
||||||
|
freqai.dk = FreqaiDataKitchen(freqai_conf)
|
||||||
|
timerange = TimeRange.parse_timerange("20180101-20180130")
|
||||||
|
freqai.dd.load_all_pair_histories(timerange, freqai.dk)
|
||||||
|
|
||||||
|
freqai.dd.pair_dict = MagicMock()
|
||||||
|
|
||||||
|
data_load_timerange = TimeRange.parse_timerange("20180101-20180130")
|
||||||
|
|
||||||
|
# 1516233600 (2018-01-18 00:00) - Start Training 1
|
||||||
|
# 1516406400 (2018-01-20 00:00) - End Training 1 (Backtest slice 1)
|
||||||
|
# 1516579200 (2018-01-22 00:00) - End Training 2 (Backtest slice 2)
|
||||||
|
# 1516838400 (2018-01-25 00:00) - End Timerange
|
||||||
|
|
||||||
|
new_timerange = TimeRange("date", "date", 1516233600, 1516406400)
|
||||||
|
freqai.extract_data_and_train_model(
|
||||||
|
new_timerange, "ADA/BTC", strategy, freqai.dk, data_load_timerange)
|
||||||
|
|
||||||
|
new_timerange = TimeRange("date", "date", 1516406400, 1516579200)
|
||||||
|
freqai.extract_data_and_train_model(
|
||||||
|
new_timerange, "ADA/BTC", strategy, freqai.dk, data_load_timerange)
|
||||||
|
|
||||||
|
model_path = freqai.dk.get_full_models_path(freqai_conf)
|
||||||
|
(backtesting_timerange,
|
||||||
|
pairs_end_dates) = freqai.dk.get_timerange_and_assets_end_dates_from_ready_models(
|
||||||
|
models_path=model_path)
|
||||||
|
|
||||||
|
assert len(pairs_end_dates["ADA"]) == 2
|
||||||
|
assert backtesting_timerange.startts == 1516406400
|
||||||
|
assert backtesting_timerange.stopts == 1516838400
|
||||||
|
|
||||||
|
backtesting_string_timerange = get_timerange_backtest_live_models(freqai_conf)
|
||||||
|
assert backtesting_string_timerange == '20180120-20180125'
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('model', [
|
||||||
|
'LightGBMRegressor'
|
||||||
|
])
|
||||||
|
def test_get_full_model_path(mocker, freqai_conf, model):
|
||||||
|
freqai_conf.update({"freqaimodel": model})
|
||||||
|
freqai_conf.update({"timerange": "20180110-20180130"})
|
||||||
|
freqai_conf.update({"strategy": "freqai_test_strat"})
|
||||||
|
|
||||||
|
strategy = get_patched_freqai_strategy(mocker, freqai_conf)
|
||||||
|
exchange = get_patched_exchange(mocker, freqai_conf)
|
||||||
|
strategy.dp = DataProvider(freqai_conf, exchange)
|
||||||
|
strategy.freqai_info = freqai_conf.get("freqai", {})
|
||||||
|
freqai = strategy.freqai
|
||||||
|
freqai.live = True
|
||||||
|
freqai.dk = FreqaiDataKitchen(freqai_conf)
|
||||||
|
timerange = TimeRange.parse_timerange("20180110-20180130")
|
||||||
|
freqai.dd.load_all_pair_histories(timerange, freqai.dk)
|
||||||
|
|
||||||
|
freqai.dd.pair_dict = MagicMock()
|
||||||
|
|
||||||
|
data_load_timerange = TimeRange.parse_timerange("20180110-20180130")
|
||||||
|
new_timerange = TimeRange.parse_timerange("20180120-20180130")
|
||||||
|
|
||||||
|
freqai.extract_data_and_train_model(
|
||||||
|
new_timerange, "ADA/BTC", strategy, freqai.dk, data_load_timerange)
|
||||||
|
|
||||||
|
model_path = freqai.dk.get_full_models_path(freqai_conf)
|
||||||
|
assert model_path.is_dir() is True
|
||||||
|
|
|
@ -27,13 +27,13 @@ def is_mac() -> bool:
|
||||||
return "Darwin" in machine
|
return "Darwin" in machine
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('model', [
|
@pytest.mark.parametrize('model, pca, dbscan, float32', [
|
||||||
'LightGBMRegressor',
|
('LightGBMRegressor', True, False, True),
|
||||||
'XGBoostRegressor',
|
('XGBoostRegressor', False, True, False),
|
||||||
'XGBoostRFRegressor',
|
('XGBoostRFRegressor', False, False, False),
|
||||||
'CatboostRegressor',
|
('CatboostRegressor', False, False, False),
|
||||||
])
|
])
|
||||||
def test_extract_data_and_train_model_Standard(mocker, freqai_conf, model):
|
def test_extract_data_and_train_model_Standard(mocker, freqai_conf, model, pca, dbscan, float32):
|
||||||
if is_arm() and model == 'CatboostRegressor':
|
if is_arm() and model == 'CatboostRegressor':
|
||||||
pytest.skip("CatBoost is not supported on ARM")
|
pytest.skip("CatBoost is not supported on ARM")
|
||||||
|
|
||||||
|
@ -41,6 +41,9 @@ def test_extract_data_and_train_model_Standard(mocker, freqai_conf, model):
|
||||||
freqai_conf.update({"freqaimodel": model})
|
freqai_conf.update({"freqaimodel": model})
|
||||||
freqai_conf.update({"timerange": "20180110-20180130"})
|
freqai_conf.update({"timerange": "20180110-20180130"})
|
||||||
freqai_conf.update({"strategy": "freqai_test_strat"})
|
freqai_conf.update({"strategy": "freqai_test_strat"})
|
||||||
|
freqai_conf['freqai']['feature_parameters'].update({"principal_component_analysis": pca})
|
||||||
|
freqai_conf['freqai']['feature_parameters'].update({"use_DBSCAN_to_remove_outliers": dbscan})
|
||||||
|
freqai_conf.update({"reduce_df_footprint": float32})
|
||||||
|
|
||||||
strategy = get_patched_freqai_strategy(mocker, freqai_conf)
|
strategy = get_patched_freqai_strategy(mocker, freqai_conf)
|
||||||
exchange = get_patched_exchange(mocker, freqai_conf)
|
exchange = get_patched_exchange(mocker, freqai_conf)
|
||||||
|
@ -75,17 +78,19 @@ def test_extract_data_and_train_model_Standard(mocker, freqai_conf, model):
|
||||||
shutil.rmtree(Path(freqai.dk.full_path))
|
shutil.rmtree(Path(freqai.dk.full_path))
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('model', [
|
@pytest.mark.parametrize('model, strat', [
|
||||||
'LightGBMRegressorMultiTarget',
|
('LightGBMRegressorMultiTarget', "freqai_test_multimodel_strat"),
|
||||||
'XGBoostRegressorMultiTarget',
|
('XGBoostRegressorMultiTarget', "freqai_test_multimodel_strat"),
|
||||||
'CatboostRegressorMultiTarget',
|
('CatboostRegressorMultiTarget', "freqai_test_multimodel_strat"),
|
||||||
|
('LightGBMClassifierMultiTarget', "freqai_test_multimodel_classifier_strat"),
|
||||||
|
('CatboostClassifierMultiTarget', "freqai_test_multimodel_classifier_strat")
|
||||||
])
|
])
|
||||||
def test_extract_data_and_train_model_MultiTargets(mocker, freqai_conf, model):
|
def test_extract_data_and_train_model_MultiTargets(mocker, freqai_conf, model, strat):
|
||||||
if is_arm() and model == 'CatboostRegressorMultiTarget':
|
if is_arm() and 'Catboost' in model:
|
||||||
pytest.skip("CatBoost is not supported on ARM")
|
pytest.skip("CatBoost is not supported on ARM")
|
||||||
|
|
||||||
freqai_conf.update({"timerange": "20180110-20180130"})
|
freqai_conf.update({"timerange": "20180110-20180130"})
|
||||||
freqai_conf.update({"strategy": "freqai_test_multimodel_strat"})
|
freqai_conf.update({"strategy": strat})
|
||||||
freqai_conf.update({"freqaimodel": model})
|
freqai_conf.update({"freqaimodel": model})
|
||||||
strategy = get_patched_freqai_strategy(mocker, freqai_conf)
|
strategy = get_patched_freqai_strategy(mocker, freqai_conf)
|
||||||
exchange = get_patched_exchange(mocker, freqai_conf)
|
exchange = get_patched_exchange(mocker, freqai_conf)
|
||||||
|
@ -192,6 +197,7 @@ def test_start_backtesting(mocker, freqai_conf, model, num_files, strat, caplog)
|
||||||
corr_df, base_df = freqai.dd.get_base_and_corr_dataframes(sub_timerange, "LTC/BTC", freqai.dk)
|
corr_df, base_df = freqai.dd.get_base_and_corr_dataframes(sub_timerange, "LTC/BTC", freqai.dk)
|
||||||
|
|
||||||
df = freqai.dk.use_strategy_to_populate_indicators(strategy, corr_df, base_df, "LTC/BTC")
|
df = freqai.dk.use_strategy_to_populate_indicators(strategy, corr_df, base_df, "LTC/BTC")
|
||||||
|
df = freqai.cache_corr_pairlist_dfs(df, freqai.dk)
|
||||||
for i in range(5):
|
for i in range(5):
|
||||||
df[f'%-constant_{i}'] = i
|
df[f'%-constant_{i}'] = i
|
||||||
# df.loc[:, f'%-constant_{i}'] = i
|
# df.loc[:, f'%-constant_{i}'] = i
|
||||||
|
@ -234,6 +240,7 @@ def test_start_backtesting_subdaily_backtest_period(mocker, freqai_conf):
|
||||||
metadata = {"pair": "LTC/BTC"}
|
metadata = {"pair": "LTC/BTC"}
|
||||||
freqai.start_backtesting(df, metadata, freqai.dk)
|
freqai.start_backtesting(df, metadata, freqai.dk)
|
||||||
model_folders = [x for x in freqai.dd.full_path.iterdir() if x.is_dir()]
|
model_folders = [x for x in freqai.dd.full_path.iterdir() if x.is_dir()]
|
||||||
|
|
||||||
assert len(model_folders) == 9
|
assert len(model_folders) == 9
|
||||||
|
|
||||||
shutil.rmtree(Path(freqai.dk.full_path))
|
shutil.rmtree(Path(freqai.dk.full_path))
|
||||||
|
@ -336,6 +343,7 @@ def test_follow_mode(mocker, freqai_conf):
|
||||||
|
|
||||||
df = strategy.dp.get_pair_dataframe('ADA/BTC', '5m')
|
df = strategy.dp.get_pair_dataframe('ADA/BTC', '5m')
|
||||||
|
|
||||||
|
freqai.dk.pair = "ADA/BTC"
|
||||||
freqai.start_live(df, metadata, strategy, freqai.dk)
|
freqai.start_live(df, metadata, strategy, freqai.dk)
|
||||||
|
|
||||||
assert len(freqai.dk.return_dataframe.index) == 5702
|
assert len(freqai.dk.return_dataframe.index) == 5702
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user