Merge branch 'develop' into improve-freqai-docs

This commit is contained in:
robcaulk 2022-09-25 20:53:36 +02:00
commit 117e510e61
66 changed files with 3315 additions and 578 deletions

View File

@ -272,6 +272,16 @@ jobs:
pip install pyaml
python build_helpers/pre_commit_update.py
pre-commit:
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
- uses: pre-commit/action@v3.0.0
docs_check:
runs-on: ubuntu-20.04
steps:
@ -302,7 +312,7 @@ jobs:
# Notify only once - when CI completes (and after deploy) in case it's successfull
notify-complete:
needs: [ build_linux, build_macos, build_windows, docs_check, mypy_version_check ]
needs: [ build_linux, build_macos, build_windows, docs_check, mypy_version_check, pre-commit ]
runs-on: ubuntu-20.04
# Discord notification can't handle schedule events
if: (github.event_name != 'schedule')
@ -327,7 +337,7 @@ jobs:
webhookUrl: ${{ secrets.DISCORD_WEBHOOK }}
deploy:
needs: [ build_linux, build_macos, build_windows, docs_check, mypy_version_check ]
needs: [ build_linux, build_macos, build_windows, docs_check, mypy_version_check, pre-commit ]
runs-on: ubuntu-20.04
if: (github.event_name == 'push' || github.event_name == 'schedule' || github.event_name == 'release') && github.repository == 'freqtrade/freqtrade'

View File

@ -94,4 +94,4 @@
"internals": {
"process_throttle_secs": 5
}
}
}

View File

@ -172,7 +172,24 @@
"jwt_secret_key": "somethingrandom",
"CORS_origins": [],
"username": "freqtrader",
"password": "SuperSecurePassword"
"password": "SuperSecurePassword",
"ws_token": "secret_ws_t0ken."
},
"external_message_consumer": {
"enabled": false,
"producers": [
{
"name": "default",
"host": "127.0.0.2",
"port": 8080,
"ws_token": "secret_ws_t0ken."
}
],
"wait_timeout": 300,
"ping_timeout": 10,
"sleep_time": 10,
"remove_entry_exit_signals": false,
"message_size_limit": 8
},
"bot_name": "freqtrade",
"db_url": "sqlite:///tradesv3.sqlite",

View File

@ -6,4 +6,3 @@ FROM ${sourceimage}:${sourcetag}
COPY requirements-freqai.txt /freqtrade/
RUN pip install -r requirements-freqai.txt --user --no-cache-dir

View File

@ -225,14 +225,16 @@ Mandatory parameters are marked as **Required**, which means that they are requi
| `webhook.webhookexitcancel` | Payload to send on exit order cancel. Only required if `webhook.enabled` is `true`. See the [webhook documentation](webhook-config.md) for more details. <br> **Datatype:** String
| `webhook.webhookexitfill` | Payload to send on exit order filled. Only required if `webhook.enabled` is `true`. See the [webhook documentation](webhook-config.md) for more details. <br> **Datatype:** String
| `webhook.webhookstatus` | Payload to send on status calls. Only required if `webhook.enabled` is `true`. See the [webhook documentation](webhook-config.md) for more details. <br> **Datatype:** String
| | **Rest API / FreqUI**
| | **Rest API / FreqUI / Producer-Consumer**
| `api_server.enabled` | Enable usage of API Server. See the [API Server documentation](rest-api.md) for more details. <br> **Datatype:** Boolean
| `api_server.listen_ip_address` | Bind IP address. See the [API Server documentation](rest-api.md) for more details. <br> **Datatype:** IPv4
| `api_server.listen_port` | Bind Port. See the [API Server documentation](rest-api.md) for more details. <br>**Datatype:** Integer between 1024 and 65535
| `api_server.verbosity` | Logging verbosity. `info` will print all RPC Calls, while "error" will only display errors. <br>**Datatype:** Enum, either `info` or `error`. Defaults to `info`.
| `api_server.username` | Username for API server. See the [API Server documentation](rest-api.md) for more details. <br>**Keep it in secret, do not disclose publicly.**<br> **Datatype:** String
| `api_server.password` | Password for API server. See the [API Server documentation](rest-api.md) for more details. <br>**Keep it in secret, do not disclose publicly.**<br> **Datatype:** String
| `api_server.ws_token` | API token for the Message WebSocket. See the [API Server documentation](rest-api.md) for more details. <br>**Keep it in secret, do not disclose publicly.** <br> **Datatype:** String
| `bot_name` | Name of the bot. Passed via API to a client - can be shown to distinguish / name bots.<br> *Defaults to `freqtrade`*<br> **Datatype:** String
| `external_message_consumer` | Enable [Producer/Consumer mode](producer-consumer.md) for more details. <br> **Datatype:** Dict
| | **Other**
| `initial_state` | Defines the initial application state. If set to stopped, then the bot has to be explicitly started via `/start` RPC command. <br>*Defaults to `stopped`.* <br> **Datatype:** Enum, either `stopped` or `running`
| `force_entry_enable` | Enables the RPC Commands to force a Trade entry. More information below. <br> **Datatype:** Boolean

View File

@ -26,7 +26,7 @@ usage: freqtrade download-data [-h] [-v] [--logfile FILE] [-V] [-c PATH]
[--timerange TIMERANGE] [--dl-trades]
[--exchange EXCHANGE]
[-t TIMEFRAMES [TIMEFRAMES ...]] [--erase]
[--data-format-ohlcv {json,jsongz,hdf5}]
[--data-format-ohlcv {json,jsongz,hdf5,feather,parquet}]
[--data-format-trades {json,jsongz,hdf5}]
[--trading-mode {spot,margin,futures}]
[--prepend]
@ -55,7 +55,7 @@ optional arguments:
list. Default: `1m 5m`.
--erase Clean all existing data for the selected
exchange/pairs/timeframes.
--data-format-ohlcv {json,jsongz,hdf5}
--data-format-ohlcv {json,jsongz,hdf5,feather,parquet}
Storage format for downloaded candle (OHLCV) data.
(default: `json`).
--data-format-trades {json,jsongz,hdf5}
@ -76,7 +76,7 @@ Common arguments:
`userdir/config.json` or `config.json` whichever
exists). Multiple --config options may be used. Can be
set to `-` to read config from stdin.
-d PATH, --datadir PATH
-d PATH, --datadir PATH, --data-dir PATH
Path to directory with historical backtesting data.
--userdir PATH, --user-data-dir PATH
Path to userdata directory.
@ -179,9 +179,11 @@ freqtrade download-data --exchange binance --pairs ETH/USDT XRP/USDT BTC/USDT --
Freqtrade currently supports 3 data-formats for both OHLCV and trades data:
* `json` (plain "text" json files)
* `jsongz` (a gzip-zipped version of json files)
* `hdf5` (a high performance datastore)
* `json` - plain "text" json files
* `jsongz` - a gzip-zipped version of json files
* `hdf5` - a high performance datastore
* `feather` - a dataformat based on Apache Arrow
* `parquet` - columnar datastore
By default, OHLCV data is stored as `json` data, while trades data is stored as `jsongz` data.
@ -200,38 +202,74 @@ If the default data-format has been changed during download, then the keys `data
!!! Note
You can convert between data-formats using the [convert-data](#sub-command-convert-data) and [convert-trade-data](#sub-command-convert-trade-data) methods.
#### Dataformat comparison
The following comparisons have been made with the following data, and by using the linux `time` command.
```
Found 6 pair / timeframe combinations.
+----------+-------------+--------+---------------------+---------------------+
| Pair | Timeframe | Type | From | To |
|----------+-------------+--------+---------------------+---------------------|
| BTC/USDT | 5m | spot | 2017-08-17 04:00:00 | 2022-09-13 19:25:00 |
| ETH/USDT | 1m | spot | 2017-08-17 04:00:00 | 2022-09-13 19:26:00 |
| BTC/USDT | 1m | spot | 2017-08-17 04:00:00 | 2022-09-13 19:30:00 |
| XRP/USDT | 5m | spot | 2018-05-04 08:10:00 | 2022-09-13 19:15:00 |
| XRP/USDT | 1m | spot | 2018-05-04 08:11:00 | 2022-09-13 19:22:00 |
| ETH/USDT | 5m | spot | 2017-08-17 04:00:00 | 2022-09-13 19:20:00 |
+----------+-------------+--------+---------------------+---------------------+
```
Timings have been taken in a not very scientific way with the following command, which forces reading the data into memory.
``` bash
time freqtrade list-data --show-timerange --data-format-ohlcv <dataformat>
```
| Format | Size | timing |
|------------|-------------|-------------|
| `json` | 149Mb | 25.6s |
| `jsongz` | 39Mb | 27s |
| `hdf5` | 145Mb | 3.9s |
| `feather` | 72Mb | 3.5s |
| `parquet` | 83Mb | 3.8s |
Size has been taken from the BTC/USDT 1m spot combination for the timerange specified above.
To have a best performance/size mix, we recommend the use of either feather or parquet.
#### Sub-command convert data
```
usage: freqtrade convert-data [-h] [-v] [--logfile FILE] [-V] [-c PATH]
[-d PATH] [--userdir PATH]
[-p PAIRS [PAIRS ...]] --format-from
{json,jsongz,hdf5} --format-to
{json,jsongz,hdf5} [--erase]
[-t {1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w,2w,1M,1y} [{1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w,2w,1M,1y} ...]]
{json,jsongz,hdf5,feather,parquet} --format-to
{json,jsongz,hdf5,feather,parquet} [--erase]
[--exchange EXCHANGE]
[-t TIMEFRAMES [TIMEFRAMES ...]]
[--trading-mode {spot,margin,futures}]
[--candle-types {spot,,futures,mark,index,premiumIndex,funding_rate} [{spot,,futures,mark,index,premiumIndex,funding_rate} ...]]
[--candle-types {spot,futures,mark,index,premiumIndex,funding_rate} [{spot,futures,mark,index,premiumIndex,funding_rate} ...]]
optional arguments:
-h, --help show this help message and exit
-p PAIRS [PAIRS ...], --pairs PAIRS [PAIRS ...]
Limit command to these pairs. Pairs are space-
separated.
--format-from {json,jsongz,hdf5}
--format-from {json,jsongz,hdf5,feather,parquet}
Source format for data conversion.
--format-to {json,jsongz,hdf5}
--format-to {json,jsongz,hdf5,feather,parquet}
Destination format for data conversion.
--erase Clean all existing data for the selected
exchange/pairs/timeframes.
-t {1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w,2w,1M,1y} [{1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w,2w,1M,1y} ...], --timeframes {1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w,2w,1M,1y} [{1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w,2w,1M,1y} ...]
Specify which tickers to download. Space-separated
list. Default: `1m 5m`.
--exchange EXCHANGE Exchange name (default: `bittrex`). Only valid if no
config is provided.
--trading-mode {spot,margin,futures}
-t TIMEFRAMES [TIMEFRAMES ...], --timeframes TIMEFRAMES [TIMEFRAMES ...]
Specify which tickers to download. Space-separated
list. Default: `1m 5m`.
--trading-mode {spot,margin,futures}, --tradingmode {spot,margin,futures}
Select Trading mode
--candle-types {spot,,futures,mark,index,premiumIndex,funding_rate} [{spot,,futures,mark,index,premiumIndex,funding_rate} ...]
--candle-types {spot,futures,mark,index,premiumIndex,funding_rate} [{spot,futures,mark,index,premiumIndex,funding_rate} ...]
Select candle type to use
Common arguments:
@ -245,7 +283,7 @@ Common arguments:
`userdir/config.json` or `config.json` whichever
exists). Multiple --config options may be used. Can be
set to `-` to read config from stdin.
-d PATH, --datadir PATH
-d PATH, --datadir PATH, --data-dir PATH
Path to directory with historical backtesting data.
--userdir PATH, --user-data-dir PATH
Path to userdata directory.
@ -267,20 +305,24 @@ freqtrade convert-data --format-from json --format-to jsongz --datadir ~/.freqtr
usage: freqtrade convert-trade-data [-h] [-v] [--logfile FILE] [-V] [-c PATH]
[-d PATH] [--userdir PATH]
[-p PAIRS [PAIRS ...]] --format-from
{json,jsongz,hdf5} --format-to
{json,jsongz,hdf5} [--erase]
{json,jsongz,hdf5,feather,parquet}
--format-to
{json,jsongz,hdf5,feather,parquet}
[--erase] [--exchange EXCHANGE]
optional arguments:
-h, --help show this help message and exit
-p PAIRS [PAIRS ...], --pairs PAIRS [PAIRS ...]
Show profits for only these pairs. Pairs are space-
Limit command to these pairs. Pairs are space-
separated.
--format-from {json,jsongz,hdf5}
--format-from {json,jsongz,hdf5,feather,parquet}
Source format for data conversion.
--format-to {json,jsongz,hdf5}
--format-to {json,jsongz,hdf5,feather,parquet}
Destination format for data conversion.
--erase Clean all existing data for the selected
exchange/pairs/timeframes.
--exchange EXCHANGE Exchange name (default: `bittrex`). Only valid if no
config is provided.
Common arguments:
-v, --verbose Verbose mode (-vv for more, -vvv to get all messages).
@ -293,7 +335,7 @@ Common arguments:
`userdir/config.json` or `config.json` whichever
exists). Multiple --config options may be used. Can be
set to `-` to read config from stdin.
-d PATH, --datadir PATH
-d PATH, --datadir PATH, --data-dir PATH
Path to directory with historical backtesting data.
--userdir PATH, --user-data-dir PATH
Path to userdata directory.
@ -318,9 +360,9 @@ This command will allow you to repeat this last step for additional timeframes w
usage: freqtrade trades-to-ohlcv [-h] [-v] [--logfile FILE] [-V] [-c PATH]
[-d PATH] [--userdir PATH]
[-p PAIRS [PAIRS ...]]
[-t {1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w,2w,1M,1y} [{1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w,2w,1M,1y} ...]]
[-t TIMEFRAMES [TIMEFRAMES ...]]
[--exchange EXCHANGE]
[--data-format-ohlcv {json,jsongz,hdf5}]
[--data-format-ohlcv {json,jsongz,hdf5,feather,parquet}]
[--data-format-trades {json,jsongz,hdf5}]
optional arguments:
@ -328,12 +370,12 @@ optional arguments:
-p PAIRS [PAIRS ...], --pairs PAIRS [PAIRS ...]
Limit command to these pairs. Pairs are space-
separated.
-t {1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w,2w,1M,1y} [{1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w,2w,1M,1y} ...], --timeframes {1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w,2w,1M,1y} [{1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w,2w,1M,1y} ...]
-t TIMEFRAMES [TIMEFRAMES ...], --timeframes TIMEFRAMES [TIMEFRAMES ...]
Specify which tickers to download. Space-separated
list. Default: `1m 5m`.
--exchange EXCHANGE Exchange name (default: `bittrex`). Only valid if no
config is provided.
--data-format-ohlcv {json,jsongz,hdf5}
--data-format-ohlcv {json,jsongz,hdf5,feather,parquet}
Storage format for downloaded candle (OHLCV) data.
(default: `json`).
--data-format-trades {json,jsongz,hdf5}
@ -351,7 +393,7 @@ Common arguments:
`userdir/config.json` or `config.json` whichever
exists). Multiple --config options may be used. Can be
set to `-` to read config from stdin.
-d PATH, --datadir PATH
-d PATH, --datadir PATH, --data-dir PATH
Path to directory with historical backtesting data.
--userdir PATH, --user-data-dir PATH
Path to userdata directory.
@ -371,7 +413,7 @@ You can get a list of downloaded data using the `list-data` sub-command.
```
usage: freqtrade list-data [-h] [-v] [--logfile FILE] [-V] [-c PATH] [-d PATH]
[--userdir PATH] [--exchange EXCHANGE]
[--data-format-ohlcv {json,jsongz,hdf5}]
[--data-format-ohlcv {json,jsongz,hdf5,feather,parquet}]
[-p PAIRS [PAIRS ...]]
[--trading-mode {spot,margin,futures}]
[--show-timerange]
@ -380,13 +422,13 @@ optional arguments:
-h, --help show this help message and exit
--exchange EXCHANGE Exchange name (default: `bittrex`). Only valid if no
config is provided.
--data-format-ohlcv {json,jsongz,hdf5}
--data-format-ohlcv {json,jsongz,hdf5,feather,parquet}
Storage format for downloaded candle (OHLCV) data.
(default: `json`).
-p PAIRS [PAIRS ...], --pairs PAIRS [PAIRS ...]
Limit command to these pairs. Pairs are space-
separated.
--trading-mode {spot,margin,futures}
--trading-mode {spot,margin,futures}, --tradingmode {spot,margin,futures}
Select Trading mode
--show-timerange Show timerange available for available data. (May take
a while to calculate).
@ -402,7 +444,7 @@ Common arguments:
`userdir/config.json` or `config.json` whichever
exists). Multiple --config options may be used. Can be
set to `-` to read config from stdin.
-d PATH, --datadir PATH
-d PATH, --datadir PATH, --data-dir PATH
Path to directory with historical backtesting data.
--userdir PATH, --user-data-dir PATH
Path to userdata directory.

163
docs/producer-consumer.md Normal file
View File

@ -0,0 +1,163 @@
# Producer / Consumer mode
freqtrade provides a mechanism whereby an instance (also called `consumer`) may listen to messages from an upstream freqtrade instance (also called `producer`) using the message websocket. Mainly, `analyzed_df` and `whitelist` messages. This allows the reuse of computed indicators (and signals) for pairs in multiple bots without needing to compute them multiple times.
See [Message Websocket](rest-api.md#message-websocket) in the Rest API docs for setting up the `api_server` configuration for your message websocket (this will be your producer).
!!! Note
We strongly recommend to set `ws_token` to something random and known only to yourself to avoid unauthorized access to your bot.
## Configuration
Enable subscribing to an instance by adding the `external_message_consumer` section to the consumer's config file.
```json
{
//...
"external_message_consumer": {
"enabled": true,
"producers": [
{
"name": "default", // This can be any name you'd like, default is "default"
"host": "127.0.0.1", // The host from your producer's api_server config
"port": 8080, // The port from your producer's api_server config
"ws_token": "sercet_Ws_t0ken" // The ws_token from your producer's api_server config
}
],
// The following configurations are optional, and usually not required
// "wait_timeout": 300,
// "ping_timeout": 10,
// "sleep_time": 10,
// "remove_entry_exit_signals": false,
// "message_size_limit": 8
}
//...
}
```
| Parameter | Description |
|------------|-------------|
| `enabled` | **Required.** Enable consumer mode. If set to false, all other settings in this section are ignored.<br>*Defaults to `false`.*<br> **Datatype:** boolean .
| `producers` | **Required.** List of producers <br> **Datatype:** Array.
| `producers.name` | **Required.** Name of this producer. This name must be used in calls to `get_producer_pairs()` and `get_producer_df()` if more than one producer is used.<br> **Datatype:** string
| `producers.host` | **Required.** The hostname or IP address from your producer.<br> **Datatype:** string
| `producers.port` | **Required.** The port matching the above host.<br> **Datatype:** string
| `producers.ws_token` | **Required.** `ws_token` as configured on the producer.<br> **Datatype:** string
| | **Optional settings**
| `wait_timeout` | Timeout until we ping again if no message is received. <br>*Defaults to `300`.*<br> **Datatype:** Integer - in seconds.
| `wait_timeout` | Ping timeout <br>*Defaults to `10`.*<br> **Datatype:** Integer - in seconds.
| `sleep_time` | Sleep time before retrying to connect.<br>*Defaults to `10`.*<br> **Datatype:** Integer - in seconds.
| `remove_entry_exit_signals` | Remove signal columns from the dataframe (set them to 0) on dataframe receipt.<br>*Defaults to `10`.*<br> **Datatype:** Integer - in seconds.
| `message_size_limit` | Size limit per message<br>*Defaults to `8`.*<br> **Datatype:** Integer - Megabytes.
Instead of (or as well as) calculating indicators in `populate_indicators()` the follower instance listens on the connection to a producer instance's messages (or multiple producer instances in advanced configurations) and requests the producer's most recently analyzed dataframes for each pair in the active whitelist.
A consumer instance will then have a full copy of the analyzed dataframes without the need to calculate them itself.
## Examples
### Example - Producer Strategy
A simple strategy with multiple indicators. No special considerations are required in the strategy itself.
```py
class ProducerStrategy(IStrategy):
#...
def populate_indicators(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
"""
Calculate indicators in the standard freqtrade way which can then be broadcast to other instances
"""
dataframe['rsi'] = ta.RSI(dataframe)
bollinger = qtpylib.bollinger_bands(qtpylib.typical_price(dataframe), window=20, stds=2)
dataframe['bb_lowerband'] = bollinger['lower']
dataframe['bb_middleband'] = bollinger['mid']
dataframe['bb_upperband'] = bollinger['upper']
dataframe['tema'] = ta.TEMA(dataframe, timeperiod=9)
return dataframe
def populate_entry_trend(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
"""
Populates the entry signal for the given dataframe
"""
dataframe.loc[
(
(qtpylib.crossed_above(dataframe['rsi'], self.buy_rsi.value)) &
(dataframe['tema'] <= dataframe['bb_middleband']) &
(dataframe['tema'] > dataframe['tema'].shift(1)) &
(dataframe['volume'] > 0)
),
'enter_long'] = 1
return dataframe
```
!!! Tip "FreqAI"
You can use this to setup [FreqAI](freqai.md) on a powerful machine, while you run consumers on simple machines like raspberries, which can interpret the signals generated from the producer in different ways.
### Example - Consumer Strategy
A logically equivalent strategy which calculates no indicators itself, but will have the same analyzed dataframes available to make trading decisions based on the indicators calculated in the producer. In this example the consumer has the same entry criteria, however this is not necessary. The consumer may use different logic to enter/exit trades, and only use the indicators as specified.
```py
class ConsumerStrategy(IStrategy):
#...
process_only_new_candles = False # required for consumers
_columns_to_expect = ['rsi_default', 'tema_default', 'bb_middleband_default']
def populate_indicators(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
"""
Use the websocket api to get pre-populated indicators from another freqtrade instance.
Use `self.dp.get_producer_df(pair)` to get the dataframe
"""
pair = metadata['pair']
timeframe = self.timeframe
producer_pairs = self.dp.get_producer_pairs()
# You can specify which producer to get pairs from via:
# self.dp.get_producer_pairs("my_other_producer")
# This func returns the analyzed dataframe, and when it was analyzed
producer_dataframe, _ = self.dp.get_producer_df(pair)
# You can get other data if the producer makes it available:
# self.dp.get_producer_df(
# pair,
# timeframe="1h",
# candle_type=CandleType.SPOT,
# producer_name="my_other_producer"
# )
if not producer_dataframe.empty:
# If you plan on passing the producer's entry/exit signal directly,
# specify ffill=False or it will have unintended results
merged_dataframe = merge_informative_pair(dataframe, producer_dataframe,
timeframe, timeframe,
append_timeframe=False,
suffix="default")
return merged_dataframe
else:
dataframe[self._columns_to_expect] = 0
return dataframe
def populate_entry_trend(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
"""
Populates the entry signal for the given dataframe
"""
# Use the dataframe columns as if we calculated them ourselves
dataframe.loc[
(
(qtpylib.crossed_above(dataframe['rsi_default'], self.buy_rsi.value)) &
(dataframe['tema_default'] <= dataframe['bb_middleband_default']) &
(dataframe['tema_default'] > dataframe['tema_default'].shift(1)) &
(dataframe['volume'] > 0)
),
'enter_long'] = 1
return dataframe
```
!!! Tip "Using upstream signals"
By setting `remove_entry_exit_signals=false`, you can also use the producer's signals directly. They should be available as `enter_long_default` (assuming `suffix="default"` was used) - and can be used as either signal directly, or as additional indicator.

View File

@ -31,7 +31,8 @@ Sample configuration:
"jwt_secret_key": "somethingrandom",
"CORS_origins": [],
"username": "Freqtrader",
"password": "SuperSecret1!"
"password": "SuperSecret1!",
"ws_token": "sercet_Ws_t0ken"
},
```
@ -66,7 +67,7 @@ secrets.token_hex()
!!! Danger "Password selection"
Please make sure to select a very strong, unique password to protect your bot from unauthorized access.
Also change `jwt_secret_key` to something random (no need to remember this, but it'll be used to encrypt your session, so it better be something unique!).
Also change `jwt_secret_key` to something random (no need to remember this, but it'll be used to encrypt your session, so it better be something unique!).
### Configuration with docker
@ -93,7 +94,6 @@ Make sure that the following 2 lines are available in your docker-compose file:
!!! Danger "Security warning"
By using `8080:8080` in the docker port mapping, the API will be available to everyone connecting to the server under the correct port, so others may be able to control your bot.
## Rest API
### Consuming the API
@ -274,7 +274,7 @@ reload_config
Reload configuration.
show_config
Returns part of the configuration, relevant for trading operations.
start
@ -322,6 +322,73 @@ whitelist
```
### Message WebSocket
The API Server includes a websocket endpoint for subscribing to RPC messages from the freqtrade Bot.
This can be used to consume real-time data from your bot, such as entry/exit fill messages, whitelist changes, populated indicators for pairs, and more.
This is also used to setup [Producer/Consumer mode](producer-consumer.md) in Freqtrade.
Assuming your rest API is set to `127.0.0.1` on port `8080`, the endpoint is available at `http://localhost:8080/api/v1/message/ws`.
To access the websocket endpoint, the `ws_token` is required as a query parameter in the endpoint URL.
To generate a safe `ws_token` you can run the following code:
``` python
>>> import secrets
>>> secrets.token_urlsafe(25)
'hZ-y58LXyX_HZ8O1cJzVyN6ePWrLpNQv4Q'
```
You would then add that token under `ws_token` in your `api_server` config. Like so:
``` json
"api_server": {
"enabled": true,
"listen_ip_address": "127.0.0.1",
"listen_port": 8080,
"verbosity": "error",
"enable_openapi": false,
"jwt_secret_key": "somethingrandom",
"CORS_origins": [],
"username": "Freqtrader",
"password": "SuperSecret1!",
"ws_token": "hZ-y58LXyX_HZ8O1cJzVyN6ePWrLpNQv4Q" // <-----
},
```
You can now connect to the endpoint at `http://localhost:8080/api/v1/message/ws?token=hZ-y58LXyX_HZ8O1cJzVyN6ePWrLpNQv4Q`.
!!! Danger "Reuse of example tokens"
Please do not use the above example token. To make sure you are secure, generate a completely new token.
#### Using the WebSocket
Once connected to the WebSocket, the bot will broadcast RPC messages to anyone who is subscribed to them. To subscribe to a list of messages, you must send a JSON request through the WebSocket like the one below. The `data` key must be a list of message type strings.
``` json
{
"type": "subscribe",
"data": ["whitelist", "analyzed_df"] // A list of string message types
}
```
For a list of message types, please refer to the RPCMessageType enum in `freqtrade/enums/rpcmessagetype.py`
Now anytime those types of RPC messages are sent in the bot, you will receive them through the WebSocket as long as the connection is active. They typically take the same form as the request:
``` json
{
"type": "analyzed_df",
"data": {
"key": ["NEO/BTC", "5m", "spot"],
"df": {}, // The dataframe
"la": "2022-09-08 22:14:41.457786+00:00"
}
}
```
### OpenAPI interface
To enable the builtin openAPI interface (Swagger UI), specify `"enable_openapi": true` in the api_server configuration.

View File

@ -106,6 +106,12 @@ def custom_exit(self, pair: str, trade: Trade, current_time: datetime, current_r
!!! Note
`enter_tag` is limited to 100 characters, remaining data will be truncated.
!!! Warning
There is only one `enter_tag` column, which is used for both long and short trades.
As a consequence, this column must be treated as "last write wins" (it's just a dataframe column after all).
In fancy situations, where multiple signals collide (or if signals are deactivated again based on different conditions), this can lead to odd results with the wrong tag applied to an entry signal.
These results are a consequence of the strategy overwriting prior tags - where the last tag will "stick" and will be the one freqtrade will use.
## Exit tag
Similar to [Buy Tagging](#buy-tag), you can also specify a sell tag.

View File

@ -34,6 +34,7 @@ dependencies:
- schedule
- python-dateutil
- joblib
- pyarrow
# ============================

View File

@ -211,6 +211,7 @@ def ask_user_config() -> Dict[str, Any]:
)
# Force JWT token to be a random string
answers['api_server_jwt_key'] = secrets.token_hex()
answers['api_server_ws_token'] = secrets.token_urlsafe(25)
return answers

View File

@ -440,7 +440,7 @@ AVAILABLE_CLI_OPTIONS = {
"dataformat_trades": Arg(
'--data-format-trades',
help='Storage format for downloaded trades data. (default: `jsongz`).',
choices=constants.AVAILABLE_DATAHANDLERS,
choices=constants.AVAILABLE_DATAHANDLERS_TRADES,
),
"show_timerange": Arg(
'--show-timerange',

View File

@ -1,4 +1,5 @@
import logging
from collections import Counter
from copy import deepcopy
from typing import Any, Dict
@ -85,6 +86,7 @@ def validate_config_consistency(conf: Dict[str, Any], preliminary: bool = False)
_validate_unlimited_amount(conf)
_validate_ask_orderbook(conf)
_validate_freqai_hyperopt(conf)
_validate_consumers(conf)
validate_migrated_strategy_settings(conf)
# validate configuration before returning
@ -332,6 +334,23 @@ def _validate_freqai_hyperopt(conf: Dict[str, Any]) -> None:
'Using analyze-per-epoch parameter is not supported with a FreqAI strategy.')
def _validate_consumers(conf: Dict[str, Any]) -> None:
emc_conf = conf.get('external_message_consumer', {})
if emc_conf.get('enabled', False):
if len(emc_conf.get('producers', [])) < 1:
raise OperationalException("You must specify at least 1 Producer to connect to.")
producer_names = [p['name'] for p in emc_conf.get('producers', [])]
duplicates = [item for item, count in Counter(producer_names).items() if count > 1]
if duplicates:
raise OperationalException(
f"Producer names must be unique. Duplicate: {', '.join(duplicates)}")
if conf.get('process_only_new_candles', True):
# Warning here or require it?
logger.warning("To receive best performance with external data, "
"please set `process_only_new_candles` to False")
def _strategy_settings(conf: Dict[str, Any]) -> None:
process_deprecated_setting(conf, None, 'use_sell_signal', None, 'use_exit_signal')

View File

@ -36,7 +36,8 @@ AVAILABLE_PAIRLISTS = ['StaticPairList', 'VolumePairList',
'PrecisionFilter', 'PriceFilter', 'RangeStabilityFilter',
'ShuffleFilter', 'SpreadFilter', 'VolatilityFilter']
AVAILABLE_PROTECTIONS = ['CooldownPeriod', 'LowProfitPairs', 'MaxDrawdown', 'StoplossGuard']
AVAILABLE_DATAHANDLERS = ['json', 'jsongz', 'hdf5']
AVAILABLE_DATAHANDLERS_TRADES = ['json', 'jsongz', 'hdf5']
AVAILABLE_DATAHANDLERS = AVAILABLE_DATAHANDLERS_TRADES + ['feather', 'parquet']
BACKTEST_BREAKDOWNS = ['day', 'week', 'month']
BACKTEST_CACHE_AGE = ['none', 'day', 'week', 'month']
BACKTEST_CACHE_DEFAULT = 'day'
@ -243,6 +244,7 @@ CONF_SCHEMA = {
'exchange': {'$ref': '#/definitions/exchange'},
'edge': {'$ref': '#/definitions/edge'},
'freqai': {'$ref': '#/definitions/freqai'},
'external_message_consumer': {'$ref': '#/definitions/external_message_consumer'},
'experimental': {
'type': 'object',
'properties': {
@ -404,6 +406,7 @@ CONF_SCHEMA = {
},
'username': {'type': 'string'},
'password': {'type': 'string'},
'ws_token': {'type': ['string', 'array'], 'items': {'type': 'string'}},
'jwt_secret_key': {'type': 'string'},
'CORS_origins': {'type': 'array', 'items': {'type': 'string'}},
'verbosity': {'type': 'string', 'enum': ['error', 'info']},
@ -432,7 +435,7 @@ CONF_SCHEMA = {
},
'dataformat_trades': {
'type': 'string',
'enum': AVAILABLE_DATAHANDLERS,
'enum': AVAILABLE_DATAHANDLERS_TRADES,
'default': 'jsongz'
},
'position_adjustment_enable': {'type': 'boolean'},
@ -488,6 +491,47 @@ CONF_SCHEMA = {
},
'required': ['process_throttle_secs', 'allowed_risk']
},
'external_message_consumer': {
'type': 'object',
'properties': {
'enabled': {'type': 'boolean', 'default': False},
'producers': {
'type': 'array',
'items': {
'type': 'object',
'properties': {
'name': {'type': 'string'},
'host': {'type': 'string'},
'port': {
'type': 'integer',
'default': 8080,
'minimum': 0,
'maximum': 65535
},
'ws_token': {'type': 'string'},
},
'required': ['name', 'host', 'ws_token']
}
},
'wait_timeout': {'type': 'integer', 'minimum': 0},
'sleep_time': {'type': 'integer', 'minimum': 0},
'ping_timeout': {'type': 'integer', 'minimum': 0},
'remove_entry_exit_signals': {'type': 'boolean', 'default': False},
'initial_candle_limit': {
'type': 'integer',
'minimum': 0,
'maximum': 1500,
'default': 1500
},
'message_size_limit': { # In megabytes
'type': 'integer',
'minimum': 1,
'maxmium': 20,
'default': 8,
}
},
'required': ['producers']
},
"freqai": {
"type": "object",
"properties": {

View File

@ -14,9 +14,10 @@ from pandas import DataFrame
from freqtrade.configuration import TimeRange
from freqtrade.constants import Config, ListPairsWithTimeframes, PairWithTimeframe
from freqtrade.data.history import load_pair_history
from freqtrade.enums import CandleType, RunMode
from freqtrade.enums import CandleType, RPCMessageType, RunMode
from freqtrade.exceptions import ExchangeError, OperationalException
from freqtrade.exchange import Exchange, timeframe_to_seconds
from freqtrade.rpc import RPCManager
from freqtrade.util import PeriodicCache
@ -28,17 +29,33 @@ MAX_DATAFRAME_CANDLES = 1000
class DataProvider:
def __init__(self, config: Config, exchange: Optional[Exchange], pairlists=None) -> None:
def __init__(
self,
config: Config,
exchange: Optional[Exchange],
pairlists=None,
rpc: Optional[RPCManager] = None
) -> None:
self._config = config
self._exchange = exchange
self._pairlists = pairlists
self.__rpc = rpc
self.__cached_pairs: Dict[PairWithTimeframe, Tuple[DataFrame, datetime]] = {}
self.__slice_index: Optional[int] = None
self.__cached_pairs_backtesting: Dict[PairWithTimeframe, DataFrame] = {}
self.__producer_pairs_df: Dict[str,
Dict[PairWithTimeframe, Tuple[DataFrame, datetime]]] = {}
self.__producer_pairs: Dict[str, List[str]] = {}
self._msg_queue: deque = deque()
self._default_candle_type = self._config.get('candle_type_def', CandleType.SPOT)
self._default_timeframe = self._config.get('timeframe', '1h')
self.__msg_cache = PeriodicCache(
maxsize=1000, ttl=timeframe_to_seconds(self._config.get('timeframe', '1h')))
maxsize=1000, ttl=timeframe_to_seconds(self._default_timeframe))
self.producers = self._config.get('external_message_consumer', {}).get('producers', [])
self.external_data_enabled = len(self.producers) > 0
def _set_dataframe_max_index(self, limit_index: int):
"""
@ -63,9 +80,110 @@ class DataProvider:
:param dataframe: analyzed dataframe
:param candle_type: Any of the enum CandleType (must match trading mode!)
"""
self.__cached_pairs[(pair, timeframe, candle_type)] = (
pair_key = (pair, timeframe, candle_type)
self.__cached_pairs[pair_key] = (
dataframe, datetime.now(timezone.utc))
# For multiple producers we will want to merge the pairlists instead of overwriting
def _set_producer_pairs(self, pairlist: List[str], producer_name: str = "default"):
"""
Set the pairs received to later be used.
:param pairlist: List of pairs
"""
self.__producer_pairs[producer_name] = pairlist
def get_producer_pairs(self, producer_name: str = "default") -> List[str]:
"""
Get the pairs cached from the producer
:returns: List of pairs
"""
return self.__producer_pairs.get(producer_name, []).copy()
def _emit_df(
self,
pair_key: PairWithTimeframe,
dataframe: DataFrame
) -> None:
"""
Send this dataframe as an ANALYZED_DF message to RPC
:param pair_key: PairWithTimeframe tuple
:param data: Tuple containing the DataFrame and the datetime it was cached
"""
if self.__rpc:
self.__rpc.send_msg(
{
'type': RPCMessageType.ANALYZED_DF,
'data': {
'key': pair_key,
'df': dataframe,
'la': datetime.now(timezone.utc)
}
}
)
def _add_external_df(
self,
pair: str,
dataframe: DataFrame,
last_analyzed: datetime,
timeframe: str,
candle_type: CandleType,
producer_name: str = "default"
) -> None:
"""
Add the pair data to this class from an external source.
:param pair: pair to get the data for
:param timeframe: Timeframe to get data for
:param candle_type: Any of the enum CandleType (must match trading mode!)
"""
pair_key = (pair, timeframe, candle_type)
if producer_name not in self.__producer_pairs_df:
self.__producer_pairs_df[producer_name] = {}
_last_analyzed = datetime.now(timezone.utc) if not last_analyzed else last_analyzed
self.__producer_pairs_df[producer_name][pair_key] = (dataframe, _last_analyzed)
logger.debug(f"External DataFrame for {pair_key} from {producer_name} added.")
def get_producer_df(
self,
pair: str,
timeframe: Optional[str] = None,
candle_type: Optional[CandleType] = None,
producer_name: str = "default"
) -> Tuple[DataFrame, datetime]:
"""
Get the pair data from producers.
:param pair: pair to get the data for
:param timeframe: Timeframe to get data for
:param candle_type: Any of the enum CandleType (must match trading mode!)
:returns: Tuple of the DataFrame and last analyzed timestamp
"""
_timeframe = self._default_timeframe if not timeframe else timeframe
_candle_type = self._default_candle_type if not candle_type else candle_type
pair_key = (pair, _timeframe, _candle_type)
# If we have no data from this Producer yet
if producer_name not in self.__producer_pairs_df:
# We don't have this data yet, return empty DataFrame and datetime (01-01-1970)
return (DataFrame(), datetime.fromtimestamp(0, tz=timezone.utc))
# If we do have data from that Producer, but no data on this pair_key
if pair_key not in self.__producer_pairs_df[producer_name]:
# We don't have this data yet, return empty DataFrame and datetime (01-01-1970)
return (DataFrame(), datetime.fromtimestamp(0, tz=timezone.utc))
# We have it, return this data
df, la = self.__producer_pairs_df[producer_name][pair_key]
return (df.copy(), la)
def add_pairlisthandler(self, pairlists) -> None:
"""
Allow adding pairlisthandler after initialization
@ -90,8 +208,10 @@ class DataProvider:
if saved_pair not in self.__cached_pairs_backtesting:
timerange = TimeRange.parse_timerange(None if self._config.get(
'timerange') is None else str(self._config.get('timerange')))
# Move informative start time respecting startup_candle_count
startup_candles = self.get_required_startup(str(timeframe))
# It is not necessary to add the training candles, as they
# were already added at the beginning of the backtest.
startup_candles = self.get_required_startup(str(timeframe), False)
tf_seconds = timeframe_to_seconds(str(timeframe))
timerange.subtract_start(tf_seconds * startup_candles)
self.__cached_pairs_backtesting[saved_pair] = load_pair_history(
@ -105,7 +225,7 @@ class DataProvider:
)
return self.__cached_pairs_backtesting[saved_pair].copy()
def get_required_startup(self, timeframe: str) -> int:
def get_required_startup(self, timeframe: str, add_train_candles: bool = True) -> int:
freqai_config = self._config.get('freqai', {})
if not freqai_config.get('enabled', False):
return self._config.get('startup_candle_count', 0)
@ -115,7 +235,9 @@ class DataProvider:
# make sure the startupcandles is at least the set maximum indicator periods
self._config['startup_candle_count'] = max(startup_candles, max(indicator_periods))
tf_seconds = timeframe_to_seconds(timeframe)
train_candles = freqai_config['train_period_days'] * 86400 / tf_seconds
train_candles = 0
if add_train_candles:
train_candles = freqai_config['train_period_days'] * 86400 / tf_seconds
total_candles = int(self._config['startup_candle_count'] + train_candles)
logger.info(f'Increasing startup_candle_count for freqai to {total_candles}')
return total_candles

View File

@ -0,0 +1,130 @@
import logging
from typing import Optional
from pandas import DataFrame, read_feather, to_datetime
from freqtrade.configuration import TimeRange
from freqtrade.constants import DEFAULT_DATAFRAME_COLUMNS, TradeList
from freqtrade.enums import CandleType
from .idatahandler import IDataHandler
logger = logging.getLogger(__name__)
class FeatherDataHandler(IDataHandler):
_columns = DEFAULT_DATAFRAME_COLUMNS
def ohlcv_store(
self, pair: str, timeframe: str, data: DataFrame, candle_type: CandleType) -> None:
"""
Store data in json format "values".
format looks as follows:
[[<date>,<open>,<high>,<low>,<close>]]
:param pair: Pair - used to generate filename
:param timeframe: Timeframe - used to generate filename
:param data: Dataframe containing OHLCV data
:param candle_type: Any of the enum CandleType (must match trading mode!)
:return: None
"""
filename = self._pair_data_filename(self._datadir, pair, timeframe, candle_type)
self.create_dir_if_needed(filename)
data.reset_index(drop=True).loc[:, self._columns].to_feather(
filename, compression_level=9, compression='lz4')
def _ohlcv_load(self, pair: str, timeframe: str,
timerange: Optional[TimeRange], candle_type: CandleType
) -> DataFrame:
"""
Internal method used to load data for one pair from disk.
Implements the loading and conversion to a Pandas dataframe.
Timerange trimming and dataframe validation happens outside of this method.
:param pair: Pair to load data
:param timeframe: Timeframe (e.g. "5m")
:param timerange: Limit data to be loaded to this timerange.
Optionally implemented by subclasses to avoid loading
all data where possible.
:param candle_type: Any of the enum CandleType (must match trading mode!)
:return: DataFrame with ohlcv data, or empty DataFrame
"""
filename = self._pair_data_filename(
self._datadir, pair, timeframe, candle_type=candle_type)
if not filename.exists():
# Fallback mode for 1M files
filename = self._pair_data_filename(
self._datadir, pair, timeframe, candle_type=candle_type, no_timeframe_modify=True)
if not filename.exists():
return DataFrame(columns=self._columns)
pairdata = read_feather(filename)
pairdata.columns = self._columns
pairdata = pairdata.astype(dtype={'open': 'float', 'high': 'float',
'low': 'float', 'close': 'float', 'volume': 'float'})
pairdata['date'] = to_datetime(pairdata['date'],
unit='ms',
utc=True,
infer_datetime_format=True)
return pairdata
def ohlcv_append(
self,
pair: str,
timeframe: str,
data: DataFrame,
candle_type: CandleType
) -> None:
"""
Append data to existing data structures
:param pair: Pair
:param timeframe: Timeframe this ohlcv data is for
:param data: Data to append.
:param candle_type: Any of the enum CandleType (must match trading mode!)
"""
raise NotImplementedError()
def trades_store(self, pair: str, data: TradeList) -> None:
"""
Store trades data (list of Dicts) to file
:param pair: Pair - used for filename
:param data: List of Lists containing trade data,
column sequence as in DEFAULT_TRADES_COLUMNS
"""
# filename = self._pair_trades_filename(self._datadir, pair)
raise NotImplementedError()
# array = pa.array(data)
# array
# feather.write_feather(data, filename)
def trades_append(self, pair: str, data: TradeList):
"""
Append data to existing files
:param pair: Pair - used for filename
:param data: List of Lists containing trade data,
column sequence as in DEFAULT_TRADES_COLUMNS
"""
raise NotImplementedError()
def _trades_load(self, pair: str, timerange: Optional[TimeRange] = None) -> TradeList:
"""
Load a pair from file, either .json.gz or .json
# TODO: respect timerange ...
:param pair: Load trades for this pair
:param timerange: Timerange to load trades for - currently not implemented
:return: List of trades
"""
raise NotImplementedError()
# filename = self._pair_trades_filename(self._datadir, pair)
# tradesdata = misc.file_load_json(filename)
# if not tradesdata:
# return []
# return tradesdata
@classmethod
def _get_file_extension(cls):
return "feather"

View File

@ -81,6 +81,7 @@ class HDF5DataHandler(IDataHandler):
raise ValueError("Wrong dataframe format")
pairdata = pairdata.astype(dtype={'open': 'float', 'high': 'float',
'low': 'float', 'close': 'float', 'volume': 'float'})
pairdata = pairdata.reset_index(drop=True)
return pairdata
def ohlcv_append(

View File

@ -375,6 +375,12 @@ def get_datahandlerclass(datatype: str) -> Type[IDataHandler]:
elif datatype == 'hdf5':
from .hdf5datahandler import HDF5DataHandler
return HDF5DataHandler
elif datatype == 'feather':
from .featherdatahandler import FeatherDataHandler
return FeatherDataHandler
elif datatype == 'parquet':
from .parquetdatahandler import ParquetDataHandler
return ParquetDataHandler
else:
raise ValueError(f"No datahandler for datatype {datatype} available.")

View File

@ -0,0 +1,129 @@
import logging
from typing import Optional
from pandas import DataFrame, read_parquet, to_datetime
from freqtrade.configuration import TimeRange
from freqtrade.constants import DEFAULT_DATAFRAME_COLUMNS, TradeList
from freqtrade.enums import CandleType
from .idatahandler import IDataHandler
logger = logging.getLogger(__name__)
class ParquetDataHandler(IDataHandler):
_columns = DEFAULT_DATAFRAME_COLUMNS
def ohlcv_store(
self, pair: str, timeframe: str, data: DataFrame, candle_type: CandleType) -> None:
"""
Store data in json format "values".
format looks as follows:
[[<date>,<open>,<high>,<low>,<close>]]
:param pair: Pair - used to generate filename
:param timeframe: Timeframe - used to generate filename
:param data: Dataframe containing OHLCV data
:param candle_type: Any of the enum CandleType (must match trading mode!)
:return: None
"""
filename = self._pair_data_filename(self._datadir, pair, timeframe, candle_type)
self.create_dir_if_needed(filename)
data.reset_index(drop=True).loc[:, self._columns].to_parquet(filename)
def _ohlcv_load(self, pair: str, timeframe: str,
timerange: Optional[TimeRange], candle_type: CandleType
) -> DataFrame:
"""
Internal method used to load data for one pair from disk.
Implements the loading and conversion to a Pandas dataframe.
Timerange trimming and dataframe validation happens outside of this method.
:param pair: Pair to load data
:param timeframe: Timeframe (e.g. "5m")
:param timerange: Limit data to be loaded to this timerange.
Optionally implemented by subclasses to avoid loading
all data where possible.
:param candle_type: Any of the enum CandleType (must match trading mode!)
:return: DataFrame with ohlcv data, or empty DataFrame
"""
filename = self._pair_data_filename(
self._datadir, pair, timeframe, candle_type=candle_type)
if not filename.exists():
# Fallback mode for 1M files
filename = self._pair_data_filename(
self._datadir, pair, timeframe, candle_type=candle_type, no_timeframe_modify=True)
if not filename.exists():
return DataFrame(columns=self._columns)
pairdata = read_parquet(filename)
pairdata.columns = self._columns
pairdata = pairdata.astype(dtype={'open': 'float', 'high': 'float',
'low': 'float', 'close': 'float', 'volume': 'float'})
pairdata['date'] = to_datetime(pairdata['date'],
unit='ms',
utc=True,
infer_datetime_format=True)
return pairdata
def ohlcv_append(
self,
pair: str,
timeframe: str,
data: DataFrame,
candle_type: CandleType
) -> None:
"""
Append data to existing data structures
:param pair: Pair
:param timeframe: Timeframe this ohlcv data is for
:param data: Data to append.
:param candle_type: Any of the enum CandleType (must match trading mode!)
"""
raise NotImplementedError()
def trades_store(self, pair: str, data: TradeList) -> None:
"""
Store trades data (list of Dicts) to file
:param pair: Pair - used for filename
:param data: List of Lists containing trade data,
column sequence as in DEFAULT_TRADES_COLUMNS
"""
# filename = self._pair_trades_filename(self._datadir, pair)
raise NotImplementedError()
# array = pa.array(data)
# array
# feather.write_feather(data, filename)
def trades_append(self, pair: str, data: TradeList):
"""
Append data to existing files
:param pair: Pair - used for filename
:param data: List of Lists containing trade data,
column sequence as in DEFAULT_TRADES_COLUMNS
"""
raise NotImplementedError()
def _trades_load(self, pair: str, timerange: Optional[TimeRange] = None) -> TradeList:
"""
Load a pair from file, either .json.gz or .json
# TODO: respect timerange ...
:param pair: Load trades for this pair
:param timerange: Timerange to load trades for - currently not implemented
:return: List of trades
"""
raise NotImplementedError()
# filename = self._pair_trades_filename(self._datadir, pair)
# tradesdata = misc.file_load_json(filename)
# if not tradesdata:
# return []
# return tradesdata
@classmethod
def _get_file_extension(cls):
return "parquet"

View File

@ -6,7 +6,7 @@ from freqtrade.enums.exittype import ExitType
from freqtrade.enums.hyperoptstate import HyperoptState
from freqtrade.enums.marginmode import MarginMode
from freqtrade.enums.ordertypevalue import OrderTypeValues
from freqtrade.enums.rpcmessagetype import RPCMessageType
from freqtrade.enums.rpcmessagetype import RPCMessageType, RPCRequestType
from freqtrade.enums.runmode import NON_UTIL_MODES, OPTIMIZE_MODES, TRADING_MODES, RunMode
from freqtrade.enums.signaltype import SignalDirection, SignalTagType, SignalType
from freqtrade.enums.state import State

View File

@ -1,7 +1,7 @@
from enum import Enum
class RPCMessageType(Enum):
class RPCMessageType(str, Enum):
STATUS = 'status'
WARNING = 'warning'
STARTUP = 'startup'
@ -19,8 +19,19 @@ class RPCMessageType(Enum):
STRATEGY_MSG = 'strategy_msg'
WHITELIST = 'whitelist'
ANALYZED_DF = 'analyzed_df'
def __repr__(self):
return self.value
def __str__(self):
return self.value
# Enum for parsing requests from ws consumers
class RPCRequestType(str, Enum):
SUBSCRIBE = 'subscribe'
WHITELIST = 'whitelist'
ANALYZED_DF = 'analyzed_df'

View File

@ -19209,4 +19209,4 @@
}
}
]
}
}

View File

@ -2891,7 +2891,7 @@ def amount_to_contracts(amount: float, contract_size: Optional[float]) -> float:
:return: num-contracts
"""
if contract_size and contract_size != 1:
return amount / contract_size
return float(FtPrecise(amount) / FtPrecise(contract_size))
else:
return amount
@ -2905,7 +2905,7 @@ def contracts_to_amount(num_contracts: float, contract_size: Optional[float]) ->
"""
if contract_size and contract_size != 1:
return num_contracts * contract_size
return float(FtPrecise(num_contracts) * FtPrecise(contract_size))
else:
return num_contracts

View File

@ -1,4 +1,5 @@
import logging
from time import time
from typing import Any, Tuple
import numpy as np
@ -32,7 +33,9 @@ class BaseClassifierModel(IFreqaiModel):
:model: Trained model which can be used to inference (self.predict)
"""
logger.info("-------------------- Starting training " f"{pair} --------------------")
logger.info(f"-------------------- Starting training {pair} --------------------")
start_time = time()
# filter the features requested by user in the configuration file and elegantly handle NaNs
features_filtered, labels_filtered = dk.filter_features(
@ -45,10 +48,10 @@ class BaseClassifierModel(IFreqaiModel):
start_date = unfiltered_df["date"].iloc[0].strftime("%Y-%m-%d")
end_date = unfiltered_df["date"].iloc[-1].strftime("%Y-%m-%d")
logger.info(f"-------------------- Training on data from {start_date} to "
f"{end_date}--------------------")
f"{end_date} --------------------")
# split data into train/test data.
data_dictionary = dk.make_train_test_datasets(features_filtered, labels_filtered)
if not self.freqai_info.get('fit_live_predictions', 0) or not self.live:
if not self.freqai_info.get("fit_live_predictions", 0) or not self.live:
dk.fit_labels()
# normalize all data based on train_dataset only
data_dictionary = dk.normalize_data(data_dictionary)
@ -57,13 +60,16 @@ class BaseClassifierModel(IFreqaiModel):
self.data_cleaning_train(dk)
logger.info(
f'Training model on {len(dk.data_dictionary["train_features"].columns)}' " features"
f"Training model on {len(dk.data_dictionary['train_features'].columns)} features"
)
logger.info(f'Training model on {len(data_dictionary["train_features"])} data points')
logger.info(f"Training model on {len(data_dictionary['train_features'])} data points")
model = self.fit(data_dictionary, dk)
logger.info(f"--------------------done training {pair}--------------------")
end_time = time()
logger.info(f"-------------------- Done training {pair} "
f"({end_time - start_time:.2f} secs) --------------------")
return model

View File

@ -1,4 +1,5 @@
import logging
from time import time
from typing import Any, Tuple
import numpy as np
@ -31,7 +32,9 @@ class BaseRegressionModel(IFreqaiModel):
:model: Trained model which can be used to inference (self.predict)
"""
logger.info("-------------------- Starting training " f"{pair} --------------------")
logger.info(f"-------------------- Starting training {pair} --------------------")
start_time = time()
# filter the features requested by user in the configuration file and elegantly handle NaNs
features_filtered, labels_filtered = dk.filter_features(
@ -44,10 +47,10 @@ class BaseRegressionModel(IFreqaiModel):
start_date = unfiltered_df["date"].iloc[0].strftime("%Y-%m-%d")
end_date = unfiltered_df["date"].iloc[-1].strftime("%Y-%m-%d")
logger.info(f"-------------------- Training on data from {start_date} to "
f"{end_date}--------------------")
f"{end_date} --------------------")
# split data into train/test data.
data_dictionary = dk.make_train_test_datasets(features_filtered, labels_filtered)
if not self.freqai_info.get('fit_live_predictions', 0) or not self.live:
if not self.freqai_info.get("fit_live_predictions", 0) or not self.live:
dk.fit_labels()
# normalize all data based on train_dataset only
data_dictionary = dk.normalize_data(data_dictionary)
@ -56,13 +59,16 @@ class BaseRegressionModel(IFreqaiModel):
self.data_cleaning_train(dk)
logger.info(
f'Training model on {len(dk.data_dictionary["train_features"].columns)}' " features"
f"Training model on {len(dk.data_dictionary['train_features'].columns)} features"
)
logger.info(f'Training model on {len(data_dictionary["train_features"])} data points')
logger.info(f"Training model on {len(data_dictionary['train_features'])} data points")
model = self.fit(data_dictionary, dk)
logger.info(f"--------------------done training {pair}--------------------")
end_time = time()
logger.info(f"-------------------- Done training {pair} "
f"({end_time - start_time:.2f} secs) --------------------")
return model

View File

@ -1,4 +1,5 @@
import logging
from time import time
from typing import Any
from pandas import DataFrame
@ -28,7 +29,9 @@ class BaseTensorFlowModel(IFreqaiModel):
:model: Trained model which can be used to inference (self.predict)
"""
logger.info("-------------------- Starting training " f"{pair} --------------------")
logger.info(f"-------------------- Starting training {pair} --------------------")
start_time = time()
# filter the features requested by user in the configuration file and elegantly handle NaNs
features_filtered, labels_filtered = dk.filter_features(
@ -41,10 +44,10 @@ class BaseTensorFlowModel(IFreqaiModel):
start_date = unfiltered_df["date"].iloc[0].strftime("%Y-%m-%d")
end_date = unfiltered_df["date"].iloc[-1].strftime("%Y-%m-%d")
logger.info(f"-------------------- Training on data from {start_date} to "
f"{end_date}--------------------")
f"{end_date} --------------------")
# split data into train/test data.
data_dictionary = dk.make_train_test_datasets(features_filtered, labels_filtered)
if not self.freqai_info.get('fit_live_predictions', 0) or not self.live:
if not self.freqai_info.get("fit_live_predictions", 0) or not self.live:
dk.fit_labels()
# normalize all data based on train_dataset only
data_dictionary = dk.normalize_data(data_dictionary)
@ -53,12 +56,15 @@ class BaseTensorFlowModel(IFreqaiModel):
self.data_cleaning_train(dk)
logger.info(
f'Training model on {len(dk.data_dictionary["train_features"].columns)}' " features"
f"Training model on {len(dk.data_dictionary['train_features'].columns)} features"
)
logger.info(f'Training model on {len(data_dictionary["train_features"])} data points')
logger.info(f"Training model on {len(data_dictionary['train_features'])} data points")
model = self.fit(data_dictionary, dk)
logger.info(f"--------------------done training {pair}--------------------")
end_time = time()
logger.info(f"-------------------- Done training {pair} "
f"({end_time - start_time:.2f} secs) --------------------")
return model

View File

@ -1,4 +1,3 @@
from joblib import Parallel
from sklearn.multioutput import MultiOutputRegressor, _fit_estimator
from sklearn.utils.fixes import delayed

View File

@ -313,6 +313,7 @@ class FreqaiDataDrawer:
"""
dk.find_features(dataframe)
dk.find_labels(dataframe)
full_labels = dk.label_list + dk.unique_class_list
@ -376,7 +377,27 @@ class FreqaiDataDrawer:
if self.config.get("freqai", {}).get("purge_old_models", False):
self.purge_old_models()
# Functions pulled back from FreqaiDataKitchen because they relied on DataDrawer
def save_metadata(self, dk: FreqaiDataKitchen) -> None:
"""
Saves only metadata for backtesting studies if user prefers
not to save model data. This saves tremendous amounts of space
for users generating huge studies.
This is only active when `save_backtest_models`: false (not default)
"""
if not dk.data_path.is_dir():
dk.data_path.mkdir(parents=True, exist_ok=True)
save_path = Path(dk.data_path)
dk.data["data_path"] = str(dk.data_path)
dk.data["model_filename"] = str(dk.model_filename)
dk.data["training_features_list"] = list(dk.data_dictionary["train_features"].columns)
dk.data["label_list"] = dk.label_list
with open(save_path / f"{dk.model_filename}_metadata.json", "w") as fp:
rapidjson.dump(dk.data, fp, default=self.np_encoder, number_mode=rapidjson.NM_NATIVE)
return
def save_data(self, model: Any, coin: str, dk: FreqaiDataKitchen) -> None:
"""
@ -430,6 +451,16 @@ class FreqaiDataDrawer:
return
def load_metadata(self, dk: FreqaiDataKitchen) -> None:
"""
Load only metadata into datakitchen to increase performance during
presaved backtesting (prediction file loading).
"""
with open(dk.data_path / f"{dk.model_filename}_metadata.json", "r") as fp:
dk.data = json.load(fp)
dk.training_features_list = dk.data["training_features_list"]
dk.label_list = dk.data["label_list"]
def load_data(self, coin: str, dk: FreqaiDataKitchen) -> Any:
"""
loads all data required to make a prediction on a sub-train time range

View File

@ -466,27 +466,6 @@ class FreqaiDataKitchen:
return df
def remove_training_from_backtesting(
self
) -> DataFrame:
"""
Function which takes the backtesting time range and
remove training data from dataframe, keeping only the
startup_candle_count candles
"""
startup_candle_count = self.config.get('startup_candle_count', 0)
tf = self.config['timeframe']
tr = self.config["timerange"]
backtesting_timerange = TimeRange.parse_timerange(tr)
if startup_candle_count > 0 and backtesting_timerange:
backtesting_timerange.subtract_start(timeframe_to_seconds(tf) * startup_candle_count)
start = datetime.fromtimestamp(backtesting_timerange.startts, tz=timezone.utc)
df = self.return_dataframe
df = df.loc[df["date"] >= start, :]
return df
def principal_component_analysis(self) -> None:
"""
Performs Principal Component Analysis on the data for dimensionality reduction
@ -775,12 +754,22 @@ class FreqaiDataKitchen:
def compute_inlier_metric(self, set_='train') -> None:
"""
Compute inlier metric from backwards distance distributions.
This metric defines how well features from a timepoint fit
into previous timepoints.
"""
def normalise(dataframe: DataFrame, key: str) -> DataFrame:
if set_ == 'train':
min_value = dataframe.min()
max_value = dataframe.max()
self.data[f'{key}_min'] = min_value
self.data[f'{key}_max'] = max_value
else:
min_value = self.data[f'{key}_min']
max_value = self.data[f'{key}_max']
return (dataframe - min_value) / (max_value - min_value)
no_prev_pts = self.freqai_config["feature_parameters"]["inlier_metric_window"]
if set_ == 'train':
@ -825,7 +814,12 @@ class FreqaiDataKitchen:
inliers = pd.DataFrame(index=distances.index)
for key in distances.keys():
current_distances = distances[key].dropna()
fit_params = stats.weibull_min.fit(current_distances)
current_distances = normalise(current_distances, key)
if set_ == 'train':
fit_params = stats.weibull_min.fit(current_distances)
self.data[f'{key}_fit_params'] = fit_params
else:
fit_params = self.data[f'{key}_fit_params']
quantiles = stats.weibull_min.cdf(current_distances, *fit_params)
df_inlier = pd.DataFrame(
@ -837,7 +831,7 @@ class FreqaiDataKitchen:
inlier_metric = pd.DataFrame(
data=inliers.sum(axis=1) / no_prev_pts,
columns=['inlier_metric'],
columns=['%-inlier_metric'],
index=compute_df.index
)
@ -887,11 +881,14 @@ class FreqaiDataKitchen:
"""
column_names = dataframe.columns
features = [c for c in column_names if "%" in c]
labels = [c for c in column_names if "&" in c]
if not features:
raise OperationalException("Could not find any features!")
self.training_features_list = features
def find_labels(self, dataframe: DataFrame) -> None:
column_names = dataframe.columns
labels = [c for c in column_names if "&" in c]
self.label_list = labels
def check_if_pred_in_training_spaces(self) -> None:
@ -979,8 +976,6 @@ class FreqaiDataKitchen:
to_keep = [col for col in dataframe.columns if not col.startswith("&")]
self.return_dataframe = pd.concat([dataframe[to_keep], self.full_df], axis=1)
self.return_dataframe = self.remove_training_from_backtesting()
self.full_df = DataFrame()
return
@ -1214,7 +1209,8 @@ class FreqaiDataKitchen:
def get_unique_classes_from_labels(self, dataframe: DataFrame) -> None:
self.find_features(dataframe)
# self.find_features(dataframe)
self.find_labels(dataframe)
for key in self.label_list:
if dataframe[key].dtype == object:

View File

@ -65,7 +65,7 @@ class IFreqaiModel(ABC):
self.first = True
self.set_full_path()
self.follow_mode: bool = self.freqai_info.get("follow_mode", False)
self.save_backtest_models: bool = self.freqai_info.get("save_backtest_models", False)
self.save_backtest_models: bool = self.freqai_info.get("save_backtest_models", True)
if self.save_backtest_models:
logger.info('Backtesting module configured to save all models.')
self.dd = FreqaiDataDrawer(Path(self.full_path), self.config, self.follow_mode)
@ -92,6 +92,7 @@ class IFreqaiModel(ABC):
self.begin_time_train: float = 0
self.base_tf_seconds = timeframe_to_seconds(self.config['timeframe'])
self.continual_learning = self.freqai_info.get('continual_learning', False)
self.plot_features = self.ft_params.get("plot_feature_importances", 0)
self._threads: List[threading.Thread] = []
self._stop_event = threading.Event()
@ -244,7 +245,8 @@ class IFreqaiModel(ABC):
# following tr_train. Both of these windows slide through the
# entire backtest
for tr_train, tr_backtest in zip(dk.training_timeranges, dk.backtesting_timeranges):
(_, _, _) = self.dd.get_pair_dict_info(metadata["pair"])
pair = metadata["pair"]
(_, _, _) = self.dd.get_pair_dict_info(pair)
train_it += 1
total_trains = len(dk.backtesting_timeranges)
self.training_timerange = tr_train
@ -259,40 +261,42 @@ class IFreqaiModel(ABC):
tr_train.stopts,
tz=timezone.utc).strftime(DATETIME_PRINT_FORMAT)
logger.info(
f"Training {metadata['pair']}, {self.pair_it}/{self.total_pairs} pairs"
f"Training {pair}, {self.pair_it}/{self.total_pairs} pairs"
f" from {tr_train_startts_str} to {tr_train_stopts_str}, {train_it}/{total_trains} "
"trains"
)
trained_timestamp_int = int(trained_timestamp.stopts)
dk.data_path = Path(
dk.full_path
/
f"sub-train-{metadata['pair'].split('/')[0]}_{trained_timestamp_int}"
dk.full_path / f"sub-train-{pair.split('/')[0]}_{trained_timestamp_int}"
)
dk.set_new_model_names(metadata["pair"], trained_timestamp)
dk.set_new_model_names(pair, trained_timestamp)
if dk.check_if_backtest_prediction_exists():
self.dd.load_metadata(dk)
self.check_if_feature_list_matches_strategy(dataframe_train, dk)
append_df = dk.get_backtesting_prediction()
dk.append_predictions(append_df)
else:
if not self.model_exists(
metadata["pair"], dk, trained_timestamp=trained_timestamp_int
):
if not self.model_exists(dk):
dk.find_features(dataframe_train)
self.model = self.train(dataframe_train, metadata["pair"], dk)
self.dd.pair_dict[metadata["pair"]]["trained_timestamp"] = int(
dk.find_labels(dataframe_train)
self.model = self.train(dataframe_train, pair, dk)
self.dd.pair_dict[pair]["trained_timestamp"] = int(
trained_timestamp.stopts)
if self.plot_features:
plot_feature_importance(self.model, pair, dk, self.plot_features)
if self.save_backtest_models:
logger.info('Saving backtest model to disk.')
self.dd.save_data(self.model, metadata["pair"], dk)
self.dd.save_data(self.model, pair, dk)
else:
logger.info('Saving metadata to disk.')
self.dd.save_metadata(dk)
else:
self.model = self.dd.load_data(metadata["pair"], dk)
self.check_if_feature_list_matches_strategy(dataframe_train, dk)
self.model = self.dd.load_data(pair, dk)
# self.check_if_feature_list_matches_strategy(dataframe_train, dk)
pred_df, do_preds = self.predict(dataframe_backtest, dk)
append_df = dk.get_predictions_to_append(pred_df, do_preds)
dk.append_predictions(append_df)
@ -371,8 +375,7 @@ class IFreqaiModel(ABC):
self.dd.return_null_values_to_strategy(dataframe, dk)
return dk
# ensure user is feeding the correct indicators to the model
self.check_if_feature_list_matches_strategy(dataframe, dk)
dk.find_labels(dataframe)
self.build_strategy_return_arrays(dataframe, dk, metadata["pair"], trained_timestamp)
@ -430,14 +433,16 @@ class IFreqaiModel(ABC):
if "training_features_list_raw" in dk.data:
feature_list = dk.data["training_features_list_raw"]
else:
feature_list = dk.training_features_list
feature_list = dk.data['training_features_list']
if dk.training_features_list != feature_list:
raise OperationalException(
"Trying to access pretrained model with `identifier` "
"but found different features furnished by current strategy."
"Change `identifier` to train from scratch, or ensure the"
"strategy is furnishing the same features as the pretrained"
"model"
"model. In case of --strategy-list, please be aware that FreqAI "
"requires all strategies to maintain identical "
"populate_any_indicator() functions"
)
def data_cleaning_train(self, dk: FreqaiDataKitchen) -> None:
@ -489,7 +494,7 @@ class IFreqaiModel(ABC):
if ft_params.get(
"principal_component_analysis", False
):
dk.pca_transform(self.dk.data_dictionary['prediction_features'])
dk.pca_transform(dk.data_dictionary['prediction_features'])
if ft_params.get("use_SVM_to_remove_outliers", False):
dk.use_SVM_to_remove_outliers(predict=True)
@ -500,14 +505,10 @@ class IFreqaiModel(ABC):
if ft_params.get("use_DBSCAN_to_remove_outliers", False):
dk.use_DBSCAN_to_remove_outliers(predict=True)
def model_exists(
self,
pair: str,
dk: FreqaiDataKitchen,
trained_timestamp: int = None,
model_filename: str = "",
scanning: bool = False,
) -> bool:
# ensure user is feeding the correct indicators to the model
self.check_if_feature_list_matches_strategy(dk.data_dictionary['prediction_features'], dk)
def model_exists(self, dk: FreqaiDataKitchen) -> bool:
"""
Given a pair and path, check if a model already exists
:param pair: pair e.g. BTC/USD
@ -515,11 +516,11 @@ class IFreqaiModel(ABC):
:return:
:boolean: whether the model file exists or not.
"""
path_to_modelfile = Path(dk.data_path / f"{model_filename}_model.joblib")
path_to_modelfile = Path(dk.data_path / f"{dk.model_filename}_model.joblib")
file_exists = path_to_modelfile.is_file()
if file_exists and not scanning:
if file_exists:
logger.info("Found model at %s", dk.data_path / dk.model_filename)
elif not scanning:
else:
logger.info("Could not find model at %s", dk.data_path / dk.model_filename)
return file_exists
@ -566,6 +567,7 @@ class IFreqaiModel(ABC):
# find the features indicated by strategy and store in datakitchen
dk.find_features(unfiltered_dataframe)
dk.find_labels(unfiltered_dataframe)
model = self.train(unfiltered_dataframe, pair, dk)
@ -573,8 +575,8 @@ class IFreqaiModel(ABC):
dk.set_new_model_names(pair, new_trained_timerange)
self.dd.save_data(model, pair, dk)
if self.freqai_info["feature_parameters"].get("plot_feature_importance", False):
plot_feature_importance(model, pair, dk)
if self.plot_features:
plot_feature_importance(model, pair, dk, self.plot_features)
if self.freqai_info.get("purge_old_models", False):
self.dd.purge_old_models()

View File

@ -170,7 +170,7 @@ def plot_feature_importance(model: Any, pair: str, dk: FreqaiDataKitchen,
# Data preparation
fi_df = pd.DataFrame({
"feature_names": np.array(dk.training_features_list),
"feature_names": np.array(dk.data_dictionary['train_features'].columns),
"feature_importance": np.array(feature_importance)
})
fi_df_top = fi_df.nlargest(count_max, "feature_importance")[::-1]

View File

@ -29,6 +29,7 @@ from freqtrade.plugins.pairlistmanager import PairListManager
from freqtrade.plugins.protectionmanager import ProtectionManager
from freqtrade.resolvers import ExchangeResolver, StrategyResolver
from freqtrade.rpc import RPCManager
from freqtrade.rpc.external_message_consumer import ExternalMessageConsumer
from freqtrade.strategy.interface import IStrategy
from freqtrade.strategy.strategy_wrapper import strategy_safe_wrapper
from freqtrade.util import FtPrecise
@ -72,6 +73,8 @@ class FreqtradeBot(LoggingMixin):
PairLocks.timeframe = self.config['timeframe']
self.pairlists = PairListManager(self.exchange, self.config)
# RPC runs in separate threads, can start handling external commands just after
# initialization, even before Freqtradebot has a chance to start its throttling,
# so anything in the Freqtradebot instance should be ready (initialized), including
@ -79,9 +82,7 @@ class FreqtradeBot(LoggingMixin):
# Keep this at the end of this initialization method.
self.rpc: RPCManager = RPCManager(self)
self.pairlists = PairListManager(self.exchange, self.config)
self.dataprovider = DataProvider(self.config, self.exchange, self.pairlists)
self.dataprovider = DataProvider(self.config, self.exchange, self.pairlists, self.rpc)
# Attach Dataprovider to strategy instance
self.strategy.dp = self.dataprovider
@ -92,6 +93,10 @@ class FreqtradeBot(LoggingMixin):
self.edge = Edge(self.config, self.exchange, self.strategy) if \
self.config.get('edge', {}).get('enabled', False) else None
# Init ExternalMessageConsumer if enabled
self.emc = ExternalMessageConsumer(self.config, self.dataprovider) if \
self.config.get('external_message_consumer', {}).get('enabled', False) else None
self.active_pair_whitelist = self._refresh_active_whitelist()
# Set initial bot state from config
@ -151,9 +156,11 @@ class FreqtradeBot(LoggingMixin):
finally:
self.strategy.ft_bot_cleanup()
self.rpc.cleanup()
Trade.commit()
self.exchange.close()
self.rpc.cleanup()
if self.emc:
self.emc.shutdown()
Trade.commit()
self.exchange.close()
def startup(self) -> None:
"""
@ -254,6 +261,7 @@ class FreqtradeBot(LoggingMixin):
pairs that have open trades.
"""
# Refresh whitelist
_prev_whitelist = self.pairlists.whitelist
self.pairlists.refresh_pairlist()
_whitelist = self.pairlists.whitelist
@ -266,6 +274,11 @@ class FreqtradeBot(LoggingMixin):
# Extend active-pair whitelist with pairs of open trades
# It ensures that candle (OHLCV) data are downloaded for open trades as well
_whitelist.extend([trade.pair for trade in trades if trade.pair not in _whitelist])
# Called last to include the included pairs
if _prev_whitelist != _whitelist:
self.rpc.send_msg({'type': RPCMessageType.WHITELIST, 'data': _whitelist})
return _whitelist
def get_free_open_trades(self) -> int:

View File

@ -10,9 +10,11 @@ from typing import Any, Iterator, List
from typing.io import IO
from urllib.parse import urlparse
import pandas
import rapidjson
from freqtrade.constants import DECIMAL_PER_COIN_FALLBACK, DECIMALS_PER_COIN
from freqtrade.enums import SignalTagType, SignalType
logger = logging.getLogger(__name__)
@ -249,3 +251,41 @@ def parse_db_uri_for_logging(uri: str):
return uri
pwd = parsed_db_uri.netloc.split(':')[1].split('@')[0]
return parsed_db_uri.geturl().replace(f':{pwd}@', ':*****@')
def dataframe_to_json(dataframe: pandas.DataFrame) -> str:
"""
Serialize a DataFrame for transmission over the wire using JSON
:param dataframe: A pandas DataFrame
:returns: A JSON string of the pandas DataFrame
"""
return dataframe.to_json(orient='split')
def json_to_dataframe(data: str) -> pandas.DataFrame:
"""
Deserialize JSON into a DataFrame
:param data: A JSON string
:returns: A pandas DataFrame from the JSON string
"""
dataframe = pandas.read_json(data, orient='split')
if 'date' in dataframe.columns:
dataframe['date'] = pandas.to_datetime(dataframe['date'], unit='ms', utc=True)
return dataframe
def remove_entry_exit_signals(dataframe: pandas.DataFrame):
"""
Remove Entry and Exit signals from a DataFrame
:param dataframe: The DataFrame to remove signals from
"""
dataframe[SignalType.ENTER_LONG.value] = 0
dataframe[SignalType.EXIT_LONG.value] = 0
dataframe[SignalType.ENTER_SHORT.value] = 0
dataframe[SignalType.EXIT_SHORT.value] = 0
dataframe[SignalTagType.ENTER_TAG.value] = None
dataframe[SignalTagType.EXIT_TAG.value] = None
return dataframe

View File

@ -91,8 +91,8 @@ class Backtesting:
if self.config.get('strategy_list'):
if self.config.get('freqai', {}).get('enabled', False):
raise OperationalException(
"You can't use strategy_list and freqai at the same time.")
logger.warning("Using --strategy-list with FreqAI REQUIRES all strategies "
"to have identical populate_any_indicators.")
for strat in list(self.config['strategy_list']):
stratconf = deepcopy(self.config)
stratconf['strategy'] = strat
@ -139,9 +139,14 @@ class Backtesting:
# Get maximum required startup period
self.required_startup = max([strat.startup_candle_count for strat in self.strategylist])
self.exchange.validate_required_startup_candles(self.required_startup, self.timeframe)
if self.config.get('freqai', {}).get('enabled', False):
# For FreqAI, increase the required_startup to includes the training data
self.required_startup = self.dataprovider.get_required_startup(self.timeframe)
# Add maximum startup candle count to configuration for informative pairs support
self.config['startup_candle_count'] = self.required_startup
self.exchange.validate_required_startup_candles(self.required_startup, self.timeframe)
self.trading_mode: TradingMode = config.get('trading_mode', TradingMode.SPOT)
# strategies which define "can_short=True" will fail to load in Spot mode.
@ -217,7 +222,7 @@ class Backtesting:
pairs=self.pairlists.whitelist,
timeframe=self.timeframe,
timerange=self.timerange,
startup_candles=self.dataprovider.get_required_startup(self.timeframe),
startup_candles=self.config['startup_candle_count'],
fail_without_data=True,
data_format=self.config.get('dataformat_ohlcv', 'json'),
candle_type=self.config.get('candle_type_def', CandleType.SPOT)

View File

@ -1,8 +1,10 @@
import logging
import secrets
from datetime import datetime, timedelta
from typing import Any, Dict, Union
import jwt
from fastapi import APIRouter, Depends, HTTPException, status
from fastapi import APIRouter, Depends, HTTPException, Query, WebSocket, status
from fastapi.security import OAuth2PasswordBearer
from fastapi.security.http import HTTPBasic, HTTPBasicCredentials
@ -10,6 +12,8 @@ from freqtrade.rpc.api_server.api_schemas import AccessAndRefreshToken, AccessTo
from freqtrade.rpc.api_server.deps import get_api_config
logger = logging.getLogger(__name__)
ALGORITHM = "HS256"
router_login = APIRouter()
@ -25,7 +29,7 @@ httpbasic = HTTPBasic(auto_error=False)
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token", auto_error=False)
def get_user_from_token(token, secret_key: str, token_type: str = "access"):
def get_user_from_token(token, secret_key: str, token_type: str = "access") -> str:
credentials_exception = HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Could not validate credentials",
@ -44,6 +48,45 @@ def get_user_from_token(token, secret_key: str, token_type: str = "access"):
return username
# This should be reimplemented to better realign with the existing tools provided
# by FastAPI regarding API Tokens
# https://github.com/tiangolo/fastapi/blob/master/fastapi/security/api_key.py
async def validate_ws_token(
ws: WebSocket,
ws_token: Union[str, None] = Query(default=None, alias="token"),
api_config: Dict[str, Any] = Depends(get_api_config)
):
secret_ws_token = api_config.get('ws_token', None)
secret_jwt_key = api_config.get('jwt_secret_key', 'super-secret')
# Check if ws_token is/in secret_ws_token
if ws_token and secret_ws_token:
is_valid_ws_token = False
if isinstance(secret_ws_token, str):
is_valid_ws_token = secrets.compare_digest(secret_ws_token, ws_token)
elif isinstance(secret_ws_token, list):
is_valid_ws_token = any([
secrets.compare_digest(potential, ws_token)
for potential in secret_ws_token
])
if is_valid_ws_token:
return ws_token
# Check if ws_token is a JWT
try:
user = get_user_from_token(ws_token, secret_jwt_key)
return user
# If the token is a jwt, and it's valid return the user
except HTTPException:
pass
# No checks passed, deny the connection
logger.debug("Denying websocket request.")
# If it doesn't match, close the websocket connection
await ws.close(code=status.WS_1008_POLICY_VIOLATION)
def create_token(data: dict, secret_key: str, token_type: str = "access") -> str:
to_encode = data.copy()
if token_type == "access":

View File

@ -38,7 +38,8 @@ logger = logging.getLogger(__name__)
# 2.15: Add backtest history endpoints
# 2.16: Additional daily metrics
# 2.17: Forceentry - leverage, partial force_exit
API_VERSION = 2.17
# 2.20: Add websocket endpoints
API_VERSION = 2.20
# Public API, requires no auth.
router_public = APIRouter()

View File

@ -0,0 +1,140 @@
import logging
from typing import Any, Dict
from fastapi import APIRouter, Depends, WebSocketDisconnect
from fastapi.websockets import WebSocket, WebSocketState
from pydantic import ValidationError
from freqtrade.enums import RPCMessageType, RPCRequestType
from freqtrade.rpc.api_server.api_auth import validate_ws_token
from freqtrade.rpc.api_server.deps import get_channel_manager, get_rpc
from freqtrade.rpc.api_server.ws import WebSocketChannel
from freqtrade.rpc.api_server.ws_schemas import (WSAnalyzedDFMessage, WSMessageSchema,
WSRequestSchema, WSWhitelistMessage)
from freqtrade.rpc.rpc import RPC
logger = logging.getLogger(__name__)
# Private router, protected by API Key authentication
router = APIRouter()
async def is_websocket_alive(ws: WebSocket) -> bool:
"""
Check if a FastAPI Websocket is still open
"""
if (
ws.application_state == WebSocketState.CONNECTED and
ws.client_state == WebSocketState.CONNECTED
):
return True
return False
async def _process_consumer_request(
request: Dict[str, Any],
channel: WebSocketChannel,
rpc: RPC
):
"""
Validate and handle a request from a websocket consumer
"""
# Validate the request, makes sure it matches the schema
try:
websocket_request = WSRequestSchema.parse_obj(request)
except ValidationError as e:
logger.error(f"Invalid request from {channel}: {e}")
return
type, data = websocket_request.type, websocket_request.data
response: WSMessageSchema
logger.debug(f"Request of type {type} from {channel}")
# If we have a request of type SUBSCRIBE, set the topics in this channel
if type == RPCRequestType.SUBSCRIBE:
# If the request is empty, do nothing
if not data:
return
# If all topics passed are a valid RPCMessageType, set subscriptions on channel
if all([any(x.value == topic for x in RPCMessageType) for topic in data]):
channel.set_subscriptions(data)
# We don't send a response for subscriptions
return
elif type == RPCRequestType.WHITELIST:
# Get whitelist
whitelist = rpc._ws_request_whitelist()
# Format response
response = WSWhitelistMessage(data=whitelist)
# Send it back
await channel.send(response.dict(exclude_none=True))
elif type == RPCRequestType.ANALYZED_DF:
limit = None
if data:
# Limit the amount of candles per dataframe to 'limit' or 1500
limit = max(data.get('limit', 1500), 1500)
# They requested the full historical analyzed dataframes
analyzed_df = rpc._ws_request_analyzed_df(limit)
# For every dataframe, send as a separate message
for _, message in analyzed_df.items():
response = WSAnalyzedDFMessage(data=message)
await channel.send(response.dict(exclude_none=True))
@router.websocket("/message/ws")
async def message_endpoint(
ws: WebSocket,
rpc: RPC = Depends(get_rpc),
channel_manager=Depends(get_channel_manager),
token: str = Depends(validate_ws_token)
):
"""
Message WebSocket endpoint, facilitates sending RPC messages
"""
try:
channel = await channel_manager.on_connect(ws)
if await is_websocket_alive(ws):
logger.info(f"Consumer connected - {channel}")
# Keep connection open until explicitly closed, and process requests
try:
while not channel.is_closed():
request = await channel.recv()
# Process the request here
await _process_consumer_request(request, channel, rpc)
except WebSocketDisconnect:
# Handle client disconnects
logger.info(f"Consumer disconnected - {channel}")
await channel_manager.on_disconnect(ws)
except Exception as e:
logger.info(f"Consumer connection failed - {channel}")
logger.exception(e)
# Handle cases like -
# RuntimeError('Cannot call "send" once a closed message has been sent')
await channel_manager.on_disconnect(ws)
else:
await ws.close()
except RuntimeError:
# WebSocket was closed
await channel_manager.on_disconnect(ws)
except Exception as e:
logger.error(f"Failed to serve - {ws.client}")
# Log tracebacks to keep track of what errors are happening
logger.exception(e)
await channel_manager.on_disconnect(ws)

View File

@ -41,6 +41,10 @@ def get_exchange(config=Depends(get_config)):
return ApiServer._exchange
def get_channel_manager():
return ApiServer._ws_channel_manager
def is_webserver_mode(config=Depends(get_config)):
if config['runmode'] != RunMode.WEBSERVER:
raise RPCException('Bot is not in the correct state')

View File

@ -1,16 +1,21 @@
import asyncio
import logging
from ipaddress import IPv4Address
from threading import Thread
from typing import Any, Dict
import orjson
import uvicorn
from fastapi import Depends, FastAPI
from fastapi.middleware.cors import CORSMiddleware
# Look into alternatives
from janus import Queue as ThreadedQueue
from starlette.responses import JSONResponse
from freqtrade.constants import Config
from freqtrade.exceptions import OperationalException
from freqtrade.rpc.api_server.uvicorn_threaded import UvicornServer
from freqtrade.rpc.api_server.ws import ChannelManager
from freqtrade.rpc.rpc import RPC, RPCException, RPCHandler
@ -44,6 +49,10 @@ class ApiServer(RPCHandler):
_config: Config = {}
# Exchange - only available in webserver mode.
_exchange = None
# websocket message queue stuff
_ws_channel_manager = None
_ws_thread = None
_ws_loop = None
def __new__(cls, *args, **kwargs):
"""
@ -61,17 +70,21 @@ class ApiServer(RPCHandler):
return
self._standalone: bool = standalone
self._server = None
self._ws_queue = None
self._ws_background_task = None
ApiServer.__initialized = True
api_config = self._config['api_server']
ApiServer._ws_channel_manager = ChannelManager()
self.app = FastAPI(title="Freqtrade API",
docs_url='/docs' if api_config.get('enable_openapi', False) else None,
redoc_url=None,
default_response_class=FTJSONResponse,
)
self.configure_app(self.app, self._config)
self.start_api()
def add_rpc_handler(self, rpc: RPC):
@ -93,6 +106,19 @@ class ApiServer(RPCHandler):
logger.info("Stopping API Server")
self._server.cleanup()
if self._ws_thread and self._ws_loop:
logger.info("Stopping API Server background tasks")
if self._ws_background_task:
# Cancel the queue task
self._ws_background_task.cancel()
self._ws_thread.join()
self._ws_thread = None
self._ws_loop = None
self._ws_background_task = None
@classmethod
def shutdown(cls):
cls.__initialized = False
@ -102,7 +128,9 @@ class ApiServer(RPCHandler):
cls._rpc = None
def send_msg(self, msg: Dict[str, str]) -> None:
pass
if self._ws_queue:
sync_q = self._ws_queue.sync_q
sync_q.put(msg)
def handle_rpc_exception(self, request, exc):
logger.exception(f"API Error calling: {exc}")
@ -116,6 +144,7 @@ class ApiServer(RPCHandler):
from freqtrade.rpc.api_server.api_backtest import router as api_backtest
from freqtrade.rpc.api_server.api_v1 import router as api_v1
from freqtrade.rpc.api_server.api_v1 import router_public as api_v1_public
from freqtrade.rpc.api_server.api_ws import router as ws_router
from freqtrade.rpc.api_server.web_ui import router_ui
app.include_router(api_v1_public, prefix="/api/v1")
@ -126,6 +155,7 @@ class ApiServer(RPCHandler):
app.include_router(api_backtest, prefix="/api/v1",
dependencies=[Depends(http_basic_or_jwt_token)],
)
app.include_router(ws_router, prefix="/api/v1")
app.include_router(router_login, prefix="/api/v1", tags=["auth"])
# UI Router MUST be last!
app.include_router(router_ui, prefix='')
@ -140,6 +170,48 @@ class ApiServer(RPCHandler):
app.add_exception_handler(RPCException, self.handle_rpc_exception)
def start_message_queue(self):
if self._ws_thread:
return
# Create a new loop, as it'll be just for the background thread
self._ws_loop = asyncio.new_event_loop()
# Start the thread
self._ws_thread = Thread(target=self._ws_loop.run_forever)
self._ws_thread.start()
# Finally, submit the coro to the thread
self._ws_background_task = asyncio.run_coroutine_threadsafe(
self._broadcast_queue_data(), loop=self._ws_loop)
async def _broadcast_queue_data(self):
# Instantiate the queue in this coroutine so it's attached to our loop
self._ws_queue = ThreadedQueue()
async_queue = self._ws_queue.async_q
try:
while True:
logger.debug("Getting queue messages...")
# Get data from queue
message = await async_queue.get()
logger.debug(f"Found message of type: {message.get('type')}")
# Broadcast it
await self._ws_channel_manager.broadcast(message)
# Sleep, make this configurable?
await asyncio.sleep(0.1)
except asyncio.CancelledError:
pass
# For testing, shouldn't happen when stable
except Exception as e:
logger.exception(f"Exception happened in background task: {e}")
finally:
# Disconnect channels and stop the loop on cancel
await self._ws_channel_manager.disconnect_all()
self._ws_loop.stop()
def start_api(self):
"""
Start API ... should be run in thread.
@ -177,6 +249,7 @@ class ApiServer(RPCHandler):
if self._standalone:
self._server.run()
else:
self.start_message_queue()
self._server.run_in_thread()
except Exception:
logger.exception("Api server failed to start.")

View File

@ -0,0 +1,6 @@
# flake8: noqa: F401
# isort: off
from freqtrade.rpc.api_server.ws.types import WebSocketType
from freqtrade.rpc.api_server.ws.proxy import WebSocketProxy
from freqtrade.rpc.api_server.ws.serializer import HybridJSONWebSocketSerializer
from freqtrade.rpc.api_server.ws.channel import ChannelManager, WebSocketChannel

View File

@ -0,0 +1,178 @@
import logging
from threading import RLock
from typing import List, Optional, Type
from uuid import uuid4
from fastapi import WebSocket as FastAPIWebSocket
from freqtrade.rpc.api_server.ws.proxy import WebSocketProxy
from freqtrade.rpc.api_server.ws.serializer import (HybridJSONWebSocketSerializer,
WebSocketSerializer)
from freqtrade.rpc.api_server.ws.types import WebSocketType
logger = logging.getLogger(__name__)
class WebSocketChannel:
"""
Object to help facilitate managing a websocket connection
"""
def __init__(
self,
websocket: WebSocketType,
channel_id: Optional[str] = None,
serializer_cls: Type[WebSocketSerializer] = HybridJSONWebSocketSerializer
):
self.channel_id = channel_id if channel_id else uuid4().hex[:8]
# The WebSocket object
self._websocket = WebSocketProxy(websocket)
# The Serializing class for the WebSocket object
self._serializer_cls = serializer_cls
self._subscriptions: List[str] = []
# Internal event to signify a closed websocket
self._closed = False
# Wrap the WebSocket in the Serializing class
self._wrapped_ws = self._serializer_cls(self._websocket)
def __repr__(self):
return f"WebSocketChannel({self.channel_id}, {self.remote_addr})"
@property
def remote_addr(self):
return self._websocket.remote_addr
async def send(self, data):
"""
Send data on the wrapped websocket
"""
await self._wrapped_ws.send(data)
async def recv(self):
"""
Receive data on the wrapped websocket
"""
return await self._wrapped_ws.recv()
async def ping(self):
"""
Ping the websocket
"""
return await self._websocket.ping()
async def close(self):
"""
Close the WebSocketChannel
"""
self._closed = True
def is_closed(self) -> bool:
"""
Closed flag
"""
return self._closed
def set_subscriptions(self, subscriptions: List[str] = []) -> None:
"""
Set which subscriptions this channel is subscribed to
:param subscriptions: List of subscriptions, List[str]
"""
self._subscriptions = subscriptions
def subscribed_to(self, message_type: str) -> bool:
"""
Check if this channel is subscribed to the message_type
:param message_type: The message type to check
"""
return message_type in self._subscriptions
class ChannelManager:
def __init__(self):
self.channels = dict()
self._lock = RLock() # Re-entrant Lock
async def on_connect(self, websocket: WebSocketType):
"""
Wrap websocket connection into Channel and add to list
:param websocket: The WebSocket object to attach to the Channel
"""
if isinstance(websocket, FastAPIWebSocket):
try:
await websocket.accept()
except RuntimeError:
# The connection was closed before we could accept it
return
ws_channel = WebSocketChannel(websocket)
with self._lock:
self.channels[websocket] = ws_channel
return ws_channel
async def on_disconnect(self, websocket: WebSocketType):
"""
Call close on the channel if it's not, and remove from channel list
:param websocket: The WebSocket objet attached to the Channel
"""
with self._lock:
channel = self.channels.get(websocket)
if channel:
if not channel.is_closed():
await channel.close()
del self.channels[websocket]
async def disconnect_all(self):
"""
Disconnect all Channels
"""
with self._lock:
for websocket, channel in self.channels.items():
if not channel.is_closed():
await channel.close()
self.channels = dict()
async def broadcast(self, data):
"""
Broadcast data on all Channels
:param data: The data to send
"""
with self._lock:
message_type = data.get('type')
for websocket, channel in self.channels.items():
try:
if channel.subscribed_to(message_type):
await channel.send(data)
except RuntimeError:
# Handle cannot send after close cases
await self.on_disconnect(websocket)
async def send_direct(self, channel, data):
"""
Send data directly through direct_channel only
:param direct_channel: The WebSocketChannel object to send data through
:param data: The data to send
"""
await channel.send(data)
def has_channels(self):
"""
Flag for more than 0 channels
"""
return len(self.channels) > 0

View File

@ -0,0 +1,69 @@
from typing import Any, Tuple, Union
from fastapi import WebSocket as FastAPIWebSocket
from websockets.client import WebSocketClientProtocol as WebSocket
from freqtrade.rpc.api_server.ws.types import WebSocketType
class WebSocketProxy:
"""
WebSocketProxy object to bring the FastAPIWebSocket and websockets.WebSocketClientProtocol
under the same API
"""
def __init__(self, websocket: WebSocketType):
self._websocket: Union[FastAPIWebSocket, WebSocket] = websocket
@property
def remote_addr(self) -> Tuple[Any, ...]:
if isinstance(self._websocket, WebSocket):
return self._websocket.remote_address
elif isinstance(self._websocket, FastAPIWebSocket):
if self._websocket.client:
client, port = self._websocket.client.host, self._websocket.client.port
return (client, port)
return ("unknown", 0)
async def send(self, data):
"""
Send data on the wrapped websocket
"""
if hasattr(self._websocket, "send_text"):
await self._websocket.send_text(data)
else:
await self._websocket.send(data)
async def recv(self):
"""
Receive data on the wrapped websocket
"""
if hasattr(self._websocket, "receive_text"):
return await self._websocket.receive_text()
else:
return await self._websocket.recv()
async def ping(self):
"""
Ping the websocket, not supported by FastAPI WebSockets
"""
if hasattr(self._websocket, "ping"):
return await self._websocket.ping()
return False
async def close(self, code: int = 1000):
"""
Close the websocket connection, only supported by FastAPI WebSockets
"""
if hasattr(self._websocket, "close"):
try:
return await self._websocket.close(code)
except RuntimeError:
pass
async def accept(self):
"""
Accept the WebSocket connection, only support by FastAPI WebSockets
"""
if hasattr(self._websocket, "accept"):
return await self._websocket.accept()

View File

@ -0,0 +1,62 @@
import logging
from abc import ABC, abstractmethod
import orjson
import rapidjson
from pandas import DataFrame
from freqtrade.misc import dataframe_to_json, json_to_dataframe
from freqtrade.rpc.api_server.ws.proxy import WebSocketProxy
logger = logging.getLogger(__name__)
class WebSocketSerializer(ABC):
def __init__(self, websocket: WebSocketProxy):
self._websocket: WebSocketProxy = websocket
@abstractmethod
def _serialize(self, data):
raise NotImplementedError()
@abstractmethod
def _deserialize(self, data):
raise NotImplementedError()
async def send(self, data: bytes):
await self._websocket.send(self._serialize(data))
async def recv(self) -> bytes:
data = await self._websocket.recv()
return self._deserialize(data)
async def close(self, code: int = 1000):
await self._websocket.close(code)
class HybridJSONWebSocketSerializer(WebSocketSerializer):
def _serialize(self, data) -> str:
return str(orjson.dumps(data, default=_json_default), "utf-8")
def _deserialize(self, data: str):
# RapidJSON expects strings
return rapidjson.loads(data, object_hook=_json_object_hook)
# Support serializing pandas DataFrames
def _json_default(z):
if isinstance(z, DataFrame):
return {
'__type__': 'dataframe',
'__value__': dataframe_to_json(z)
}
raise TypeError
# Support deserializing JSON to pandas DataFrames
def _json_object_hook(z):
if z.get('__type__') == 'dataframe':
return json_to_dataframe(z.get('__value__'))
return z

View File

@ -0,0 +1,8 @@
from typing import Any, Dict, TypeVar
from fastapi import WebSocket as FastAPIWebSocket
from websockets.client import WebSocketClientProtocol as WebSocket
WebSocketType = TypeVar("WebSocketType", FastAPIWebSocket, WebSocket)
MessageType = Dict[str, Any]

View File

@ -0,0 +1,63 @@
from datetime import datetime
from typing import Any, Dict, List, Optional
from pandas import DataFrame
from pydantic import BaseModel
from freqtrade.constants import PairWithTimeframe
from freqtrade.enums.rpcmessagetype import RPCMessageType, RPCRequestType
class BaseArbitraryModel(BaseModel):
class Config:
arbitrary_types_allowed = True
class WSRequestSchema(BaseArbitraryModel):
type: RPCRequestType
data: Optional[Any] = None
class WSMessageSchema(BaseArbitraryModel):
type: RPCMessageType
data: Optional[Any] = None
class Config:
extra = 'allow'
# ------------------------------ REQUEST SCHEMAS ----------------------------
class WSSubscribeRequest(WSRequestSchema):
type: RPCRequestType = RPCRequestType.SUBSCRIBE
data: List[RPCMessageType]
class WSWhitelistRequest(WSRequestSchema):
type: RPCRequestType = RPCRequestType.WHITELIST
data: None = None
class WSAnalyzedDFRequest(WSRequestSchema):
type: RPCRequestType = RPCRequestType.ANALYZED_DF
data: Dict[str, Any] = {"limit": 1500}
# ------------------------------ MESSAGE SCHEMAS ----------------------------
class WSWhitelistMessage(WSMessageSchema):
type: RPCMessageType = RPCMessageType.WHITELIST
data: List[str]
class WSAnalyzedDFMessage(WSMessageSchema):
class AnalyzedDFData(BaseArbitraryModel):
key: PairWithTimeframe
df: DataFrame
la: datetime
type: RPCMessageType = RPCMessageType.ANALYZED_DF
data: AnalyzedDFData
# --------------------------------------------------------------------------

View File

@ -0,0 +1,335 @@
"""
ExternalMessageConsumer module
Main purpose is to connect to external bot's message websocket to consume data
from it
"""
import asyncio
import logging
import socket
from threading import Thread
from typing import TYPE_CHECKING, Any, Callable, Dict, List, TypedDict
import websockets
from pydantic import ValidationError
from freqtrade.data.dataprovider import DataProvider
from freqtrade.enums import RPCMessageType
from freqtrade.misc import remove_entry_exit_signals
from freqtrade.rpc.api_server.ws import WebSocketChannel
from freqtrade.rpc.api_server.ws_schemas import (WSAnalyzedDFMessage, WSAnalyzedDFRequest,
WSMessageSchema, WSRequestSchema,
WSSubscribeRequest, WSWhitelistMessage,
WSWhitelistRequest)
if TYPE_CHECKING:
import websockets.connect
class Producer(TypedDict):
name: str
host: str
port: int
ws_token: str
logger = logging.getLogger(__name__)
class ExternalMessageConsumer:
"""
The main controller class for consuming external messages from
other freqtrade bot's
"""
def __init__(
self,
config: Dict[str, Any],
dataprovider: DataProvider
):
self._config = config
self._dp = dataprovider
self._running = False
self._thread = None
self._loop = None
self._main_task = None
self._sub_tasks = None
self._emc_config = self._config.get('external_message_consumer', {})
self.enabled = self._emc_config.get('enabled', False)
self.producers: List[Producer] = self._emc_config.get('producers', [])
self.wait_timeout = self._emc_config.get('wait_timeout', 300) # in seconds
self.ping_timeout = self._emc_config.get('ping_timeout', 10) # in seconds
self.sleep_time = self._emc_config.get('sleep_time', 10) # in seconds
# The amount of candles per dataframe on the initial request
self.initial_candle_limit = self._emc_config.get('initial_candle_limit', 1500)
# Message size limit, in megabytes. Default 8mb, Use bitwise operator << 20 to convert
# as the websockets client expects bytes.
self.message_size_limit = (self._emc_config.get('message_size_limit', 8) << 20)
# Setting these explicitly as they probably shouldn't be changed by a user
# Unless we somehow integrate this with the strategy to allow creating
# callbacks for the messages
self.topics = [RPCMessageType.WHITELIST, RPCMessageType.ANALYZED_DF]
# Allow setting data for each initial request
self._initial_requests: List[WSRequestSchema] = [
WSSubscribeRequest(data=self.topics),
WSWhitelistRequest(),
WSAnalyzedDFRequest()
]
# Specify which function to use for which RPCMessageType
self._message_handlers: Dict[str, Callable[[str, WSMessageSchema], None]] = {
RPCMessageType.WHITELIST: self._consume_whitelist_message,
RPCMessageType.ANALYZED_DF: self._consume_analyzed_df_message,
}
self.start()
def start(self):
"""
Start the main internal loop in another thread to run coroutines
"""
if self._thread and self._loop:
return
logger.info("Starting ExternalMessageConsumer")
self._loop = asyncio.new_event_loop()
self._thread = Thread(target=self._loop.run_forever)
self._running = True
self._thread.start()
self._main_task = asyncio.run_coroutine_threadsafe(self._main(), loop=self._loop)
def shutdown(self):
"""
Shutdown the loop, thread, and tasks
"""
if self._thread and self._loop:
logger.info("Stopping ExternalMessageConsumer")
self._running = False
if self._sub_tasks:
# Cancel sub tasks
for task in self._sub_tasks:
task.cancel()
if self._main_task:
# Cancel the main task
self._main_task.cancel()
self._thread.join()
self._thread = None
self._loop = None
self._sub_tasks = None
self._main_task = None
async def _main(self):
"""
The main task coroutine
"""
lock = asyncio.Lock()
try:
# Create a connection to each producer
self._sub_tasks = [
self._loop.create_task(self._handle_producer_connection(producer, lock))
for producer in self.producers
]
await asyncio.gather(*self._sub_tasks)
except asyncio.CancelledError:
pass
finally:
# Stop the loop once we are done
self._loop.stop()
async def _handle_producer_connection(self, producer: Producer, lock: asyncio.Lock):
"""
Main connection loop for the consumer
:param producer: Dictionary containing producer info
:param lock: An asyncio Lock
"""
try:
await self._create_connection(producer, lock)
except asyncio.CancelledError:
# Exit silently
pass
async def _create_connection(self, producer: Producer, lock: asyncio.Lock):
"""
Actually creates and handles the websocket connection, pinging on timeout
and handling connection errors.
:param producer: Dictionary containing producer info
:param lock: An asyncio Lock
"""
while self._running:
try:
host, port = producer['host'], producer['port']
token = producer['ws_token']
name = producer['name']
ws_url = f"ws://{host}:{port}/api/v1/message/ws?token={token}"
# This will raise InvalidURI if the url is bad
async with websockets.connect(ws_url, max_size=self.message_size_limit) as ws:
channel = WebSocketChannel(ws, channel_id=name)
logger.info(f"Producer connection success - {channel}")
# Now request the initial data from this Producer
for request in self._initial_requests:
await channel.send(
request.dict(exclude_none=True)
)
# Now receive data, if none is within the time limit, ping
await self._receive_messages(channel, producer, lock)
except (websockets.exceptions.InvalidURI, ValueError) as e:
logger.error(f"{ws_url} is an invalid WebSocket URL - {e}")
break
except (
socket.gaierror,
ConnectionRefusedError,
websockets.exceptions.InvalidStatusCode,
websockets.exceptions.InvalidMessage
) as e:
logger.error(f"Connection Refused - {e} retrying in {self.sleep_time}s")
await asyncio.sleep(self.sleep_time)
continue
except (
websockets.exceptions.ConnectionClosedError,
websockets.exceptions.ConnectionClosedOK
):
# Just keep trying to connect again indefinitely
await asyncio.sleep(self.sleep_time)
continue
except Exception as e:
# An unforseen error has occurred, log and continue
logger.error("Unexpected error has occurred:")
logger.exception(e)
continue
async def _receive_messages(
self,
channel: WebSocketChannel,
producer: Producer,
lock: asyncio.Lock
):
"""
Loop to handle receiving messages from a Producer
:param channel: The WebSocketChannel object for the WebSocket
:param producer: Dictionary containing producer info
:param lock: An asyncio Lock
"""
while self._running:
try:
message = await asyncio.wait_for(
channel.recv(),
timeout=self.wait_timeout
)
try:
async with lock:
# Handle the message
self.handle_producer_message(producer, message)
except Exception as e:
logger.exception(f"Error handling producer message: {e}")
except (asyncio.TimeoutError, websockets.exceptions.ConnectionClosed):
# We haven't received data yet. Check the connection and continue.
try:
# ping
ping = await channel.ping()
await asyncio.wait_for(ping, timeout=self.ping_timeout)
logger.debug(f"Connection to {channel} still alive...")
continue
except Exception as e:
logger.warning(f"Ping error {channel} - retrying in {self.sleep_time}s")
logger.debug(e, exc_info=e)
await asyncio.sleep(self.sleep_time)
break
def handle_producer_message(self, producer: Producer, message: Dict[str, Any]):
"""
Handles external messages from a Producer
"""
producer_name = producer.get('name', 'default')
try:
producer_message = WSMessageSchema.parse_obj(message)
except ValidationError as e:
logger.error(f"Invalid message from `{producer_name}`: {e}")
return
if not producer_message.data:
logger.error(f"Empty message received from `{producer_name}`")
return
logger.info(f"Received message of type `{producer_message.type}` from `{producer_name}`")
message_handler = self._message_handlers.get(producer_message.type)
if not message_handler:
logger.info(f"Received unhandled message: `{producer_message.data}`, ignoring...")
return
message_handler(producer_name, producer_message)
def _consume_whitelist_message(self, producer_name: str, message: WSMessageSchema):
try:
# Validate the message
whitelist_message = WSWhitelistMessage.parse_obj(message)
except ValidationError as e:
logger.error(f"Invalid message from `{producer_name}`: {e}")
return
# Add the pairlist data to the DataProvider
self._dp._set_producer_pairs(whitelist_message.data, producer_name=producer_name)
logger.debug(f"Consumed message from `{producer_name}` of type `RPCMessageType.WHITELIST`")
def _consume_analyzed_df_message(self, producer_name: str, message: WSMessageSchema):
try:
df_message = WSAnalyzedDFMessage.parse_obj(message)
except ValidationError as e:
logger.error(f"Invalid message from `{producer_name}`: {e}")
return
key = df_message.data.key
df = df_message.data.df
la = df_message.data.la
pair, timeframe, candle_type = key
# If set, remove the Entry and Exit signals from the Producer
if self._emc_config.get('remove_entry_exit_signals', False):
df = remove_entry_exit_signals(df)
# Add the dataframe to the dataprovider
self._dp._add_external_df(pair, df,
last_analyzed=la,
timeframe=timeframe,
candle_type=candle_type,
producer_name=producer_name)
logger.debug(
f"Consumed message from `{producer_name}` of type `RPCMessageType.ANALYZED_DF`")

View File

@ -1039,14 +1039,52 @@ class RPC:
def _rpc_analysed_dataframe(self, pair: str, timeframe: str,
limit: Optional[int]) -> Dict[str, Any]:
""" Analyzed dataframe in Dict form """
_data, last_analyzed = self.__rpc_analysed_dataframe_raw(pair, timeframe, limit)
return self._convert_dataframe_to_dict(self._freqtrade.config['strategy'],
pair, timeframe, _data, last_analyzed)
def __rpc_analysed_dataframe_raw(self, pair: str, timeframe: str,
limit: Optional[int]) -> Tuple[DataFrame, datetime]:
""" Get the dataframe and last analyze from the dataprovider """
_data, last_analyzed = self._freqtrade.dataprovider.get_analyzed_dataframe(
pair, timeframe)
_data = _data.copy()
if limit:
_data = _data.iloc[-limit:]
return self._convert_dataframe_to_dict(self._freqtrade.config['strategy'],
pair, timeframe, _data, last_analyzed)
return _data, last_analyzed
def _ws_all_analysed_dataframes(
self,
pairlist: List[str],
limit: Optional[int]
) -> Dict[str, Any]:
""" Get the analysed dataframes of each pair in the pairlist """
timeframe = self._freqtrade.config['timeframe']
candle_type = self._freqtrade.config.get('candle_type_def', CandleType.SPOT)
_data = {}
for pair in pairlist:
dataframe, last_analyzed = self.__rpc_analysed_dataframe_raw(pair, timeframe, limit)
_data[pair] = {
"key": (pair, timeframe, candle_type),
"df": dataframe,
"la": last_analyzed
}
return _data
def _ws_request_analyzed_df(self, limit: Optional[int]):
""" Historical Analyzed Dataframes for WebSocket """
whitelist = self._freqtrade.active_pair_whitelist
return self._ws_all_analysed_dataframes(whitelist, limit)
def _ws_request_whitelist(self):
""" Whitelist data for WebSocket """
return self._freqtrade.active_pair_whitelist
@staticmethod
def _rpc_analysed_history_full(config, pair: str, timeframe: str,

View File

@ -67,7 +67,8 @@ class RPCManager:
'status': 'stopping bot'
}
"""
logger.info('Sending rpc message: %s', msg)
if msg.get('type') is not RPCMessageType.ANALYZED_DF:
logger.info('Sending rpc message: %s', msg)
if 'pair' in msg:
msg.update({
'base_currency': self._rpc._freqtrade.exchange.get_pair_base_currency(msg['pair'])

View File

@ -16,6 +16,7 @@ from freqtrade.enums import (CandleType, ExitCheckTuple, ExitType, RunMode, Sign
SignalTagType, SignalType, TradingMode)
from freqtrade.exceptions import OperationalException, StrategyError
from freqtrade.exchange import timeframe_to_minutes, timeframe_to_next_date, timeframe_to_seconds
from freqtrade.misc import remove_entry_exit_signals
from freqtrade.persistence import Order, PairLocks, Trade
from freqtrade.strategy.hyper import HyperStrategyMixin
from freqtrade.strategy.informative_decorator import (InformativeData, PopulateIndicators,
@ -742,20 +743,19 @@ class IStrategy(ABC, HyperStrategyMixin):
# always run if process_only_new_candles is set to false
if (not self.process_only_new_candles or
self._last_candle_seen_per_pair.get(pair, None) != dataframe.iloc[-1]['date']):
# Defs that only make change on new candle data.
dataframe = self.analyze_ticker(dataframe, metadata)
self._last_candle_seen_per_pair[pair] = dataframe.iloc[-1]['date']
self.dp._set_cached_df(
pair, self.timeframe, dataframe,
candle_type=self.config.get('candle_type_def', CandleType.SPOT))
candle_type = self.config.get('candle_type_def', CandleType.SPOT)
self.dp._set_cached_df(pair, self.timeframe, dataframe, candle_type=candle_type)
self.dp._emit_df((pair, self.timeframe, candle_type), dataframe)
else:
logger.debug("Skipping TA Analysis for already analyzed candle")
dataframe[SignalType.ENTER_LONG.value] = 0
dataframe[SignalType.EXIT_LONG.value] = 0
dataframe[SignalType.ENTER_SHORT.value] = 0
dataframe[SignalType.EXIT_SHORT.value] = 0
dataframe[SignalTagType.ENTER_TAG.value] = None
dataframe[SignalTagType.EXIT_TAG.value] = None
dataframe = remove_entry_exit_signals(dataframe)
logger.debug("Loop Analysis Launched")

View File

@ -67,6 +67,7 @@
"verbosity": "error",
"enable_openapi": false,
"jwt_secret_key": "{{ api_server_jwt_key }}",
"ws_token": "{{ api_server_ws_token }}",
"CORS_origins": [],
"username": "{{ api_server_username }}",
"password": "{{ api_server_password }}"

View File

@ -28,9 +28,7 @@ nav:
- Configuration: freqai-configuration.md
- Parameter table: freqai-parameter-table.md
- Feature engineering: freqai-feature-engineering.md
# - Outlier detection: freqai-outlier-detection.md
- Running FreqAI: freqai-running.md
# - Data handling: freqai-data-handling.md
- Developer guide: freqai-developers.md
- Short / Leverage: leverage.md
- Utility Sub-commands: utils.md
@ -44,6 +42,7 @@ nav:
- Advanced Post-installation Tasks: advanced-setup.md
- Advanced Strategy: strategy-advanced.md
- Advanced Hyperopt: advanced-hyperopt.md
- Producer/Consumer mode: producer-consumer.md
- Edge Positioning: edge.md
- Sandbox Testing: sandbox-testing.md
- FAQ: faq.md

View File

@ -21,6 +21,7 @@ jinja2==3.1.2
tables==3.7.0
blosc==1.10.6
joblib==1.2.0
pyarrow==9.0.0
# find first, C search in arrays
py_find_1st==1.1.5
@ -50,3 +51,7 @@ python-dateutil==2.8.2
#Futures
schedule==1.1.0
#WS Messages
websockets==10.3
janus==1.0.0

View File

@ -49,4 +49,3 @@ exclude =
__pycache__,
.eggs,
user_data,

View File

@ -8,13 +8,11 @@ hyperopt = [
'scikit-learn',
'scikit-optimize>=0.7.0',
'filelock',
'joblib',
'progressbar2',
]
freqai = [
'scikit-learn',
'joblib',
'catboost; platform_machine != "aarch64"',
'lightgbm',
]
@ -74,12 +72,16 @@ setup(
'pandas',
'tables',
'blosc',
'joblib',
'pyarrow',
'fastapi',
'uvicorn',
'psutil',
'pyjwt',
'aiofiles',
'schedule'
'schedule',
'websockets',
'janus'
],
extras_require={
'dev': all_extra,

View File

@ -58,6 +58,11 @@ def log_has(line, logs):
return any(line == message for message in logs.messages)
def log_has_when(line, logs, when):
"""Check if line is found in caplog's messages during a specified stage"""
return any(line == message.message for message in logs.get_records(when))
def log_has_re(line, logs):
"""Check if line matches some caplog's message."""
return any(re.match(line, message) for message in logs.messages)

View File

@ -0,0 +1,436 @@
# pragma pylint: disable=missing-docstring, protected-access, C0103
import re
from pathlib import Path
from unittest.mock import MagicMock
import pytest
from pandas import DataFrame
from freqtrade.configuration import TimeRange
from freqtrade.constants import AVAILABLE_DATAHANDLERS
from freqtrade.data.history.featherdatahandler import FeatherDataHandler
from freqtrade.data.history.hdf5datahandler import HDF5DataHandler
from freqtrade.data.history.idatahandler import IDataHandler, get_datahandler, get_datahandlerclass
from freqtrade.data.history.jsondatahandler import JsonDataHandler, JsonGzDataHandler
from freqtrade.data.history.parquetdatahandler import ParquetDataHandler
from freqtrade.enums import CandleType, TradingMode
from tests.conftest import log_has
def test_datahandler_ohlcv_get_pairs(testdatadir):
pairs = JsonDataHandler.ohlcv_get_pairs(testdatadir, '5m', candle_type=CandleType.SPOT)
# Convert to set to avoid failures due to sorting
assert set(pairs) == {'UNITTEST/BTC', 'XLM/BTC', 'ETH/BTC', 'TRX/BTC', 'LTC/BTC',
'XMR/BTC', 'ZEC/BTC', 'ADA/BTC', 'ETC/BTC', 'NXT/BTC',
'DASH/BTC', 'XRP/ETH'}
pairs = JsonGzDataHandler.ohlcv_get_pairs(testdatadir, '8m', candle_type=CandleType.SPOT)
assert set(pairs) == {'UNITTEST/BTC'}
pairs = HDF5DataHandler.ohlcv_get_pairs(testdatadir, '5m', candle_type=CandleType.SPOT)
assert set(pairs) == {'UNITTEST/BTC'}
pairs = JsonDataHandler.ohlcv_get_pairs(testdatadir, '1h', candle_type=CandleType.MARK)
assert set(pairs) == {'UNITTEST/USDT', 'XRP/USDT'}
pairs = JsonGzDataHandler.ohlcv_get_pairs(testdatadir, '1h', candle_type=CandleType.FUTURES)
assert set(pairs) == {'XRP/USDT'}
pairs = HDF5DataHandler.ohlcv_get_pairs(testdatadir, '1h', candle_type=CandleType.MARK)
assert set(pairs) == {'UNITTEST/USDT:USDT'}
@pytest.mark.parametrize('filename,pair,timeframe,candletype', [
('XMR_BTC-5m.json', 'XMR_BTC', '5m', ''),
('XMR_USDT-1h.h5', 'XMR_USDT', '1h', ''),
('BTC-PERP-1h.h5', 'BTC-PERP', '1h', ''),
('BTC_USDT-2h.jsongz', 'BTC_USDT', '2h', ''),
('BTC_USDT-2h-mark.jsongz', 'BTC_USDT', '2h', 'mark'),
('XMR_USDT-1h-mark.h5', 'XMR_USDT', '1h', 'mark'),
('XMR_USDT-1h-random.h5', 'XMR_USDT', '1h', 'random'),
('BTC-PERP-1h-index.h5', 'BTC-PERP', '1h', 'index'),
('XMR_USDT_USDT-1h-mark.h5', 'XMR_USDT_USDT', '1h', 'mark'),
])
def test_datahandler_ohlcv_regex(filename, pair, timeframe, candletype):
regex = JsonDataHandler._OHLCV_REGEX
match = re.search(regex, filename)
assert len(match.groups()) > 1
assert match[1] == pair
assert match[2] == timeframe
assert match[3] == candletype
@pytest.mark.parametrize('input,expected', [
('XMR_USDT', 'XMR/USDT'),
('BTC_USDT', 'BTC/USDT'),
('USDT_BUSD', 'USDT/BUSD'),
('BTC_USDT_USDT', 'BTC/USDT:USDT'), # Futures
('XRP_USDT_USDT', 'XRP/USDT:USDT'), # futures
('BTC-PERP', 'BTC-PERP'),
('BTC-PERP_USDT', 'BTC-PERP:USDT'), # potential FTX case
('UNITTEST_USDT', 'UNITTEST/USDT'),
])
def test_rebuild_pair_from_filename(input, expected):
assert IDataHandler.rebuild_pair_from_filename(input) == expected
def test_datahandler_ohlcv_get_available_data(testdatadir):
paircombs = JsonDataHandler.ohlcv_get_available_data(testdatadir, TradingMode.SPOT)
# Convert to set to avoid failures due to sorting
assert set(paircombs) == {
('UNITTEST/BTC', '5m', CandleType.SPOT),
('ETH/BTC', '5m', CandleType.SPOT),
('XLM/BTC', '5m', CandleType.SPOT),
('TRX/BTC', '5m', CandleType.SPOT),
('LTC/BTC', '5m', CandleType.SPOT),
('XMR/BTC', '5m', CandleType.SPOT),
('ZEC/BTC', '5m', CandleType.SPOT),
('UNITTEST/BTC', '1m', CandleType.SPOT),
('ADA/BTC', '5m', CandleType.SPOT),
('ETC/BTC', '5m', CandleType.SPOT),
('NXT/BTC', '5m', CandleType.SPOT),
('DASH/BTC', '5m', CandleType.SPOT),
('XRP/ETH', '1m', CandleType.SPOT),
('XRP/ETH', '5m', CandleType.SPOT),
('UNITTEST/BTC', '30m', CandleType.SPOT),
('UNITTEST/BTC', '8m', CandleType.SPOT),
('NOPAIR/XXX', '4m', CandleType.SPOT),
}
paircombs = JsonDataHandler.ohlcv_get_available_data(testdatadir, TradingMode.FUTURES)
# Convert to set to avoid failures due to sorting
assert set(paircombs) == {
('UNITTEST/USDT', '1h', 'mark'),
('XRP/USDT', '1h', 'futures'),
('XRP/USDT', '1h', 'mark'),
('XRP/USDT', '8h', 'mark'),
('XRP/USDT', '8h', 'funding_rate'),
}
paircombs = JsonGzDataHandler.ohlcv_get_available_data(testdatadir, TradingMode.SPOT)
assert set(paircombs) == {('UNITTEST/BTC', '8m', CandleType.SPOT)}
paircombs = HDF5DataHandler.ohlcv_get_available_data(testdatadir, TradingMode.SPOT)
assert set(paircombs) == {('UNITTEST/BTC', '5m', CandleType.SPOT)}
def test_jsondatahandler_trades_get_pairs(testdatadir):
pairs = JsonGzDataHandler.trades_get_pairs(testdatadir)
# Convert to set to avoid failures due to sorting
assert set(pairs) == {'XRP/ETH', 'XRP/OLD'}
def test_jsondatahandler_ohlcv_purge(mocker, testdatadir):
mocker.patch.object(Path, "exists", MagicMock(return_value=False))
unlinkmock = mocker.patch.object(Path, "unlink", MagicMock())
dh = JsonGzDataHandler(testdatadir)
assert not dh.ohlcv_purge('UNITTEST/NONEXIST', '5m', '')
assert not dh.ohlcv_purge('UNITTEST/NONEXIST', '5m', candle_type='mark')
assert unlinkmock.call_count == 0
mocker.patch.object(Path, "exists", MagicMock(return_value=True))
assert dh.ohlcv_purge('UNITTEST/NONEXIST', '5m', '')
assert dh.ohlcv_purge('UNITTEST/NONEXIST', '5m', candle_type='mark')
assert unlinkmock.call_count == 2
def test_jsondatahandler_ohlcv_load(testdatadir, caplog):
dh = JsonDataHandler(testdatadir)
df = dh.ohlcv_load('XRP/ETH', '5m', 'spot')
assert len(df) == 711
df_mark = dh.ohlcv_load('UNITTEST/USDT', '1h', candle_type="mark")
assert len(df_mark) == 99
df_no_mark = dh.ohlcv_load('UNITTEST/USDT', '1h', 'spot')
assert len(df_no_mark) == 0
# Failure case (empty array)
df1 = dh.ohlcv_load('NOPAIR/XXX', '4m', 'spot')
assert len(df1) == 0
assert log_has("Could not load data for NOPAIR/XXX.", caplog)
assert df.columns.equals(df1.columns)
@pytest.mark.parametrize('datahandler', ['feather', 'parquet'])
def test_datahandler_trades_not_supported(datahandler, testdatadir, ):
dh = get_datahandler(testdatadir, datahandler)
with pytest.raises(NotImplementedError):
dh.trades_load('UNITTEST/ETH')
with pytest.raises(NotImplementedError):
dh.trades_store('UNITTEST/ETH', MagicMock())
def test_jsondatahandler_trades_load(testdatadir, caplog):
dh = JsonGzDataHandler(testdatadir)
logmsg = "Old trades format detected - converting"
dh.trades_load('XRP/ETH')
assert not log_has(logmsg, caplog)
# Test conversation is happening
dh.trades_load('XRP/OLD')
assert log_has(logmsg, caplog)
def test_jsondatahandler_trades_purge(mocker, testdatadir):
mocker.patch.object(Path, "exists", MagicMock(return_value=False))
unlinkmock = mocker.patch.object(Path, "unlink", MagicMock())
dh = JsonGzDataHandler(testdatadir)
assert not dh.trades_purge('UNITTEST/NONEXIST')
assert unlinkmock.call_count == 0
mocker.patch.object(Path, "exists", MagicMock(return_value=True))
assert dh.trades_purge('UNITTEST/NONEXIST')
assert unlinkmock.call_count == 1
@pytest.mark.parametrize('datahandler', AVAILABLE_DATAHANDLERS)
def test_datahandler_ohlcv_append(datahandler, testdatadir, ):
dh = get_datahandler(testdatadir, datahandler)
with pytest.raises(NotImplementedError):
dh.ohlcv_append('UNITTEST/ETH', '5m', DataFrame(), CandleType.SPOT)
with pytest.raises(NotImplementedError):
dh.ohlcv_append('UNITTEST/ETH', '5m', DataFrame(), CandleType.MARK)
@pytest.mark.parametrize('datahandler', AVAILABLE_DATAHANDLERS)
def test_datahandler_trades_append(datahandler, testdatadir):
dh = get_datahandler(testdatadir, datahandler)
with pytest.raises(NotImplementedError):
dh.trades_append('UNITTEST/ETH', [])
def test_hdf5datahandler_trades_get_pairs(testdatadir):
pairs = HDF5DataHandler.trades_get_pairs(testdatadir)
# Convert to set to avoid failures due to sorting
assert set(pairs) == {'XRP/ETH'}
def test_hdf5datahandler_trades_load(testdatadir):
dh = get_datahandler(testdatadir, 'hdf5')
trades = dh.trades_load('XRP/ETH')
assert isinstance(trades, list)
trades1 = dh.trades_load('UNITTEST/NONEXIST')
assert trades1 == []
# data goes from 2019-10-11 - 2019-10-13
timerange = TimeRange.parse_timerange('20191011-20191012')
trades2 = dh._trades_load('XRP/ETH', timerange)
assert len(trades) > len(trades2)
# Check that ID is None (If it's nan, it's wrong)
assert trades2[0][2] is None
# unfiltered load has trades before starttime
assert len([t for t in trades if t[0] < timerange.startts * 1000]) >= 0
# filtered list does not have trades before starttime
assert len([t for t in trades2 if t[0] < timerange.startts * 1000]) == 0
# unfiltered load has trades after endtime
assert len([t for t in trades if t[0] > timerange.stopts * 1000]) > 0
# filtered list does not have trades after endtime
assert len([t for t in trades2 if t[0] > timerange.stopts * 1000]) == 0
def test_hdf5datahandler_trades_store(testdatadir, tmpdir):
tmpdir1 = Path(tmpdir)
dh = get_datahandler(testdatadir, 'hdf5')
trades = dh.trades_load('XRP/ETH')
dh1 = get_datahandler(tmpdir1, 'hdf5')
dh1.trades_store('XRP/NEW', trades)
file = tmpdir1 / 'XRP_NEW-trades.h5'
assert file.is_file()
# Load trades back
trades_new = dh1.trades_load('XRP/NEW')
assert len(trades_new) == len(trades)
assert trades[0][0] == trades_new[0][0]
assert trades[0][1] == trades_new[0][1]
# assert trades[0][2] == trades_new[0][2] # This is nan - so comparison does not make sense
assert trades[0][3] == trades_new[0][3]
assert trades[0][4] == trades_new[0][4]
assert trades[0][5] == trades_new[0][5]
assert trades[0][6] == trades_new[0][6]
assert trades[-1][0] == trades_new[-1][0]
assert trades[-1][1] == trades_new[-1][1]
# assert trades[-1][2] == trades_new[-1][2] # This is nan - so comparison does not make sense
assert trades[-1][3] == trades_new[-1][3]
assert trades[-1][4] == trades_new[-1][4]
assert trades[-1][5] == trades_new[-1][5]
assert trades[-1][6] == trades_new[-1][6]
def test_hdf5datahandler_trades_purge(mocker, testdatadir):
mocker.patch.object(Path, "exists", MagicMock(return_value=False))
unlinkmock = mocker.patch.object(Path, "unlink", MagicMock())
dh = get_datahandler(testdatadir, 'hdf5')
assert not dh.trades_purge('UNITTEST/NONEXIST')
assert unlinkmock.call_count == 0
mocker.patch.object(Path, "exists", MagicMock(return_value=True))
assert dh.trades_purge('UNITTEST/NONEXIST')
assert unlinkmock.call_count == 1
@pytest.mark.parametrize('pair,timeframe,candle_type,candle_append,startdt,enddt', [
# Data goes from 2018-01-10 - 2018-01-30
('UNITTEST/BTC', '5m', 'spot', '', '2018-01-15', '2018-01-19'),
# Mark data goes from to 2021-11-15 2021-11-19
('UNITTEST/USDT:USDT', '1h', 'mark', '-mark', '2021-11-16', '2021-11-18'),
])
def test_hdf5datahandler_ohlcv_load_and_resave(
testdatadir,
tmpdir,
pair,
timeframe,
candle_type,
candle_append,
startdt, enddt
):
tmpdir1 = Path(tmpdir)
tmpdir2 = tmpdir1
if candle_type not in ('', 'spot'):
tmpdir2 = tmpdir1 / 'futures'
tmpdir2.mkdir()
dh = get_datahandler(testdatadir, 'hdf5')
ohlcv = dh._ohlcv_load(pair, timeframe, None, candle_type=candle_type)
assert isinstance(ohlcv, DataFrame)
assert len(ohlcv) > 0
file = tmpdir2 / f"UNITTEST_NEW-{timeframe}{candle_append}.h5"
assert not file.is_file()
dh1 = get_datahandler(tmpdir1, 'hdf5')
dh1.ohlcv_store('UNITTEST/NEW', timeframe, ohlcv, candle_type=candle_type)
assert file.is_file()
assert not ohlcv[ohlcv['date'] < startdt].empty
timerange = TimeRange.parse_timerange(f"{startdt.replace('-', '')}-{enddt.replace('-', '')}")
# Call private function to ensure timerange is filtered in hdf5
ohlcv = dh._ohlcv_load(pair, timeframe, timerange, candle_type=candle_type)
ohlcv1 = dh1._ohlcv_load('UNITTEST/NEW', timeframe, timerange, candle_type=candle_type)
assert len(ohlcv) == len(ohlcv1)
assert ohlcv.equals(ohlcv1)
assert ohlcv[ohlcv['date'] < startdt].empty
assert ohlcv[ohlcv['date'] > enddt].empty
# Try loading inexisting file
ohlcv = dh.ohlcv_load('UNITTEST/NONEXIST', timeframe, candle_type=candle_type)
assert ohlcv.empty
@pytest.mark.parametrize('pair,timeframe,candle_type,candle_append,startdt,enddt', [
# Data goes from 2018-01-10 - 2018-01-30
('UNITTEST/BTC', '5m', 'spot', '', '2018-01-15', '2018-01-19'),
# Mark data goes from to 2021-11-15 2021-11-19
('UNITTEST/USDT', '1h', 'mark', '-mark', '2021-11-16', '2021-11-18'),
])
@pytest.mark.parametrize('datahandler', ['hdf5', 'feather', 'parquet'])
def test_generic_datahandler_ohlcv_load_and_resave(
datahandler,
testdatadir,
tmpdir,
pair,
timeframe,
candle_type,
candle_append,
startdt, enddt
):
tmpdir1 = Path(tmpdir)
tmpdir2 = tmpdir1
if candle_type not in ('', 'spot'):
tmpdir2 = tmpdir1 / 'futures'
tmpdir2.mkdir()
# Load data from one common file
dhbase = get_datahandler(testdatadir, 'json')
ohlcv = dhbase._ohlcv_load(pair, timeframe, None, candle_type=candle_type)
assert isinstance(ohlcv, DataFrame)
assert len(ohlcv) > 0
# Get data to test
dh = get_datahandler(testdatadir, datahandler)
file = tmpdir2 / f"UNITTEST_NEW-{timeframe}{candle_append}.{dh._get_file_extension()}"
assert not file.is_file()
dh1 = get_datahandler(tmpdir1, datahandler)
dh1.ohlcv_store('UNITTEST/NEW', timeframe, ohlcv, candle_type=candle_type)
assert file.is_file()
assert not ohlcv[ohlcv['date'] < startdt].empty
timerange = TimeRange.parse_timerange(f"{startdt.replace('-', '')}-{enddt.replace('-', '')}")
ohlcv = dhbase.ohlcv_load(pair, timeframe, timerange=timerange, candle_type=candle_type)
if datahandler == 'hdf5':
ohlcv1 = dh1._ohlcv_load('UNITTEST/NEW', timeframe, timerange, candle_type=candle_type)
if candle_type == 'mark':
ohlcv1['volume'] = 0.0
else:
ohlcv1 = dh1.ohlcv_load('UNITTEST/NEW', timeframe,
timerange=timerange, candle_type=candle_type)
assert len(ohlcv) == len(ohlcv1)
assert ohlcv.equals(ohlcv1)
assert ohlcv[ohlcv['date'] < startdt].empty
assert ohlcv[ohlcv['date'] > enddt].empty
# Try loading inexisting file
ohlcv = dh.ohlcv_load('UNITTEST/NONEXIST', timeframe, candle_type=candle_type)
assert ohlcv.empty
def test_hdf5datahandler_ohlcv_purge(mocker, testdatadir):
mocker.patch.object(Path, "exists", MagicMock(return_value=False))
unlinkmock = mocker.patch.object(Path, "unlink", MagicMock())
dh = get_datahandler(testdatadir, 'hdf5')
assert not dh.ohlcv_purge('UNITTEST/NONEXIST', '5m', '')
assert not dh.ohlcv_purge('UNITTEST/NONEXIST', '5m', candle_type='mark')
assert unlinkmock.call_count == 0
mocker.patch.object(Path, "exists", MagicMock(return_value=True))
assert dh.ohlcv_purge('UNITTEST/NONEXIST', '5m', '')
assert dh.ohlcv_purge('UNITTEST/NONEXIST', '5m', candle_type='mark')
assert unlinkmock.call_count == 2
def test_gethandlerclass():
cl = get_datahandlerclass('json')
assert cl == JsonDataHandler
assert issubclass(cl, IDataHandler)
cl = get_datahandlerclass('jsongz')
assert cl == JsonGzDataHandler
assert issubclass(cl, IDataHandler)
assert issubclass(cl, JsonDataHandler)
cl = get_datahandlerclass('hdf5')
assert cl == HDF5DataHandler
assert issubclass(cl, IDataHandler)
cl = get_datahandlerclass('feather')
assert cl == FeatherDataHandler
assert issubclass(cl, IDataHandler)
cl = get_datahandlerclass('parquet')
assert cl == ParquetDataHandler
assert issubclass(cl, IDataHandler)
with pytest.raises(ValueError, match=r"No datahandler for .*"):
get_datahandlerclass('DeadBeef')
def test_get_datahandler(testdatadir):
dh = get_datahandler(testdatadir, 'json')
assert type(dh) == JsonDataHandler
dh = get_datahandler(testdatadir, 'jsongz')
assert type(dh) == JsonGzDataHandler
dh1 = get_datahandler(testdatadir, 'jsongz', dh)
assert id(dh1) == id(dh)
dh = get_datahandler(testdatadir, 'hdf5')
assert type(dh) == HDF5DataHandler

View File

@ -144,6 +144,77 @@ def test_available_pairs(mocker, default_conf, ohlcv_history):
assert dp.available_pairs == [("XRP/BTC", timeframe), ("UNITTEST/BTC", timeframe), ]
def test_producer_pairs(mocker, default_conf, ohlcv_history):
dataprovider = DataProvider(default_conf, None)
producer = "default"
whitelist = ["XRP/BTC", "ETH/BTC"]
assert len(dataprovider.get_producer_pairs(producer)) == 0
dataprovider._set_producer_pairs(whitelist, producer)
assert len(dataprovider.get_producer_pairs(producer)) == 2
new_whitelist = ["BTC/USDT"]
dataprovider._set_producer_pairs(new_whitelist, producer)
assert dataprovider.get_producer_pairs(producer) == new_whitelist
assert dataprovider.get_producer_pairs("bad") == []
def test_get_producer_df(mocker, default_conf, ohlcv_history):
dataprovider = DataProvider(default_conf, None)
pair = 'BTC/USDT'
timeframe = default_conf['timeframe']
candle_type = CandleType.SPOT
empty_la = datetime.fromtimestamp(0, tz=timezone.utc)
now = datetime.now(timezone.utc)
# no data has been added, any request should return an empty dataframe
dataframe, la = dataprovider.get_producer_df(pair, timeframe, candle_type)
assert dataframe.empty
assert la == empty_la
# the data is added, should return that added dataframe
dataprovider._add_external_df(pair, ohlcv_history, now, timeframe, candle_type)
dataframe, la = dataprovider.get_producer_df(pair, timeframe, candle_type)
assert len(dataframe) > 0
assert la > empty_la
# no data on this producer, should return empty dataframe
dataframe, la = dataprovider.get_producer_df(pair, producer_name='bad')
assert dataframe.empty
assert la == empty_la
# non existent timeframe, empty dataframe
datframe, la = dataprovider.get_producer_df(pair, timeframe='1h')
assert dataframe.empty
assert la == empty_la
def test_emit_df(mocker, default_conf, ohlcv_history):
mocker.patch('freqtrade.rpc.rpc_manager.RPCManager.__init__', MagicMock())
rpc_mock = mocker.patch('freqtrade.rpc.rpc_manager.RPCManager', MagicMock())
send_mock = mocker.patch('freqtrade.rpc.rpc_manager.RPCManager.send_msg', MagicMock())
dataprovider = DataProvider(default_conf, exchange=None, rpc=rpc_mock)
dataprovider_no_rpc = DataProvider(default_conf, exchange=None)
pair = "BTC/USDT"
# No emit yet
assert send_mock.call_count == 0
# Rpc is added, we call emit, should call send_msg
dataprovider._emit_df(pair, ohlcv_history)
assert send_mock.call_count == 1
# No rpc added, emit called, should not call send_msg
dataprovider_no_rpc._emit_df(pair, ohlcv_history)
assert send_mock.call_count == 1
def test_refresh(mocker, default_conf, ohlcv_history):
refresh_mock = MagicMock()
mocker.patch("freqtrade.exchange.Exchange.refresh_latest_ohlcv", refresh_mock)

View File

@ -1,7 +1,6 @@
# pragma pylint: disable=missing-docstring, protected-access, C0103
import json
import re
import uuid
from pathlib import Path
from shutil import copyfile
@ -13,18 +12,17 @@ from pandas import DataFrame
from pandas.testing import assert_frame_equal
from freqtrade.configuration import TimeRange
from freqtrade.constants import AVAILABLE_DATAHANDLERS, DATETIME_PRINT_FORMAT
from freqtrade.constants import DATETIME_PRINT_FORMAT
from freqtrade.data.converter import ohlcv_to_dataframe
from freqtrade.data.history.hdf5datahandler import HDF5DataHandler
from freqtrade.data.history.history_utils import (_download_pair_history, _download_trades_history,
_load_cached_data_for_updating,
convert_trades_to_ohlcv, get_timerange, load_data,
load_pair_history, refresh_backtest_ohlcv_data,
refresh_backtest_trades_data, refresh_data,
validate_backtest_data)
from freqtrade.data.history.idatahandler import IDataHandler, get_datahandler, get_datahandlerclass
from freqtrade.data.history.idatahandler import get_datahandler
from freqtrade.data.history.jsondatahandler import JsonDataHandler, JsonGzDataHandler
from freqtrade.enums import CandleType, TradingMode
from freqtrade.enums import CandleType
from freqtrade.exchange import timeframe_to_minutes
from freqtrade.misc import file_dump_json
from freqtrade.resolvers import StrategyResolver
@ -32,25 +30,6 @@ from tests.conftest import (CURRENT_TEST_STRATEGY, get_patched_exchange, log_has
patch_exchange)
# Change this if modifying UNITTEST/BTC testdatafile
_BTC_UNITTEST_LENGTH = 13681
def _backup_file(file: Path, copy_file: bool = False) -> None:
"""
Backup existing file to avoid deleting the user file
:param file: complete path to the file
:param copy_file: keep file in place too.
:return: None
"""
file_swp = str(file) + '.swp'
if file.is_file():
file.rename(file_swp)
if copy_file:
copyfile(file_swp, file)
def _clean_test_file(file: Path) -> None:
"""
Backup existing file to avoid deleting the user file
@ -67,7 +46,7 @@ def _clean_test_file(file: Path) -> None:
file_swp.rename(file)
def test_load_data_30min_timeframe(mocker, caplog, default_conf, testdatadir) -> None:
def test_load_data_30min_timeframe(caplog, testdatadir) -> None:
ld = load_pair_history(pair='UNITTEST/BTC', timeframe='30m', datadir=testdatadir)
assert isinstance(ld, DataFrame)
assert not log_has(
@ -76,7 +55,7 @@ def test_load_data_30min_timeframe(mocker, caplog, default_conf, testdatadir) ->
)
def test_load_data_7min_timeframe(mocker, caplog, default_conf, testdatadir) -> None:
def test_load_data_7min_timeframe(caplog, testdatadir) -> None:
ld = load_pair_history(pair='UNITTEST/BTC', timeframe='7m', datadir=testdatadir)
assert isinstance(ld, DataFrame)
assert ld.empty
@ -108,7 +87,7 @@ def test_load_data_mark(ohlcv_history, mocker, caplog, testdatadir) -> None:
)
def test_load_data_startup_candles(mocker, caplog, default_conf, testdatadir) -> None:
def test_load_data_startup_candles(mocker, testdatadir) -> None:
ltfmock = mocker.patch(
'freqtrade.data.history.jsondatahandler.JsonDataHandler._ohlcv_load',
MagicMock(return_value=DataFrame()))
@ -405,7 +384,7 @@ def test_load_partial_missing(testdatadir, caplog) -> None:
caplog)
def test_init(default_conf, mocker) -> None:
def test_init(default_conf) -> None:
assert {} == load_data(
datadir=Path(''),
pairs=[],
@ -685,340 +664,3 @@ def test_convert_trades_to_ohlcv(testdatadir, tmpdir, caplog):
convert_trades_to_ohlcv(['NoDatapair'], timeframes=['1m', '5m'],
datadir=tmpdir1, timerange=tr, erase=True)
assert log_has('Could not convert NoDatapair to OHLCV.', caplog)
def test_datahandler_ohlcv_get_pairs(testdatadir):
pairs = JsonDataHandler.ohlcv_get_pairs(testdatadir, '5m', candle_type=CandleType.SPOT)
# Convert to set to avoid failures due to sorting
assert set(pairs) == {'UNITTEST/BTC', 'XLM/BTC', 'ETH/BTC', 'TRX/BTC', 'LTC/BTC',
'XMR/BTC', 'ZEC/BTC', 'ADA/BTC', 'ETC/BTC', 'NXT/BTC',
'DASH/BTC', 'XRP/ETH'}
pairs = JsonGzDataHandler.ohlcv_get_pairs(testdatadir, '8m', candle_type=CandleType.SPOT)
assert set(pairs) == {'UNITTEST/BTC'}
pairs = HDF5DataHandler.ohlcv_get_pairs(testdatadir, '5m', candle_type=CandleType.SPOT)
assert set(pairs) == {'UNITTEST/BTC'}
pairs = JsonDataHandler.ohlcv_get_pairs(testdatadir, '1h', candle_type=CandleType.MARK)
assert set(pairs) == {'UNITTEST/USDT', 'XRP/USDT'}
pairs = JsonGzDataHandler.ohlcv_get_pairs(testdatadir, '1h', candle_type=CandleType.FUTURES)
assert set(pairs) == {'XRP/USDT'}
pairs = HDF5DataHandler.ohlcv_get_pairs(testdatadir, '1h', candle_type=CandleType.MARK)
assert set(pairs) == {'UNITTEST/USDT:USDT'}
@pytest.mark.parametrize('filename,pair,timeframe,candletype', [
('XMR_BTC-5m.json', 'XMR_BTC', '5m', ''),
('XMR_USDT-1h.h5', 'XMR_USDT', '1h', ''),
('BTC-PERP-1h.h5', 'BTC-PERP', '1h', ''),
('BTC_USDT-2h.jsongz', 'BTC_USDT', '2h', ''),
('BTC_USDT-2h-mark.jsongz', 'BTC_USDT', '2h', 'mark'),
('XMR_USDT-1h-mark.h5', 'XMR_USDT', '1h', 'mark'),
('XMR_USDT-1h-random.h5', 'XMR_USDT', '1h', 'random'),
('BTC-PERP-1h-index.h5', 'BTC-PERP', '1h', 'index'),
('XMR_USDT_USDT-1h-mark.h5', 'XMR_USDT_USDT', '1h', 'mark'),
])
def test_datahandler_ohlcv_regex(filename, pair, timeframe, candletype):
regex = JsonDataHandler._OHLCV_REGEX
match = re.search(regex, filename)
assert len(match.groups()) > 1
assert match[1] == pair
assert match[2] == timeframe
assert match[3] == candletype
@pytest.mark.parametrize('input,expected', [
('XMR_USDT', 'XMR/USDT'),
('BTC_USDT', 'BTC/USDT'),
('USDT_BUSD', 'USDT/BUSD'),
('BTC_USDT_USDT', 'BTC/USDT:USDT'), # Futures
('XRP_USDT_USDT', 'XRP/USDT:USDT'), # futures
('BTC-PERP', 'BTC-PERP'),
('BTC-PERP_USDT', 'BTC-PERP:USDT'), # potential FTX case
('UNITTEST_USDT', 'UNITTEST/USDT'),
])
def test_rebuild_pair_from_filename(input, expected):
assert IDataHandler.rebuild_pair_from_filename(input) == expected
def test_datahandler_ohlcv_get_available_data(testdatadir):
paircombs = JsonDataHandler.ohlcv_get_available_data(testdatadir, TradingMode.SPOT)
# Convert to set to avoid failures due to sorting
assert set(paircombs) == {
('UNITTEST/BTC', '5m', CandleType.SPOT),
('ETH/BTC', '5m', CandleType.SPOT),
('XLM/BTC', '5m', CandleType.SPOT),
('TRX/BTC', '5m', CandleType.SPOT),
('LTC/BTC', '5m', CandleType.SPOT),
('XMR/BTC', '5m', CandleType.SPOT),
('ZEC/BTC', '5m', CandleType.SPOT),
('UNITTEST/BTC', '1m', CandleType.SPOT),
('ADA/BTC', '5m', CandleType.SPOT),
('ETC/BTC', '5m', CandleType.SPOT),
('NXT/BTC', '5m', CandleType.SPOT),
('DASH/BTC', '5m', CandleType.SPOT),
('XRP/ETH', '1m', CandleType.SPOT),
('XRP/ETH', '5m', CandleType.SPOT),
('UNITTEST/BTC', '30m', CandleType.SPOT),
('UNITTEST/BTC', '8m', CandleType.SPOT),
('NOPAIR/XXX', '4m', CandleType.SPOT),
}
paircombs = JsonDataHandler.ohlcv_get_available_data(testdatadir, TradingMode.FUTURES)
# Convert to set to avoid failures due to sorting
assert set(paircombs) == {
('UNITTEST/USDT', '1h', 'mark'),
('XRP/USDT', '1h', 'futures'),
('XRP/USDT', '1h', 'mark'),
('XRP/USDT', '8h', 'mark'),
('XRP/USDT', '8h', 'funding_rate'),
}
paircombs = JsonGzDataHandler.ohlcv_get_available_data(testdatadir, TradingMode.SPOT)
assert set(paircombs) == {('UNITTEST/BTC', '8m', CandleType.SPOT)}
paircombs = HDF5DataHandler.ohlcv_get_available_data(testdatadir, TradingMode.SPOT)
assert set(paircombs) == {('UNITTEST/BTC', '5m', CandleType.SPOT)}
def test_jsondatahandler_trades_get_pairs(testdatadir):
pairs = JsonGzDataHandler.trades_get_pairs(testdatadir)
# Convert to set to avoid failures due to sorting
assert set(pairs) == {'XRP/ETH', 'XRP/OLD'}
def test_jsondatahandler_ohlcv_purge(mocker, testdatadir):
mocker.patch.object(Path, "exists", MagicMock(return_value=False))
unlinkmock = mocker.patch.object(Path, "unlink", MagicMock())
dh = JsonGzDataHandler(testdatadir)
assert not dh.ohlcv_purge('UNITTEST/NONEXIST', '5m', '')
assert not dh.ohlcv_purge('UNITTEST/NONEXIST', '5m', candle_type='mark')
assert unlinkmock.call_count == 0
mocker.patch.object(Path, "exists", MagicMock(return_value=True))
assert dh.ohlcv_purge('UNITTEST/NONEXIST', '5m', '')
assert dh.ohlcv_purge('UNITTEST/NONEXIST', '5m', candle_type='mark')
assert unlinkmock.call_count == 2
def test_jsondatahandler_ohlcv_load(testdatadir, caplog):
dh = JsonDataHandler(testdatadir)
df = dh.ohlcv_load('XRP/ETH', '5m', 'spot')
assert len(df) == 711
df_mark = dh.ohlcv_load('UNITTEST/USDT', '1h', candle_type="mark")
assert len(df_mark) == 99
df_no_mark = dh.ohlcv_load('UNITTEST/USDT', '1h', 'spot')
assert len(df_no_mark) == 0
# Failure case (empty array)
df1 = dh.ohlcv_load('NOPAIR/XXX', '4m', 'spot')
assert len(df1) == 0
assert log_has("Could not load data for NOPAIR/XXX.", caplog)
assert df.columns.equals(df1.columns)
def test_jsondatahandler_trades_load(testdatadir, caplog):
dh = JsonGzDataHandler(testdatadir)
logmsg = "Old trades format detected - converting"
dh.trades_load('XRP/ETH')
assert not log_has(logmsg, caplog)
# Test conversation is happening
dh.trades_load('XRP/OLD')
assert log_has(logmsg, caplog)
def test_jsondatahandler_trades_purge(mocker, testdatadir):
mocker.patch.object(Path, "exists", MagicMock(return_value=False))
unlinkmock = mocker.patch.object(Path, "unlink", MagicMock())
dh = JsonGzDataHandler(testdatadir)
assert not dh.trades_purge('UNITTEST/NONEXIST')
assert unlinkmock.call_count == 0
mocker.patch.object(Path, "exists", MagicMock(return_value=True))
assert dh.trades_purge('UNITTEST/NONEXIST')
assert unlinkmock.call_count == 1
@pytest.mark.parametrize('datahandler', AVAILABLE_DATAHANDLERS)
def test_datahandler_ohlcv_append(datahandler, testdatadir, ):
dh = get_datahandler(testdatadir, datahandler)
with pytest.raises(NotImplementedError):
dh.ohlcv_append('UNITTEST/ETH', '5m', DataFrame(), CandleType.SPOT)
with pytest.raises(NotImplementedError):
dh.ohlcv_append('UNITTEST/ETH', '5m', DataFrame(), CandleType.MARK)
@pytest.mark.parametrize('datahandler', AVAILABLE_DATAHANDLERS)
def test_datahandler_trades_append(datahandler, testdatadir):
dh = get_datahandler(testdatadir, datahandler)
with pytest.raises(NotImplementedError):
dh.trades_append('UNITTEST/ETH', [])
def test_hdf5datahandler_trades_get_pairs(testdatadir):
pairs = HDF5DataHandler.trades_get_pairs(testdatadir)
# Convert to set to avoid failures due to sorting
assert set(pairs) == {'XRP/ETH'}
def test_hdf5datahandler_trades_load(testdatadir):
dh = HDF5DataHandler(testdatadir)
trades = dh.trades_load('XRP/ETH')
assert isinstance(trades, list)
trades1 = dh.trades_load('UNITTEST/NONEXIST')
assert trades1 == []
# data goes from 2019-10-11 - 2019-10-13
timerange = TimeRange.parse_timerange('20191011-20191012')
trades2 = dh._trades_load('XRP/ETH', timerange)
assert len(trades) > len(trades2)
# Check that ID is None (If it's nan, it's wrong)
assert trades2[0][2] is None
# unfiltered load has trades before starttime
assert len([t for t in trades if t[0] < timerange.startts * 1000]) >= 0
# filtered list does not have trades before starttime
assert len([t for t in trades2 if t[0] < timerange.startts * 1000]) == 0
# unfiltered load has trades after endtime
assert len([t for t in trades if t[0] > timerange.stopts * 1000]) > 0
# filtered list does not have trades after endtime
assert len([t for t in trades2 if t[0] > timerange.stopts * 1000]) == 0
def test_hdf5datahandler_trades_store(testdatadir, tmpdir):
tmpdir1 = Path(tmpdir)
dh = HDF5DataHandler(testdatadir)
trades = dh.trades_load('XRP/ETH')
dh1 = HDF5DataHandler(tmpdir1)
dh1.trades_store('XRP/NEW', trades)
file = tmpdir1 / 'XRP_NEW-trades.h5'
assert file.is_file()
# Load trades back
trades_new = dh1.trades_load('XRP/NEW')
assert len(trades_new) == len(trades)
assert trades[0][0] == trades_new[0][0]
assert trades[0][1] == trades_new[0][1]
# assert trades[0][2] == trades_new[0][2] # This is nan - so comparison does not make sense
assert trades[0][3] == trades_new[0][3]
assert trades[0][4] == trades_new[0][4]
assert trades[0][5] == trades_new[0][5]
assert trades[0][6] == trades_new[0][6]
assert trades[-1][0] == trades_new[-1][0]
assert trades[-1][1] == trades_new[-1][1]
# assert trades[-1][2] == trades_new[-1][2] # This is nan - so comparison does not make sense
assert trades[-1][3] == trades_new[-1][3]
assert trades[-1][4] == trades_new[-1][4]
assert trades[-1][5] == trades_new[-1][5]
assert trades[-1][6] == trades_new[-1][6]
def test_hdf5datahandler_trades_purge(mocker, testdatadir):
mocker.patch.object(Path, "exists", MagicMock(return_value=False))
unlinkmock = mocker.patch.object(Path, "unlink", MagicMock())
dh = HDF5DataHandler(testdatadir)
assert not dh.trades_purge('UNITTEST/NONEXIST')
assert unlinkmock.call_count == 0
mocker.patch.object(Path, "exists", MagicMock(return_value=True))
assert dh.trades_purge('UNITTEST/NONEXIST')
assert unlinkmock.call_count == 1
@pytest.mark.parametrize('pair,timeframe,candle_type,candle_append,startdt,enddt', [
# Data goes from 2018-01-10 - 2018-01-30
('UNITTEST/BTC', '5m', 'spot', '', '2018-01-15', '2018-01-19'),
# Mark data goes from to 2021-11-15 2021-11-19
('UNITTEST/USDT:USDT', '1h', 'mark', '-mark', '2021-11-16', '2021-11-18'),
])
def test_hdf5datahandler_ohlcv_load_and_resave(
testdatadir,
tmpdir,
pair,
timeframe,
candle_type,
candle_append,
startdt, enddt
):
tmpdir1 = Path(tmpdir)
tmpdir2 = tmpdir1
if candle_type not in ('', 'spot'):
tmpdir2 = tmpdir1 / 'futures'
tmpdir2.mkdir()
dh = HDF5DataHandler(testdatadir)
ohlcv = dh._ohlcv_load(pair, timeframe, None, candle_type=candle_type)
assert isinstance(ohlcv, DataFrame)
assert len(ohlcv) > 0
file = tmpdir2 / f"UNITTEST_NEW-{timeframe}{candle_append}.h5"
assert not file.is_file()
dh1 = HDF5DataHandler(tmpdir1)
dh1.ohlcv_store('UNITTEST/NEW', timeframe, ohlcv, candle_type=candle_type)
assert file.is_file()
assert not ohlcv[ohlcv['date'] < startdt].empty
timerange = TimeRange.parse_timerange(f"{startdt.replace('-', '')}-{enddt.replace('-', '')}")
# Call private function to ensure timerange is filtered in hdf5
ohlcv = dh._ohlcv_load(pair, timeframe, timerange, candle_type=candle_type)
ohlcv1 = dh1._ohlcv_load('UNITTEST/NEW', timeframe, timerange, candle_type=candle_type)
assert len(ohlcv) == len(ohlcv1)
assert ohlcv.equals(ohlcv1)
assert ohlcv[ohlcv['date'] < startdt].empty
assert ohlcv[ohlcv['date'] > enddt].empty
# Try loading inexisting file
ohlcv = dh.ohlcv_load('UNITTEST/NONEXIST', timeframe, candle_type=candle_type)
assert ohlcv.empty
def test_hdf5datahandler_ohlcv_purge(mocker, testdatadir):
mocker.patch.object(Path, "exists", MagicMock(return_value=False))
unlinkmock = mocker.patch.object(Path, "unlink", MagicMock())
dh = HDF5DataHandler(testdatadir)
assert not dh.ohlcv_purge('UNITTEST/NONEXIST', '5m', '')
assert not dh.ohlcv_purge('UNITTEST/NONEXIST', '5m', candle_type='mark')
assert unlinkmock.call_count == 0
mocker.patch.object(Path, "exists", MagicMock(return_value=True))
assert dh.ohlcv_purge('UNITTEST/NONEXIST', '5m', '')
assert dh.ohlcv_purge('UNITTEST/NONEXIST', '5m', candle_type='mark')
assert unlinkmock.call_count == 2
def test_gethandlerclass():
cl = get_datahandlerclass('json')
assert cl == JsonDataHandler
assert issubclass(cl, IDataHandler)
cl = get_datahandlerclass('jsongz')
assert cl == JsonGzDataHandler
assert issubclass(cl, IDataHandler)
assert issubclass(cl, JsonDataHandler)
cl = get_datahandlerclass('hdf5')
assert cl == HDF5DataHandler
assert issubclass(cl, IDataHandler)
with pytest.raises(ValueError, match=r"No datahandler for .*"):
get_datahandlerclass('DeadBeef')
def test_get_datahandler(testdatadir):
dh = get_datahandler(testdatadir, 'json')
assert type(dh) == JsonDataHandler
dh = get_datahandler(testdatadir, 'jsongz')
assert type(dh) == JsonGzDataHandler
dh1 = get_datahandler(testdatadir, 'jsongz', dh)
assert id(dh1) == id(dh)
dh = get_datahandler(testdatadir, 'hdf5')
assert type(dh) == HDF5DataHandler

View File

@ -20,6 +20,7 @@ from freqtrade.exchange import (Binance, Bittrex, Exchange, Kraken, amount_to_pr
timeframe_to_prev_date, timeframe_to_seconds)
from freqtrade.exchange.common import (API_FETCH_ORDER_RETRY_COUNT, API_RETRY_COUNT,
calculate_backoff, remove_credentials)
from freqtrade.exchange.exchange import amount_to_contract_precision
from freqtrade.resolvers.exchange_resolver import ExchangeResolver
from tests.conftest import get_mock_coro, get_patched_exchange, log_has, log_has_re, num_log_has_re
@ -4470,6 +4471,7 @@ def test__amount_to_contracts(
('ADA/USDT:USDT', 10.4445555, 10.4, 10.444),
('LTC/ETH', 30, 30, 30),
('LTC/USD', 30, 30, 30),
('ADA/USDT:USDT', 1.17, 1.1, 1.17),
# contract size of 10
('ETH/USDT:USDT', 10.111, 10.1, 10),
('ETH/USDT:USDT', 10.188, 10.1, 10),
@ -4497,6 +4499,20 @@ def test_amount_to_contract_precision(
assert result_size == expected_fut
@pytest.mark.parametrize('amount,precision,precision_mode,contract_size,expected', [
(1.17, 1.0, 4, 0.01, 1.17), # Tick size
(1.17, 1.0, 2, 0.01, 1.17), #
(1.16, 1.0, 4, 0.01, 1.16), #
(1.16, 1.0, 2, 0.01, 1.16), #
(1.13, 1.0, 2, 0.01, 1.13), #
(10.988, 1.0, 2, 10, 10),
(10.988, 1.0, 4, 10, 10),
])
def test_amount_to_contract_precision2(amount, precision, precision_mode, contract_size, expected):
res = amount_to_contract_precision(amount, precision, precision_mode, contract_size)
assert pytest.approx(res) == expected
@pytest.mark.parametrize('exchange_name,open_rate,is_short,trading_mode,margin_mode', [
# Bittrex
('bittrex', 2.0, False, 'spot', None),

View File

@ -3,21 +3,21 @@ from datetime import datetime, timezone
from pathlib import Path
from unittest.mock import PropertyMock
import pytest
from freqtrade.commands.optimize_commands import start_backtesting
from freqtrade.exceptions import OperationalException
from freqtrade.commands.optimize_commands import setup_optimize_configuration
from freqtrade.enums import RunMode
from freqtrade.optimize.backtesting import Backtesting
from tests.conftest import (CURRENT_TEST_STRATEGY, get_args, log_has_re, patch_exchange,
patched_configuration_load_config_file)
def test_freqai_backtest_start_backtest_list(freqai_conf, mocker, testdatadir):
def test_freqai_backtest_start_backtest_list(freqai_conf, mocker, testdatadir, caplog):
patch_exchange(mocker)
now = datetime.now(timezone.utc)
mocker.patch('freqtrade.plugins.pairlistmanager.PairListManager.whitelist',
PropertyMock(return_value=['HULUMULU/USDT', 'XRP/USDT']))
# mocker.patch('freqtrade.optimize.backtesting.Backtesting.backtest', backtestmock)
mocker.patch('freqtrade.optimize.backtesting.history.load_data')
mocker.patch('freqtrade.optimize.backtesting.history.get_timerange', return_value=(now, now))
patched_configuration_load_config_file(mocker, freqai_conf)
@ -30,9 +30,11 @@ def test_freqai_backtest_start_backtest_list(freqai_conf, mocker, testdatadir):
'--strategy-list', CURRENT_TEST_STRATEGY
]
args = get_args(args)
with pytest.raises(OperationalException,
match=r"You can't use strategy_list and freqai at the same time\."):
start_backtesting(args)
bt_config = setup_optimize_configuration(args, RunMode.BACKTEST)
Backtesting(bt_config)
assert log_has_re('Using --strategy-list with FreqAI REQUIRES all strategies to have identical '
'populate_any_indicators.', caplog)
Backtesting.cleanup()
def test_freqai_backtest_load_data(freqai_conf, mocker, caplog):

View File

@ -1,6 +1,7 @@
import re
from datetime import timedelta
from pathlib import Path
from shutil import copyfile
import joblib
import pandas as pd
@ -25,7 +26,22 @@ from freqtrade.optimize.optimize_reports import (_get_resample_from_period, gene
text_table_exit_reason, text_table_strategy)
from freqtrade.resolvers.strategy_resolver import StrategyResolver
from tests.conftest import CURRENT_TEST_STRATEGY
from tests.data.test_history import _backup_file, _clean_test_file
from tests.data.test_history import _clean_test_file
def _backup_file(file: Path, copy_file: bool = False) -> None:
"""
Backup existing file to avoid deleting the user file
:param file: complete path to the file
:param copy_file: keep file in place too.
:return: None
"""
file_swp = str(file) + '.swp'
if file.is_file():
file.rename(file_swp)
if copy_file:
copyfile(file_swp, file)
def test_text_table_bt_results():

View File

@ -3,6 +3,8 @@ Unit test file for rpc/api_server.py
"""
import json
import logging
import time
from datetime import datetime, timedelta, timezone
from pathlib import Path
from unittest.mock import ANY, MagicMock, PropertyMock
@ -10,7 +12,7 @@ from unittest.mock import ANY, MagicMock, PropertyMock
import pandas as pd
import pytest
import uvicorn
from fastapi import FastAPI
from fastapi import FastAPI, WebSocketDisconnect
from fastapi.exceptions import HTTPException
from fastapi.testclient import TestClient
from requests.auth import _basic_auth_str
@ -31,6 +33,7 @@ from tests.conftest import (CURRENT_TEST_STRATEGY, create_mock_trades, get_mock_
BASE_URI = "/api/v1"
_TEST_USER = "FreqTrader"
_TEST_PASS = "SuperSecurePassword1!"
_TEST_WS_TOKEN = "secret_Ws_t0ken"
@pytest.fixture
@ -44,17 +47,21 @@ def botclient(default_conf, mocker):
"CORS_origins": ['http://example.com'],
"username": _TEST_USER,
"password": _TEST_PASS,
"ws_token": _TEST_WS_TOKEN
}})
ftbot = get_patched_freqtradebot(mocker, default_conf)
rpc = RPC(ftbot)
mocker.patch('freqtrade.rpc.api_server.ApiServer.start_api', MagicMock())
apiserver = None
try:
apiserver = ApiServer(default_conf)
apiserver.add_rpc_handler(rpc)
yield ftbot, TestClient(apiserver.app)
# Cleanup ... ?
finally:
if apiserver:
apiserver.cleanup()
ApiServer.shutdown()
@ -154,6 +161,25 @@ def test_api_auth():
get_user_from_token(b'not_a_token', 'secret1234')
def test_api_ws_auth(botclient):
ftbot, client = botclient
def url(token): return f"/api/v1/message/ws?token={token}"
bad_token = "bad-ws_token"
with pytest.raises(WebSocketDisconnect):
with client.websocket_connect(url(bad_token)) as websocket:
websocket.receive()
good_token = _TEST_WS_TOKEN
with client.websocket_connect(url(good_token)) as websocket:
pass
jwt_secret = ftbot.config['api_server'].get('jwt_secret_key', 'super-secret')
jwt_token = create_token({'identity': {'u': 'Freqtrade'}}, jwt_secret)
with client.websocket_connect(url(jwt_token)) as websocket:
pass
def test_api_unauthorized(botclient):
ftbot, client = botclient
rc = client.get(f"{BASE_URI}/ping")
@ -261,6 +287,7 @@ def test_api__init__(default_conf, mocker):
with pytest.raises(OperationalException, match="RPC Handler already attached."):
apiserver.add_rpc_handler(RPC(get_patched_freqtradebot(mocker, default_conf)))
apiserver.cleanup()
ApiServer.shutdown()
@ -388,6 +415,7 @@ def test_api_run(default_conf, mocker, caplog):
MagicMock(side_effect=Exception))
apiserver.start_api()
assert log_has("Api server failed to start.", caplog)
apiserver.cleanup()
ApiServer.shutdown()
@ -410,6 +438,7 @@ def test_api_cleanup(default_conf, mocker, caplog):
apiserver.cleanup()
assert apiserver._server.cleanup.call_count == 1
assert log_has("Stopping API Server", caplog)
assert log_has("Stopping API Server background tasks", caplog)
ApiServer.shutdown()
@ -1663,3 +1692,93 @@ def test_health(botclient):
ret = rc.json()
assert ret['last_process_ts'] == 0
assert ret['last_process'] == '1970-01-01T00:00:00+00:00'
def test_api_ws_subscribe(botclient, mocker):
ftbot, client = botclient
ws_url = f"/api/v1/message/ws?token={_TEST_WS_TOKEN}"
sub_mock = mocker.patch('freqtrade.rpc.api_server.ws.WebSocketChannel.set_subscriptions')
with client.websocket_connect(ws_url) as ws:
ws.send_json({'type': 'subscribe', 'data': ['whitelist']})
# Check call count is now 1 as we sent a valid subscribe request
assert sub_mock.call_count == 1
with client.websocket_connect(ws_url) as ws:
ws.send_json({'type': 'subscribe', 'data': 'whitelist'})
# Call count hasn't changed as the subscribe request was invalid
assert sub_mock.call_count == 1
def test_api_ws_requests(botclient, mocker, caplog):
caplog.set_level(logging.DEBUG)
ftbot, client = botclient
ws_url = f"/api/v1/message/ws?token={_TEST_WS_TOKEN}"
# Test whitelist request
with client.websocket_connect(ws_url) as ws:
ws.send_json({"type": "whitelist", "data": None})
response = ws.receive_json()
assert log_has_re(r"Request of type whitelist from.+", caplog)
assert response['type'] == "whitelist"
# Test analyzed_df request
with client.websocket_connect(ws_url) as ws:
ws.send_json({"type": "analyzed_df", "data": {}})
response = ws.receive_json()
assert log_has_re(r"Request of type analyzed_df from.+", caplog)
assert response['type'] == "analyzed_df"
caplog.clear()
# Test analyzed_df request with data
with client.websocket_connect(ws_url) as ws:
ws.send_json({"type": "analyzed_df", "data": {"limit": 100}})
response = ws.receive_json()
assert log_has_re(r"Request of type analyzed_df from.+", caplog)
assert response['type'] == "analyzed_df"
def test_api_ws_send_msg(default_conf, mocker, caplog):
try:
caplog.set_level(logging.DEBUG)
default_conf.update({"api_server": {"enabled": True,
"listen_ip_address": "127.0.0.1",
"listen_port": 8080,
"CORS_origins": ['http://example.com'],
"username": _TEST_USER,
"password": _TEST_PASS,
"ws_token": _TEST_WS_TOKEN
}})
mocker.patch('freqtrade.rpc.telegram.Updater')
mocker.patch('freqtrade.rpc.api_server.ApiServer.start_api')
apiserver = ApiServer(default_conf)
apiserver.add_rpc_handler(RPC(get_patched_freqtradebot(mocker, default_conf)))
apiserver.start_message_queue()
# Give the queue thread time to start
time.sleep(0.2)
# Test message_queue coro receives the message
test_message = {"type": "status", "data": "test"}
apiserver.send_msg(test_message)
time.sleep(0.1) # Not sure how else to wait for the coro to receive the data
assert log_has("Found message of type: status", caplog)
# Test if exception logged when error occurs in sending
mocker.patch('freqtrade.rpc.api_server.ws.channel.ChannelManager.broadcast',
side_effect=Exception)
apiserver.send_msg(test_message)
time.sleep(0.1) # Not sure how else to wait for the coro to receive the data
assert log_has_re(r"Exception happened in background task.*", caplog)
finally:
apiserver.cleanup()
ApiServer.shutdown()

465
tests/rpc/test_rpc_emc.py Normal file
View File

@ -0,0 +1,465 @@
"""
Unit test file for rpc/external_message_consumer.py
"""
import asyncio
import functools
import logging
from datetime import datetime, timezone
from unittest.mock import MagicMock
import pytest
import websockets
from freqtrade.data.dataprovider import DataProvider
from freqtrade.rpc.external_message_consumer import ExternalMessageConsumer
from tests.conftest import log_has, log_has_re, log_has_when
_TEST_WS_TOKEN = "secret_Ws_t0ken"
_TEST_WS_HOST = "127.0.0.1"
_TEST_WS_PORT = 9989
@pytest.fixture
def patched_emc(default_conf, mocker):
default_conf.update({
"external_message_consumer": {
"enabled": True,
"producers": [
{
"name": "default",
"host": "null",
"port": 9891,
"ws_token": _TEST_WS_TOKEN
}
]
}
})
dataprovider = DataProvider(default_conf, None, None, None)
emc = ExternalMessageConsumer(default_conf, dataprovider)
try:
yield emc
finally:
emc.shutdown()
def test_emc_start(patched_emc, caplog):
# Test if the message was printed
assert log_has_when("Starting ExternalMessageConsumer", caplog, "setup")
# Test if the thread and loop objects were created
assert patched_emc._thread and patched_emc._loop
# Test we call start again nothing happens
prev_thread = patched_emc._thread
patched_emc.start()
assert prev_thread == patched_emc._thread
def test_emc_shutdown(patched_emc, caplog):
patched_emc.shutdown()
assert log_has("Stopping ExternalMessageConsumer", caplog)
# Test the loop has stopped
assert patched_emc._loop is None
# Test if the thread has stopped
assert patched_emc._thread is None
caplog.clear()
patched_emc.shutdown()
# Test func didn't run again as it was called once already
assert not log_has("Stopping ExternalMessageConsumer", caplog)
def test_emc_init(patched_emc):
# Test the settings were set correctly
assert patched_emc.initial_candle_limit <= 1500
assert patched_emc.wait_timeout > 0
assert patched_emc.sleep_time > 0
# Parametrize this?
def test_emc_handle_producer_message(patched_emc, caplog, ohlcv_history):
test_producer = {"name": "test", "url": "ws://test", "ws_token": "test"}
producer_name = test_producer['name']
caplog.set_level(logging.DEBUG)
# Test handle whitelist message
whitelist_message = {"type": "whitelist", "data": ["BTC/USDT"]}
patched_emc.handle_producer_message(test_producer, whitelist_message)
assert log_has(f"Received message of type `whitelist` from `{producer_name}`", caplog)
assert log_has(
f"Consumed message from `{producer_name}` of type `RPCMessageType.WHITELIST`", caplog)
# Test handle analyzed_df message
df_message = {
"type": "analyzed_df",
"data": {
"key": ("BTC/USDT", "5m", "spot"),
"df": ohlcv_history,
"la": datetime.now(timezone.utc)
}
}
patched_emc.handle_producer_message(test_producer, df_message)
assert log_has(f"Received message of type `analyzed_df` from `{producer_name}`", caplog)
assert log_has(
f"Consumed message from `{producer_name}` of type `RPCMessageType.ANALYZED_DF`", caplog)
# Test unhandled message
unhandled_message = {"type": "status", "data": "RUNNING"}
patched_emc.handle_producer_message(test_producer, unhandled_message)
assert log_has_re(r"Received unhandled message\: .*", caplog)
# Test malformed messages
caplog.clear()
malformed_message = {"type": "whitelist", "data": {"pair": "BTC/USDT"}}
patched_emc.handle_producer_message(test_producer, malformed_message)
assert log_has_re(r"Invalid message .+", caplog)
malformed_message = {
"type": "analyzed_df",
"data": {
"key": "BTC/USDT",
"df": ohlcv_history,
"la": datetime.now(timezone.utc)
}
}
patched_emc.handle_producer_message(test_producer, malformed_message)
assert log_has(f"Received message of type `analyzed_df` from `{producer_name}`", caplog)
assert log_has_re(r"Invalid message .+", caplog)
caplog.clear()
malformed_message = {"some": "stuff"}
patched_emc.handle_producer_message(test_producer, malformed_message)
assert log_has_re(r"Invalid message .+", caplog)
caplog.clear()
malformed_message = {"type": "whitelist", "data": None}
patched_emc.handle_producer_message(test_producer, malformed_message)
assert log_has_re(r"Empty message .+", caplog)
async def test_emc_create_connection_success(default_conf, caplog, mocker):
default_conf.update({
"external_message_consumer": {
"enabled": True,
"producers": [
{
"name": "default",
"host": _TEST_WS_HOST,
"port": _TEST_WS_PORT,
"ws_token": _TEST_WS_TOKEN
}
],
"wait_timeout": 60,
"ping_timeout": 60,
"sleep_timeout": 60
}
})
mocker.patch('freqtrade.rpc.external_message_consumer.ExternalMessageConsumer.start',
MagicMock())
dp = DataProvider(default_conf, None, None, None)
emc = ExternalMessageConsumer(default_conf, dp)
test_producer = default_conf['external_message_consumer']['producers'][0]
lock = asyncio.Lock()
emc._running = True
async def eat(websocket):
emc._running = False
try:
async with websockets.serve(eat, _TEST_WS_HOST, _TEST_WS_PORT):
await emc._create_connection(test_producer, lock)
assert log_has_re(r"Producer connection success.+", caplog)
finally:
emc.shutdown()
async def test_emc_create_connection_invalid_port(default_conf, caplog, mocker):
default_conf.update({
"external_message_consumer": {
"enabled": True,
"producers": [
{
"name": "default",
"host": _TEST_WS_HOST,
"port": -1,
"ws_token": _TEST_WS_TOKEN
}
],
"wait_timeout": 60,
"ping_timeout": 60,
"sleep_timeout": 60
}
})
dp = DataProvider(default_conf, None, None, None)
emc = ExternalMessageConsumer(default_conf, dp)
try:
await asyncio.sleep(0.01)
assert log_has_re(r".+ is an invalid WebSocket URL .+", caplog)
finally:
emc.shutdown()
async def test_emc_create_connection_invalid_host(default_conf, caplog, mocker):
default_conf.update({
"external_message_consumer": {
"enabled": True,
"producers": [
{
"name": "default",
"host": "10000.1241..2121/",
"port": _TEST_WS_PORT,
"ws_token": _TEST_WS_TOKEN
}
],
"wait_timeout": 60,
"ping_timeout": 60,
"sleep_timeout": 60
}
})
dp = DataProvider(default_conf, None, None, None)
emc = ExternalMessageConsumer(default_conf, dp)
try:
await asyncio.sleep(0.01)
assert log_has_re(r".+ is an invalid WebSocket URL .+", caplog)
finally:
emc.shutdown()
async def test_emc_create_connection_error(default_conf, caplog, mocker):
default_conf.update({
"external_message_consumer": {
"enabled": True,
"producers": [
{
"name": "default",
"host": _TEST_WS_HOST,
"port": _TEST_WS_PORT,
"ws_token": _TEST_WS_TOKEN
}
],
"wait_timeout": 60,
"ping_timeout": 60,
"sleep_timeout": 60
}
})
# Test unexpected error
mocker.patch('websockets.connect', side_effect=RuntimeError)
dp = DataProvider(default_conf, None, None, None)
emc = ExternalMessageConsumer(default_conf, dp)
try:
await asyncio.sleep(0.01)
assert log_has("Unexpected error has occurred:", caplog)
finally:
emc.shutdown()
async def test_emc_receive_messages_valid(default_conf, caplog, mocker):
default_conf.update({
"external_message_consumer": {
"enabled": True,
"producers": [
{
"name": "default",
"host": _TEST_WS_HOST,
"port": _TEST_WS_PORT,
"ws_token": _TEST_WS_TOKEN
}
],
"wait_timeout": 1,
"ping_timeout": 60,
"sleep_time": 60
}
})
mocker.patch('freqtrade.rpc.external_message_consumer.ExternalMessageConsumer.start',
MagicMock())
lock = asyncio.Lock()
test_producer = default_conf['external_message_consumer']['producers'][0]
dp = DataProvider(default_conf, None, None, None)
emc = ExternalMessageConsumer(default_conf, dp)
loop = asyncio.get_event_loop()
def change_running(emc): emc._running = not emc._running
class TestChannel:
async def recv(self, *args, **kwargs):
return {"type": "whitelist", "data": ["BTC/USDT"]}
async def ping(self, *args, **kwargs):
return asyncio.Future()
try:
change_running(emc)
loop.call_soon(functools.partial(change_running, emc=emc))
await emc._receive_messages(TestChannel(), test_producer, lock)
assert log_has_re(r"Received message of type `whitelist`.+", caplog)
finally:
emc.shutdown()
async def test_emc_receive_messages_invalid(default_conf, caplog, mocker):
default_conf.update({
"external_message_consumer": {
"enabled": True,
"producers": [
{
"name": "default",
"host": _TEST_WS_HOST,
"port": _TEST_WS_PORT,
"ws_token": _TEST_WS_TOKEN
}
],
"wait_timeout": 1,
"ping_timeout": 60,
"sleep_time": 60
}
})
mocker.patch('freqtrade.rpc.external_message_consumer.ExternalMessageConsumer.start',
MagicMock())
lock = asyncio.Lock()
test_producer = default_conf['external_message_consumer']['producers'][0]
dp = DataProvider(default_conf, None, None, None)
emc = ExternalMessageConsumer(default_conf, dp)
loop = asyncio.get_event_loop()
def change_running(emc): emc._running = not emc._running
class TestChannel:
async def recv(self, *args, **kwargs):
return {"type": ["BTC/USDT"]}
async def ping(self, *args, **kwargs):
return asyncio.Future()
try:
change_running(emc)
loop.call_soon(functools.partial(change_running, emc=emc))
await emc._receive_messages(TestChannel(), test_producer, lock)
assert log_has_re(r"Invalid message from.+", caplog)
finally:
emc.shutdown()
async def test_emc_receive_messages_timeout(default_conf, caplog, mocker):
default_conf.update({
"external_message_consumer": {
"enabled": True,
"producers": [
{
"name": "default",
"host": _TEST_WS_HOST,
"port": _TEST_WS_PORT,
"ws_token": _TEST_WS_TOKEN
}
],
"wait_timeout": 0.1,
"ping_timeout": 1,
"sleep_time": 1
}
})
mocker.patch('freqtrade.rpc.external_message_consumer.ExternalMessageConsumer.start',
MagicMock())
lock = asyncio.Lock()
test_producer = default_conf['external_message_consumer']['producers'][0]
dp = DataProvider(default_conf, None, None, None)
emc = ExternalMessageConsumer(default_conf, dp)
loop = asyncio.get_event_loop()
def change_running(emc): emc._running = not emc._running
class TestChannel:
async def recv(self, *args, **kwargs):
await asyncio.sleep(0.2)
async def ping(self, *args, **kwargs):
return asyncio.Future()
try:
change_running(emc)
loop.call_soon(functools.partial(change_running, emc=emc))
await emc._receive_messages(TestChannel(), test_producer, lock)
assert log_has_re(r"Ping error.+", caplog)
finally:
emc.shutdown()
async def test_emc_receive_messages_handle_error(default_conf, caplog, mocker):
default_conf.update({
"external_message_consumer": {
"enabled": True,
"producers": [
{
"name": "default",
"host": _TEST_WS_HOST,
"port": _TEST_WS_PORT,
"ws_token": _TEST_WS_TOKEN
}
],
"wait_timeout": 1,
"ping_timeout": 1,
"sleep_time": 1
}
})
mocker.patch('freqtrade.rpc.external_message_consumer.ExternalMessageConsumer.start',
MagicMock())
lock = asyncio.Lock()
test_producer = default_conf['external_message_consumer']['producers'][0]
dp = DataProvider(default_conf, None, None, None)
emc = ExternalMessageConsumer(default_conf, dp)
emc.handle_producer_message = MagicMock(side_effect=Exception)
loop = asyncio.get_event_loop()
def change_running(emc): emc._running = not emc._running
class TestChannel:
async def recv(self, *args, **kwargs):
return {"type": "whitelist", "data": ["BTC/USDT"]}
async def ping(self, *args, **kwargs):
return asyncio.Future()
try:
change_running(emc)
loop.call_soon(functools.partial(change_running, emc=emc))
await emc._receive_messages(TestChannel(), test_producer, lock)
assert log_has_re(r"Error handling producer message.+", caplog)
finally:
emc.shutdown()

View File

@ -1089,6 +1089,58 @@ def test__validate_pricing_rules(default_conf, caplog) -> None:
validate_config_consistency(conf)
def test__validate_consumers(default_conf, caplog) -> None:
conf = deepcopy(default_conf)
conf.update({
"external_message_consumer": {
"enabled": True,
"producers": []
}
})
with pytest.raises(OperationalException,
match="You must specify at least 1 Producer to connect to."):
validate_config_consistency(conf)
conf = deepcopy(default_conf)
conf.update({
"external_message_consumer": {
"enabled": True,
"producers": [
{
"name": "default",
"host": "127.0.0.1",
"port": 8081,
"ws_token": "secret_ws_t0ken."
}, {
"name": "default",
"host": "127.0.0.1",
"port": 8080,
"ws_token": "secret_ws_t0ken."
}
]}
})
with pytest.raises(OperationalException,
match="Producer names must be unique. Duplicate: default"):
validate_config_consistency(conf)
conf = deepcopy(default_conf)
conf.update({
"process_only_new_candles": True,
"external_message_consumer": {
"enabled": True,
"producers": [
{
"name": "default",
"host": "127.0.0.1",
"port": 8081,
"ws_token": "secret_ws_t0ken."
}
]}
})
validate_config_consistency(conf)
assert log_has_re("To receive best performance with external data.*", caplog)
def test_load_config_test_comments() -> None:
"""
Load config with comments

View File

@ -1319,9 +1319,9 @@ def test_create_stoploss_order_invalid_order(
assert create_order_mock.call_args[1]['amount'] == trade.amount
# Rpc is sending first buy, then sell
assert rpc_mock.call_count == 2
assert rpc_mock.call_args_list[1][0][0]['sell_reason'] == ExitType.EMERGENCY_EXIT.value
assert rpc_mock.call_args_list[1][0][0]['order_type'] == 'market'
assert rpc_mock.call_count == 3
assert rpc_mock.call_args_list[2][0][0]['sell_reason'] == ExitType.EMERGENCY_EXIT.value
assert rpc_mock.call_args_list[2][0][0]['order_type'] == 'market'
@pytest.mark.parametrize("is_short", [False, True])
@ -2439,7 +2439,7 @@ def test_manage_open_orders_entry_usercustom(
# Trade should be closed since the function returns true
freqtrade.manage_open_orders()
assert cancel_order_wr_mock.call_count == 1
assert rpc_mock.call_count == 1
assert rpc_mock.call_count == 2
trades = Trade.query.filter(Trade.open_order_id.is_(open_trade.open_order_id)).all()
nb_trades = len(trades)
assert nb_trades == 0
@ -2478,7 +2478,7 @@ def test_manage_open_orders_entry(
# check it does cancel buy orders over the time limit
freqtrade.manage_open_orders()
assert cancel_order_mock.call_count == 1
assert rpc_mock.call_count == 1
assert rpc_mock.call_count == 2
trades = Trade.query.filter(Trade.open_order_id.is_(open_trade.open_order_id)).all()
nb_trades = len(trades)
assert nb_trades == 0
@ -2608,7 +2608,7 @@ def test_check_handle_cancelled_buy(
# check it does cancel buy orders over the time limit
freqtrade.manage_open_orders()
assert cancel_order_mock.call_count == 0
assert rpc_mock.call_count == 1
assert rpc_mock.call_count == 2
trades = Trade.query.filter(Trade.open_order_id.is_(open_trade.open_order_id)).all()
assert len(trades) == 0
assert log_has_re(
@ -2639,7 +2639,7 @@ def test_manage_open_orders_buy_exception(
# check it does cancel buy orders over the time limit
freqtrade.manage_open_orders()
assert cancel_order_mock.call_count == 0
assert rpc_mock.call_count == 0
assert rpc_mock.call_count == 1
trades = Trade.query.filter(Trade.open_order_id.is_(open_trade.open_order_id)).all()
nb_trades = len(trades)
assert nb_trades == 1
@ -2686,7 +2686,7 @@ def test_manage_open_orders_exit_usercustom(
# Return false - No impact
freqtrade.manage_open_orders()
assert cancel_order_mock.call_count == 0
assert rpc_mock.call_count == 0
assert rpc_mock.call_count == 1
assert open_trade_usdt.is_open is False
assert freqtrade.strategy.check_exit_timeout.call_count == 1
assert freqtrade.strategy.check_entry_timeout.call_count == 0
@ -2696,7 +2696,7 @@ def test_manage_open_orders_exit_usercustom(
# Return Error - No impact
freqtrade.manage_open_orders()
assert cancel_order_mock.call_count == 0
assert rpc_mock.call_count == 0
assert rpc_mock.call_count == 1
assert open_trade_usdt.is_open is False
assert freqtrade.strategy.check_exit_timeout.call_count == 1
assert freqtrade.strategy.check_entry_timeout.call_count == 0
@ -2706,7 +2706,7 @@ def test_manage_open_orders_exit_usercustom(
freqtrade.strategy.check_entry_timeout = MagicMock(return_value=True)
freqtrade.manage_open_orders()
assert cancel_order_mock.call_count == 1
assert rpc_mock.call_count == 1
assert rpc_mock.call_count == 2
assert open_trade_usdt.is_open is True
assert freqtrade.strategy.check_exit_timeout.call_count == 1
assert freqtrade.strategy.check_entry_timeout.call_count == 0
@ -2766,7 +2766,7 @@ def test_manage_open_orders_exit(
# check it does cancel sell orders over the time limit
freqtrade.manage_open_orders()
assert cancel_order_mock.call_count == 1
assert rpc_mock.call_count == 1
assert rpc_mock.call_count == 2
assert open_trade_usdt.is_open is True
# Custom user sell-timeout is never called
assert freqtrade.strategy.check_exit_timeout.call_count == 0
@ -2805,7 +2805,7 @@ def test_check_handle_cancelled_exit(
# check it does cancel sell orders over the time limit
freqtrade.manage_open_orders()
assert cancel_order_mock.call_count == 0
assert rpc_mock.call_count == 1
assert rpc_mock.call_count == 2
assert open_trade_usdt.is_open is True
exit_name = 'Buy' if is_short else 'Sell'
assert log_has_re(f"{exit_name} order cancelled on exchange for Trade.*", caplog)
@ -2843,7 +2843,7 @@ def test_manage_open_orders_partial(
# note this is for a partially-complete buy order
freqtrade.manage_open_orders()
assert cancel_order_mock.call_count == 1
assert rpc_mock.call_count == 2
assert rpc_mock.call_count == 3
trades = Trade.query.filter(Trade.open_order_id.is_(open_trade.open_order_id)).all()
assert len(trades) == 1
assert trades[0].amount == 23.0
@ -2890,7 +2890,7 @@ def test_manage_open_orders_partial_fee(
assert log_has_re(r"Applying fee on amount for Trade.*", caplog)
assert cancel_order_mock.call_count == 1
assert rpc_mock.call_count == 2
assert rpc_mock.call_count == 3
trades = Trade.query.filter(Trade.open_order_id.is_(open_trade.open_order_id)).all()
assert len(trades) == 1
# Verify that trade has been updated
@ -2940,7 +2940,7 @@ def test_manage_open_orders_partial_except(
assert log_has_re(r"Could not update trade amount: .*", caplog)
assert cancel_order_mock.call_count == 1
assert rpc_mock.call_count == 2
assert rpc_mock.call_count == 3
trades = Trade.query.filter(Trade.open_order_id.is_(open_trade.open_order_id)).all()
assert len(trades) == 1
# Verify that trade has been updated
@ -3155,7 +3155,7 @@ def test_handle_cancel_exit_limit(mocker, default_conf_usdt, fee) -> None:
reason = CANCEL_REASON['TIMEOUT']
assert freqtrade.handle_cancel_exit(trade, order, reason)
assert cancel_order_mock.call_count == 1
assert send_msg_mock.call_count == 1
assert send_msg_mock.call_count == 2
assert trade.close_rate is None
assert trade.exit_reason is None
@ -3592,7 +3592,7 @@ def test_execute_trade_exit_with_stoploss_on_exchange(
trade.is_short = is_short
assert trade
assert cancel_order.call_count == 1
assert rpc_mock.call_count == 3
assert rpc_mock.call_count == 4
@pytest.mark.parametrize("is_short", [False, True])
@ -3662,11 +3662,11 @@ def test_may_execute_trade_exit_after_stoploss_on_exchange_hit(
assert trade.stoploss_order_id is None
assert trade.is_open is False
assert trade.exit_reason == ExitType.STOPLOSS_ON_EXCHANGE.value
assert rpc_mock.call_count == 3
assert rpc_mock.call_args_list[0][0][0]['type'] == RPCMessageType.ENTRY
assert rpc_mock.call_args_list[0][0][0]['amount'] > 20
assert rpc_mock.call_args_list[1][0][0]['type'] == RPCMessageType.ENTRY_FILL
assert rpc_mock.call_args_list[2][0][0]['type'] == RPCMessageType.EXIT_FILL
assert rpc_mock.call_count == 4
assert rpc_mock.call_args_list[1][0][0]['type'] == RPCMessageType.ENTRY
assert rpc_mock.call_args_list[1][0][0]['amount'] > 20
assert rpc_mock.call_args_list[2][0][0]['type'] == RPCMessageType.ENTRY_FILL
assert rpc_mock.call_args_list[3][0][0]['type'] == RPCMessageType.EXIT_FILL
@pytest.mark.parametrize(

View File

@ -7,10 +7,11 @@ from unittest.mock import MagicMock
import pytest
from freqtrade.misc import (decimals_per_coin, deep_merge_dicts, file_dump_json, file_load_json,
format_ms_time, pair_to_filename, parse_db_uri_for_logging, plural,
render_template, render_template_with_fallback, round_coin_value,
safe_value_fallback, safe_value_fallback2, shorten_date)
from freqtrade.misc import (dataframe_to_json, decimals_per_coin, deep_merge_dicts, file_dump_json,
file_load_json, format_ms_time, json_to_dataframe, pair_to_filename,
parse_db_uri_for_logging, plural, render_template,
render_template_with_fallback, round_coin_value, safe_value_fallback,
safe_value_fallback2, shorten_date)
def test_decimals_per_coin():
@ -219,3 +220,14 @@ def test_deep_merge_dicts():
res2['first']['rows']['test'] = 'asdf'
assert deep_merge_dicts(a, deepcopy(b), allow_null_overrides=False) == res2
def test_dataframe_json(ohlcv_history):
from pandas.testing import assert_frame_equal
json = dataframe_to_json(ohlcv_history)
dataframe = json_to_dataframe(json)
assert list(ohlcv_history.columns) == list(dataframe.columns)
assert len(ohlcv_history) == len(dataframe)
assert_frame_equal(ohlcv_history, dataframe)