Merge pull request #8553 from freqtrade/new_release

New release 2023.4
This commit is contained in:
Matthias 2023-04-25 16:15:47 +02:00 committed by GitHub
commit 8364fc1bd2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
136 changed files with 3298 additions and 996 deletions

View File

@ -57,7 +57,7 @@ jobs:
- name: Installation - *nix
if: runner.os == 'Linux'
run: |
python -m pip install --upgrade pip wheel
python -m pip install --upgrade pip==23.0.1 wheel==0.38.4
export LD_LIBRARY_PATH=${HOME}/dependencies/lib:$LD_LIBRARY_PATH
export TA_LIBRARY_PATH=${HOME}/dependencies/lib
export TA_INCLUDE_PATH=${HOME}/dependencies/include
@ -163,7 +163,7 @@ jobs:
rm /usr/local/bin/python3.11-config || true
brew install hdf5 c-blosc
python -m pip install --upgrade pip wheel
python -m pip install --upgrade pip==23.0.1 wheel==0.38.4
export LD_LIBRARY_PATH=${HOME}/dependencies/lib:$LD_LIBRARY_PATH
export TA_LIBRARY_PATH=${HOME}/dependencies/lib
export TA_INCLUDE_PATH=${HOME}/dependencies/include
@ -352,7 +352,7 @@ jobs:
- name: Installation - *nix
if: runner.os == 'Linux'
run: |
python -m pip install --upgrade pip wheel
python -m pip install --upgrade pip==23.0.1 wheel==0.38.4
export LD_LIBRARY_PATH=${HOME}/dependencies/lib:$LD_LIBRARY_PATH
export TA_LIBRARY_PATH=${HOME}/dependencies/lib
export TA_INCLUDE_PATH=${HOME}/dependencies/include
@ -425,7 +425,7 @@ jobs:
python setup.py sdist bdist_wheel
- name: Publish to PyPI (Test)
uses: pypa/gh-action-pypi-publish@v1.8.3
uses: pypa/gh-action-pypi-publish@v1.8.5
if: (github.event_name == 'release')
with:
user: __token__
@ -433,7 +433,7 @@ jobs:
repository_url: https://test.pypi.org/legacy/
- name: Publish to PyPI
uses: pypa/gh-action-pypi-publish@v1.8.3
uses: pypa/gh-action-pypi-publish@v1.8.5
if: (github.event_name == 'release')
with:
user: __token__

View File

@ -13,12 +13,12 @@ repos:
- id: mypy
exclude: build_helpers
additional_dependencies:
- types-cachetools==5.3.0.4
- types-cachetools==5.3.0.5
- types-filelock==3.2.7
- types-requests==2.28.11.16
- types-tabulate==0.9.0.1
- types-python-dateutil==2.8.19.10
- SQLAlchemy==2.0.7
- types-requests==2.28.11.17
- types-tabulate==0.9.0.2
- types-python-dateutil==2.8.19.12
- SQLAlchemy==2.0.10
# stages: [push]
- repo: https://github.com/pycqa/isort

View File

@ -1,4 +1,4 @@
FROM python:3.10.10-slim-bullseye as base
FROM python:3.10.11-slim-bullseye as base
# Setup env
ENV LANG C.UTF-8
@ -25,7 +25,7 @@ FROM base as python-deps
RUN apt-get update \
&& apt-get -y install build-essential libssl-dev git libffi-dev libgfortran5 pkg-config cmake gcc \
&& apt-get clean \
&& pip install --upgrade pip
&& pip install --upgrade pip==23.0.1 wheel==0.38.4
# Install TA-lib
COPY build_helpers/* /tmp/

View File

@ -210,6 +210,6 @@ To run this bot we recommend you a cloud instance with a minimum of:
- [Python >= 3.8](http://docs.python-guide.org/en/latest/starting/installation/)
- [pip](https://pip.pypa.io/en/stable/installing/)
- [git](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git)
- [TA-Lib](https://mrjbq7.github.io/ta-lib/install.html)
- [TA-Lib](https://ta-lib.github.io/ta-lib-python/)
- [virtualenv](https://virtualenv.pypa.io/en/stable/installation.html) (Recommended)
- [Docker](https://www.docker.com/products/docker) (Recommended)

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -1,21 +1,21 @@
# Downloads don't work automatically, since the URL is regenerated via javascript.
# Downloaded from https://www.lfd.uci.edu/~gohlke/pythonlibs/#ta-lib
python -m pip install --upgrade pip wheel
python -m pip install --upgrade pip==23.0.1 wheel==0.38.4
$pyv = python -c "import sys; print(f'{sys.version_info.major}.{sys.version_info.minor}')"
if ($pyv -eq '3.8') {
pip install build_helpers\TA_Lib-0.4.25-cp38-cp38-win_amd64.whl
pip install build_helpers\TA_Lib-0.4.26-cp38-cp38-win_amd64.whl
}
if ($pyv -eq '3.9') {
pip install build_helpers\TA_Lib-0.4.25-cp39-cp39-win_amd64.whl
pip install build_helpers\TA_Lib-0.4.26-cp39-cp39-win_amd64.whl
}
if ($pyv -eq '3.10') {
pip install build_helpers\TA_Lib-0.4.25-cp310-cp310-win_amd64.whl
pip install build_helpers\TA_Lib-0.4.26-cp310-cp310-win_amd64.whl
}
if ($pyv -eq '3.11') {
pip install build_helpers\TA_Lib-0.4.25-cp311-cp311-win_amd64.whl
pip install build_helpers\TA_Lib-0.4.26-cp311-cp311-win_amd64.whl
}
pip install -r requirements-dev.txt
pip install -e .

View File

@ -12,6 +12,7 @@ TAG=$(echo "${BRANCH_NAME}" | sed -e "s/\//_/g")
TAG_PLOT=${TAG}_plot
TAG_FREQAI=${TAG}_freqai
TAG_FREQAI_RL=${TAG_FREQAI}rl
TAG_FREQAI_TORCH=${TAG_FREQAI}torch
TAG_PI="${TAG}_pi"
TAG_ARM=${TAG}_arm
@ -42,9 +43,9 @@ if [ $? -ne 0 ]; then
return 1
fi
docker build --cache-from freqtrade:${TAG_ARM} --build-arg sourceimage=${CACHE_IMAGE} --build-arg sourcetag=${TAG_ARM} -t freqtrade:${TAG_PLOT_ARM} -f docker/Dockerfile.plot .
docker build --cache-from freqtrade:${TAG_ARM} --build-arg sourceimage=${CACHE_IMAGE} --build-arg sourcetag=${TAG_ARM} -t freqtrade:${TAG_FREQAI_ARM} -f docker/Dockerfile.freqai .
docker build --cache-from freqtrade:${TAG_ARM} --build-arg sourceimage=${CACHE_IMAGE} --build-arg sourcetag=${TAG_ARM} -t freqtrade:${TAG_FREQAI_RL_ARM} -f docker/Dockerfile.freqai_rl .
docker build --build-arg sourceimage=freqtrade --build-arg sourcetag=${TAG_ARM} -t freqtrade:${TAG_PLOT_ARM} -f docker/Dockerfile.plot .
docker build --build-arg sourceimage=freqtrade --build-arg sourcetag=${TAG_ARM} -t freqtrade:${TAG_FREQAI_ARM} -f docker/Dockerfile.freqai .
docker build --build-arg sourceimage=freqtrade --build-arg sourcetag=${TAG_FREQAI_ARM} -t freqtrade:${TAG_FREQAI_RL_ARM} -f docker/Dockerfile.freqai_rl .
# Tag image for upload and next build step
docker tag freqtrade:$TAG_ARM ${CACHE_IMAGE}:$TAG_ARM
@ -84,6 +85,10 @@ docker manifest push -p ${IMAGE_NAME}:${TAG_FREQAI}
docker manifest create ${IMAGE_NAME}:${TAG_FREQAI_RL} ${CACHE_IMAGE}:${TAG_FREQAI_RL} ${CACHE_IMAGE}:${TAG_FREQAI_RL_ARM}
docker manifest push -p ${IMAGE_NAME}:${TAG_FREQAI_RL}
# Create special Torch tag - which is identical to the RL tag.
docker manifest create ${IMAGE_NAME}:${TAG_FREQAI_TORCH} ${CACHE_IMAGE}:${TAG_FREQAI_RL} ${CACHE_IMAGE}:${TAG_FREQAI_RL_ARM}
docker manifest push -p ${IMAGE_NAME}:${TAG_FREQAI_TORCH}
# copy images to ghcr.io
alias crane="docker run --rm -i -v $(pwd)/.crane:/home/nonroot/.docker/ gcr.io/go-containerregistry/crane"
@ -93,6 +98,7 @@ chmod a+rwx .crane
echo "${GHCR_TOKEN}" | crane auth login ghcr.io -u "${GHCR_USERNAME}" --password-stdin
crane copy ${IMAGE_NAME}:${TAG_FREQAI_RL} ${GHCR_IMAGE_NAME}:${TAG_FREQAI_RL}
crane copy ${IMAGE_NAME}:${TAG_FREQAI_RL} ${GHCR_IMAGE_NAME}:${TAG_FREQAI_TORCH}
crane copy ${IMAGE_NAME}:${TAG_FREQAI} ${GHCR_IMAGE_NAME}:${TAG_FREQAI}
crane copy ${IMAGE_NAME}:${TAG_PLOT} ${GHCR_IMAGE_NAME}:${TAG_PLOT}
crane copy ${IMAGE_NAME}:${TAG} ${GHCR_IMAGE_NAME}:${TAG}

View File

@ -58,9 +58,9 @@ fi
# Tag image for upload and next build step
docker tag freqtrade:$TAG ${CACHE_IMAGE}:$TAG
docker build --cache-from freqtrade:${TAG} --build-arg sourceimage=${CACHE_IMAGE} --build-arg sourcetag=${TAG} -t freqtrade:${TAG_PLOT} -f docker/Dockerfile.plot .
docker build --cache-from freqtrade:${TAG} --build-arg sourceimage=${CACHE_IMAGE} --build-arg sourcetag=${TAG} -t freqtrade:${TAG_FREQAI} -f docker/Dockerfile.freqai .
docker build --cache-from freqtrade:${TAG_FREQAI} --build-arg sourceimage=${CACHE_IMAGE} --build-arg sourcetag=${TAG_FREQAI} -t freqtrade:${TAG_FREQAI_RL} -f docker/Dockerfile.freqai_rl .
docker build --build-arg sourceimage=freqtrade --build-arg sourcetag=${TAG} -t freqtrade:${TAG_PLOT} -f docker/Dockerfile.plot .
docker build --build-arg sourceimage=freqtrade --build-arg sourcetag=${TAG} -t freqtrade:${TAG_FREQAI} -f docker/Dockerfile.freqai .
docker build --build-arg sourceimage=freqtrade --build-arg sourcetag=${TAG_FREQAI} -t freqtrade:${TAG_FREQAI_RL} -f docker/Dockerfile.freqai_rl .
docker tag freqtrade:$TAG_PLOT ${CACHE_IMAGE}:$TAG_PLOT
docker tag freqtrade:$TAG_FREQAI ${CACHE_IMAGE}:$TAG_FREQAI

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

View File

@ -274,19 +274,20 @@ A backtesting result will look like that:
| XRP/BTC | 35 | 0.66 | 22.96 | 0.00114897 | 11.48 | 3:49:00 | 12 0 23 34.3 |
| ZEC/BTC | 22 | -0.46 | -10.18 | -0.00050971 | -5.09 | 2:22:00 | 7 0 15 31.8 |
| TOTAL | 429 | 0.36 | 152.41 | 0.00762792 | 76.20 | 4:12:00 | 186 0 243 43.4 |
========================================================= EXIT REASON STATS ==========================================================
| Exit Reason | Exits | Wins | Draws | Losses |
|:-------------------|--------:|------:|-------:|--------:|
| trailing_stop_loss | 205 | 150 | 0 | 55 |
| stop_loss | 166 | 0 | 0 | 166 |
| exit_signal | 56 | 36 | 0 | 20 |
| force_exit | 2 | 0 | 0 | 2 |
====================================================== LEFT OPEN TRADES REPORT ======================================================
| Pair | Entries | Avg Profit % | Cum Profit % | Tot Profit BTC | Tot Profit % | Avg Duration | Win Draw Loss Win% |
|:---------|---------:|---------------:|---------------:|-----------------:|---------------:|:---------------|--------------------:|
| ADA/BTC | 1 | 0.89 | 0.89 | 0.00004434 | 0.44 | 6:00:00 | 1 0 0 100 |
| LTC/BTC | 1 | 0.68 | 0.68 | 0.00003421 | 0.34 | 2:00:00 | 1 0 0 100 |
| TOTAL | 2 | 0.78 | 1.57 | 0.00007855 | 0.78 | 4:00:00 | 2 0 0 100 |
==================== EXIT REASON STATS ====================
| Exit Reason | Exits | Wins | Draws | Losses |
|:-------------------|--------:|------:|-------:|--------:|
| trailing_stop_loss | 205 | 150 | 0 | 55 |
| stop_loss | 166 | 0 | 0 | 166 |
| exit_signal | 56 | 36 | 0 | 20 |
| force_exit | 2 | 0 | 0 | 2 |
================== SUMMARY METRICS ==================
| Metric | Value |
|-----------------------------+---------------------|

View File

@ -138,7 +138,7 @@ Mandatory parameters are marked as **Required**, which means that they are requi
| `stake_currency` | **Required.** Crypto-currency used for trading. <br> **Datatype:** String
| `stake_amount` | **Required.** Amount of crypto-currency your bot will use for each trade. Set it to `"unlimited"` to allow the bot to use all available balance. [More information below](#configuring-amount-per-trade). <br> **Datatype:** Positive float or `"unlimited"`.
| `tradable_balance_ratio` | Ratio of the total account balance the bot is allowed to trade. [More information below](#configuring-amount-per-trade). <br>*Defaults to `0.99` 99%).*<br> **Datatype:** Positive float between `0.1` and `1.0`.
| `available_capital` | Available starting capital for the bot. Useful when running multiple bots on the same exchange account.[More information below](#configuring-amount-per-trade). <br> **Datatype:** Positive float.
| `available_capital` | Available starting capital for the bot. Useful when running multiple bots on the same exchange account. [More information below](#configuring-amount-per-trade). <br> **Datatype:** Positive float.
| `amend_last_stake_amount` | Use reduced last stake amount if necessary. [More information below](#configuring-amount-per-trade). <br>*Defaults to `false`.* <br> **Datatype:** Boolean
| `last_stake_amount_min_ratio` | Defines minimum stake amount that has to be left and executed. Applies only to the last stake amount when it's amended to a reduced value (i.e. if `amend_last_stake_amount` is set to `true`). [More information below](#configuring-amount-per-trade). <br>*Defaults to `0.5`.* <br> **Datatype:** Float (as ratio)
| `amount_reserve_percent` | Reserve some amount in min pair stake amount. The bot will reserve `amount_reserve_percent` + stoploss value when calculating min pair stake amount in order to avoid possible trade refusals. <br>*Defaults to `0.05` (5%).* <br> **Datatype:** Positive Float as ratio.
@ -155,25 +155,25 @@ Mandatory parameters are marked as **Required**, which means that they are requi
| `trailing_stop_positive_offset` | Offset on when to apply `trailing_stop_positive`. Percentage value which should be positive. More details in the [stoploss documentation](stoploss.md#trailing-stop-loss-only-once-the-trade-has-reached-a-certain-offset). [Strategy Override](#parameters-in-the-strategy). <br>*Defaults to `0.0` (no offset).* <br> **Datatype:** Float
| `trailing_only_offset_is_reached` | Only apply trailing stoploss when the offset is reached. [stoploss documentation](stoploss.md). [Strategy Override](#parameters-in-the-strategy). <br>*Defaults to `false`.* <br> **Datatype:** Boolean
| `fee` | Fee used during backtesting / dry-runs. Should normally not be configured, which has freqtrade fall back to the exchange default fee. Set as ratio (e.g. 0.001 = 0.1%). Fee is applied twice for each trade, once when buying, once when selling. <br> **Datatype:** Float (as ratio)
| `futures_funding_rate` | User-specified funding rate to be used when historical funding rates are not available from the exchange. This does not overwrite real historical rates. It is recommended that this be set to 0 unless you are testing a specific coin and you understand how the funding rate will affect freqtrade's profit calculations. [More information here](leverage.md#unavailable-funding-rates) <br>*Defaults to None.*<br> **Datatype:** Float
| `futures_funding_rate` | User-specified funding rate to be used when historical funding rates are not available from the exchange. This does not overwrite real historical rates. It is recommended that this be set to 0 unless you are testing a specific coin and you understand how the funding rate will affect freqtrade's profit calculations. [More information here](leverage.md#unavailable-funding-rates) <br>*Defaults to `None`.*<br> **Datatype:** Float
| `trading_mode` | Specifies if you want to trade regularly, trade with leverage, or trade contracts whose prices are derived from matching cryptocurrency prices. [leverage documentation](leverage.md). <br>*Defaults to `"spot"`.* <br> **Datatype:** String
| `margin_mode` | When trading with leverage, this determines if the collateral owned by the trader will be shared or isolated to each trading pair [leverage documentation](leverage.md). <br> **Datatype:** String
| `liquidation_buffer` | A ratio specifying how large of a safety net to place between the liquidation price and the stoploss to prevent a position from reaching the liquidation price [leverage documentation](leverage.md). <br>*Defaults to `0.05`.* <br> **Datatype:** Float
| | **Unfilled timeout**
| `unfilledtimeout.entry` | **Required.** How long (in minutes or seconds) the bot will wait for an unfilled entry order to complete, after which the order will be cancelled and repeated at current (new) price, as long as there is a signal. [Strategy Override](#parameters-in-the-strategy).<br> **Datatype:** Integer
| `unfilledtimeout.exit` | **Required.** How long (in minutes or seconds) the bot will wait for an unfilled exit order to complete, after which the order will be cancelled and repeated at current (new) price, as long as there is a signal. [Strategy Override](#parameters-in-the-strategy).<br> **Datatype:** Integer
| `unfilledtimeout.unit` | Unit to use in unfilledtimeout setting. Note: If you set unfilledtimeout.unit to "seconds", "internals.process_throttle_secs" must be inferior or equal to timeout [Strategy Override](#parameters-in-the-strategy). <br> *Defaults to `minutes`.* <br> **Datatype:** String
| `unfilledtimeout.unit` | Unit to use in unfilledtimeout setting. Note: If you set unfilledtimeout.unit to "seconds", "internals.process_throttle_secs" must be inferior or equal to timeout [Strategy Override](#parameters-in-the-strategy). <br> *Defaults to `"minutes"`.* <br> **Datatype:** String
| `unfilledtimeout.exit_timeout_count` | How many times can exit orders time out. Once this number of timeouts is reached, an emergency exit is triggered. 0 to disable and allow unlimited order cancels. [Strategy Override](#parameters-in-the-strategy).<br>*Defaults to `0`.* <br> **Datatype:** Integer
| | **Pricing**
| `entry_pricing.price_side` | Select the side of the spread the bot should look at to get the entry rate. [More information below](#buy-price-side).<br> *Defaults to `same`.* <br> **Datatype:** String (either `ask`, `bid`, `same` or `other`).
| `entry_pricing.price_side` | Select the side of the spread the bot should look at to get the entry rate. [More information below](#entry-price).<br> *Defaults to `"same"`.* <br> **Datatype:** String (either `ask`, `bid`, `same` or `other`).
| `entry_pricing.price_last_balance` | **Required.** Interpolate the bidding price. More information [below](#entry-price-without-orderbook-enabled).
| `entry_pricing.use_order_book` | Enable entering using the rates in [Order Book Entry](#entry-price-with-orderbook-enabled). <br> *Defaults to `True`.*<br> **Datatype:** Boolean
| `entry_pricing.use_order_book` | Enable entering using the rates in [Order Book Entry](#entry-price-with-orderbook-enabled). <br> *Defaults to `true`.*<br> **Datatype:** Boolean
| `entry_pricing.order_book_top` | Bot will use the top N rate in Order Book "price_side" to enter a trade. I.e. a value of 2 will allow the bot to pick the 2nd entry in [Order Book Entry](#entry-price-with-orderbook-enabled). <br>*Defaults to `1`.* <br> **Datatype:** Positive Integer
| `entry_pricing. check_depth_of_market.enabled` | Do not enter if the difference of buy orders and sell orders is met in Order Book. [Check market depth](#check-depth-of-market). <br>*Defaults to `false`.* <br> **Datatype:** Boolean
| `entry_pricing. check_depth_of_market.bids_to_ask_delta` | The difference ratio of buy orders and sell orders found in Order Book. A value below 1 means sell order size is greater, while value greater than 1 means buy order size is higher. [Check market depth](#check-depth-of-market) <br> *Defaults to `0`.* <br> **Datatype:** Float (as ratio)
| `exit_pricing.price_side` | Select the side of the spread the bot should look at to get the exit rate. [More information below](#exit-price-side).<br> *Defaults to `same`.* <br> **Datatype:** String (either `ask`, `bid`, `same` or `other`).
| `exit_pricing.price_side` | Select the side of the spread the bot should look at to get the exit rate. [More information below](#exit-price-side).<br> *Defaults to `"same"`.* <br> **Datatype:** String (either `ask`, `bid`, `same` or `other`).
| `exit_pricing.price_last_balance` | Interpolate the exiting price. More information [below](#exit-price-without-orderbook-enabled).
| `exit_pricing.use_order_book` | Enable exiting of open trades using [Order Book Exit](#exit-price-with-orderbook-enabled). <br> *Defaults to `True`.*<br> **Datatype:** Boolean
| `exit_pricing.use_order_book` | Enable exiting of open trades using [Order Book Exit](#exit-price-with-orderbook-enabled). <br> *Defaults to `true`.*<br> **Datatype:** Boolean
| `exit_pricing.order_book_top` | Bot will use the top N rate in Order Book "price_side" to exit. I.e. a value of 2 will allow the bot to pick the 2nd ask rate in [Order Book Exit](#exit-price-with-orderbook-enabled)<br>*Defaults to `1`.* <br> **Datatype:** Positive Integer
| `custom_price_max_distance_ratio` | Configure maximum distance ratio between current and custom entry or exit price. <br>*Defaults to `0.02` 2%).*<br> **Datatype:** Positive float
| | **TODO**
@ -199,10 +199,10 @@ Mandatory parameters are marked as **Required**, which means that they are requi
| `exchange.ccxt_sync_config` | Additional CCXT parameters passed to the regular (sync) ccxt instance. Parameters may differ from exchange to exchange and are documented in the [ccxt documentation](https://ccxt.readthedocs.io/en/latest/manual.html#instantiation) <br> **Datatype:** Dict
| `exchange.ccxt_async_config` | Additional CCXT parameters passed to the async ccxt instance. Parameters may differ from exchange to exchange and are documented in the [ccxt documentation](https://ccxt.readthedocs.io/en/latest/manual.html#instantiation) <br> **Datatype:** Dict
| `exchange.markets_refresh_interval` | The interval in minutes in which markets are reloaded. <br>*Defaults to `60` minutes.* <br> **Datatype:** Positive Integer
| `exchange.skip_pair_validation` | Skip pairlist validation on startup.<br>*Defaults to `false`<br> **Datatype:** Boolean
| `exchange.skip_open_order_update` | Skips open order updates on startup should the exchange cause problems. Only relevant in live conditions.<br>*Defaults to `false`<br> **Datatype:** Boolean
| `exchange.skip_pair_validation` | Skip pairlist validation on startup.<br>*Defaults to `false`*<br> **Datatype:** Boolean
| `exchange.skip_open_order_update` | Skips open order updates on startup should the exchange cause problems. Only relevant in live conditions.<br>*Defaults to `false`*<br> **Datatype:** Boolean
| `exchange.unknown_fee_rate` | Fallback value to use when calculating trading fees. This can be useful for exchanges which have fees in non-tradable currencies. The value provided here will be multiplied with the "fee cost".<br>*Defaults to `None`<br> **Datatype:** float
| `exchange.log_responses` | Log relevant exchange responses. For debug mode only - use with care.<br>*Defaults to `false`<br> **Datatype:** Boolean
| `exchange.log_responses` | Log relevant exchange responses. For debug mode only - use with care.<br>*Defaults to `false`*<br> **Datatype:** Boolean
| `experimental.block_bad_exchanges` | Block exchanges known to not work with freqtrade. Leave on default unless you want to test if that exchange works now. <br>*Defaults to `true`.* <br> **Datatype:** Boolean
| | **Plugins**
| `edge.*` | Please refer to [edge configuration document](edge.md) for detailed explanation of all possible configuration options.
@ -213,7 +213,7 @@ Mandatory parameters are marked as **Required**, which means that they are requi
| `telegram.token` | Your Telegram bot token. Only required if `telegram.enabled` is `true`. <br>**Keep it in secret, do not disclose publicly.** <br> **Datatype:** String
| `telegram.chat_id` | Your personal Telegram account id. Only required if `telegram.enabled` is `true`. <br>**Keep it in secret, do not disclose publicly.** <br> **Datatype:** String
| `telegram.balance_dust_level` | Dust-level (in stake currency) - currencies with a balance below this will not be shown by `/balance`. <br> **Datatype:** float
| `telegram.reload` | Allow "reload" buttons on telegram messages. <br>*Defaults to `True`.<br> **Datatype:** boolean
| `telegram.reload` | Allow "reload" buttons on telegram messages. <br>*Defaults to `true`.<br> **Datatype:** boolean
| `telegram.notification_settings.*` | Detailed notification settings. Refer to the [telegram documentation](telegram-usage.md) for details.<br> **Datatype:** dictionary
| `telegram.allow_custom_messages` | Enable the sending of Telegram messages from strategies via the dataprovider.send_msg() function. <br> **Datatype:** Boolean
| | **Webhook**

View File

@ -142,6 +142,13 @@ To fix this, redefine order types in the strategy to use "limit" instead of "mar
The same fix should be applied in the configuration file, if order types are defined in your custom config rather than in the strategy.
### I'm trying to start the bot live, but get an API permission error
Errors like `Invalid API-key, IP, or permissions for action` mean exactly what they actually say.
Your API key is either invalid (copy/paste error? check for leading/trailing spaces in the config), expired, or the IP you're running the bot from is not enabled in the Exchange's API console.
Usually, the permission "Spot Trading" (or the equivalent in the exchange you use) will be necessary.
Futures will usually have to be enabled specifically.
### How do I search the bot logs for something?
By default, the bot writes its log into stderr stream. This is implemented this way so that you can easily separate the bot's diagnostics messages from Backtesting, Edge and Hyperopt results, output from other various Freqtrade utility sub-commands, as well as from the output of your custom `print()`'s you may have inserted into your strategy. So if you need to search the log messages with the grep utility, you need to redirect stderr to stdout and disregard stdout.

View File

@ -52,7 +52,7 @@ The FreqAI strategy requires including the following lines of code in the standa
return dataframe
def feature_engineering_expand_all(self, dataframe, period, **kwargs):
def feature_engineering_expand_all(self, dataframe: DataFrame, period, **kwargs) -> DataFrame:
"""
*Only functional with FreqAI enabled strategies*
This function will automatically expand the defined features on the config defined
@ -77,7 +77,7 @@ The FreqAI strategy requires including the following lines of code in the standa
return dataframe
def feature_engineering_expand_basic(self, dataframe, **kwargs):
def feature_engineering_expand_basic(self, dataframe: DataFrame, **kwargs) -> DataFrame:
"""
*Only functional with FreqAI enabled strategies*
This function will automatically expand the defined features on the config defined
@ -101,7 +101,7 @@ The FreqAI strategy requires including the following lines of code in the standa
dataframe["%-raw_price"] = dataframe["close"]
return dataframe
def feature_engineering_standard(self, dataframe, **kwargs):
def feature_engineering_standard(self, dataframe: DataFrame, **kwargs) -> DataFrame:
"""
*Only functional with FreqAI enabled strategies*
This optional function will be called once with the dataframe of the base timeframe.
@ -122,7 +122,7 @@ The FreqAI strategy requires including the following lines of code in the standa
dataframe["%-hour_of_day"] = (dataframe["date"].dt.hour + 1) / 25
return dataframe
def set_freqai_targets(self, dataframe, **kwargs):
def set_freqai_targets(self, dataframe: DataFrame, **kwargs) -> DataFrame:
"""
*Only functional with FreqAI enabled strategies*
Required function to set the targets for the model.
@ -139,6 +139,7 @@ The FreqAI strategy requires including the following lines of code in the standa
/ dataframe["close"]
- 1
)
return dataframe
```
Notice how the `feature_engineering_*()` is where [features](freqai-feature-engineering.md#feature-engineering) are added. Meanwhile `set_freqai_targets()` adds the labels/targets. A full example strategy is available in `templates/FreqaiExampleStrategy.py`.
@ -236,3 +237,161 @@ If you want to predict multiple targets you must specify all labels in the same
df['&s-up_or_down'] = np.where( df["close"].shift(-100) > df["close"], 'up', 'down')
df['&s-up_or_down'] = np.where( df["close"].shift(-100) == df["close"], 'same', df['&s-up_or_down'])
```
## PyTorch Module
### Quick start
The easiest way to quickly run a pytorch model is with the following command (for regression task):
```bash
freqtrade trade --config config_examples/config_freqai.example.json --strategy FreqaiExampleStrategy --freqaimodel PyTorchMLPRegressor --strategy-path freqtrade/templates
```
!!! note "Installation/docker"
The PyTorch module requires large packages such as `torch`, which should be explicitly requested during `./setup.sh -i` by answering "y" to the question "Do you also want dependencies for freqai-rl or PyTorch (~700mb additional space required) [y/N]?".
Users who prefer docker should ensure they use the docker image appended with `_freqaitorch`.
### Structure
#### Model
You can construct your own Neural Network architecture in PyTorch by simply defining your `nn.Module` class inside your custom [`IFreqaiModel` file](#using-different-prediction-models) and then using that class in your `def train()` function. Here is an example of logistic regression model implementation using PyTorch (should be used with nn.BCELoss criterion) for classification tasks.
```python
class LogisticRegression(nn.Module):
def __init__(self, input_size: int):
super().__init__()
# Define your layers
self.linear = nn.Linear(input_size, 1)
self.activation = nn.Sigmoid()
def forward(self, x: torch.Tensor) -> torch.Tensor:
# Define the forward pass
out = self.linear(x)
out = self.activation(out)
return out
class MyCoolPyTorchClassifier(BasePyTorchClassifier):
"""
This is a custom IFreqaiModel showing how a user might setup their own
custom Neural Network architecture for their training.
"""
@property
def data_convertor(self) -> PyTorchDataConvertor:
return DefaultPyTorchDataConvertor(target_tensor_type=torch.float)
def __init__(self, **kwargs) -> None:
super().__init__(**kwargs)
config = self.freqai_info.get("model_training_parameters", {})
self.learning_rate: float = config.get("learning_rate", 3e-4)
self.model_kwargs: Dict[str, Any] = config.get("model_kwargs", {})
self.trainer_kwargs: Dict[str, Any] = config.get("trainer_kwargs", {})
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
"""
User sets up the training and test data to fit their desired model here
:param data_dictionary: the dictionary holding all data for train, test,
labels, weights
:param dk: The datakitchen object for the current coin/model
"""
class_names = self.get_class_names()
self.convert_label_column_to_int(data_dictionary, dk, class_names)
n_features = data_dictionary["train_features"].shape[-1]
model = LogisticRegression(
input_dim=n_features
)
model.to(self.device)
optimizer = torch.optim.AdamW(model.parameters(), lr=self.learning_rate)
criterion = torch.nn.CrossEntropyLoss()
init_model = self.get_init_model(dk.pair)
trainer = PyTorchModelTrainer(
model=model,
optimizer=optimizer,
criterion=criterion,
model_meta_data={"class_names": class_names},
device=self.device,
init_model=init_model,
data_convertor=self.data_convertor,
**self.trainer_kwargs,
)
trainer.fit(data_dictionary, self.splits)
return trainer
```
#### Trainer
The `PyTorchModelTrainer` performs the idiomatic PyTorch train loop:
Define our model, loss function, and optimizer, and then move them to the appropriate device (GPU or CPU). Inside the loop, we iterate through the batches in the dataloader, move the data to the device, compute the prediction and loss, backpropagate, and update the model parameters using the optimizer.
In addition, the trainer is responsible for the following:
- saving and loading the model
- converting the data from `pandas.DataFrame` to `torch.Tensor`.
#### Integration with Freqai module
Like all freqai models, PyTorch models inherit `IFreqaiModel`. `IFreqaiModel` declares three abstract methods: `train`, `fit`, and `predict`. we implement these methods in three levels of hierarchy.
From top to bottom:
1. `BasePyTorchModel` - Implements the `train` method. all `BasePyTorch*` inherit it. responsible for general data preparation (e.g., data normalization) and calling the `fit` method. Sets `device` attribute used by children classes. Sets `model_type` attribute used by the parent class.
2. `BasePyTorch*` - Implements the `predict` method. Here, the `*` represents a group of algorithms, such as classifiers or regressors. responsible for data preprocessing, predicting, and postprocessing if needed.
3. `PyTorch*Classifier` / `PyTorch*Regressor` - implements the `fit` method. responsible for the main train flaw, where we initialize the trainer and model objects.
![image](assets/freqai_pytorch-diagram.png)
#### Full example
Building a PyTorch regressor using MLP (multilayer perceptron) model, MSELoss criterion, and AdamW optimizer.
```python
class PyTorchMLPRegressor(BasePyTorchRegressor):
def __init__(self, **kwargs) -> None:
super().__init__(**kwargs)
config = self.freqai_info.get("model_training_parameters", {})
self.learning_rate: float = config.get("learning_rate", 3e-4)
self.model_kwargs: Dict[str, Any] = config.get("model_kwargs", {})
self.trainer_kwargs: Dict[str, Any] = config.get("trainer_kwargs", {})
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
n_features = data_dictionary["train_features"].shape[-1]
model = PyTorchMLPModel(
input_dim=n_features,
output_dim=1,
**self.model_kwargs
)
model.to(self.device)
optimizer = torch.optim.AdamW(model.parameters(), lr=self.learning_rate)
criterion = torch.nn.MSELoss()
init_model = self.get_init_model(dk.pair)
trainer = PyTorchModelTrainer(
model=model,
optimizer=optimizer,
criterion=criterion,
device=self.device,
init_model=init_model,
target_tensor_type=torch.float,
**self.trainer_kwargs,
)
trainer.fit(data_dictionary)
return trainer
```
Here we create a `PyTorchMLPRegressor` class that implements the `fit` method. The `fit` method specifies the training building blocks: model, optimizer, criterion, and trainer. We inherit both `BasePyTorchRegressor` and `BasePyTorchModel`, where the former implements the `predict` method that is suitable for our regression task, and the latter implements the train method.
??? Note "Setting Class Names for Classifiers"
When using classifiers, the user must declare the class names (or targets) by overriding the `IFreqaiModel.class_names` attribute. This is achieved by setting `self.freqai.class_names` in the FreqAI strategy inside the `set_freqai_targets` method.
For example, if you are using a binary classifier to predict price movements as up or down, you can set the class names as follows:
```python
def set_freqai_targets(self, dataframe: DataFrame, metadata: Dict, **kwargs) -> DataFrame:
self.freqai.class_names = ["down", "up"]
dataframe['&s-up_or_down'] = np.where(dataframe["close"].shift(-100) >
dataframe["close"], 'up', 'down')
return dataframe
```
To see a full example, you can refer to the [classifier test strategy class](https://github.com/freqtrade/freqtrade/blob/develop/tests/strategy/strats/freqai_test_classifier.py).

View File

@ -6,8 +6,8 @@ Low level feature engineering is performed in the user strategy within a set of
| Function | Description |
|---------------|-------------|
| `feature_engineering__expand_all()` | This optional function will automatically expand the defined features on the config defined `indicator_periods_candles`, `include_timeframes`, `include_shifted_candles`, and `include_corr_pairs`.
| `feature_engineering__expand_basic()` | This optional function will automatically expand the defined features on the config defined `include_timeframes`, `include_shifted_candles`, and `include_corr_pairs`. Note: this function does *not* expand across `include_periods_candles`.
| `feature_engineering_expand_all()` | This optional function will automatically expand the defined features on the config defined `indicator_periods_candles`, `include_timeframes`, `include_shifted_candles`, and `include_corr_pairs`.
| `feature_engineering_expand_basic()` | This optional function will automatically expand the defined features on the config defined `include_timeframes`, `include_shifted_candles`, and `include_corr_pairs`. Note: this function does *not* expand across `include_periods_candles`.
| `feature_engineering_standard()` | This optional function will be called once with the dataframe of the base timeframe. This is the final function to be called, which means that the dataframe entering this function will contain all the features and columns from the base asset created by the other `feature_engineering_expand` functions. This function is a good place to do custom exotic feature extractions (e.g. tsfresh). This function is also a good place for any feature that should not be auto-expanded upon (e.g., day of the week).
| `set_freqai_targets()` | Required function to set the targets for the model. All targets must be prepended with `&` to be recognized by the FreqAI internals.
@ -16,7 +16,7 @@ Meanwhile, high level feature engineering is handled within `"feature_parameters
It is advisable to start from the template `feature_engineering_*` functions in the source provided example strategy (found in `templates/FreqaiExampleStrategy.py`) to ensure that the feature definitions are following the correct conventions. Here is an example of how to set the indicators and labels in the strategy:
```python
def feature_engineering_expand_all(self, dataframe, period, metadata, **kwargs):
def feature_engineering_expand_all(self, dataframe: DataFrame, period, metadata, **kwargs) -> DataFrame:
"""
*Only functional with FreqAI enabled strategies*
This function will automatically expand the defined features on the config defined
@ -67,7 +67,7 @@ It is advisable to start from the template `feature_engineering_*` functions in
return dataframe
def feature_engineering_expand_basic(self, dataframe, metadata, **kwargs):
def feature_engineering_expand_basic(self, dataframe: DataFrame, metadata, **kwargs) -> DataFrame:
"""
*Only functional with FreqAI enabled strategies*
This function will automatically expand the defined features on the config defined
@ -96,7 +96,7 @@ It is advisable to start from the template `feature_engineering_*` functions in
dataframe["%-raw_price"] = dataframe["close"]
return dataframe
def feature_engineering_standard(self, dataframe, metadata, **kwargs):
def feature_engineering_standard(self, dataframe: DataFrame, metadata, **kwargs) -> DataFrame:
"""
*Only functional with FreqAI enabled strategies*
This optional function will be called once with the dataframe of the base timeframe.
@ -122,7 +122,7 @@ It is advisable to start from the template `feature_engineering_*` functions in
dataframe["%-hour_of_day"] = (dataframe["date"].dt.hour + 1) / 25
return dataframe
def set_freqai_targets(self, dataframe, metadata, **kwargs):
def set_freqai_targets(self, dataframe: DataFrame, metadata, **kwargs) -> DataFrame:
"""
*Only functional with FreqAI enabled strategies*
Required function to set the targets for the model.
@ -181,15 +181,14 @@ You can ask for each of the defined features to be included also for informative
In total, the number of features the user of the presented example strat has created is: length of `include_timeframes` * no. features in `feature_engineering_expand_*()` * length of `include_corr_pairlist` * no. `include_shifted_candles` * length of `indicator_periods_candles`
$= 3 * 3 * 3 * 2 * 2 = 108$.
### Gain finer control over `feature_engineering_*` functions with `metadata`
### Gain finer control over `feature_engineering_*` functions with `metadata`
All `feature_engineering_*` and `set_freqai_targets()` functions are passed a `metadata` dictionary which contains information about the `pair`, `tf` (timeframe), and `period` that FreqAI is automating for feature building. As such, a user can use `metadata` inside `feature_engineering_*` functions as criteria for blocking/reserving features for certain timeframes, periods, pairs etc.
All `feature_engineering_*` and `set_freqai_targets()` functions are passed a `metadata` dictionary which contains information about the `pair`, `tf` (timeframe), and `period` that FreqAI is automating for feature building. As such, a user can use `metadata` inside `feature_engineering_*` functions as criteria for blocking/reserving features for certain timeframes, periods, pairs etc.
```py
def feature_engineering_expand_all(self, dataframe, period, metadata, **kwargs):
if metadata["tf"] == "1h":
dataframe["%-roc-period"] = ta.ROC(dataframe, timeperiod=period)
```python
def feature_engineering_expand_all(self, dataframe: DataFrame, period, metadata, **kwargs) -> DataFrame:
if metadata["tf"] == "1h":
dataframe["%-roc-period"] = ta.ROC(dataframe, timeperiod=period)
```
This will block `ta.ROC()` from being added to any timeframes other than `"1h"`.

View File

@ -85,6 +85,28 @@ Mandatory parameters are marked as **Required** and have to be set in one of the
| `net_arch` | Network architecture which is well described in [`stable_baselines3` doc](https://stable-baselines3.readthedocs.io/en/master/guide/custom_policy.html#examples). In summary: `[<shared layers>, dict(vf=[<non-shared value network layers>], pi=[<non-shared policy network layers>])]`. By default this is set to `[128, 128]`, which defines 2 shared hidden layers with 128 units each.
| `randomize_starting_position` | Randomize the starting point of each episode to avoid overfitting. <br> **Datatype:** bool. <br> Default: `False`.
| `drop_ohlc_from_features` | Do not include the normalized ohlc data in the feature set passed to the agent during training (ohlc will still be used for driving the environment in all cases) <br> **Datatype:** Boolean. <br> **Default:** `False`
| `progress_bar` | Display a progress bar with the current progress, elapsed time and estimated remaining time. <br> **Datatype:** Boolean. <br> Default: `False`.
### PyTorch parameters
#### general
| Parameter | Description |
|------------|-------------|
| | **Model training parameters within the `freqai.model_training_parameters` sub dictionary**
| `learning_rate` | Learning rate to be passed to the optimizer. <br> **Datatype:** float. <br> Default: `3e-4`.
| `model_kwargs` | Parameters to be passed to the model class. <br> **Datatype:** dict. <br> Default: `{}`.
| `trainer_kwargs` | Parameters to be passed to the trainer class. <br> **Datatype:** dict. <br> Default: `{}`.
#### trainer_kwargs
| Parameter | Description |
|------------|-------------|
| | **Model training parameters within the `freqai.model_training_parameters.model_kwargs` sub dictionary**
| `max_iters` | The number of training iterations to run. iteration here refers to the number of times we call self.optimizer.step(). used to calculate n_epochs. <br> **Datatype:** int. <br> Default: `100`.
| `batch_size` | The size of the batches to use during training.. <br> **Datatype:** int. <br> Default: `64`.
| `max_n_eval_batches` | The maximum number batches to use for evaluation.. <br> **Datatype:** int, optional. <br> Default: `None`.
### Additional parameters

View File

@ -37,7 +37,7 @@ freqtrade trade --freqaimodel ReinforcementLearner --strategy MyRLStrategy --con
where `ReinforcementLearner` will use the templated `ReinforcementLearner` from `freqai/prediction_models/ReinforcementLearner` (or a custom user defined one located in `user_data/freqaimodels`). The strategy, on the other hand, follows the same base [feature engineering](freqai-feature-engineering.md) with `feature_engineering_*` as a typical Regressor. The difference lies in the creation of the targets, Reinforcement Learning doesn't require them. However, FreqAI requires a default (neutral) value to be set in the action column:
```python
def set_freqai_targets(self, dataframe, **kwargs):
def set_freqai_targets(self, dataframe, **kwargs) -> DataFrame:
"""
*Only functional with FreqAI enabled strategies*
Required function to set the targets for the model.
@ -53,17 +53,19 @@ where `ReinforcementLearner` will use the templated `ReinforcementLearner` from
# For RL, there are no direct targets to set. This is filler (neutral)
# until the agent sends an action.
dataframe["&-action"] = 0
return dataframe
```
Most of the function remains the same as for typical Regressors, however, the function below shows how the strategy must pass the raw price data to the agent so that it has access to raw OHLCV in the training environment:
```python
def feature_engineering_standard(self, dataframe, **kwargs):
def feature_engineering_standard(self, dataframe: DataFrame, **kwargs) -> DataFrame:
# The following features are necessary for RL models
dataframe[f"%-raw_close"] = dataframe["close"]
dataframe[f"%-raw_open"] = dataframe["open"]
dataframe[f"%-raw_high"] = dataframe["high"]
dataframe[f"%-raw_low"] = dataframe["low"]
return dataframe
```
Finally, there is no explicit "label" to make - instead it is necessary to assign the `&-action` column which will contain the agent's actions when accessed in `populate_entry/exit_trends()`. In the present example, the neutral action to 0. This value should align with the environment used. FreqAI provides two environments, both use 0 as the neutral action.
@ -180,7 +182,7 @@ As you begin to modify the strategy and the prediction model, you will quickly r
# you can use feature values from dataframe
# Assumes the shifted RSI indicator has been generated in the strategy.
rsi_now = self.raw_features[f"%-rsi-period-10_shift-1_{pair}_"
rsi_now = self.raw_features[f"%-rsi-period_10_shift-1_{pair}_"
f"{self.config['timeframe']}"].iloc[self._current_tick]
# reward agent for entering trades

View File

@ -52,7 +52,7 @@ These requirements apply to both [Script Installation](#script-installation) and
* [pip](https://pip.pypa.io/en/stable/installing/)
* [git](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git)
* [virtualenv](https://virtualenv.pypa.io/en/stable/installation.html) (Recommended)
* [TA-Lib](https://mrjbq7.github.io/ta-lib/install.html) (install instructions [below](#install-ta-lib))
* [TA-Lib](https://ta-lib.github.io/ta-lib-python/) (install instructions [below](#install-ta-lib))
### Install code
@ -210,7 +210,7 @@ sudo ./build_helpers/install_ta-lib.sh
##### TA-Lib manual installation
Official webpage: https://mrjbq7.github.io/ta-lib/install.html
[Official installation guide](https://ta-lib.github.io/ta-lib-python/install.html)
```bash
wget http://prdownloads.sourceforge.net/ta-lib/ta-lib-0.4.0-src.tar.gz

View File

@ -49,7 +49,7 @@ Enable subscribing to an instance by adding the `external_message_consumer` sect
| `wait_timeout` | Timeout until we ping again if no message is received. <br>*Defaults to `300`.*<br> **Datatype:** Integer - in seconds.
| `ping_timeout` | Ping timeout <br>*Defaults to `10`.*<br> **Datatype:** Integer - in seconds.
| `sleep_time` | Sleep time before retrying to connect.<br>*Defaults to `10`.*<br> **Datatype:** Integer - in seconds.
| `remove_entry_exit_signals` | Remove signal columns from the dataframe (set them to 0) on dataframe receipt.<br>*Defaults to `False`.*<br> **Datatype:** Boolean.
| `remove_entry_exit_signals` | Remove signal columns from the dataframe (set them to 0) on dataframe receipt.<br>*Defaults to `false`.*<br> **Datatype:** Boolean.
| `message_size_limit` | Size limit per message<br>*Defaults to `8`.*<br> **Datatype:** Integer - Megabytes.
Instead of (or as well as) calculating indicators in `populate_indicators()` the follower instance listens on the connection to a producer instance's messages (or multiple producer instances in advanced configurations) and requests the producer's most recently analyzed dataframes for each pair in the active whitelist.

View File

@ -1,6 +1,6 @@
markdown==3.3.7
mkdocs==1.4.2
mkdocs-material==9.1.4
mkdocs-material==9.1.7
mdx_truly_sane_lists==1.3
pymdown-extensions==9.10
pymdown-extensions==9.11
jinja2==3.1.2

View File

@ -9,9 +9,6 @@ This same command can also be used to update freqUI, should there be a new relea
Once the bot is started in trade / dry-run mode (with `freqtrade trade`) - the UI will be available under the configured port below (usually `http://127.0.0.1:8080`).
!!! info "Alpha release"
FreqUI is still considered an alpha release - if you encounter bugs or inconsistencies please open a [FreqUI issue](https://github.com/freqtrade/frequi/issues/new/choose).
!!! Note "developers"
Developers should not use this method, but instead use the method described in the [freqUI repository](https://github.com/freqtrade/frequi) to get the source-code of freqUI.

View File

@ -23,10 +23,22 @@ These modes can be configured with these values:
'stoploss_on_exchange_limit_ratio': 0.99
```
!!! Note
Stoploss on exchange is only supported for Binance (stop-loss-limit), Huobi (stop-limit), Kraken (stop-loss-market, stop-loss-limit), Gate (stop-limit), and Kucoin (stop-limit and stop-market) as of now.
<ins>Do not set too low/tight stoploss value if using stop loss on exchange!</ins>
If set to low/tight then you have greater risk of missing fill on the order and stoploss will not work.
Stoploss on exchange is only supported for the following exchanges, and not all exchanges support both stop-limit and stop-market.
The Order-type will be ignored if only one mode is available.
| Exchange | stop-loss type |
|----------|-------------|
| Binance | limit |
| Binance Futures | market, limit |
| Huobi | limit |
| kraken | market, limit |
| Gate | limit |
| Okx | limit |
| Kucoin | stop-limit, stop-market|
!!! Note "Tight stoploss"
<ins>Do not set too low/tight stoploss value when using stop loss on exchange!</ins>
If set to low/tight you will have greater risk of missing fill on the order and stoploss will not work.
### stoploss_on_exchange and stoploss_on_exchange_limit_ratio
@ -197,11 +209,6 @@ You can also keep a static stoploss until the offset is reached, and then trail
If `trailing_only_offset_is_reached = True` then the trailing stoploss is only activated once the offset is reached. Until then, the stoploss remains at the configured `stoploss`.
This option can be used with or without `trailing_stop_positive`, but uses `trailing_stop_positive_offset` as offset.
``` python
trailing_stop_positive_offset = 0.011
trailing_only_offset_is_reached = True
```
Configuration (offset is buy-price + 3%):
``` python

View File

@ -1,21 +1,21 @@
# Advanced Strategies
This page explains some advanced concepts available for strategies.
If you're just getting started, please be familiar with the methods described in the [Strategy Customization](strategy-customization.md) documentation and with the [Freqtrade basics](bot-basics.md) first.
If you're just getting started, please familiarize yourself with the [Freqtrade basics](bot-basics.md) and methods described in [Strategy Customization](strategy-customization.md) first.
[Freqtrade basics](bot-basics.md) describes in which sequence each method described below is called, which can be helpful to understand which method to use for your custom needs.
The call sequence of the methods described here is covered under [bot execution logic](bot-basics.md#bot-execution-logic). Those docs are also helpful in deciding which method is most suitable for your customisation needs.
!!! Note
All callback methods described below should only be implemented in a strategy if they are actually used.
Callback methods should *only* be implemented if a strategy uses them.
!!! Tip
You can get a strategy template containing all below methods by running `freqtrade new-strategy --strategy MyAwesomeStrategy --template advanced`
Start off with a strategy template containing all available callback methods by running `freqtrade new-strategy --strategy MyAwesomeStrategy --template advanced`
## Storing information
Storing information can be accomplished by creating a new dictionary within the strategy class.
The name of the variable can be chosen at will, but should be prefixed with `cust_` to avoid naming collisions with predefined strategy variables.
The name of the variable can be chosen at will, but should be prefixed with `custom_` to avoid naming collisions with predefined strategy variables.
```python
class AwesomeStrategy(IStrategy):

View File

@ -43,7 +43,7 @@ class AwesomeStrategy(IStrategy):
if self.config['runmode'].value in ('live', 'dry_run'):
# Assign this to the class by using self.*
# can then be used by populate_* methods
self.cust_remote_data = requests.get('https://some_remote_source.example.com')
self.custom_remote_data = requests.get('https://some_remote_source.example.com')
```
@ -352,7 +352,7 @@ class AwesomeStrategy(IStrategy):
# Convert absolute price to percentage relative to current_rate
if stoploss_price < current_rate:
return (stoploss_price / current_rate) - 1
return stoploss_from_absolute(stoploss_price, current_rate, is_short=trade.is_short)
# return maximum stoploss value, keeping current stoploss price unchanged
return 1

View File

@ -578,7 +578,7 @@ def populate_any_indicators(
Features will now expand automatically. As such, the expansion loops, as well as the `{pair}` / `{timeframe}` parts will need to be removed.
``` python linenums="1"
def feature_engineering_expand_all(self, dataframe, period, **kwargs):
def feature_engineering_expand_all(self, dataframe, period, **kwargs) -> DataFrame::
"""
*Only functional with FreqAI enabled strategies*
This function will automatically expand the defined features on the config defined
@ -638,7 +638,7 @@ Features will now expand automatically. As such, the expansion loops, as well as
Basic features. Make sure to remove the `{pair}` part from your features.
``` python linenums="1"
def feature_engineering_expand_basic(self, dataframe, **kwargs):
def feature_engineering_expand_basic(self, dataframe: DataFrame, **kwargs) -> DataFrame::
"""
*Only functional with FreqAI enabled strategies*
This function will automatically expand the defined features on the config defined
@ -673,7 +673,7 @@ Basic features. Make sure to remove the `{pair}` part from your features.
### FreqAI - feature engineering standard
``` python linenums="1"
def feature_engineering_standard(self, dataframe, **kwargs):
def feature_engineering_standard(self, dataframe: DataFrame, **kwargs) -> DataFrame:
"""
*Only functional with FreqAI enabled strategies*
This optional function will be called once with the dataframe of the base timeframe.
@ -704,7 +704,7 @@ Basic features. Make sure to remove the `{pair}` part from your features.
Targets now get their own, dedicated method.
``` python linenums="1"
def set_freqai_targets(self, dataframe, **kwargs):
def set_freqai_targets(self, dataframe: DataFrame, **kwargs) -> DataFrame:
"""
*Only functional with FreqAI enabled strategies*
Required function to set the targets for the model.

View File

@ -279,6 +279,7 @@ Return a summary of your profit/loss and performance.
> ∙ `33.095 EUR`
>
> **Total Trade Count:** `138`
> **Bot started:** `2022-07-11 18:40:44`
> **First Trade opened:** `3 days ago`
> **Latest Trade opened:** `2 minutes ago`
> **Avg. Duration:** `2:33:45`
@ -292,6 +293,7 @@ The relative profit of `15.2 Σ%` is be based on the starting capital - so in th
Starting capital is either taken from the `available_capital` setting, or calculated by using current wallet size - profits.
Profit Factor is calculated as gross profits / gross losses - and should serve as an overall metric for the strategy.
Max drawdown corresponds to the backtesting metric `Absolute Drawdown (Account)` - calculated as `(Absolute Drawdown) / (DrawdownHigh + startingBalance)`.
Bot started date will refer to the date the bot was first started. For older bots, this will default to the first trade's open date.
### /forceexit <trade_id>

View File

@ -24,9 +24,9 @@ git clone https://github.com/freqtrade/freqtrade.git
Install ta-lib according to the [ta-lib documentation](https://github.com/mrjbq7/ta-lib#windows).
As compiling from source on windows has heavy dependencies (requires a partial visual studio installation), there is also a repository of unofficial pre-compiled windows Wheels [here](https://www.lfd.uci.edu/~gohlke/pythonlibs/#ta-lib), which need to be downloaded and installed using `pip install TA_Lib-0.4.25-cp38-cp38-win_amd64.whl` (make sure to use the version matching your python version).
As compiling from source on windows has heavy dependencies (requires a partial visual studio installation), Freqtrade provides these dependencies (in the binary wheel format) for the latest 3 Python versions (3.8, 3.9, 3.10 and 3.11) and for 64bit Windows.
These Wheels are also used by CI running on windows, and are therefore tested together with freqtrade.
Freqtrade provides these dependencies for the latest 3 Python versions (3.8, 3.9, 3.10 and 3.11) and for 64bit Windows.
Other versions must be downloaded from the above link.
``` powershell
@ -45,8 +45,6 @@ freqtrade
The above installation script assumes you're using powershell on a 64bit windows.
Commands for the legacy CMD windows console may differ.
> Thanks [Owdr](https://github.com/Owdr) for the commands. Source: [Issue #222](https://github.com/freqtrade/freqtrade/issues/222)
### Error during installation on Windows
``` bash

View File

@ -1,5 +1,5 @@
""" Freqtrade bot """
__version__ = '2023.3'
__version__ = '2023.4'
if 'dev' in __version__:
from pathlib import Path

View File

@ -46,7 +46,7 @@ ARGS_LIST_FREQAIMODELS = ["freqaimodel_path", "print_one_column", "print_coloriz
ARGS_LIST_HYPEROPTS = ["hyperopt_path", "print_one_column", "print_colorized"]
ARGS_BACKTEST_SHOW = ["exportfilename", "backtest_show_pair_list"]
ARGS_BACKTEST_SHOW = ["exportfilename", "backtest_show_pair_list", "backtest_breakdown"]
ARGS_LIST_EXCHANGES = ["print_one_column", "list_exchanges_all"]

View File

@ -116,7 +116,7 @@ class TimeRange:
:param text: value from --timerange
:return: Start and End range period
"""
if text is None:
if not text:
return TimeRange(None, None, 0, 0)
syntax = [(r'^-(\d{8})$', (None, 'date')),
(r'^(\d{8})-$', ('date', None)),

View File

@ -64,6 +64,7 @@ USERPATH_FREQAIMODELS = 'freqaimodels'
TELEGRAM_SETTING_OPTIONS = ['on', 'off', 'silent']
WEBHOOK_FORMAT_OPTIONS = ['form', 'json', 'raw']
FULL_DATAFRAME_THRESHOLD = 100
CUSTOM_TAG_MAX_LENGTH = 255
ENV_VAR_PREFIX = 'FREQTRADE__'
@ -598,7 +599,8 @@ CONF_SCHEMA = {
"model_type": {"type": "string", "default": "PPO"},
"policy_type": {"type": "string", "default": "MlpPolicy"},
"net_arch": {"type": "array", "default": [128, 128]},
"randomize_startinng_position": {"type": "boolean", "default": False},
"randomize_starting_position": {"type": "boolean", "default": False},
"progress_bar": {"type": "boolean", "default": True},
"model_reward_parameters": {
"type": "object",
"properties": {

View File

@ -246,14 +246,8 @@ def _load_backtest_data_df_compatibility(df: pd.DataFrame) -> pd.DataFrame:
"""
Compatibility support for older backtest data.
"""
df['open_date'] = pd.to_datetime(df['open_date'],
utc=True,
infer_datetime_format=True
)
df['close_date'] = pd.to_datetime(df['close_date'],
utc=True,
infer_datetime_format=True
)
df['open_date'] = pd.to_datetime(df['open_date'], utc=True)
df['close_date'] = pd.to_datetime(df['close_date'], utc=True)
# Compatibility support for pre short Columns
if 'is_short' not in df.columns:
df['is_short'] = False

View File

@ -34,7 +34,7 @@ def ohlcv_to_dataframe(ohlcv: list, timeframe: str, pair: str, *,
cols = DEFAULT_DATAFRAME_COLUMNS
df = DataFrame(ohlcv, columns=cols)
df['date'] = to_datetime(df['date'], unit='ms', utc=True, infer_datetime_format=True)
df['date'] = to_datetime(df['date'], unit='ms', utc=True)
# Some exchanges return int values for Volume and even for OHLC.
# Convert them since TA-LIB indicators used in the strategy assume floats

View File

@ -63,10 +63,7 @@ class FeatherDataHandler(IDataHandler):
pairdata.columns = self._columns
pairdata = pairdata.astype(dtype={'open': 'float', 'high': 'float',
'low': 'float', 'close': 'float', 'volume': 'float'})
pairdata['date'] = to_datetime(pairdata['date'],
unit='ms',
utc=True,
infer_datetime_format=True)
pairdata['date'] = to_datetime(pairdata['date'], unit='ms', utc=True)
return pairdata
def ohlcv_append(

View File

@ -75,10 +75,7 @@ class JsonDataHandler(IDataHandler):
return DataFrame(columns=self._columns)
pairdata = pairdata.astype(dtype={'open': 'float', 'high': 'float',
'low': 'float', 'close': 'float', 'volume': 'float'})
pairdata['date'] = to_datetime(pairdata['date'],
unit='ms',
utc=True,
infer_datetime_format=True)
pairdata['date'] = to_datetime(pairdata['date'], unit='ms', utc=True)
return pairdata
def ohlcv_append(

View File

@ -62,10 +62,7 @@ class ParquetDataHandler(IDataHandler):
pairdata.columns = self._columns
pairdata = pairdata.astype(dtype={'open': 'float', 'high': 'float',
'low': 'float', 'close': 'float', 'volume': 'float'})
pairdata['date'] = to_datetime(pairdata['date'],
unit='ms',
utc=True,
infer_datetime_format=True)
pairdata['date'] = to_datetime(pairdata['date'], unit='ms', utc=True)
return pairdata
def ohlcv_append(

View File

@ -6,17 +6,18 @@ from freqtrade.exchange.exchange import Exchange
from freqtrade.exchange.binance import Binance
from freqtrade.exchange.bitpanda import Bitpanda
from freqtrade.exchange.bittrex import Bittrex
from freqtrade.exchange.bitvavo import Bitvavo
from freqtrade.exchange.bybit import Bybit
from freqtrade.exchange.coinbasepro import Coinbasepro
from freqtrade.exchange.exchange_utils import (amount_to_contract_precision, amount_to_contracts,
amount_to_precision, available_exchanges,
ccxt_exchanges, contracts_to_amount,
date_minus_candles, is_exchange_known_ccxt,
market_is_active, price_to_precision,
timeframe_to_minutes, timeframe_to_msecs,
timeframe_to_next_date, timeframe_to_prev_date,
timeframe_to_seconds, validate_exchange,
validate_exchanges)
from freqtrade.exchange.exchange_utils import (ROUND_DOWN, ROUND_UP, amount_to_contract_precision,
amount_to_contracts, amount_to_precision,
available_exchanges, ccxt_exchanges,
contracts_to_amount, date_minus_candles,
is_exchange_known_ccxt, market_is_active,
price_to_precision, timeframe_to_minutes,
timeframe_to_msecs, timeframe_to_next_date,
timeframe_to_prev_date, timeframe_to_seconds,
validate_exchange, validate_exchanges)
from freqtrade.exchange.gate import Gate
from freqtrade.exchange.hitbtc import Hitbtc
from freqtrade.exchange.huobi import Huobi

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,23 @@
"""Kucoin exchange subclass."""
import logging
from typing import Dict
from freqtrade.exchange import Exchange
logger = logging.getLogger(__name__)
class Bitvavo(Exchange):
"""Bitvavo exchange class.
Contains adjustments needed for Freqtrade to work with this exchange.
Please note that this exchange is not included in the list of exchanges
officially supported by the Freqtrade development team. So some features
may still not work as expected.
"""
_ft_has: Dict = {
"ohlcv_candle_limit": 1440,
}

View File

@ -30,13 +30,14 @@ from freqtrade.exceptions import (DDosProtection, ExchangeError, InsufficientFun
RetryableOrderError, TemporaryError)
from freqtrade.exchange.common import (API_FETCH_ORDER_RETRY_COUNT, remove_credentials, retrier,
retrier_async)
from freqtrade.exchange.exchange_utils import (CcxtModuleType, amount_to_contract_precision,
amount_to_contracts, amount_to_precision,
contracts_to_amount, date_minus_candles,
is_exchange_known_ccxt, market_is_active,
price_to_precision, timeframe_to_minutes,
timeframe_to_msecs, timeframe_to_next_date,
timeframe_to_prev_date, timeframe_to_seconds)
from freqtrade.exchange.exchange_utils import (ROUND, ROUND_DOWN, ROUND_UP, CcxtModuleType,
amount_to_contract_precision, amount_to_contracts,
amount_to_precision, contracts_to_amount,
date_minus_candles, is_exchange_known_ccxt,
market_is_active, price_to_precision,
timeframe_to_minutes, timeframe_to_msecs,
timeframe_to_next_date, timeframe_to_prev_date,
timeframe_to_seconds)
from freqtrade.exchange.types import OHLCVResponse, OrderBook, Ticker, Tickers
from freqtrade.misc import (chunks, deep_merge_dicts, file_dump_json, file_load_json,
safe_value_fallback2)
@ -59,6 +60,7 @@ class Exchange:
# or by specifying them in the configuration.
_ft_has_default: Dict = {
"stoploss_on_exchange": False,
"stop_price_param": "stopPrice",
"order_time_in_force": ["GTC"],
"ohlcv_params": {},
"ohlcv_candle_limit": 500,
@ -734,12 +736,14 @@ class Exchange:
"""
return amount_to_precision(amount, self.get_precision_amount(pair), self.precisionMode)
def price_to_precision(self, pair: str, price: float) -> float:
def price_to_precision(self, pair: str, price: float, *, rounding_mode: int = ROUND) -> float:
"""
Returns the price rounded up to the precision the Exchange accepts.
Rounds up
Returns the price rounded to the precision the Exchange accepts.
The default price_rounding_mode in conf is ROUND.
For stoploss calculations, must use ROUND_UP for longs, and ROUND_DOWN for shorts.
"""
return price_to_precision(price, self.get_precision_price(pair), self.precisionMode)
return price_to_precision(price, self.get_precision_price(pair),
self.precisionMode, rounding_mode=rounding_mode)
def price_get_one_pip(self, pair: str, price: float) -> float:
"""
@ -762,12 +766,12 @@ class Exchange:
return self._get_stake_amount_limit(pair, price, stoploss, 'min', leverage)
def get_max_pair_stake_amount(self, pair: str, price: float, leverage: float = 1.0) -> float:
max_stake_amount = self._get_stake_amount_limit(pair, price, 0.0, 'max')
max_stake_amount = self._get_stake_amount_limit(pair, price, 0.0, 'max', leverage)
if max_stake_amount is None:
# * Should never be executed
raise OperationalException(f'{self.name}.get_max_pair_stake_amount should'
'never set max_stake_amount to None')
return max_stake_amount / leverage
return max_stake_amount
def _get_stake_amount_limit(
self,
@ -785,43 +789,41 @@ class Exchange:
except KeyError:
raise ValueError(f"Can't get market information for symbol {pair}")
if isMin:
# reserve some percent defined in config (5% default) + stoploss
margin_reserve: float = 1.0 + self._config.get('amount_reserve_percent',
DEFAULT_AMOUNT_RESERVE_PERCENT)
stoploss_reserve = (
margin_reserve / (1 - abs(stoploss)) if abs(stoploss) != 1 else 1.5
)
# it should not be more than 50%
stoploss_reserve = max(min(stoploss_reserve, 1.5), 1)
else:
margin_reserve = 1.0
stoploss_reserve = 1.0
stake_limits = []
limits = market['limits']
if (limits['cost'][limit] is not None):
stake_limits.append(
self._contracts_to_amount(
pair,
limits['cost'][limit]
)
self._contracts_to_amount(pair, limits['cost'][limit]) * stoploss_reserve
)
if (limits['amount'][limit] is not None):
stake_limits.append(
self._contracts_to_amount(
pair,
limits['amount'][limit] * price
)
self._contracts_to_amount(pair, limits['amount'][limit]) * price * margin_reserve
)
if not stake_limits:
return None if isMin else float('inf')
# reserve some percent defined in config (5% default) + stoploss
amount_reserve_percent = 1.0 + self._config.get('amount_reserve_percent',
DEFAULT_AMOUNT_RESERVE_PERCENT)
amount_reserve_percent = (
amount_reserve_percent / (1 - abs(stoploss)) if abs(stoploss) != 1 else 1.5
)
# it should not be more than 50%
amount_reserve_percent = max(min(amount_reserve_percent, 1.5), 1)
# The value returned should satisfy both limits: for amount (base currency) and
# for cost (quote, stake currency), so max() is used here.
# See also #2575 at github.
return self._get_stake_amount_considering_leverage(
max(stake_limits) * amount_reserve_percent,
max(stake_limits) if isMin else min(stake_limits),
leverage or 1.0
) if isMin else min(stake_limits)
)
def _get_stake_amount_considering_leverage(self, stake_amount: float, leverage: float) -> float:
"""
@ -884,7 +886,7 @@ class Exchange:
'filled': _amount,
'remaining': 0.0,
'status': "closed",
'cost': (dry_order['amount'] * average) / leverage
'cost': (dry_order['amount'] * average)
})
# market orders will always incurr taker fees
dry_order = self.add_dry_order_fee(pair, dry_order, 'taker')
@ -1114,11 +1116,11 @@ class Exchange:
"""
if not self._ft_has.get('stoploss_on_exchange'):
raise OperationalException(f"stoploss is not implemented for {self.name}.")
price_param = self._ft_has['stop_price_param']
return (
order.get('stopPrice', None) is None
or ((side == "sell" and stop_loss > float(order['stopPrice'])) or
(side == "buy" and stop_loss < float(order['stopPrice'])))
order.get(price_param, None) is None
or ((side == "sell" and stop_loss > float(order[price_param])) or
(side == "buy" and stop_loss < float(order[price_param])))
)
def _get_stop_order_type(self, user_order_type) -> Tuple[str, str]:
@ -1158,8 +1160,8 @@ class Exchange:
def _get_stop_params(self, side: BuySell, ordertype: str, stop_price: float) -> Dict:
params = self._params.copy()
# Verify if stopPrice works for your exchange!
params.update({'stopPrice': stop_price})
# Verify if stopPrice works for your exchange, else configure stop_price_param
params.update({self._ft_has['stop_price_param']: stop_price})
return params
@retrier(retries=0)
@ -1185,12 +1187,12 @@ class Exchange:
user_order_type = order_types.get('stoploss', 'market')
ordertype, user_order_type = self._get_stop_order_type(user_order_type)
stop_price_norm = self.price_to_precision(pair, stop_price)
round_mode = ROUND_DOWN if side == 'buy' else ROUND_UP
stop_price_norm = self.price_to_precision(pair, stop_price, rounding_mode=round_mode)
limit_rate = None
if user_order_type == 'limit':
limit_rate = self._get_stop_limit_rate(stop_price, order_types, side)
limit_rate = self.price_to_precision(pair, limit_rate)
limit_rate = self.price_to_precision(pair, limit_rate, rounding_mode=round_mode)
if self._config['dry_run']:
dry_order = self.create_dry_run_order(
@ -2369,12 +2371,12 @@ class Exchange:
# Must fetch the leverage tiers for each market separately
# * This is slow(~45s) on Okx, makes ~90 api calls to load all linear swap markets
markets = self.markets
symbols = []
for symbol, market in markets.items():
symbols = [
symbol for symbol, market in markets.items()
if (self.market_is_future(market)
and market['quote'] == self._config['stake_currency']):
symbols.append(symbol)
and market['quote'] == self._config['stake_currency'])
]
tiers: Dict[str, List[Dict]] = {}
@ -2394,25 +2396,26 @@ class Exchange:
else:
logger.info("Using cached leverage_tiers.")
async def gather_results():
async def gather_results(input_coro):
return await asyncio.gather(*input_coro, return_exceptions=True)
for input_coro in chunks(coros, 100):
with self._loop_lock:
results = self.loop.run_until_complete(gather_results())
results = self.loop.run_until_complete(gather_results(input_coro))
for symbol, res in results:
tiers[symbol] = res
for res in results:
if isinstance(res, Exception):
logger.warning(f"Leverage tier exception: {repr(res)}")
continue
symbol, tier = res
tiers[symbol] = tier
if len(coros) > 0:
self.cache_leverage_tiers(tiers, self._config['stake_currency'])
logger.info(f"Done initializing {len(symbols)} markets.")
return tiers
else:
return {}
else:
return {}
return {}
def cache_leverage_tiers(self, tiers: Dict[str, List[Dict]], stake_currency: str) -> None:
@ -2428,14 +2431,17 @@ class Exchange:
def load_cached_leverage_tiers(self, stake_currency: str) -> Optional[Dict[str, List[Dict]]]:
filename = self._config['datadir'] / "futures" / f"leverage_tiers_{stake_currency}.json"
if filename.is_file():
tiers = file_load_json(filename)
updated = tiers.get('updated')
if updated:
updated_dt = parser.parse(updated)
if updated_dt < datetime.now(timezone.utc) - timedelta(weeks=4):
logger.info("Cached leverage tiers are outdated. Will update.")
return None
return tiers['data']
try:
tiers = file_load_json(filename)
updated = tiers.get('updated')
if updated:
updated_dt = parser.parse(updated)
if updated_dt < datetime.now(timezone.utc) - timedelta(weeks=4):
logger.info("Cached leverage tiers are outdated. Will update.")
return None
return tiers['data']
except Exception:
logger.exception("Error loading cached leverage tiers. Refreshing.")
return None
def fill_leverage_tiers(self) -> None:

View File

@ -2,11 +2,12 @@
Exchange support utils
"""
from datetime import datetime, timedelta, timezone
from math import ceil
from math import ceil, floor
from typing import Any, Dict, List, Optional, Tuple
import ccxt
from ccxt import ROUND_DOWN, ROUND_UP, TICK_SIZE, TRUNCATE, decimal_to_precision
from ccxt import (DECIMAL_PLACES, ROUND, ROUND_DOWN, ROUND_UP, SIGNIFICANT_DIGITS, TICK_SIZE,
TRUNCATE, decimal_to_precision)
from freqtrade.exchange.common import BAD_EXCHANGES, EXCHANGE_HAS_OPTIONAL, EXCHANGE_HAS_REQUIRED
from freqtrade.util import FtPrecise
@ -219,35 +220,51 @@ def amount_to_contract_precision(
return amount
def price_to_precision(price: float, price_precision: Optional[float],
precisionMode: Optional[int]) -> float:
def price_to_precision(
price: float,
price_precision: Optional[float],
precisionMode: Optional[int],
*,
rounding_mode: int = ROUND,
) -> float:
"""
Returns the price rounded up to the precision the Exchange accepts.
Returns the price rounded to the precision the Exchange accepts.
Partial Re-implementation of ccxt internal method decimal_to_precision(),
which does not support rounding up
which does not support rounding up.
For stoploss calculations, must use ROUND_UP for longs, and ROUND_DOWN for shorts.
TODO: If ccxt supports ROUND_UP for decimal_to_precision(), we could remove this and
align with amount_to_precision().
!!! Rounds up
:param price: price to convert
:param price_precision: price precision to use. Used from markets[pair]['precision']['price']
:param precisionMode: precision mode to use. Should be used from precisionMode
one of ccxt's DECIMAL_PLACES, SIGNIFICANT_DIGITS, or TICK_SIZE
:param rounding_mode: rounding mode to use. Defaults to ROUND
:return: price rounded up to the precision the Exchange accepts
"""
if price_precision is not None and precisionMode is not None:
# price = float(decimal_to_precision(price, rounding_mode=ROUND,
# precision=price_precision,
# counting_mode=self.precisionMode,
# ))
if precisionMode == TICK_SIZE:
if rounding_mode == ROUND:
ticks = price / price_precision
rounded_ticks = round(ticks)
return rounded_ticks * price_precision
precision = FtPrecise(price_precision)
price_str = FtPrecise(price)
missing = price_str % precision
if not missing == FtPrecise("0"):
price = round(float(str(price_str - missing + precision)), 14)
else:
symbol_prec = price_precision
big_price = price * pow(10, symbol_prec)
price = ceil(big_price) / pow(10, symbol_prec)
return round(float(str(price_str - missing + precision)), 14)
return price
elif precisionMode in (SIGNIFICANT_DIGITS, DECIMAL_PLACES):
ndigits = round(price_precision)
if rounding_mode == ROUND:
return round(price, ndigits)
ticks = price * (10**ndigits)
if rounding_mode == ROUND_UP:
return ceil(ticks) / (10**ndigits)
if rounding_mode == TRUNCATE:
return int(ticks) / (10**ndigits)
if rounding_mode == ROUND_DOWN:
return floor(ticks) / (10**ndigits)
raise ValueError(f"Unknown rounding_mode {rounding_mode}")
raise ValueError(f"Unknown precisionMode {precisionMode}")
return price

View File

@ -12,6 +12,7 @@ from freqtrade.exceptions import (DDosProtection, InsufficientFundsError, Invali
OperationalException, TemporaryError)
from freqtrade.exchange import Exchange
from freqtrade.exchange.common import retrier
from freqtrade.exchange.exchange_utils import ROUND_DOWN, ROUND_UP
from freqtrade.exchange.types import Tickers
@ -109,6 +110,7 @@ class Kraken(Exchange):
if self.trading_mode == TradingMode.FUTURES:
params.update({'reduceOnly': True})
round_mode = ROUND_DOWN if side == 'buy' else ROUND_UP
if order_types.get('stoploss', 'market') == 'limit':
ordertype = "stop-loss-limit"
limit_price_pct = order_types.get('stoploss_on_exchange_limit_ratio', 0.99)
@ -116,11 +118,11 @@ class Kraken(Exchange):
limit_rate = stop_price * limit_price_pct
else:
limit_rate = stop_price * (2 - limit_price_pct)
params['price2'] = self.price_to_precision(pair, limit_rate)
params['price2'] = self.price_to_precision(pair, limit_rate, rounding_mode=round_mode)
else:
ordertype = "stop-loss"
stop_price = self.price_to_precision(pair, stop_price)
stop_price = self.price_to_precision(pair, stop_price, rounding_mode=round_mode)
if self._config['dry_run']:
dry_order = self.create_dry_run_order(

View File

@ -28,6 +28,7 @@ class Okx(Exchange):
"funding_fee_timeframe": "8h",
"stoploss_order_types": {"limit": "limit"},
"stoploss_on_exchange": True,
"stop_price_param": "stopLossPrice",
}
_ft_has_futures: Dict = {
"tickers_have_quoteVolume": False,
@ -162,29 +163,12 @@ class Okx(Exchange):
return pair_tiers[-1]['maxNotional'] / leverage
def _get_stop_params(self, side: BuySell, ordertype: str, stop_price: float) -> Dict:
params = self._params.copy()
# Verify if stopPrice works for your exchange!
params.update({'stopLossPrice': stop_price})
params = super()._get_stop_params(side, ordertype, stop_price)
if self.trading_mode == TradingMode.FUTURES and self.margin_mode:
params['tdMode'] = self.margin_mode.value
params['posSide'] = self._get_posSide(side, True)
return params
def stoploss_adjust(self, stop_loss: float, order: Dict, side: str) -> bool:
"""
OKX uses non-default stoploss price naming.
"""
if not self._ft_has.get('stoploss_on_exchange'):
raise OperationalException(f"stoploss is not implemented for {self.name}.")
return (
order.get('stopLossPrice', None) is None
or ((side == "sell" and stop_loss > float(order['stopLossPrice'])) or
(side == "buy" and stop_loss < float(order['stopLossPrice'])))
)
def fetch_stoploss_order(self, order_id: str, pair: str, params: Dict = {}) -> Dict:
if self._config['dry_run']:
return self.fetch_dry_run_order(order_id)

View File

@ -66,7 +66,7 @@ class Base3ActionRLEnv(BaseEnvironment):
elif action == Actions.Sell.value and not self.can_short:
self._update_total_profit()
self._position = Positions.Neutral
trade_type = "neutral"
trade_type = "exit"
self._last_trade_tick = None
else:
print("case not defined")
@ -74,7 +74,7 @@ class Base3ActionRLEnv(BaseEnvironment):
if trade_type is not None:
self.trade_history.append(
{'price': self.current_price(), 'index': self._current_tick,
'type': trade_type})
'type': trade_type, 'profit': self.get_unrealized_profit()})
if (self._total_profit < self.max_drawdown or
self._total_unrealized_profit < self.max_drawdown):

View File

@ -52,16 +52,6 @@ class Base4ActionRLEnv(BaseEnvironment):
trade_type = None
if self.is_tradesignal(action):
"""
Action: Neutral, position: Long -> Close Long
Action: Neutral, position: Short -> Close Short
Action: Long, position: Neutral -> Open Long
Action: Long, position: Short -> Close Short and Open Long
Action: Short, position: Neutral -> Open Short
Action: Short, position: Long -> Close Long and Open Short
"""
if action == Actions.Neutral.value:
self._position = Positions.Neutral
@ -69,16 +59,16 @@ class Base4ActionRLEnv(BaseEnvironment):
self._last_trade_tick = None
elif action == Actions.Long_enter.value:
self._position = Positions.Long
trade_type = "long"
trade_type = "enter_long"
self._last_trade_tick = self._current_tick
elif action == Actions.Short_enter.value:
self._position = Positions.Short
trade_type = "short"
trade_type = "enter_short"
self._last_trade_tick = self._current_tick
elif action == Actions.Exit.value:
self._update_total_profit()
self._position = Positions.Neutral
trade_type = "neutral"
trade_type = "exit"
self._last_trade_tick = None
else:
print("case not defined")
@ -86,7 +76,7 @@ class Base4ActionRLEnv(BaseEnvironment):
if trade_type is not None:
self.trade_history.append(
{'price': self.current_price(), 'index': self._current_tick,
'type': trade_type})
'type': trade_type, 'profit': self.get_unrealized_profit()})
if (self._total_profit < self.max_drawdown or
self._total_unrealized_profit < self.max_drawdown):

View File

@ -53,16 +53,6 @@ class Base5ActionRLEnv(BaseEnvironment):
trade_type = None
if self.is_tradesignal(action):
"""
Action: Neutral, position: Long -> Close Long
Action: Neutral, position: Short -> Close Short
Action: Long, position: Neutral -> Open Long
Action: Long, position: Short -> Close Short and Open Long
Action: Short, position: Neutral -> Open Short
Action: Short, position: Long -> Close Long and Open Short
"""
if action == Actions.Neutral.value:
self._position = Positions.Neutral
@ -70,21 +60,21 @@ class Base5ActionRLEnv(BaseEnvironment):
self._last_trade_tick = None
elif action == Actions.Long_enter.value:
self._position = Positions.Long
trade_type = "long"
trade_type = "enter_long"
self._last_trade_tick = self._current_tick
elif action == Actions.Short_enter.value:
self._position = Positions.Short
trade_type = "short"
trade_type = "enter_short"
self._last_trade_tick = self._current_tick
elif action == Actions.Long_exit.value:
self._update_total_profit()
self._position = Positions.Neutral
trade_type = "neutral"
trade_type = "exit_long"
self._last_trade_tick = None
elif action == Actions.Short_exit.value:
self._update_total_profit()
self._position = Positions.Neutral
trade_type = "neutral"
trade_type = "exit_short"
self._last_trade_tick = None
else:
print("case not defined")
@ -92,7 +82,7 @@ class Base5ActionRLEnv(BaseEnvironment):
if trade_type is not None:
self.trade_history.append(
{'price': self.current_price(), 'index': self._current_tick,
'type': trade_type})
'type': trade_type, 'profit': self.get_unrealized_profit()})
if (self._total_profit < self.max_drawdown or
self._total_unrealized_profit < self.max_drawdown):

View File

@ -0,0 +1,147 @@
import logging
from typing import Dict, List, Tuple
import numpy as np
import numpy.typing as npt
import pandas as pd
import torch
from pandas import DataFrame
from torch.nn import functional as F
from freqtrade.exceptions import OperationalException
from freqtrade.freqai.base_models.BasePyTorchModel import BasePyTorchModel
from freqtrade.freqai.data_kitchen import FreqaiDataKitchen
logger = logging.getLogger(__name__)
class BasePyTorchClassifier(BasePyTorchModel):
"""
A PyTorch implementation of a classifier.
User must implement fit method
Important!
- User must declare the target class names in the strategy,
under IStrategy.set_freqai_targets method.
for example, in your strategy:
```
def set_freqai_targets(self, dataframe: DataFrame, metadata: Dict, **kwargs):
self.freqai.class_names = ["down", "up"]
dataframe['&s-up_or_down'] = np.where(dataframe["close"].shift(-100) >
dataframe["close"], 'up', 'down')
return dataframe
"""
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.class_name_to_index = None
self.index_to_class_name = None
def predict(
self, unfiltered_df: DataFrame, dk: FreqaiDataKitchen, **kwargs
) -> Tuple[DataFrame, npt.NDArray[np.int_]]:
"""
Filter the prediction features data and predict with it.
:param unfiltered_df: Full dataframe for the current backtest period.
:return:
:pred_df: dataframe containing the predictions
:do_predict: np.array of 1s and 0s to indicate places where freqai needed to remove
data (NaNs) or felt uncertain about data (PCA and DI index)
:raises ValueError: if 'class_names' doesn't exist in model meta_data.
"""
class_names = self.model.model_meta_data.get("class_names", None)
if not class_names:
raise ValueError(
"Missing class names. "
"self.model.model_meta_data['class_names'] is None."
)
if not self.class_name_to_index:
self.init_class_names_to_index_mapping(class_names)
dk.find_features(unfiltered_df)
filtered_df, _ = dk.filter_features(
unfiltered_df, dk.training_features_list, training_filter=False
)
filtered_df = dk.normalize_data_from_metadata(filtered_df)
dk.data_dictionary["prediction_features"] = filtered_df
self.data_cleaning_predict(dk)
x = self.data_convertor.convert_x(
dk.data_dictionary["prediction_features"],
device=self.device
)
logits = self.model.model(x)
probs = F.softmax(logits, dim=-1)
predicted_classes = torch.argmax(probs, dim=-1)
predicted_classes_str = self.decode_class_names(predicted_classes)
pred_df_prob = DataFrame(probs.detach().numpy(), columns=class_names)
pred_df = DataFrame(predicted_classes_str, columns=[dk.label_list[0]])
pred_df = pd.concat([pred_df, pred_df_prob], axis=1)
return (pred_df, dk.do_predict)
def encode_class_names(
self,
data_dictionary: Dict[str, pd.DataFrame],
dk: FreqaiDataKitchen,
class_names: List[str],
):
"""
encode class name, str -> int
assuming first column of *_labels data frame to be the target column
containing the class names
"""
target_column_name = dk.label_list[0]
for split in self.splits:
label_df = data_dictionary[f"{split}_labels"]
self.assert_valid_class_names(label_df[target_column_name], class_names)
label_df[target_column_name] = list(
map(lambda x: self.class_name_to_index[x], label_df[target_column_name])
)
@staticmethod
def assert_valid_class_names(
target_column: pd.Series,
class_names: List[str]
):
non_defined_labels = set(target_column) - set(class_names)
if len(non_defined_labels) != 0:
raise OperationalException(
f"Found non defined labels: {non_defined_labels}, ",
f"expecting labels: {class_names}"
)
def decode_class_names(self, class_ints: torch.Tensor) -> List[str]:
"""
decode class name, int -> str
"""
return list(map(lambda x: self.index_to_class_name[x.item()], class_ints))
def init_class_names_to_index_mapping(self, class_names):
self.class_name_to_index = {s: i for i, s in enumerate(class_names)}
self.index_to_class_name = {i: s for i, s in enumerate(class_names)}
logger.info(f"encoded class name to index: {self.class_name_to_index}")
def convert_label_column_to_int(
self,
data_dictionary: Dict[str, pd.DataFrame],
dk: FreqaiDataKitchen,
class_names: List[str]
):
self.init_class_names_to_index_mapping(class_names)
self.encode_class_names(data_dictionary, dk, class_names)
def get_class_names(self) -> List[str]:
if not self.class_names:
raise ValueError(
"self.class_names is empty, "
"set self.freqai.class_names = ['class a', 'class b', 'class c'] "
"inside IStrategy.set_freqai_targets method."
)
return self.class_names

View File

@ -0,0 +1,83 @@
import logging
from abc import ABC, abstractmethod
from time import time
from typing import Any
import torch
from pandas import DataFrame
from freqtrade.freqai.data_kitchen import FreqaiDataKitchen
from freqtrade.freqai.freqai_interface import IFreqaiModel
from freqtrade.freqai.torch.PyTorchDataConvertor import PyTorchDataConvertor
logger = logging.getLogger(__name__)
class BasePyTorchModel(IFreqaiModel, ABC):
"""
Base class for PyTorch type models.
User *must* inherit from this class and set fit() and predict() and
data_convertor property.
"""
def __init__(self, **kwargs):
super().__init__(config=kwargs["config"])
self.dd.model_type = "pytorch"
self.device = "cuda" if torch.cuda.is_available() else "cpu"
test_size = self.freqai_info.get('data_split_parameters', {}).get('test_size')
self.splits = ["train", "test"] if test_size != 0 else ["train"]
def train(
self, unfiltered_df: DataFrame, pair: str, dk: FreqaiDataKitchen, **kwargs
) -> Any:
"""
Filter the training data and train a model to it. Train makes heavy use of the datakitchen
for storing, saving, loading, and analyzing the data.
:param unfiltered_df: Full dataframe for the current training period
:return:
:model: Trained model which can be used to inference (self.predict)
"""
logger.info(f"-------------------- Starting training {pair} --------------------")
start_time = time()
features_filtered, labels_filtered = dk.filter_features(
unfiltered_df,
dk.training_features_list,
dk.label_list,
training_filter=True,
)
# split data into train/test data.
data_dictionary = dk.make_train_test_datasets(features_filtered, labels_filtered)
if not self.freqai_info.get("fit_live_predictions", 0) or not self.live:
dk.fit_labels()
# normalize all data based on train_dataset only
data_dictionary = dk.normalize_data(data_dictionary)
# optional additional data cleaning/analysis
self.data_cleaning_train(dk)
logger.info(
f"Training model on {len(dk.data_dictionary['train_features'].columns)} features"
)
logger.info(f"Training model on {len(data_dictionary['train_features'])} data points")
model = self.fit(data_dictionary, dk)
end_time = time()
logger.info(f"-------------------- Done training {pair} "
f"({end_time - start_time:.2f} secs) --------------------")
return model
@property
@abstractmethod
def data_convertor(self) -> PyTorchDataConvertor:
"""
a class responsible for converting `*_features` & `*_labels` pandas dataframes
to pytorch tensors.
"""
raise NotImplementedError("Abstract property")

View File

@ -0,0 +1,50 @@
import logging
from typing import Tuple
import numpy as np
import numpy.typing as npt
from pandas import DataFrame
from freqtrade.freqai.base_models.BasePyTorchModel import BasePyTorchModel
from freqtrade.freqai.data_kitchen import FreqaiDataKitchen
logger = logging.getLogger(__name__)
class BasePyTorchRegressor(BasePyTorchModel):
"""
A PyTorch implementation of a regressor.
User must implement fit method
"""
def __init__(self, **kwargs):
super().__init__(**kwargs)
def predict(
self, unfiltered_df: DataFrame, dk: FreqaiDataKitchen, **kwargs
) -> Tuple[DataFrame, npt.NDArray[np.int_]]:
"""
Filter the prediction features data and predict with it.
:param unfiltered_df: Full dataframe for the current backtest period.
:return:
:pred_df: dataframe containing the predictions
:do_predict: np.array of 1s and 0s to indicate places where freqai needed to remove
data (NaNs) or felt uncertain about data (PCA and DI index)
"""
dk.find_features(unfiltered_df)
filtered_df, _ = dk.filter_features(
unfiltered_df, dk.training_features_list, training_filter=False
)
filtered_df = dk.normalize_data_from_metadata(filtered_df)
dk.data_dictionary["prediction_features"] = filtered_df
self.data_cleaning_predict(dk)
x = self.data_convertor.convert_x(
dk.data_dictionary["prediction_features"],
device=self.device
)
y = self.model.model(x)
y = y.cpu()
pred_df = DataFrame(y.detach().numpy(), columns=[dk.label_list[0]])
return (pred_df, dk.do_predict)

View File

@ -446,7 +446,7 @@ class FreqaiDataDrawer:
dump(model, save_path / f"{dk.model_filename}_model.joblib")
elif self.model_type == 'keras':
model.save(save_path / f"{dk.model_filename}_model.h5")
elif 'stable_baselines' in self.model_type or 'sb3_contrib' == self.model_type:
elif self.model_type in ["stable_baselines3", "sb3_contrib", "pytorch"]:
model.save(save_path / f"{dk.model_filename}_model.zip")
if dk.svm_model is not None:
@ -496,7 +496,7 @@ class FreqaiDataDrawer:
dk.training_features_list = dk.data["training_features_list"]
dk.label_list = dk.data["label_list"]
def load_data(self, coin: str, dk: FreqaiDataKitchen) -> Any:
def load_data(self, coin: str, dk: FreqaiDataKitchen) -> Any: # noqa: C901
"""
loads all data required to make a prediction on a sub-train time range
:returns:
@ -537,6 +537,11 @@ class FreqaiDataDrawer:
self.model_type, self.freqai_info['rl_config']['model_type'])
MODELCLASS = getattr(mod, self.freqai_info['rl_config']['model_type'])
model = MODELCLASS.load(dk.data_path / f"{dk.model_filename}_model")
elif self.model_type == 'pytorch':
import torch
zip = torch.load(dk.data_path / f"{dk.model_filename}_model.zip")
model = zip["pytrainer"]
model = model.load_from_checkpoint(zip)
if Path(dk.data_path / f"{dk.model_filename}_svm_model.joblib").is_file():
dk.svm_model = load(dk.data_path / f"{dk.model_filename}_svm_model.joblib")

View File

@ -1291,7 +1291,7 @@ class FreqaiDataKitchen:
return dataframe
def use_strategy_to_populate_indicators(
def use_strategy_to_populate_indicators( # noqa: C901
self,
strategy: IStrategy,
corr_dataframes: dict = {},
@ -1362,12 +1362,12 @@ class FreqaiDataKitchen:
dataframe = self.populate_features(dataframe.copy(), corr_pair, strategy,
corr_dataframes, base_dataframes, True)
dataframe = strategy.set_freqai_targets(dataframe.copy(), metadata=metadata)
if self.live:
dataframe = strategy.set_freqai_targets(dataframe.copy(), metadata=metadata)
dataframe = self.remove_special_chars_from_feature_names(dataframe)
self.get_unique_classes_from_labels(dataframe)
dataframe = self.remove_special_chars_from_feature_names(dataframe)
if self.config.get('reduce_df_footprint', False):
dataframe = reduce_dataframe_footprint(dataframe)

View File

@ -83,6 +83,7 @@ class IFreqaiModel(ABC):
self.CONV_WIDTH = self.freqai_info.get('conv_width', 1)
if self.ft_params.get("inlier_metric_window", 0):
self.CONV_WIDTH = self.ft_params.get("inlier_metric_window", 0) * 2
self.class_names: List[str] = [] # used in classification subclasses
self.pair_it = 0
self.pair_it_train = 0
self.total_pairs = len(self.config.get("exchange", {}).get("pair_whitelist"))
@ -306,7 +307,7 @@ class IFreqaiModel(ABC):
if check_features:
self.dd.load_metadata(dk)
dataframe_dummy_features = self.dk.use_strategy_to_populate_indicators(
strategy, prediction_dataframe=dataframe.tail(1), pair=metadata["pair"]
strategy, prediction_dataframe=dataframe.tail(1), pair=pair
)
dk.find_features(dataframe_dummy_features)
self.check_if_feature_list_matches_strategy(dk)
@ -316,7 +317,7 @@ class IFreqaiModel(ABC):
else:
if populate_indicators:
dataframe = self.dk.use_strategy_to_populate_indicators(
strategy, prediction_dataframe=dataframe, pair=metadata["pair"]
strategy, prediction_dataframe=dataframe, pair=pair
)
populate_indicators = False
@ -332,6 +333,10 @@ class IFreqaiModel(ABC):
dataframe_train = dk.slice_dataframe(tr_train, dataframe_base_train)
dataframe_backtest = dk.slice_dataframe(tr_backtest, dataframe_base_backtest)
dataframe_train = dk.remove_special_chars_from_feature_names(dataframe_train)
dataframe_backtest = dk.remove_special_chars_from_feature_names(dataframe_backtest)
dk.get_unique_classes_from_labels(dataframe_train)
if not self.model_exists(dk):
dk.find_features(dataframe_train)
dk.find_labels(dataframe_train)
@ -567,8 +572,9 @@ class IFreqaiModel(ABC):
file_type = ".joblib"
elif self.dd.model_type == 'keras':
file_type = ".h5"
elif 'stable_baselines' in self.dd.model_type or 'sb3_contrib' == self.dd.model_type:
elif self.dd.model_type in ["stable_baselines3", "sb3_contrib", "pytorch"]:
file_type = ".zip"
path_to_modelfile = Path(dk.data_path / f"{dk.model_filename}_model{file_type}")
file_exists = path_to_modelfile.is_file()
if file_exists:

View File

@ -14,16 +14,20 @@ logger = logging.getLogger(__name__)
class CatboostClassifier(BaseClassifierModel):
"""
User created prediction model. The class needs to override three necessary
functions, predict(), train(), fit(). The class inherits ModelHandler which
has its own DataHandler where data is held, saved, loaded, and managed.
User created prediction model. The class inherits IFreqaiModel, which
means it has full access to all Frequency AI functionality. Typically,
users would use this to override the common `fit()`, `train()`, or
`predict()` methods to add their custom data handling tools or change
various aspects of the training that cannot be configured via the
top level config.json file.
"""
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
"""
User sets up the training and test data to fit their desired model here
:param data_dictionary: the dictionary constructed by DataHandler to hold
all the training and test data/labels.
:param data_dictionary: the dictionary holding all data for train, test,
labels, weights
:param dk: The datakitchen object for the current coin/model
"""
train_data = Pool(

View File

@ -15,16 +15,20 @@ logger = logging.getLogger(__name__)
class CatboostClassifierMultiTarget(BaseClassifierModel):
"""
User created prediction model. The class needs to override three necessary
functions, predict(), train(), fit(). The class inherits ModelHandler which
has its own DataHandler where data is held, saved, loaded, and managed.
User created prediction model. The class inherits IFreqaiModel, which
means it has full access to all Frequency AI functionality. Typically,
users would use this to override the common `fit()`, `train()`, or
`predict()` methods to add their custom data handling tools or change
various aspects of the training that cannot be configured via the
top level config.json file.
"""
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
"""
User sets up the training and test data to fit their desired model here
:param data_dictionary: the dictionary constructed by DataHandler to hold
all the training and test data/labels.
:param data_dictionary: the dictionary holding all data for train, test,
labels, weights
:param dk: The datakitchen object for the current coin/model
"""
cbc = CatBoostClassifier(

View File

@ -14,16 +14,20 @@ logger = logging.getLogger(__name__)
class CatboostRegressor(BaseRegressionModel):
"""
User created prediction model. The class needs to override three necessary
functions, predict(), train(), fit(). The class inherits ModelHandler which
has its own DataHandler where data is held, saved, loaded, and managed.
User created prediction model. The class inherits IFreqaiModel, which
means it has full access to all Frequency AI functionality. Typically,
users would use this to override the common `fit()`, `train()`, or
`predict()` methods to add their custom data handling tools or change
various aspects of the training that cannot be configured via the
top level config.json file.
"""
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
"""
User sets up the training and test data to fit their desired model here
:param data_dictionary: the dictionary constructed by DataHandler to hold
all the training and test data/labels.
:param data_dictionary: the dictionary holding all data for train, test,
labels, weights
:param dk: The datakitchen object for the current coin/model
"""
train_data = Pool(

View File

@ -15,16 +15,20 @@ logger = logging.getLogger(__name__)
class CatboostRegressorMultiTarget(BaseRegressionModel):
"""
User created prediction model. The class needs to override three necessary
functions, predict(), train(), fit(). The class inherits ModelHandler which
has its own DataHandler where data is held, saved, loaded, and managed.
User created prediction model. The class inherits IFreqaiModel, which
means it has full access to all Frequency AI functionality. Typically,
users would use this to override the common `fit()`, `train()`, or
`predict()` methods to add their custom data handling tools or change
various aspects of the training that cannot be configured via the
top level config.json file.
"""
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
"""
User sets up the training and test data to fit their desired model here
:param data_dictionary: the dictionary constructed by DataHandler to hold
all the training and test data/labels.
:param data_dictionary: the dictionary holding all data for train, test,
labels, weights
:param dk: The datakitchen object for the current coin/model
"""
cbr = CatBoostRegressor(

View File

@ -12,16 +12,20 @@ logger = logging.getLogger(__name__)
class LightGBMClassifier(BaseClassifierModel):
"""
User created prediction model. The class needs to override three necessary
functions, predict(), train(), fit(). The class inherits ModelHandler which
has its own DataHandler where data is held, saved, loaded, and managed.
User created prediction model. The class inherits IFreqaiModel, which
means it has full access to all Frequency AI functionality. Typically,
users would use this to override the common `fit()`, `train()`, or
`predict()` methods to add their custom data handling tools or change
various aspects of the training that cannot be configured via the
top level config.json file.
"""
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
"""
User sets up the training and test data to fit their desired model here
:param data_dictionary: the dictionary constructed by DataHandler to hold
all the training and test data/labels.
:param data_dictionary: the dictionary holding all data for train, test,
labels, weights
:param dk: The datakitchen object for the current coin/model
"""
if self.freqai_info.get('data_split_parameters', {}).get('test_size', 0.1) == 0:

View File

@ -13,16 +13,20 @@ logger = logging.getLogger(__name__)
class LightGBMClassifierMultiTarget(BaseClassifierModel):
"""
User created prediction model. The class needs to override three necessary
functions, predict(), train(), fit(). The class inherits ModelHandler which
has its own DataHandler where data is held, saved, loaded, and managed.
User created prediction model. The class inherits IFreqaiModel, which
means it has full access to all Frequency AI functionality. Typically,
users would use this to override the common `fit()`, `train()`, or
`predict()` methods to add their custom data handling tools or change
various aspects of the training that cannot be configured via the
top level config.json file.
"""
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
"""
User sets up the training and test data to fit their desired model here
:param data_dictionary: the dictionary constructed by DataHandler to hold
all the training and test data/labels.
:param data_dictionary: the dictionary holding all data for train, test,
labels, weights
:param dk: The datakitchen object for the current coin/model
"""
lgb = LGBMClassifier(**self.model_training_parameters)

View File

@ -12,18 +12,20 @@ logger = logging.getLogger(__name__)
class LightGBMRegressor(BaseRegressionModel):
"""
User created prediction model. The class needs to override three necessary
functions, predict(), train(), fit(). The class inherits ModelHandler which
has its own DataHandler where data is held, saved, loaded, and managed.
User created prediction model. The class inherits IFreqaiModel, which
means it has full access to all Frequency AI functionality. Typically,
users would use this to override the common `fit()`, `train()`, or
`predict()` methods to add their custom data handling tools or change
various aspects of the training that cannot be configured via the
top level config.json file.
"""
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
"""
Most regressors use the same function names and arguments e.g. user
can drop in LGBMRegressor in place of CatBoostRegressor and all data
management will be properly handled by Freqai.
:param data_dictionary: the dictionary constructed by DataHandler to hold
all the training and test data/labels.
User sets up the training and test data to fit their desired model here
:param data_dictionary: the dictionary holding all data for train, test,
labels, weights
:param dk: The datakitchen object for the current coin/model
"""
if self.freqai_info.get('data_split_parameters', {}).get('test_size', 0.1) == 0:

View File

@ -13,16 +13,20 @@ logger = logging.getLogger(__name__)
class LightGBMRegressorMultiTarget(BaseRegressionModel):
"""
User created prediction model. The class needs to override three necessary
functions, predict(), train(), fit(). The class inherits ModelHandler which
has its own DataHandler where data is held, saved, loaded, and managed.
User created prediction model. The class inherits IFreqaiModel, which
means it has full access to all Frequency AI functionality. Typically,
users would use this to override the common `fit()`, `train()`, or
`predict()` methods to add their custom data handling tools or change
various aspects of the training that cannot be configured via the
top level config.json file.
"""
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
"""
User sets up the training and test data to fit their desired model here
:param data_dictionary: the dictionary constructed by DataHandler to hold
all the training and test data/labels.
:param data_dictionary: the dictionary holding all data for train, test,
labels, weights
:param dk: The datakitchen object for the current coin/model
"""
lgb = LGBMRegressor(**self.model_training_parameters)

View File

@ -0,0 +1,89 @@
from typing import Any, Dict
import torch
from freqtrade.freqai.base_models.BasePyTorchClassifier import BasePyTorchClassifier
from freqtrade.freqai.data_kitchen import FreqaiDataKitchen
from freqtrade.freqai.torch.PyTorchDataConvertor import (DefaultPyTorchDataConvertor,
PyTorchDataConvertor)
from freqtrade.freqai.torch.PyTorchMLPModel import PyTorchMLPModel
from freqtrade.freqai.torch.PyTorchModelTrainer import PyTorchModelTrainer
class PyTorchMLPClassifier(BasePyTorchClassifier):
"""
This class implements the fit method of IFreqaiModel.
in the fit method we initialize the model and trainer objects.
the only requirement from the model is to be aligned to PyTorchClassifier
predict method that expects the model to predict a tensor of type long.
parameters are passed via `model_training_parameters` under the freqai
section in the config file. e.g:
{
...
"freqai": {
...
"model_training_parameters" : {
"learning_rate": 3e-4,
"trainer_kwargs": {
"max_iters": 5000,
"batch_size": 64,
"max_n_eval_batches": null,
},
"model_kwargs": {
"hidden_dim": 512,
"dropout_percent": 0.2,
"n_layer": 1,
},
}
}
}
"""
@property
def data_convertor(self) -> PyTorchDataConvertor:
return DefaultPyTorchDataConvertor(
target_tensor_type=torch.long,
squeeze_target_tensor=True
)
def __init__(self, **kwargs) -> None:
super().__init__(**kwargs)
config = self.freqai_info.get("model_training_parameters", {})
self.learning_rate: float = config.get("learning_rate", 3e-4)
self.model_kwargs: Dict[str, Any] = config.get("model_kwargs", {})
self.trainer_kwargs: Dict[str, Any] = config.get("trainer_kwargs", {})
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
"""
User sets up the training and test data to fit their desired model here
:param data_dictionary: the dictionary holding all data for train, test,
labels, weights
:param dk: The datakitchen object for the current coin/model
:raises ValueError: If self.class_names is not defined in the parent class.
"""
class_names = self.get_class_names()
self.convert_label_column_to_int(data_dictionary, dk, class_names)
n_features = data_dictionary["train_features"].shape[-1]
model = PyTorchMLPModel(
input_dim=n_features,
output_dim=len(class_names),
**self.model_kwargs
)
model.to(self.device)
optimizer = torch.optim.AdamW(model.parameters(), lr=self.learning_rate)
criterion = torch.nn.CrossEntropyLoss()
init_model = self.get_init_model(dk.pair)
trainer = PyTorchModelTrainer(
model=model,
optimizer=optimizer,
criterion=criterion,
model_meta_data={"class_names": class_names},
device=self.device,
init_model=init_model,
data_convertor=self.data_convertor,
**self.trainer_kwargs,
)
trainer.fit(data_dictionary, self.splits)
return trainer

View File

@ -0,0 +1,83 @@
from typing import Any, Dict
import torch
from freqtrade.freqai.base_models.BasePyTorchRegressor import BasePyTorchRegressor
from freqtrade.freqai.data_kitchen import FreqaiDataKitchen
from freqtrade.freqai.torch.PyTorchDataConvertor import (DefaultPyTorchDataConvertor,
PyTorchDataConvertor)
from freqtrade.freqai.torch.PyTorchMLPModel import PyTorchMLPModel
from freqtrade.freqai.torch.PyTorchModelTrainer import PyTorchModelTrainer
class PyTorchMLPRegressor(BasePyTorchRegressor):
"""
This class implements the fit method of IFreqaiModel.
in the fit method we initialize the model and trainer objects.
the only requirement from the model is to be aligned to PyTorchRegressor
predict method that expects the model to predict tensor of type float.
the trainer defines the training loop.
parameters are passed via `model_training_parameters` under the freqai
section in the config file. e.g:
{
...
"freqai": {
...
"model_training_parameters" : {
"learning_rate": 3e-4,
"trainer_kwargs": {
"max_iters": 5000,
"batch_size": 64,
"max_n_eval_batches": null,
},
"model_kwargs": {
"hidden_dim": 512,
"dropout_percent": 0.2,
"n_layer": 1,
},
}
}
}
"""
@property
def data_convertor(self) -> PyTorchDataConvertor:
return DefaultPyTorchDataConvertor(target_tensor_type=torch.float)
def __init__(self, **kwargs) -> None:
super().__init__(**kwargs)
config = self.freqai_info.get("model_training_parameters", {})
self.learning_rate: float = config.get("learning_rate", 3e-4)
self.model_kwargs: Dict[str, Any] = config.get("model_kwargs", {})
self.trainer_kwargs: Dict[str, Any] = config.get("trainer_kwargs", {})
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
"""
User sets up the training and test data to fit their desired model here
:param data_dictionary: the dictionary holding all data for train, test,
labels, weights
:param dk: The datakitchen object for the current coin/model
"""
n_features = data_dictionary["train_features"].shape[-1]
model = PyTorchMLPModel(
input_dim=n_features,
output_dim=1,
**self.model_kwargs
)
model.to(self.device)
optimizer = torch.optim.AdamW(model.parameters(), lr=self.learning_rate)
criterion = torch.nn.MSELoss()
init_model = self.get_init_model(dk.pair)
trainer = PyTorchModelTrainer(
model=model,
optimizer=optimizer,
criterion=criterion,
device=self.device,
init_model=init_model,
data_convertor=self.data_convertor,
**self.trainer_kwargs,
)
trainer.fit(data_dictionary, self.splits)
return trainer

View File

@ -71,7 +71,8 @@ class ReinforcementLearner(BaseReinforcementLearningModel):
model.learn(
total_timesteps=int(total_timesteps),
callback=[self.eval_callback, self.tensorboard_callback]
callback=[self.eval_callback, self.tensorboard_callback],
progress_bar=self.rl_config.get('progress_bar', False)
)
if Path(dk.data_path / "best_model.zip").is_file():

View File

@ -18,16 +18,20 @@ logger = logging.getLogger(__name__)
class XGBoostClassifier(BaseClassifierModel):
"""
User created prediction model. The class needs to override three necessary
functions, predict(), train(), fit(). The class inherits ModelHandler which
has its own DataHandler where data is held, saved, loaded, and managed.
User created prediction model. The class inherits IFreqaiModel, which
means it has full access to all Frequency AI functionality. Typically,
users would use this to override the common `fit()`, `train()`, or
`predict()` methods to add their custom data handling tools or change
various aspects of the training that cannot be configured via the
top level config.json file.
"""
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
"""
User sets up the training and test data to fit their desired model here
:param data_dictionary: the dictionary constructed by DataHandler to hold
all the training and test data/labels.
:param data_dictionary: the dictionary holding all data for train, test,
labels, weights
:param dk: The datakitchen object for the current coin/model
"""
X = data_dictionary["train_features"].to_numpy()

View File

@ -18,16 +18,20 @@ logger = logging.getLogger(__name__)
class XGBoostRFClassifier(BaseClassifierModel):
"""
User created prediction model. The class needs to override three necessary
functions, predict(), train(), fit(). The class inherits ModelHandler which
has its own DataHandler where data is held, saved, loaded, and managed.
User created prediction model. The class inherits IFreqaiModel, which
means it has full access to all Frequency AI functionality. Typically,
users would use this to override the common `fit()`, `train()`, or
`predict()` methods to add their custom data handling tools or change
various aspects of the training that cannot be configured via the
top level config.json file.
"""
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
"""
User sets up the training and test data to fit their desired model here
:param data_dictionary: the dictionary constructed by DataHandler to hold
all the training and test data/labels.
:param data_dictionary: the dictionary holding all data for train, test,
labels, weights
:param dk: The datakitchen object for the current coin/model
"""
X = data_dictionary["train_features"].to_numpy()

View File

@ -12,16 +12,20 @@ logger = logging.getLogger(__name__)
class XGBoostRFRegressor(BaseRegressionModel):
"""
User created prediction model. The class needs to override three necessary
functions, predict(), train(), fit(). The class inherits ModelHandler which
has its own DataHandler where data is held, saved, loaded, and managed.
User created prediction model. The class inherits IFreqaiModel, which
means it has full access to all Frequency AI functionality. Typically,
users would use this to override the common `fit()`, `train()`, or
`predict()` methods to add their custom data handling tools or change
various aspects of the training that cannot be configured via the
top level config.json file.
"""
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
"""
User sets up the training and test data to fit their desired model here
:param data_dictionary: the dictionary constructed by DataHandler to hold
all the training and test data/labels.
:param data_dictionary: the dictionary holding all data for train, test,
labels, weights
:param dk: The datakitchen object for the current coin/model
"""
X = data_dictionary["train_features"]

View File

@ -12,16 +12,20 @@ logger = logging.getLogger(__name__)
class XGBoostRegressor(BaseRegressionModel):
"""
User created prediction model. The class needs to override three necessary
functions, predict(), train(), fit(). The class inherits ModelHandler which
has its own DataHandler where data is held, saved, loaded, and managed.
User created prediction model. The class inherits IFreqaiModel, which
means it has full access to all Frequency AI functionality. Typically,
users would use this to override the common `fit()`, `train()`, or
`predict()` methods to add their custom data handling tools or change
various aspects of the training that cannot be configured via the
top level config.json file.
"""
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
"""
User sets up the training and test data to fit their desired model here
:param data_dictionary: the dictionary constructed by DataHandler to hold
all the training and test data/labels.
:param data_dictionary: the dictionary holding all data for train, test,
labels, weights
:param dk: The datakitchen object for the current coin/model
"""
X = data_dictionary["train_features"]

View File

@ -13,16 +13,20 @@ logger = logging.getLogger(__name__)
class XGBoostRegressorMultiTarget(BaseRegressionModel):
"""
User created prediction model. The class needs to override three necessary
functions, predict(), train(), fit(). The class inherits ModelHandler which
has its own DataHandler where data is held, saved, loaded, and managed.
User created prediction model. The class inherits IFreqaiModel, which
means it has full access to all Frequency AI functionality. Typically,
users would use this to override the common `fit()`, `train()`, or
`predict()` methods to add their custom data handling tools or change
various aspects of the training that cannot be configured via the
top level config.json file.
"""
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
"""
User sets up the training and test data to fit their desired model here
:param data_dictionary: the dictionary constructed by DataHandler to hold
all the training and test data/labels.
:param data_dictionary: the dictionary holding all data for train, test,
labels, weights
:param dk: The datakitchen object for the current coin/model
"""
xgb = XGBRegressor(**self.model_training_parameters)

View File

@ -0,0 +1,67 @@
from abc import ABC, abstractmethod
from typing import List, Optional
import pandas as pd
import torch
class PyTorchDataConvertor(ABC):
"""
This class is responsible for converting `*_features` & `*_labels` pandas dataframes
to pytorch tensors.
"""
@abstractmethod
def convert_x(self, df: pd.DataFrame, device: Optional[str] = None) -> List[torch.Tensor]:
"""
:param df: "*_features" dataframe.
:param device: The device to use for training (e.g. 'cpu', 'cuda').
"""
@abstractmethod
def convert_y(self, df: pd.DataFrame, device: Optional[str] = None) -> List[torch.Tensor]:
"""
:param df: "*_labels" dataframe.
:param device: The device to use for training (e.g. 'cpu', 'cuda').
"""
class DefaultPyTorchDataConvertor(PyTorchDataConvertor):
"""
A default conversion that keeps features dataframe shapes.
"""
def __init__(
self,
target_tensor_type: Optional[torch.dtype] = None,
squeeze_target_tensor: bool = False
):
"""
:param target_tensor_type: type of target tensor, for classification use
torch.long, for regressor use torch.float or torch.double.
:param squeeze_target_tensor: controls the target shape, used for loss functions
that requires 0D or 1D.
"""
self._target_tensor_type = target_tensor_type
self._squeeze_target_tensor = squeeze_target_tensor
def convert_x(self, df: pd.DataFrame, device: Optional[str] = None) -> List[torch.Tensor]:
x = torch.from_numpy(df.values).float()
if device:
x = x.to(device)
return [x]
def convert_y(self, df: pd.DataFrame, device: Optional[str] = None) -> List[torch.Tensor]:
y = torch.from_numpy(df.values)
if self._target_tensor_type:
y = y.to(self._target_tensor_type)
if self._squeeze_target_tensor:
y = y.squeeze()
if device:
y = y.to(device)
return [y]

View File

@ -0,0 +1,97 @@
import logging
from typing import List
import torch
from torch import nn
logger = logging.getLogger(__name__)
class PyTorchMLPModel(nn.Module):
"""
A multi-layer perceptron (MLP) model implemented using PyTorch.
This class mainly serves as a simple example for the integration of PyTorch model's
to freqai. It is not optimized at all and should not be used for production purposes.
:param input_dim: The number of input features. This parameter specifies the number
of features in the input data that the MLP will use to make predictions.
:param output_dim: The number of output classes. This parameter specifies the number
of classes that the MLP will predict.
:param hidden_dim: The number of hidden units in each layer. This parameter controls
the complexity of the MLP and determines how many nonlinear relationships the MLP
can represent. Increasing the number of hidden units can increase the capacity of
the MLP to model complex patterns, but it also increases the risk of overfitting
the training data. Default: 256
:param dropout_percent: The dropout rate for regularization. This parameter specifies
the probability of dropping out a neuron during training to prevent overfitting.
The dropout rate should be tuned carefully to balance between underfitting and
overfitting. Default: 0.2
:param n_layer: The number of layers in the MLP. This parameter specifies the number
of layers in the MLP architecture. Adding more layers to the MLP can increase its
capacity to model complex patterns, but it also increases the risk of overfitting
the training data. Default: 1
:returns: The output of the MLP, with shape (batch_size, output_dim)
"""
def __init__(self, input_dim: int, output_dim: int, **kwargs):
super().__init__()
hidden_dim: int = kwargs.get("hidden_dim", 256)
dropout_percent: int = kwargs.get("dropout_percent", 0.2)
n_layer: int = kwargs.get("n_layer", 1)
self.input_layer = nn.Linear(input_dim, hidden_dim)
self.blocks = nn.Sequential(*[Block(hidden_dim, dropout_percent) for _ in range(n_layer)])
self.output_layer = nn.Linear(hidden_dim, output_dim)
self.relu = nn.ReLU()
self.dropout = nn.Dropout(p=dropout_percent)
def forward(self, tensors: List[torch.Tensor]) -> torch.Tensor:
x: torch.Tensor = tensors[0]
x = self.relu(self.input_layer(x))
x = self.dropout(x)
x = self.blocks(x)
x = self.output_layer(x)
return x
class Block(nn.Module):
"""
A building block for a multi-layer perceptron (MLP).
:param hidden_dim: The number of hidden units in the feedforward network.
:param dropout_percent: The dropout rate for regularization.
:returns: torch.Tensor. with shape (batch_size, hidden_dim)
"""
def __init__(self, hidden_dim: int, dropout_percent: int):
super().__init__()
self.ff = FeedForward(hidden_dim)
self.dropout = nn.Dropout(p=dropout_percent)
self.ln = nn.LayerNorm(hidden_dim)
def forward(self, x: torch.Tensor) -> torch.Tensor:
x = self.ff(self.ln(x))
x = self.dropout(x)
return x
class FeedForward(nn.Module):
"""
A simple fully-connected feedforward neural network block.
:param hidden_dim: The number of hidden units in the block.
:return: torch.Tensor. with shape (batch_size, hidden_dim)
"""
def __init__(self, hidden_dim: int):
super().__init__()
self.net = nn.Sequential(
nn.Linear(hidden_dim, hidden_dim),
nn.ReLU(),
)
def forward(self, x: torch.Tensor) -> torch.Tensor:
return self.net(x)

View File

@ -0,0 +1,208 @@
import logging
import math
from pathlib import Path
from typing import Any, Dict, List, Optional
import pandas as pd
import torch
from torch import nn
from torch.optim import Optimizer
from torch.utils.data import DataLoader, TensorDataset
from freqtrade.freqai.torch.PyTorchDataConvertor import PyTorchDataConvertor
from freqtrade.freqai.torch.PyTorchTrainerInterface import PyTorchTrainerInterface
logger = logging.getLogger(__name__)
class PyTorchModelTrainer(PyTorchTrainerInterface):
def __init__(
self,
model: nn.Module,
optimizer: Optimizer,
criterion: nn.Module,
device: str,
init_model: Dict,
data_convertor: PyTorchDataConvertor,
model_meta_data: Dict[str, Any] = {},
**kwargs
):
"""
:param model: The PyTorch model to be trained.
:param optimizer: The optimizer to use for training.
:param criterion: The loss function to use for training.
:param device: The device to use for training (e.g. 'cpu', 'cuda').
:param init_model: A dictionary containing the initial model/optimizer
state_dict and model_meta_data saved by self.save() method.
:param model_meta_data: Additional metadata about the model (optional).
:param data_convertor: convertor from pd.DataFrame to torch.tensor.
:param max_iters: The number of training iterations to run.
iteration here refers to the number of times we call
self.optimizer.step(). used to calculate n_epochs.
:param batch_size: The size of the batches to use during training.
:param max_n_eval_batches: The maximum number batches to use for evaluation.
"""
self.model = model
self.optimizer = optimizer
self.criterion = criterion
self.model_meta_data = model_meta_data
self.device = device
self.max_iters: int = kwargs.get("max_iters", 100)
self.batch_size: int = kwargs.get("batch_size", 64)
self.max_n_eval_batches: Optional[int] = kwargs.get("max_n_eval_batches", None)
self.data_convertor = data_convertor
if init_model:
self.load_from_checkpoint(init_model)
def fit(self, data_dictionary: Dict[str, pd.DataFrame], splits: List[str]):
"""
:param data_dictionary: the dictionary constructed by DataHandler to hold
all the training and test data/labels.
:param splits: splits to use in training, splits must contain "train",
optional "test" could be added by setting freqai.data_split_parameters.test_size > 0
in the config file.
- Calculates the predicted output for the batch using the PyTorch model.
- Calculates the loss between the predicted and actual output using a loss function.
- Computes the gradients of the loss with respect to the model's parameters using
backpropagation.
- Updates the model's parameters using an optimizer.
"""
data_loaders_dictionary = self.create_data_loaders_dictionary(data_dictionary, splits)
epochs = self.calc_n_epochs(
n_obs=len(data_dictionary["train_features"]),
batch_size=self.batch_size,
n_iters=self.max_iters
)
for epoch in range(1, epochs + 1):
# training
losses = []
for i, batch_data in enumerate(data_loaders_dictionary["train"]):
for tensor in batch_data:
tensor.to(self.device)
xb = batch_data[:-1]
yb = batch_data[-1]
yb_pred = self.model(xb)
loss = self.criterion(yb_pred, yb)
self.optimizer.zero_grad(set_to_none=True)
loss.backward()
self.optimizer.step()
losses.append(loss.item())
train_loss = sum(losses) / len(losses)
log_message = f"epoch {epoch}/{epochs}: train loss {train_loss:.4f}"
# evaluation
if "test" in splits:
test_loss = self.estimate_loss(
data_loaders_dictionary,
self.max_n_eval_batches,
"test"
)
log_message += f" ; test loss {test_loss:.4f}"
logger.info(log_message)
@torch.no_grad()
def estimate_loss(
self,
data_loader_dictionary: Dict[str, DataLoader],
max_n_eval_batches: Optional[int],
split: str,
) -> float:
self.model.eval()
n_batches = 0
losses = []
for i, batch_data in enumerate(data_loader_dictionary[split]):
if max_n_eval_batches and i > max_n_eval_batches:
n_batches += 1
break
for tensor in batch_data:
tensor.to(self.device)
xb = batch_data[:-1]
yb = batch_data[-1]
yb_pred = self.model(xb)
loss = self.criterion(yb_pred, yb)
losses.append(loss.item())
self.model.train()
return sum(losses) / len(losses)
def create_data_loaders_dictionary(
self,
data_dictionary: Dict[str, pd.DataFrame],
splits: List[str]
) -> Dict[str, DataLoader]:
"""
Converts the input data to PyTorch tensors using a data loader.
"""
data_loader_dictionary = {}
for split in splits:
x = self.data_convertor.convert_x(data_dictionary[f"{split}_features"], self.device)
y = self.data_convertor.convert_y(data_dictionary[f"{split}_labels"], self.device)
dataset = TensorDataset(*x, *y)
data_loader = DataLoader(
dataset,
batch_size=self.batch_size,
shuffle=True,
drop_last=True,
num_workers=0,
)
data_loader_dictionary[split] = data_loader
return data_loader_dictionary
@staticmethod
def calc_n_epochs(n_obs: int, batch_size: int, n_iters: int) -> int:
"""
Calculates the number of epochs required to reach the maximum number
of iterations specified in the model training parameters.
the motivation here is that `max_iters` is easier to optimize and keep stable,
across different n_obs - the number of data points.
"""
n_batches = math.ceil(n_obs // batch_size)
epochs = math.ceil(n_iters // n_batches)
if epochs <= 10:
logger.warning("User set `max_iters` in such a way that the trainer will only perform "
f" {epochs} epochs. Please consider increasing this value accordingly")
if epochs <= 1:
logger.warning("Epochs set to 1. Please review your `max_iters` value")
epochs = 1
return epochs
def save(self, path: Path):
"""
- Saving any nn.Module state_dict
- Saving model_meta_data, this dict should contain any additional data that the
user needs to store. e.g class_names for classification models.
"""
torch.save({
"model_state_dict": self.model.state_dict(),
"optimizer_state_dict": self.optimizer.state_dict(),
"model_meta_data": self.model_meta_data,
"pytrainer": self
}, path)
def load(self, path: Path):
checkpoint = torch.load(path)
return self.load_from_checkpoint(checkpoint)
def load_from_checkpoint(self, checkpoint: Dict):
"""
when using continual_learning, DataDrawer will load the dictionary
(containing state dicts and model_meta_data) by calling torch.load(path).
you can access this dict from any class that inherits IFreqaiModel by calling
get_init_model method.
"""
self.model.load_state_dict(checkpoint["model_state_dict"])
self.optimizer.load_state_dict(checkpoint["optimizer_state_dict"])
self.model_meta_data = checkpoint["model_meta_data"]
return self

View File

@ -0,0 +1,53 @@
from abc import ABC, abstractmethod
from pathlib import Path
from typing import Dict, List
import pandas as pd
import torch
from torch import nn
class PyTorchTrainerInterface(ABC):
@abstractmethod
def fit(self, data_dictionary: Dict[str, pd.DataFrame], splits: List[str]) -> None:
"""
:param data_dictionary: the dictionary constructed by DataHandler to hold
all the training and test data/labels.
:param splits: splits to use in training, splits must contain "train",
optional "test" could be added by setting freqai.data_split_parameters.test_size > 0
in the config file.
- Calculates the predicted output for the batch using the PyTorch model.
- Calculates the loss between the predicted and actual output using a loss function.
- Computes the gradients of the loss with respect to the model's parameters using
backpropagation.
- Updates the model's parameters using an optimizer.
"""
@abstractmethod
def save(self, path: Path) -> None:
"""
- Saving any nn.Module state_dict
- Saving model_meta_data, this dict should contain any additional data that the
user needs to store. e.g class_names for classification models.
"""
def load(self, path: Path) -> nn.Module:
"""
:param path: path to zip file.
:returns: pytorch model.
"""
checkpoint = torch.load(path)
return self.load_from_checkpoint(checkpoint)
@abstractmethod
def load_from_checkpoint(self, checkpoint: Dict) -> nn.Module:
"""
when using continual_learning, DataDrawer will load the dictionary
(containing state dicts and model_meta_data) by calling torch.load(path).
you can access this dict from any class that inherits IFreqaiModel by calling
get_init_model method.
:checkpoint checkpoint: dict containing the model & optimizer state dicts,
model_meta_data, etc..
"""

View File

View File

@ -21,10 +21,12 @@ from freqtrade.enums import (ExitCheckTuple, ExitType, RPCMessageType, RunMode,
State, TradingMode)
from freqtrade.exceptions import (DependencyException, ExchangeError, InsufficientFundsError,
InvalidOrderException, PricingError)
from freqtrade.exchange import timeframe_to_minutes, timeframe_to_next_date, timeframe_to_seconds
from freqtrade.exchange import (ROUND_DOWN, ROUND_UP, timeframe_to_minutes, timeframe_to_next_date,
timeframe_to_seconds)
from freqtrade.misc import safe_value_fallback, safe_value_fallback2
from freqtrade.mixins import LoggingMixin
from freqtrade.persistence import Order, PairLocks, Trade, init_db
from freqtrade.persistence.key_value_store import set_startup_time
from freqtrade.plugins.pairlistmanager import PairListManager
from freqtrade.plugins.protectionmanager import ProtectionManager
from freqtrade.resolvers import ExchangeResolver, StrategyResolver
@ -181,6 +183,7 @@ class FreqtradeBot(LoggingMixin):
performs startup tasks
"""
migrate_binance_futures_names(self.config)
set_startup_time()
self.rpc.startup_messages(self.config, self.pairlists, self.protections)
# Update older trades with precision and precision mode
@ -853,7 +856,8 @@ class FreqtradeBot(LoggingMixin):
logger.info(f"Canceling stoploss on exchange for {trade}")
co = self.exchange.cancel_stoploss_order_with_result(
trade.stoploss_order_id, trade.pair, trade.amount)
trade.update_order(co)
self.update_trade_state(trade, trade.stoploss_order_id, co, stoploss_order=True)
# Reset stoploss order id.
trade.stoploss_order_id = None
except InvalidOrderException:
@ -945,7 +949,7 @@ class FreqtradeBot(LoggingMixin):
return enter_limit_requested, stake_amount, leverage
def _notify_enter(self, trade: Trade, order: Order, order_type: Optional[str] = None,
def _notify_enter(self, trade: Trade, order: Order, order_type: str,
fill: bool = False, sub_trade: bool = False) -> None:
"""
Sends rpc notification when a entry order occurred.
@ -1171,7 +1175,8 @@ class FreqtradeBot(LoggingMixin):
logger.warning('Unable to fetch stoploss order: %s', exception)
if stoploss_order:
trade.update_order(stoploss_order)
self.update_trade_state(trade, trade.stoploss_order_id, stoploss_order,
stoploss_order=True)
# We check if stoploss order is fulfilled
if stoploss_order and stoploss_order['status'] in ('closed', 'triggered'):
@ -1235,7 +1240,9 @@ class FreqtradeBot(LoggingMixin):
:param order: Current on exchange stoploss order
:return: None
"""
stoploss_norm = self.exchange.price_to_precision(trade.pair, trade.stoploss_or_liquidation)
stoploss_norm = self.exchange.price_to_precision(
trade.pair, trade.stoploss_or_liquidation,
rounding_mode=ROUND_DOWN if trade.is_short else ROUND_UP)
if self.exchange.stoploss_adjust(stoploss_norm, order, side=trade.exit_side):
# we check if the update is necessary
@ -1418,7 +1425,7 @@ class FreqtradeBot(LoggingMixin):
corder = order
reason = constants.CANCEL_REASON['CANCELLED_ON_EXCHANGE']
logger.info('%s order %s for %s.', side, reason, trade)
logger.info(f'{side} order {reason} for {trade}.')
# Using filled to determine the filled amount
filled_amount = safe_value_fallback2(corder, order, 'filled', 'filled')
@ -1478,8 +1485,8 @@ class FreqtradeBot(LoggingMixin):
return False
try:
order = self.exchange.cancel_order_with_result(order['id'], trade.pair,
trade.amount)
order = self.exchange.cancel_order_with_result(
order['id'], trade.pair, trade.amount)
except InvalidOrderException:
logger.exception(
f"Could not cancel {trade.exit_side} order {trade.open_order_id}")
@ -1491,17 +1498,18 @@ class FreqtradeBot(LoggingMixin):
# Order might be filled above in odd timing issues.
if order.get('status') in ('canceled', 'cancelled'):
trade.exit_reason = None
trade.open_order_id = None
else:
trade.exit_reason = exit_reason_prev
cancelled = True
else:
reason = constants.CANCEL_REASON['CANCELLED_ON_EXCHANGE']
trade.exit_reason = None
trade.open_order_id = None
self.update_trade_state(trade, trade.open_order_id, order)
self.update_trade_state(trade, order['id'], order)
logger.info(f'{trade.exit_side.capitalize()} order {reason} for {trade}.')
trade.open_order_id = None
trade.close_rate = None
trade.close_rate_requested = None
@ -1778,11 +1786,11 @@ class FreqtradeBot(LoggingMixin):
return False
# Update trade with order values
logger.info(f'Found open order for {trade}')
if not stoploss_order:
logger.info(f'Found open order for {trade}')
try:
order = action_order or self.exchange.fetch_order_or_stoploss_order(order_id,
trade.pair,
stoploss_order)
order = action_order or self.exchange.fetch_order_or_stoploss_order(
order_id, trade.pair, stoploss_order)
except InvalidOrderException as exception:
logger.warning('Unable to fetch order %s: %s', order_id, exception)
return False
@ -1847,7 +1855,7 @@ class FreqtradeBot(LoggingMixin):
self.handle_protections(trade.pair, trade.trade_direction)
elif send_msg and not trade.open_order_id and not stoploss_order:
# Enter fill
self._notify_enter(trade, order, fill=True, sub_trade=sub_trade)
self._notify_enter(trade, order, order.order_type, fill=True, sub_trade=sub_trade)
def handle_protections(self, pair: str, side: LongShort) -> None:
# Lock pair for one candle to prevent immediate rebuys

View File

@ -1,24 +1,11 @@
import logging
import sys
from logging import Formatter
from logging.handlers import BufferingHandler, RotatingFileHandler, SysLogHandler
from logging.handlers import RotatingFileHandler, SysLogHandler
from freqtrade.constants import Config
from freqtrade.exceptions import OperationalException
class FTBufferingHandler(BufferingHandler):
def flush(self):
"""
Override Flush behaviour - we keep half of the configured capacity
otherwise, we have moments with "empty" logs.
"""
self.acquire()
try:
# Keep half of the records in buffer.
self.buffer = self.buffer[-int(self.capacity / 2):]
finally:
self.release()
from freqtrade.loggers.buffering_handler import FTBufferingHandler
from freqtrade.loggers.std_err_stream_handler import FTStdErrStreamHandler
logger = logging.getLogger(__name__)
@ -69,7 +56,7 @@ def setup_logging_pre() -> None:
logging.basicConfig(
level=logging.INFO,
format=LOGFORMAT,
handlers=[logging.StreamHandler(sys.stderr), bufferHandler]
handlers=[FTStdErrStreamHandler(), bufferHandler]
)

View File

@ -0,0 +1,15 @@
from logging.handlers import BufferingHandler
class FTBufferingHandler(BufferingHandler):
def flush(self):
"""
Override Flush behaviour - we keep half of the configured capacity
otherwise, we have moments with "empty" logs.
"""
self.acquire()
try:
# Keep half of the records in buffer.
self.buffer = self.buffer[-int(self.capacity / 2):]
finally:
self.release()

View File

@ -0,0 +1,26 @@
import sys
from logging import Handler
class FTStdErrStreamHandler(Handler):
def flush(self):
"""
Override Flush behaviour - we keep half of the configured capacity
otherwise, we have moments with "empty" logs.
"""
self.acquire()
try:
sys.stderr.flush()
finally:
self.release()
def emit(self, record):
try:
msg = self.format(record)
# Don't keep a reference to stderr - this can be problematic with progressbars.
sys.stderr.write(msg + '\n')
self.flush()
except RecursionError:
raise
except Exception:
self.handleError(record)

View File

@ -13,13 +13,13 @@ from math import ceil
from pathlib import Path
from typing import Any, Dict, List, Optional, Tuple
import progressbar
import rapidjson
from colorama import Fore, Style
from colorama import init as colorama_init
from joblib import Parallel, cpu_count, delayed, dump, load, wrap_non_picklable_objects
from joblib.externals import cloudpickle
from pandas import DataFrame
from rich.progress import (BarColumn, MofNCompleteColumn, Progress, TaskProgressColumn, TextColumn,
TimeElapsedColumn, TimeRemainingColumn)
from freqtrade.constants import DATETIME_PRINT_FORMAT, FTHYPT_FILEVERSION, LAST_BT_RESULT_FN, Config
from freqtrade.data.converter import trim_dataframes
@ -44,8 +44,6 @@ with warnings.catch_warnings():
from skopt import Optimizer
from skopt.space import Dimension
progressbar.streams.wrap_stderr()
progressbar.streams.wrap_stdout()
logger = logging.getLogger(__name__)
@ -381,7 +379,8 @@ class Hyperopt:
strat_stats = generate_strategy_stats(
self.pairlist, self.backtesting.strategy.get_strategy_name(),
backtesting_results, min_date, max_date, market_change=self.market_change
backtesting_results, min_date, max_date, market_change=self.market_change,
is_hyperopt=True,
)
results_explanation = HyperoptTools.format_results_explanation_string(
strat_stats, self.config['stake_currency'])
@ -520,29 +519,6 @@ class Hyperopt:
else:
return self.opt.ask(n_points=n_points), [False for _ in range(n_points)]
def get_progressbar_widgets(self):
if self.print_colorized:
widgets = [
' [Epoch ', progressbar.Counter(), ' of ', str(self.total_epochs),
' (', progressbar.Percentage(), ')] ',
progressbar.Bar(marker=progressbar.AnimatedMarker(
fill='\N{FULL BLOCK}',
fill_wrap=Fore.GREEN + '{}' + Fore.RESET,
marker_wrap=Style.BRIGHT + '{}' + Style.RESET_ALL,
)),
' [', progressbar.ETA(), ', ', progressbar.Timer(), ']',
]
else:
widgets = [
' [Epoch ', progressbar.Counter(), ' of ', str(self.total_epochs),
' (', progressbar.Percentage(), ')] ',
progressbar.Bar(marker=progressbar.AnimatedMarker(
fill='\N{FULL BLOCK}',
)),
' [', progressbar.ETA(), ', ', progressbar.Timer(), ']',
]
return widgets
def evaluate_result(self, val: Dict[str, Any], current: int, is_random: bool):
"""
Evaluate results returned from generate_optimizer
@ -602,11 +578,19 @@ class Hyperopt:
logger.info(f'Effective number of parallel workers used: {jobs}')
# Define progressbar
widgets = self.get_progressbar_widgets()
with progressbar.ProgressBar(
max_value=self.total_epochs, redirect_stdout=False, redirect_stderr=False,
widgets=widgets
with Progress(
TextColumn("[progress.description]{task.description}"),
BarColumn(bar_width=None),
MofNCompleteColumn(),
TaskProgressColumn(),
"",
TimeElapsedColumn(),
"",
TimeRemainingColumn(),
expand=True,
) as pbar:
task = pbar.add_task("Epochs", total=self.total_epochs)
start = 0
if self.analyze_per_epoch:
@ -616,7 +600,7 @@ class Hyperopt:
f_val0 = self.generate_optimizer(asked[0])
self.opt.tell(asked, [f_val0['loss']])
self.evaluate_result(f_val0, 1, is_random[0])
pbar.update(1)
pbar.update(task, advance=1)
start += 1
evals = ceil((self.total_epochs - start) / jobs)
@ -630,14 +614,12 @@ class Hyperopt:
f_val = self.run_optimizer_parallel(parallel, asked)
self.opt.tell(asked, [v['loss'] for v in f_val])
# Calculate progressbar outputs
for j, val in enumerate(f_val):
# Use human-friendly indexes here (starting from 1)
current = i * jobs + j + 1 + start
self.evaluate_result(val, current, is_random[j])
pbar.update(current)
pbar.update(task, advance=1)
except KeyboardInterrupt:
print('User interrupted..')

View File

@ -23,6 +23,8 @@ logger = logging.getLogger(__name__)
NON_OPT_PARAM_APPENDIX = " # value loaded from strategy"
HYPER_PARAMS_FILE_FORMAT = rapidjson.NM_NATIVE | rapidjson.NM_NAN
def hyperopt_serializer(x):
if isinstance(x, np.integer):
@ -76,9 +78,18 @@ class HyperoptTools():
with filename.open('w') as f:
rapidjson.dump(final_params, f, indent=2,
default=hyperopt_serializer,
number_mode=rapidjson.NM_NATIVE | rapidjson.NM_NAN
number_mode=HYPER_PARAMS_FILE_FORMAT
)
@staticmethod
def load_params(filename: Path) -> Dict:
"""
Load parameters from file
"""
with filename.open('r') as f:
params = rapidjson.load(f, number_mode=HYPER_PARAMS_FILE_FORMAT)
return params
@staticmethod
def try_export_params(config: Config, strategy_name: str, params: Dict):
if params.get(FTHYPT_FILEVERSION, 1) >= 2 and not config.get('disableparamexport', False):
@ -189,7 +200,7 @@ class HyperoptTools():
for s in ['buy', 'sell', 'protection',
'roi', 'stoploss', 'trailing', 'max_open_trades']:
HyperoptTools._params_update_for_json(result_dict, params, non_optimized, s)
print(rapidjson.dumps(result_dict, default=str, number_mode=rapidjson.NM_NATIVE))
print(rapidjson.dumps(result_dict, default=str, number_mode=HYPER_PARAMS_FILE_FORMAT))
else:
HyperoptTools._params_pretty_print(params, 'buy', "Buy hyperspace params:",

View File

@ -7,8 +7,8 @@ from typing import Any, Dict, List, Union
from pandas import DataFrame, to_datetime
from tabulate import tabulate
from freqtrade.constants import (DATETIME_PRINT_FORMAT, LAST_BT_RESULT_FN, UNLIMITED_STAKE_AMOUNT,
Config, IntOrInf)
from freqtrade.constants import (BACKTEST_BREAKDOWNS, DATETIME_PRINT_FORMAT, LAST_BT_RESULT_FN,
UNLIMITED_STAKE_AMOUNT, Config, IntOrInf)
from freqtrade.data.metrics import (calculate_cagr, calculate_calmar, calculate_csum,
calculate_expectancy, calculate_market_change,
calculate_max_drawdown, calculate_sharpe, calculate_sortino)
@ -273,7 +273,8 @@ def _get_resample_from_period(period: str) -> str:
if period == 'day':
return '1d'
if period == 'week':
return '1w'
# Weekly defaulting to Monday.
return '1W-MON'
if period == 'month':
return '1M'
raise ValueError(f"Period {period} is not supported.")
@ -295,6 +296,7 @@ def generate_periodic_breakdown_stats(trade_list: List, period: str) -> List[Dic
stats.append(
{
'date': name.strftime('%d/%m/%Y'),
'date_ts': int(name.to_pydatetime().timestamp() * 1000),
'profit_abs': profit_abs,
'wins': wins,
'draws': draws,
@ -304,6 +306,13 @@ def generate_periodic_breakdown_stats(trade_list: List, period: str) -> List[Dic
return stats
def generate_all_periodic_breakdown_stats(trade_list: List) -> Dict[str, List]:
result = {}
for period in BACKTEST_BREAKDOWNS:
result[period] = generate_periodic_breakdown_stats(trade_list, period)
return result
def generate_trading_stats(results: DataFrame) -> Dict[str, Any]:
""" Generate overall trade statistics """
if len(results) == 0:
@ -380,7 +389,8 @@ def generate_strategy_stats(pairlist: List[str],
strategy: str,
content: Dict[str, Any],
min_date: datetime, max_date: datetime,
market_change: float
market_change: float,
is_hyperopt: bool = False,
) -> Dict[str, Any]:
"""
:param pairlist: List of pairs to backtest
@ -415,6 +425,11 @@ def generate_strategy_stats(pairlist: List[str],
daily_stats = generate_daily_stats(results)
trade_stats = generate_trading_stats(results)
periodic_breakdown = {}
if not is_hyperopt:
periodic_breakdown = {'periodic_breakdown': generate_all_periodic_breakdown_stats(results)}
best_pair = max([pair for pair in pair_results if pair['key'] != 'TOTAL'],
key=lambda x: x['profit_sum']) if len(pair_results) > 1 else None
worst_pair = min([pair for pair in pair_results if pair['key'] != 'TOTAL'],
@ -433,7 +448,6 @@ def generate_strategy_stats(pairlist: List[str],
'results_per_enter_tag': enter_tag_results,
'exit_reason_summary': exit_reason_stats,
'left_open_trades': left_open_results,
# 'days_breakdown_stats': days_breakdown_stats,
'total_trades': len(results),
'trade_count_long': len(results.loc[~results['is_short']]),
@ -498,6 +512,7 @@ def generate_strategy_stats(pairlist: List[str],
'exit_profit_only': config['exit_profit_only'],
'exit_profit_offset': config['exit_profit_offset'],
'ignore_roi_if_entry_signal': config['ignore_roi_if_entry_signal'],
**periodic_breakdown,
**daily_stats,
**trade_stats
}
@ -865,6 +880,11 @@ def show_backtest_result(strategy: str, results: Dict[str, Any], stake_currency:
print(' BACKTESTING REPORT '.center(len(table.splitlines()[0]), '='))
print(table)
table = text_table_bt_results(results['left_open_trades'], stake_currency=stake_currency)
if isinstance(table, str) and len(table) > 0:
print(' LEFT OPEN TRADES REPORT '.center(len(table.splitlines()[0]), '='))
print(table)
if (results.get('results_per_enter_tag') is not None
or results.get('results_per_buy_tag') is not None):
# results_per_buy_tag is deprecated and should be removed 2 versions after short golive.
@ -884,14 +904,12 @@ def show_backtest_result(strategy: str, results: Dict[str, Any], stake_currency:
print(' EXIT REASON STATS '.center(len(table.splitlines()[0]), '='))
print(table)
table = text_table_bt_results(results['left_open_trades'], stake_currency=stake_currency)
if isinstance(table, str) and len(table) > 0:
print(' LEFT OPEN TRADES REPORT '.center(len(table.splitlines()[0]), '='))
print(table)
for period in backtest_breakdown:
days_breakdown_stats = generate_periodic_breakdown_stats(
trade_list=results['trades'], period=period)
if period in results.get('periodic_breakdown', {}):
days_breakdown_stats = results['periodic_breakdown'][period]
else:
days_breakdown_stats = generate_periodic_breakdown_stats(
trade_list=results['trades'], period=period)
table = text_table_periodic_breakdown(days_breakdown_stats=days_breakdown_stats,
stake_currency=stake_currency, period=period)
if isinstance(table, str) and len(table) > 0:
@ -917,11 +935,11 @@ def show_backtest_results(config: Config, backtest_stats: Dict):
strategy, results, stake_currency,
config.get('backtest_breakdown', []))
if len(backtest_stats['strategy']) > 1:
if len(backtest_stats['strategy']) > 0:
# Print Strategy summary table
table = text_table_strategy(backtest_stats['strategy_comparison'], stake_currency)
print(f"{results['backtest_start']} -> {results['backtest_end']} |"
print(f"Backtested {results['backtest_start']} -> {results['backtest_end']} |"
f" Max open trades : {results['max_open_trades']}")
print(' STRATEGY SUMMARY '.center(len(table.splitlines()[0]), '='))
print(table)

View File

@ -1,5 +1,6 @@
# flake8: noqa: F401
from freqtrade.persistence.key_value_store import KeyStoreKeys, KeyValueStore
from freqtrade.persistence.models import init_db
from freqtrade.persistence.pairlock_middleware import PairLocks
from freqtrade.persistence.trade_model import LocalTrade, Order, Trade

View File

@ -0,0 +1,179 @@
from datetime import datetime, timezone
from enum import Enum
from typing import ClassVar, Optional, Union
from sqlalchemy import String
from sqlalchemy.orm import Mapped, mapped_column
from freqtrade.persistence.base import ModelBase, SessionType
ValueTypes = Union[str, datetime, float, int]
class ValueTypesEnum(str, Enum):
STRING = 'str'
DATETIME = 'datetime'
FLOAT = 'float'
INT = 'int'
class KeyStoreKeys(str, Enum):
BOT_START_TIME = 'bot_start_time'
STARTUP_TIME = 'startup_time'
class _KeyValueStoreModel(ModelBase):
"""
Pair Locks database model.
"""
__tablename__ = 'KeyValueStore'
session: ClassVar[SessionType]
id: Mapped[int] = mapped_column(primary_key=True)
key: Mapped[KeyStoreKeys] = mapped_column(String(25), nullable=False, index=True)
value_type: Mapped[ValueTypesEnum] = mapped_column(String(20), nullable=False)
string_value: Mapped[Optional[str]]
datetime_value: Mapped[Optional[datetime]]
float_value: Mapped[Optional[float]]
int_value: Mapped[Optional[int]]
class KeyValueStore():
"""
Generic bot-wide, persistent key-value store
Can be used to store generic values, e.g. very first bot startup time.
Supports the types str, datetime, float and int.
"""
@staticmethod
def store_value(key: KeyStoreKeys, value: ValueTypes) -> None:
"""
Store the given value for the given key.
:param key: Key to store the value for - can be used in get-value to retrieve the key
:param value: Value to store - can be str, datetime, float or int
"""
kv = _KeyValueStoreModel.session.query(_KeyValueStoreModel).filter(
_KeyValueStoreModel.key == key).first()
if kv is None:
kv = _KeyValueStoreModel(key=key)
if isinstance(value, str):
kv.value_type = ValueTypesEnum.STRING
kv.string_value = value
elif isinstance(value, datetime):
kv.value_type = ValueTypesEnum.DATETIME
kv.datetime_value = value
elif isinstance(value, float):
kv.value_type = ValueTypesEnum.FLOAT
kv.float_value = value
elif isinstance(value, int):
kv.value_type = ValueTypesEnum.INT
kv.int_value = value
else:
raise ValueError(f'Unknown value type {kv.value_type}')
_KeyValueStoreModel.session.add(kv)
_KeyValueStoreModel.session.commit()
@staticmethod
def delete_value(key: KeyStoreKeys) -> None:
"""
Delete the value for the given key.
:param key: Key to delete the value for
"""
kv = _KeyValueStoreModel.session.query(_KeyValueStoreModel).filter(
_KeyValueStoreModel.key == key).first()
if kv is not None:
_KeyValueStoreModel.session.delete(kv)
_KeyValueStoreModel.session.commit()
@staticmethod
def get_value(key: KeyStoreKeys) -> Optional[ValueTypes]:
"""
Get the value for the given key.
:param key: Key to get the value for
"""
kv = _KeyValueStoreModel.session.query(_KeyValueStoreModel).filter(
_KeyValueStoreModel.key == key).first()
if kv is None:
return None
if kv.value_type == ValueTypesEnum.STRING:
return kv.string_value
if kv.value_type == ValueTypesEnum.DATETIME and kv.datetime_value is not None:
return kv.datetime_value.replace(tzinfo=timezone.utc)
if kv.value_type == ValueTypesEnum.FLOAT:
return kv.float_value
if kv.value_type == ValueTypesEnum.INT:
return kv.int_value
# This should never happen unless someone messed with the database manually
raise ValueError(f'Unknown value type {kv.value_type}') # pragma: no cover
@staticmethod
def get_string_value(key: KeyStoreKeys) -> Optional[str]:
"""
Get the value for the given key.
:param key: Key to get the value for
"""
kv = _KeyValueStoreModel.session.query(_KeyValueStoreModel).filter(
_KeyValueStoreModel.key == key,
_KeyValueStoreModel.value_type == ValueTypesEnum.STRING).first()
if kv is None:
return None
return kv.string_value
@staticmethod
def get_datetime_value(key: KeyStoreKeys) -> Optional[datetime]:
"""
Get the value for the given key.
:param key: Key to get the value for
"""
kv = _KeyValueStoreModel.session.query(_KeyValueStoreModel).filter(
_KeyValueStoreModel.key == key,
_KeyValueStoreModel.value_type == ValueTypesEnum.DATETIME).first()
if kv is None or kv.datetime_value is None:
return None
return kv.datetime_value.replace(tzinfo=timezone.utc)
@staticmethod
def get_float_value(key: KeyStoreKeys) -> Optional[float]:
"""
Get the value for the given key.
:param key: Key to get the value for
"""
kv = _KeyValueStoreModel.session.query(_KeyValueStoreModel).filter(
_KeyValueStoreModel.key == key,
_KeyValueStoreModel.value_type == ValueTypesEnum.FLOAT).first()
if kv is None:
return None
return kv.float_value
@staticmethod
def get_int_value(key: KeyStoreKeys) -> Optional[int]:
"""
Get the value for the given key.
:param key: Key to get the value for
"""
kv = _KeyValueStoreModel.session.query(_KeyValueStoreModel).filter(
_KeyValueStoreModel.key == key,
_KeyValueStoreModel.value_type == ValueTypesEnum.INT).first()
if kv is None:
return None
return kv.int_value
def set_startup_time():
"""
sets bot_start_time to the first trade open date - or "now" on new databases.
sets startup_time to "now"
"""
st = KeyValueStore.get_value('bot_start_time')
if st is None:
from freqtrade.persistence import Trade
t = Trade.session.query(Trade).order_by(Trade.open_date.asc()).first()
if t is not None:
KeyValueStore.store_value('bot_start_time', t.open_date_utc)
else:
KeyValueStore.store_value('bot_start_time', datetime.now(timezone.utc))
KeyValueStore.store_value('startup_time', datetime.now(timezone.utc))

View File

@ -13,6 +13,7 @@ from sqlalchemy.pool import StaticPool
from freqtrade.exceptions import OperationalException
from freqtrade.persistence.base import ModelBase
from freqtrade.persistence.key_value_store import _KeyValueStoreModel
from freqtrade.persistence.migrations import check_migrate
from freqtrade.persistence.pairlock import PairLock
from freqtrade.persistence.trade_model import Order, Trade
@ -76,6 +77,7 @@ def init_db(db_url: str) -> None:
bind=engine, autoflush=False), scopefunc=get_request_or_thread_id)
Order.session = Trade.session
PairLock.session = Trade.session
_KeyValueStoreModel.session = Trade.session
previous_tables = inspect(engine).get_table_names()
ModelBase.metadata.create_all(engine)

View File

@ -9,13 +9,14 @@ from typing import Any, ClassVar, Dict, List, Optional, Sequence, cast
from sqlalchemy import (Enum, Float, ForeignKey, Integer, ScalarResult, Select, String,
UniqueConstraint, desc, func, select)
from sqlalchemy.orm import Mapped, lazyload, mapped_column, relationship
from sqlalchemy.orm import Mapped, lazyload, mapped_column, relationship, validates
from freqtrade.constants import (DATETIME_PRINT_FORMAT, MATH_CLOSE_PREC, NON_OPEN_EXCHANGE_STATES,
BuySell, LongShort)
from freqtrade.constants import (CUSTOM_TAG_MAX_LENGTH, DATETIME_PRINT_FORMAT, MATH_CLOSE_PREC,
NON_OPEN_EXCHANGE_STATES, BuySell, LongShort)
from freqtrade.enums import ExitType, TradingMode
from freqtrade.exceptions import DependencyException, OperationalException
from freqtrade.exchange import amount_to_contract_precision, price_to_precision
from freqtrade.exchange import (ROUND_DOWN, ROUND_UP, amount_to_contract_precision,
price_to_precision)
from freqtrade.leverage import interest
from freqtrade.persistence.base import ModelBase, SessionType
from freqtrade.util import FtPrecise
@ -597,7 +598,8 @@ class LocalTrade():
"""
Method used internally to set self.stop_loss.
"""
stop_loss_norm = price_to_precision(stop_loss, self.price_precision, self.precision_mode)
stop_loss_norm = price_to_precision(stop_loss, self.price_precision, self.precision_mode,
rounding_mode=ROUND_DOWN if self.is_short else ROUND_UP)
if not self.stop_loss:
self.initial_stop_loss = stop_loss_norm
self.stop_loss = stop_loss_norm
@ -628,7 +630,8 @@ class LocalTrade():
if self.initial_stop_loss_pct is None or refresh:
self.__set_stop_loss(new_loss, stoploss)
self.initial_stop_loss = price_to_precision(
new_loss, self.price_precision, self.precision_mode)
new_loss, self.price_precision, self.precision_mode,
rounding_mode=ROUND_DOWN if self.is_short else ROUND_UP)
self.initial_stop_loss_pct = -1 * abs(stoploss)
# evaluate if the stop loss needs to be updated
@ -692,21 +695,24 @@ class LocalTrade():
else:
logger.warning(
f'Got different open_order_id {self.open_order_id} != {order.order_id}')
elif order.ft_order_side == 'stoploss' and order.status not in ('open', ):
self.stoploss_order_id = None
self.close_rate_requested = self.stop_loss
self.exit_reason = ExitType.STOPLOSS_ON_EXCHANGE.value
if self.is_open:
logger.info(f'{order.order_type.upper()} is hit for {self}.')
else:
raise ValueError(f'Unknown order type: {order.order_type}')
if order.ft_order_side != self.entry_side:
amount_tr = amount_to_contract_precision(self.amount, self.amount_precision,
self.precision_mode, self.contract_size)
if isclose(order.safe_amount_after_fee, amount_tr, abs_tol=MATH_CLOSE_PREC):
self.close(order.safe_price)
else:
self.recalc_trade_from_orders()
elif order.ft_order_side == 'stoploss' and order.status not in ('canceled', 'open'):
self.stoploss_order_id = None
self.close_rate_requested = self.stop_loss
self.exit_reason = ExitType.STOPLOSS_ON_EXCHANGE.value
if self.is_open:
logger.info(f'{order.order_type.upper()} is hit for {self}.')
self.close(order.safe_price)
else:
raise ValueError(f'Unknown order type: {order.order_type}')
Trade.commit()
def close(self, rate: float, *, show_msg: bool = True) -> None:
@ -1253,11 +1259,13 @@ class Trade(ModelBase, LocalTrade):
Float(), nullable=True, default=0.0) # type: ignore
# Lowest price reached
min_rate: Mapped[Optional[float]] = mapped_column(Float(), nullable=True) # type: ignore
exit_reason: Mapped[Optional[str]] = mapped_column(String(100), nullable=True) # type: ignore
exit_reason: Mapped[Optional[str]] = mapped_column(
String(CUSTOM_TAG_MAX_LENGTH), nullable=True) # type: ignore
exit_order_status: Mapped[Optional[str]] = mapped_column(
String(100), nullable=True) # type: ignore
strategy: Mapped[Optional[str]] = mapped_column(String(100), nullable=True) # type: ignore
enter_tag: Mapped[Optional[str]] = mapped_column(String(100), nullable=True) # type: ignore
enter_tag: Mapped[Optional[str]] = mapped_column(
String(CUSTOM_TAG_MAX_LENGTH), nullable=True) # type: ignore
timeframe: Mapped[Optional[int]] = mapped_column(Integer, nullable=True) # type: ignore
trading_mode: Mapped[TradingMode] = mapped_column(
@ -1287,6 +1295,13 @@ class Trade(ModelBase, LocalTrade):
self.realized_profit = 0
self.recalc_open_trade_value()
@validates('enter_tag', 'exit_reason')
def validate_string_len(self, key, value):
max_len = getattr(self.__class__, key).prop.columns[0].type.length
if value and len(value) > max_len:
return value[:max_len]
return value
def delete(self) -> None:
for order in self.orders:

View File

@ -6,6 +6,7 @@ from typing import Any, Dict, Optional
from freqtrade.constants import Config
from freqtrade.exceptions import OperationalException
from freqtrade.exchange import ROUND_UP
from freqtrade.exchange.types import Ticker
from freqtrade.plugins.pairlist.IPairList import IPairList
@ -61,9 +62,10 @@ class PrecisionFilter(IPairList):
stop_price = ticker['last'] * self._stoploss
# Adjust stop-prices to precision
sp = self._exchange.price_to_precision(pair, stop_price)
sp = self._exchange.price_to_precision(pair, stop_price, rounding_mode=ROUND_UP)
stop_gap_price = self._exchange.price_to_precision(pair, stop_price * 0.99)
stop_gap_price = self._exchange.price_to_precision(pair, stop_price * 0.99,
rounding_mode=ROUND_UP)
logger.debug(f"{pair} - {sp} : {stop_gap_price}")
if sp <= stop_gap_price:

View File

@ -143,6 +143,9 @@ class RemotePairList(IPairList):
if self._init_done:
pairlist = self._pair_cache.get('pairlist')
if pairlist == [None]:
# Valid but empty pairlist.
return []
else:
pairlist = []
@ -181,7 +184,11 @@ class RemotePairList(IPairList):
pairlist = self._whitelist_for_active_markets(pairlist)
pairlist = pairlist[:self._number_pairs]
self._pair_cache['pairlist'] = pairlist.copy()
if pairlist:
self._pair_cache['pairlist'] = pairlist.copy()
else:
# If pairlist is empty, set a dummy value to avoid fetching again
self._pair_cache['pairlist'] = [None]
if time_elapsed != 0.0:
self.log_once(f'Pairlist Fetched in {time_elapsed} seconds.', logger.info)

View File

@ -108,6 +108,8 @@ class Profit(BaseModel):
max_drawdown: float
max_drawdown_abs: float
trading_volume: Optional[float]
bot_start_timestamp: int
bot_start_date: str
class SellReason(BaseModel):

View File

@ -303,11 +303,11 @@ def get_strategy(strategy: str, config=Depends(get_config)):
@router.get('/freqaimodels', response_model=FreqAIModelListResponse, tags=['freqai'])
def list_freqaimodels(config=Depends(get_config)):
from freqtrade.resolvers.freqaimodel_resolver import FreqaiModelResolver
strategies = FreqaiModelResolver.search_all_objects(
models = FreqaiModelResolver.search_all_objects(
config, False)
strategies = sorted(strategies, key=lambda x: x['name'])
models = sorted(models, key=lambda x: x['name'])
return {'freqaimodels': [x['name'] for x in strategies]}
return {'freqaimodels': [x['name'] for x in models]}
@router.get('/available_pairs', response_model=AvailablePairs, tags=['candle data'])

View File

@ -55,7 +55,7 @@ class UvicornServer(uvicorn.Server):
@contextlib.contextmanager
def run_in_thread(self):
self.thread = threading.Thread(target=self.run)
self.thread = threading.Thread(target=self.run, name='FTUvicorn')
self.thread.start()
while not self.started:
time.sleep(1e-3)

View File

@ -24,9 +24,10 @@ from freqtrade.enums import (CandleType, ExitCheckTuple, ExitType, MarketDirecti
State, TradingMode)
from freqtrade.exceptions import ExchangeError, PricingError
from freqtrade.exchange import timeframe_to_minutes, timeframe_to_msecs
from freqtrade.exchange.types import Tickers
from freqtrade.loggers import bufferHandler
from freqtrade.misc import decimals_per_coin, shorten_date
from freqtrade.persistence import Order, PairLocks, Trade
from freqtrade.persistence import KeyStoreKeys, KeyValueStore, Order, PairLocks, Trade
from freqtrade.persistence.models import PairLock
from freqtrade.plugins.pairlist.pairlist_helpers import expand_pairlist
from freqtrade.rpc.fiat_convert import CryptoToFiatConverter
@ -543,6 +544,7 @@ class RPC:
first_date = trades[0].open_date if trades else None
last_date = trades[-1].open_date if trades else None
num = float(len(durations) or 1)
bot_start = KeyValueStore.get_datetime_value(KeyStoreKeys.BOT_START_TIME)
return {
'profit_closed_coin': profit_closed_coin_sum,
'profit_closed_percent_mean': round(profit_closed_ratio_mean * 100, 2),
@ -576,17 +578,44 @@ class RPC:
'max_drawdown': max_drawdown,
'max_drawdown_abs': max_drawdown_abs,
'trading_volume': trading_volume,
'bot_start_timestamp': int(bot_start.timestamp() * 1000) if bot_start else 0,
'bot_start_date': bot_start.strftime(DATETIME_PRINT_FORMAT) if bot_start else '',
}
def __balance_get_est_stake(
self, coin: str, stake_currency: str, balance: Wallet, tickers) -> float:
est_stake = 0.0
if coin == stake_currency:
est_stake = balance.total
if self._config.get('trading_mode', TradingMode.SPOT) != TradingMode.SPOT:
# in Futures, "total" includes the locked stake, and therefore all positions
est_stake = balance.free
else:
try:
pair = self._freqtrade.exchange.get_valid_pair_combination(coin, stake_currency)
rate: Optional[float] = tickers.get(pair, {}).get('last', None)
if rate:
if pair.startswith(stake_currency) and not pair.endswith(stake_currency):
rate = 1.0 / rate
est_stake = rate * balance.total
except (ExchangeError):
logger.warning(f"Could not get rate for pair {coin}.")
raise ValueError()
return est_stake
def _rpc_balance(self, stake_currency: str, fiat_display_currency: str) -> Dict:
""" Returns current account balance per crypto """
currencies: List[Dict] = []
total = 0.0
total_bot = 0.0
try:
tickers = self._freqtrade.exchange.get_tickers(cached=True)
tickers: Tickers = self._freqtrade.exchange.get_tickers(cached=True)
except (ExchangeError):
raise RPCException('Error getting current tickers.')
open_trades: List[Trade] = Trade.get_open_trades()
open_assets = [t.base_currency for t in open_trades]
self._freqtrade.wallets.update(require_update=False)
starting_capital = self._freqtrade.wallets.get_starting_balance()
starting_cap_fiat = self._fiat_converter.convert_amount(
@ -596,26 +625,14 @@ class RPC:
for coin, balance in self._freqtrade.wallets.get_all_balances().items():
if not balance.total:
continue
try:
est_stake = self.__balance_get_est_stake(coin, stake_currency, balance, tickers)
except ValueError:
continue
est_stake: float = 0
if coin == stake_currency:
rate = 1.0
est_stake = balance.total
if self._config.get('trading_mode', TradingMode.SPOT) != TradingMode.SPOT:
# in Futures, "total" includes the locked stake, and therefore all positions
est_stake = balance.free
else:
try:
pair = self._freqtrade.exchange.get_valid_pair_combination(coin, stake_currency)
rate = tickers.get(pair, {}).get('last')
if rate:
if pair.startswith(stake_currency) and not pair.endswith(stake_currency):
rate = 1.0 / rate
est_stake = rate * balance.total
except (ExchangeError):
logger.warning(f" Could not get rate for pair {coin}.")
continue
total = total + est_stake
total += est_stake
if coin == stake_currency or coin in open_assets:
total_bot += est_stake
currencies.append({
'currency': coin,
'free': balance.free,
@ -648,10 +665,12 @@ class RPC:
value = self._fiat_converter.convert_amount(
total, stake_currency, fiat_display_currency) if self._fiat_converter else 0
value_bot = self._fiat_converter.convert_amount(
total_bot, stake_currency, fiat_display_currency) if self._fiat_converter else 0
trade_count = len(Trade.get_trades_proxy())
starting_capital_ratio = (total / starting_capital) - 1 if starting_capital else 0.0
starting_cap_fiat_ratio = (value / starting_cap_fiat) - 1 if starting_cap_fiat else 0.0
starting_capital_ratio = (total_bot / starting_capital) - 1 if starting_capital else 0.0
starting_cap_fiat_ratio = (value_bot / starting_cap_fiat) - 1 if starting_cap_fiat else 0.0
return {
'currencies': currencies,
@ -1193,6 +1212,7 @@ class RPC:
from freqtrade.resolvers.strategy_resolver import StrategyResolver
strategy = StrategyResolver.load_strategy(config)
strategy.dp = DataProvider(config, exchange=exchange, pairlists=None)
strategy.ft_bot_start()
df_analyzed = strategy.analyze_ticker(_data[pair], {'pair': pair})

View File

@ -52,7 +52,7 @@ class __RPCBuyMsgBase(RPCSendMsgBase):
direction: str
limit: float
open_rate: float
order_type: Optional[str] # TODO: why optional??
order_type: str
stake_amount: float
stake_currency: str
fiat_currency: Optional[str]

View File

@ -66,10 +66,7 @@ def authorized_only(command_handler: Callable[..., None]) -> Callable[..., Any]:
chat_id = int(self._config['telegram']['chat_id'])
if cchat_id != chat_id:
logger.info(
'Rejected unauthorized message from: %s',
update.message.chat_id
)
logger.info(f'Rejected unauthorized message from: {update.message.chat_id}')
return wrapper
# Rollback session to avoid getting data stored in a transaction.
Trade.rollback()
@ -819,7 +816,7 @@ class Telegram(RPCHandler):
best_pair = stats['best_pair']
best_pair_profit_ratio = stats['best_pair_profit_ratio']
if stats['trade_count'] == 0:
markdown_msg = 'No trades yet.'
markdown_msg = f"No trades yet.\n*Bot started:* `{stats['bot_start_date']}`"
else:
# Message to display
if stats['closed_trade_count'] > 0:
@ -838,6 +835,7 @@ class Telegram(RPCHandler):
f"({profit_all_percent} \N{GREEK CAPITAL LETTER SIGMA}%)`\n"
f"∙ `{round_coin_value(profit_all_fiat, fiat_disp_cur)}`\n"
f"*Total Trade Count:* `{trade_count}`\n"
f"*Bot started:* `{stats['bot_start_date']}`\n"
f"*{'First Trade opened' if not timescale else 'Showing Profit since'}:* "
f"`{first_trade_date}`\n"
f"*Latest Trade opened:* `{latest_trade_date}`\n"
@ -1420,7 +1418,7 @@ class Telegram(RPCHandler):
def send_blacklist_msg(self, blacklist: Dict):
errmsgs = []
for pair, error in blacklist['errors'].items():
errmsgs.append(f"Error adding `{pair}` to blacklist: `{error['error_msg']}`")
errmsgs.append(f"Error: {error['error_msg']}")
if errmsgs:
self._send_msg('\n'.join(errmsgs))

View File

@ -8,7 +8,7 @@ from typing import Any, Dict, Iterator, List, Optional, Tuple, Type, Union
from freqtrade.constants import Config
from freqtrade.exceptions import OperationalException
from freqtrade.misc import deep_merge_dicts, json_load
from freqtrade.misc import deep_merge_dicts
from freqtrade.optimize.hyperopt_tools import HyperoptTools
from freqtrade.strategy.parameters import BaseParameter
@ -124,8 +124,7 @@ class HyperStrategyMixin:
if filename.is_file():
logger.info(f"Loading parameters from file {filename}")
try:
with filename.open('r') as f:
params = json_load(f)
params = HyperoptTools.load_params(filename)
if params.get('strategy_name') != self.__class__.__name__:
raise OperationalException('Invalid parameter file provided.')
return params

View File

@ -10,7 +10,7 @@ from typing import Dict, List, Optional, Tuple, Union
import arrow
from pandas import DataFrame
from freqtrade.constants import Config, IntOrInf, ListPairsWithTimeframes
from freqtrade.constants import CUSTOM_TAG_MAX_LENGTH, Config, IntOrInf, ListPairsWithTimeframes
from freqtrade.data.dataprovider import DataProvider
from freqtrade.enums import (CandleType, ExitCheckTuple, ExitType, MarketDirection, RunMode,
SignalDirection, SignalTagType, SignalType, TradingMode)
@ -27,7 +27,6 @@ from freqtrade.wallets import Wallets
logger = logging.getLogger(__name__)
CUSTOM_EXIT_MAX_LENGTH = 64
class IStrategy(ABC, HyperStrategyMixin):
@ -619,7 +618,7 @@ class IStrategy(ABC, HyperStrategyMixin):
return df
def feature_engineering_expand_all(self, dataframe: DataFrame, period: int,
metadata: Dict, **kwargs):
metadata: Dict, **kwargs) -> DataFrame:
"""
*Only functional with FreqAI enabled strategies*
This function will automatically expand the defined features on the config defined
@ -645,7 +644,8 @@ class IStrategy(ABC, HyperStrategyMixin):
"""
return dataframe
def feature_engineering_expand_basic(self, dataframe: DataFrame, metadata: Dict, **kwargs):
def feature_engineering_expand_basic(
self, dataframe: DataFrame, metadata: Dict, **kwargs) -> DataFrame:
"""
*Only functional with FreqAI enabled strategies*
This function will automatically expand the defined features on the config defined
@ -674,7 +674,8 @@ class IStrategy(ABC, HyperStrategyMixin):
"""
return dataframe
def feature_engineering_standard(self, dataframe: DataFrame, metadata: Dict, **kwargs):
def feature_engineering_standard(
self, dataframe: DataFrame, metadata: Dict, **kwargs) -> DataFrame:
"""
*Only functional with FreqAI enabled strategies*
This optional function will be called once with the dataframe of the base timeframe.
@ -698,7 +699,7 @@ class IStrategy(ABC, HyperStrategyMixin):
"""
return dataframe
def set_freqai_targets(self, dataframe: DataFrame, metadata: Dict, **kwargs):
def set_freqai_targets(self, dataframe: DataFrame, metadata: Dict, **kwargs) -> DataFrame:
"""
*Only functional with FreqAI enabled strategies*
Required function to set the targets for the model.
@ -1118,11 +1119,11 @@ class IStrategy(ABC, HyperStrategyMixin):
exit_signal = ExitType.CUSTOM_EXIT
if isinstance(reason_cust, str):
custom_reason = reason_cust
if len(reason_cust) > CUSTOM_EXIT_MAX_LENGTH:
if len(reason_cust) > CUSTOM_TAG_MAX_LENGTH:
logger.warning(f'Custom exit reason returned from '
f'custom_exit is too long and was trimmed'
f'to {CUSTOM_EXIT_MAX_LENGTH} characters.')
custom_reason = reason_cust[:CUSTOM_EXIT_MAX_LENGTH]
f'to {CUSTOM_TAG_MAX_LENGTH} characters.')
custom_reason = reason_cust[:CUSTOM_TAG_MAX_LENGTH]
else:
custom_reason = ''
if (

Some files were not shown because too many files have changed in this diff Show More