mirror of
https://github.com/freqtrade/freqtrade.git
synced 2024-11-10 10:21:59 +00:00
Merge remote-tracking branch 'origin/develop' into feature/proceed-exit-while-open-order
This commit is contained in:
commit
e9ba0d2ce8
7
.github/dependabot.yml
vendored
7
.github/dependabot.yml
vendored
|
@ -1,9 +1,14 @@
|
||||||
version: 2
|
version: 2
|
||||||
updates:
|
updates:
|
||||||
- package-ecosystem: docker
|
- package-ecosystem: docker
|
||||||
directory: "/"
|
directories:
|
||||||
|
- "/"
|
||||||
|
- "/docker"
|
||||||
schedule:
|
schedule:
|
||||||
interval: daily
|
interval: daily
|
||||||
|
ignore:
|
||||||
|
- dependency-name: "*"
|
||||||
|
update-types: ["version-update:semver-major"]
|
||||||
open-pull-requests-limit: 10
|
open-pull-requests-limit: 10
|
||||||
|
|
||||||
- package-ecosystem: pip
|
- package-ecosystem: pip
|
||||||
|
|
16
.github/workflows/ci.yml
vendored
16
.github/workflows/ci.yml
vendored
|
@ -55,7 +55,7 @@ jobs:
|
||||||
|
|
||||||
- name: Installation - *nix
|
- name: Installation - *nix
|
||||||
run: |
|
run: |
|
||||||
python -m pip install --upgrade "pip<=24.0" wheel
|
python -m pip install --upgrade pip wheel
|
||||||
export LD_LIBRARY_PATH=${HOME}/dependencies/lib:$LD_LIBRARY_PATH
|
export LD_LIBRARY_PATH=${HOME}/dependencies/lib:$LD_LIBRARY_PATH
|
||||||
export TA_LIBRARY_PATH=${HOME}/dependencies/lib
|
export TA_LIBRARY_PATH=${HOME}/dependencies/lib
|
||||||
export TA_INCLUDE_PATH=${HOME}/dependencies/include
|
export TA_INCLUDE_PATH=${HOME}/dependencies/include
|
||||||
|
@ -80,6 +80,11 @@ jobs:
|
||||||
# Allow failure for coveralls
|
# Allow failure for coveralls
|
||||||
coveralls || true
|
coveralls || true
|
||||||
|
|
||||||
|
- name: Run json schema extract
|
||||||
|
# This should be kept before the repository check to ensure that the schema is up-to-date
|
||||||
|
run: |
|
||||||
|
python build_helpers/extract_config_json_schema.py
|
||||||
|
|
||||||
- name: Check for repository changes
|
- name: Check for repository changes
|
||||||
run: |
|
run: |
|
||||||
if [ -n "$(git status --porcelain)" ]; then
|
if [ -n "$(git status --porcelain)" ]; then
|
||||||
|
@ -192,7 +197,7 @@ jobs:
|
||||||
|
|
||||||
- name: Installation (python)
|
- name: Installation (python)
|
||||||
run: |
|
run: |
|
||||||
python -m pip install --upgrade "pip<=24.0" wheel
|
python -m pip install --upgrade pip wheel
|
||||||
export LD_LIBRARY_PATH=${HOME}/dependencies/lib:$LD_LIBRARY_PATH
|
export LD_LIBRARY_PATH=${HOME}/dependencies/lib:$LD_LIBRARY_PATH
|
||||||
export TA_LIBRARY_PATH=${HOME}/dependencies/lib
|
export TA_LIBRARY_PATH=${HOME}/dependencies/lib
|
||||||
export TA_INCLUDE_PATH=${HOME}/dependencies/include
|
export TA_INCLUDE_PATH=${HOME}/dependencies/include
|
||||||
|
@ -379,7 +384,6 @@ jobs:
|
||||||
- name: Documentation build
|
- name: Documentation build
|
||||||
run: |
|
run: |
|
||||||
pip install -r docs/requirements-docs.txt
|
pip install -r docs/requirements-docs.txt
|
||||||
pip install mkdocs
|
|
||||||
mkdocs build
|
mkdocs build
|
||||||
|
|
||||||
- name: Discord notification
|
- name: Discord notification
|
||||||
|
@ -422,7 +426,7 @@ jobs:
|
||||||
|
|
||||||
- name: Installation - *nix
|
- name: Installation - *nix
|
||||||
run: |
|
run: |
|
||||||
python -m pip install --upgrade "pip<=24.0" wheel
|
python -m pip install --upgrade pip wheel
|
||||||
export LD_LIBRARY_PATH=${HOME}/dependencies/lib:$LD_LIBRARY_PATH
|
export LD_LIBRARY_PATH=${HOME}/dependencies/lib:$LD_LIBRARY_PATH
|
||||||
export TA_LIBRARY_PATH=${HOME}/dependencies/lib
|
export TA_LIBRARY_PATH=${HOME}/dependencies/lib
|
||||||
export TA_INCLUDE_PATH=${HOME}/dependencies/include
|
export TA_INCLUDE_PATH=${HOME}/dependencies/include
|
||||||
|
@ -533,12 +537,12 @@ jobs:
|
||||||
|
|
||||||
|
|
||||||
- name: Publish to PyPI (Test)
|
- name: Publish to PyPI (Test)
|
||||||
uses: pypa/gh-action-pypi-publish@v1.9.0
|
uses: pypa/gh-action-pypi-publish@v1.10.0
|
||||||
with:
|
with:
|
||||||
repository-url: https://test.pypi.org/legacy/
|
repository-url: https://test.pypi.org/legacy/
|
||||||
|
|
||||||
- name: Publish to PyPI
|
- name: Publish to PyPI
|
||||||
uses: pypa/gh-action-pypi-publish@v1.9.0
|
uses: pypa/gh-action-pypi-publish@v1.10.0
|
||||||
|
|
||||||
|
|
||||||
deploy-docker:
|
deploy-docker:
|
||||||
|
|
55
.github/workflows/deploy-docs.yml
vendored
Normal file
55
.github/workflows/deploy-docs.yml
vendored
Normal file
|
@ -0,0 +1,55 @@
|
||||||
|
name: Build Documentation
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- develop
|
||||||
|
release:
|
||||||
|
types: [published]
|
||||||
|
|
||||||
|
|
||||||
|
# disable permissions for all of the available permissions
|
||||||
|
permissions: {}
|
||||||
|
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-docs:
|
||||||
|
permissions:
|
||||||
|
contents: write # for mike to push
|
||||||
|
name: Deploy Docs through mike
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up Python
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: '3.12'
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
pip install -r docs/requirements-docs.txt
|
||||||
|
|
||||||
|
- name: Fetch gh-pages branch
|
||||||
|
run: |
|
||||||
|
git fetch origin gh-pages --depth=1
|
||||||
|
|
||||||
|
- name: Configure Git user
|
||||||
|
run: |
|
||||||
|
git config --local user.email "github-actions[bot]@users.noreply.github.com"
|
||||||
|
git config --local user.name "github-actions[bot]"
|
||||||
|
|
||||||
|
- name: Build and push Mike
|
||||||
|
if: ${{ github.event_name == 'push' }}
|
||||||
|
run: |
|
||||||
|
mike deploy ${{ github.ref_name }} latest --push --update-aliases
|
||||||
|
|
||||||
|
- name: Build and push Mike - Release
|
||||||
|
if: ${{ github.event_name == 'release' }}
|
||||||
|
run: |
|
||||||
|
mike deploy ${{ github.ref_name }} stable --push --update-aliases
|
||||||
|
|
||||||
|
- name: Show mike versions
|
||||||
|
run: |
|
||||||
|
mike list
|
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -114,3 +114,5 @@ target/
|
||||||
!config_examples/config_full.example.json
|
!config_examples/config_full.example.json
|
||||||
!config_examples/config_kraken.example.json
|
!config_examples/config_kraken.example.json
|
||||||
!config_examples/config_freqai.example.json
|
!config_examples/config_freqai.example.json
|
||||||
|
|
||||||
|
docker-compose-*.yml
|
||||||
|
|
|
@ -2,24 +2,24 @@
|
||||||
# See https://pre-commit.com/hooks.html for more hooks
|
# See https://pre-commit.com/hooks.html for more hooks
|
||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/pycqa/flake8
|
- repo: https://github.com/pycqa/flake8
|
||||||
rev: "7.1.0"
|
rev: "7.1.1"
|
||||||
hooks:
|
hooks:
|
||||||
- id: flake8
|
- id: flake8
|
||||||
additional_dependencies: [Flake8-pyproject]
|
additional_dependencies: [Flake8-pyproject]
|
||||||
# stages: [push]
|
# stages: [push]
|
||||||
|
|
||||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||||
rev: "v1.10.0"
|
rev: "v1.11.2"
|
||||||
hooks:
|
hooks:
|
||||||
- id: mypy
|
- id: mypy
|
||||||
exclude: build_helpers
|
exclude: build_helpers
|
||||||
additional_dependencies:
|
additional_dependencies:
|
||||||
- types-cachetools==5.3.0.7
|
- types-cachetools==5.5.0.20240820
|
||||||
- types-filelock==3.2.7
|
- types-filelock==3.2.7
|
||||||
- types-requests==2.32.0.20240622
|
- types-requests==2.32.0.20240712
|
||||||
- types-tabulate==0.9.0.20240106
|
- types-tabulate==0.9.0.20240106
|
||||||
- types-python-dateutil==2.9.0.20240316
|
- types-python-dateutil==2.9.0.20240821
|
||||||
- SQLAlchemy==2.0.31
|
- SQLAlchemy==2.0.32
|
||||||
# stages: [push]
|
# stages: [push]
|
||||||
|
|
||||||
- repo: https://github.com/pycqa/isort
|
- repo: https://github.com/pycqa/isort
|
||||||
|
@ -31,7 +31,7 @@ repos:
|
||||||
|
|
||||||
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
||||||
# Ruff version.
|
# Ruff version.
|
||||||
rev: 'v0.4.10'
|
rev: 'v0.6.3'
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff
|
- id: ruff
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
FROM python:3.12.4-slim-bookworm as base
|
FROM python:3.12.5-slim-bookworm as base
|
||||||
|
|
||||||
# Setup env
|
# Setup env
|
||||||
ENV LANG C.UTF-8
|
ENV LANG C.UTF-8
|
||||||
|
@ -25,7 +25,7 @@ FROM base as python-deps
|
||||||
RUN apt-get update \
|
RUN apt-get update \
|
||||||
&& apt-get -y install build-essential libssl-dev git libffi-dev libgfortran5 pkg-config cmake gcc \
|
&& apt-get -y install build-essential libssl-dev git libffi-dev libgfortran5 pkg-config cmake gcc \
|
||||||
&& apt-get clean \
|
&& apt-get clean \
|
||||||
&& pip install --upgrade "pip<=24.0" wheel
|
&& pip install --upgrade pip wheel
|
||||||
|
|
||||||
# Install TA-lib
|
# Install TA-lib
|
||||||
COPY build_helpers/* /tmp/
|
COPY build_helpers/* /tmp/
|
||||||
|
|
18
README.md
18
README.md
|
@ -30,6 +30,7 @@ Please read the [exchange specific notes](docs/exchanges.md) to learn about even
|
||||||
- [X] [Binance](https://www.binance.com/)
|
- [X] [Binance](https://www.binance.com/)
|
||||||
- [X] [Bitmart](https://bitmart.com/)
|
- [X] [Bitmart](https://bitmart.com/)
|
||||||
- [X] [BingX](https://bingx.com/invite/0EM9RX)
|
- [X] [BingX](https://bingx.com/invite/0EM9RX)
|
||||||
|
- [X] [Bybit](https://bybit.com/)
|
||||||
- [X] [Gate.io](https://www.gate.io/ref/6266643)
|
- [X] [Gate.io](https://www.gate.io/ref/6266643)
|
||||||
- [X] [HTX](https://www.htx.com/) (Former Huobi)
|
- [X] [HTX](https://www.htx.com/) (Former Huobi)
|
||||||
- [X] [Kraken](https://kraken.com/)
|
- [X] [Kraken](https://kraken.com/)
|
||||||
|
@ -86,41 +87,50 @@ For further (native) installation methods, please refer to the [Installation doc
|
||||||
|
|
||||||
```
|
```
|
||||||
usage: freqtrade [-h] [-V]
|
usage: freqtrade [-h] [-V]
|
||||||
{trade,create-userdir,new-config,new-strategy,download-data,convert-data,convert-trade-data,list-data,backtesting,edge,hyperopt,hyperopt-list,hyperopt-show,list-exchanges,list-hyperopts,list-markets,list-pairs,list-strategies,list-timeframes,show-trades,test-pairlist,install-ui,plot-dataframe,plot-profit,webserver}
|
{trade,create-userdir,new-config,show-config,new-strategy,download-data,convert-data,convert-trade-data,trades-to-ohlcv,list-data,backtesting,backtesting-show,backtesting-analysis,edge,hyperopt,hyperopt-list,hyperopt-show,list-exchanges,list-markets,list-pairs,list-strategies,list-freqaimodels,list-timeframes,show-trades,test-pairlist,convert-db,install-ui,plot-dataframe,plot-profit,webserver,strategy-updater,lookahead-analysis,recursive-analysis}
|
||||||
...
|
...
|
||||||
|
|
||||||
Free, open source crypto trading bot
|
Free, open source crypto trading bot
|
||||||
|
|
||||||
positional arguments:
|
positional arguments:
|
||||||
{trade,create-userdir,new-config,new-strategy,download-data,convert-data,convert-trade-data,list-data,backtesting,edge,hyperopt,hyperopt-list,hyperopt-show,list-exchanges,list-hyperopts,list-markets,list-pairs,list-strategies,list-timeframes,show-trades,test-pairlist,install-ui,plot-dataframe,plot-profit,webserver}
|
{trade,create-userdir,new-config,show-config,new-strategy,download-data,convert-data,convert-trade-data,trades-to-ohlcv,list-data,backtesting,backtesting-show,backtesting-analysis,edge,hyperopt,hyperopt-list,hyperopt-show,list-exchanges,list-markets,list-pairs,list-strategies,list-freqaimodels,list-timeframes,show-trades,test-pairlist,convert-db,install-ui,plot-dataframe,plot-profit,webserver,strategy-updater,lookahead-analysis,recursive-analysis}
|
||||||
trade Trade module.
|
trade Trade module.
|
||||||
create-userdir Create user-data directory.
|
create-userdir Create user-data directory.
|
||||||
new-config Create new config
|
new-config Create new config
|
||||||
|
show-config Show resolved config
|
||||||
new-strategy Create new strategy
|
new-strategy Create new strategy
|
||||||
download-data Download backtesting data.
|
download-data Download backtesting data.
|
||||||
convert-data Convert candle (OHLCV) data from one format to
|
convert-data Convert candle (OHLCV) data from one format to
|
||||||
another.
|
another.
|
||||||
convert-trade-data Convert trade data from one format to another.
|
convert-trade-data Convert trade data from one format to another.
|
||||||
|
trades-to-ohlcv Convert trade data to OHLCV data.
|
||||||
list-data List downloaded data.
|
list-data List downloaded data.
|
||||||
backtesting Backtesting module.
|
backtesting Backtesting module.
|
||||||
|
backtesting-show Show past Backtest results
|
||||||
|
backtesting-analysis
|
||||||
|
Backtest Analysis module.
|
||||||
edge Edge module.
|
edge Edge module.
|
||||||
hyperopt Hyperopt module.
|
hyperopt Hyperopt module.
|
||||||
hyperopt-list List Hyperopt results
|
hyperopt-list List Hyperopt results
|
||||||
hyperopt-show Show details of Hyperopt results
|
hyperopt-show Show details of Hyperopt results
|
||||||
list-exchanges Print available exchanges.
|
list-exchanges Print available exchanges.
|
||||||
list-hyperopts Print available hyperopt classes.
|
|
||||||
list-markets Print markets on exchange.
|
list-markets Print markets on exchange.
|
||||||
list-pairs Print pairs on exchange.
|
list-pairs Print pairs on exchange.
|
||||||
list-strategies Print available strategies.
|
list-strategies Print available strategies.
|
||||||
|
list-freqaimodels Print available freqAI models.
|
||||||
list-timeframes Print available timeframes for the exchange.
|
list-timeframes Print available timeframes for the exchange.
|
||||||
show-trades Show trades.
|
show-trades Show trades.
|
||||||
test-pairlist Test your pairlist configuration.
|
test-pairlist Test your pairlist configuration.
|
||||||
|
convert-db Migrate database to different system
|
||||||
install-ui Install FreqUI
|
install-ui Install FreqUI
|
||||||
plot-dataframe Plot candles with indicators.
|
plot-dataframe Plot candles with indicators.
|
||||||
plot-profit Generate plot showing profits.
|
plot-profit Generate plot showing profits.
|
||||||
webserver Webserver module.
|
webserver Webserver module.
|
||||||
|
strategy-updater updates outdated strategy files to the current version
|
||||||
|
lookahead-analysis Check for potential look ahead bias.
|
||||||
|
recursive-analysis Check for potential recursive formula issue.
|
||||||
|
|
||||||
optional arguments:
|
options:
|
||||||
-h, --help show this help message and exit
|
-h, --help show this help message and exit
|
||||||
-V, --version show program's version number and exit
|
-V, --version show program's version number and exit
|
||||||
|
|
||||||
|
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
build_helpers/TA_Lib-0.4.32-cp310-cp310-win_amd64.whl
Normal file
BIN
build_helpers/TA_Lib-0.4.32-cp310-cp310-win_amd64.whl
Normal file
Binary file not shown.
BIN
build_helpers/TA_Lib-0.4.32-cp311-cp311-linux_armv7l.whl
Normal file
BIN
build_helpers/TA_Lib-0.4.32-cp311-cp311-linux_armv7l.whl
Normal file
Binary file not shown.
BIN
build_helpers/TA_Lib-0.4.32-cp311-cp311-win_amd64.whl
Normal file
BIN
build_helpers/TA_Lib-0.4.32-cp311-cp311-win_amd64.whl
Normal file
Binary file not shown.
BIN
build_helpers/TA_Lib-0.4.32-cp312-cp312-win_amd64.whl
Normal file
BIN
build_helpers/TA_Lib-0.4.32-cp312-cp312-win_amd64.whl
Normal file
Binary file not shown.
Binary file not shown.
BIN
build_helpers/TA_Lib-0.4.32-cp39-cp39-win_amd64.whl
Normal file
BIN
build_helpers/TA_Lib-0.4.32-cp39-cp39-win_amd64.whl
Normal file
Binary file not shown.
17
build_helpers/extract_config_json_schema.py
Normal file
17
build_helpers/extract_config_json_schema.py
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
"""Script to extract the configuration json schema from config_schema.py file."""
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import rapidjson
|
||||||
|
|
||||||
|
from freqtrade.configuration.config_schema import CONF_SCHEMA
|
||||||
|
|
||||||
|
|
||||||
|
def extract_config_json_schema():
|
||||||
|
schema_filename = Path(__file__).parent / "schema.json"
|
||||||
|
with schema_filename.open("w") as f:
|
||||||
|
rapidjson.dump(CONF_SCHEMA, f, indent=2)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
extract_config_json_schema()
|
|
@ -1,6 +1,6 @@
|
||||||
# vendored Wheels compiled via https://github.com/xmatthias/ta-lib-python/tree/ta_bundled_040
|
# vendored Wheels compiled via https://github.com/xmatthias/ta-lib-python/tree/ta_bundled_040
|
||||||
|
|
||||||
python -m pip install --upgrade "pip<=24.0" wheel
|
python -m pip install --upgrade pip wheel
|
||||||
|
|
||||||
$pyv = python -c "import sys; print(f'{sys.version_info.major}.{sys.version_info.minor}')"
|
$pyv = python -c "import sys; print(f'{sys.version_info.major}.{sys.version_info.minor}')"
|
||||||
|
|
||||||
|
|
Binary file not shown.
Binary file not shown.
1622
build_helpers/schema.json
Normal file
1622
build_helpers/schema.json
Normal file
File diff suppressed because it is too large
Load Diff
|
@ -1,4 +1,5 @@
|
||||||
{
|
{
|
||||||
|
"$schema": "https://schema.freqtrade.io/schema.json",
|
||||||
"max_open_trades": 3,
|
"max_open_trades": 3,
|
||||||
"stake_currency": "USDT",
|
"stake_currency": "USDT",
|
||||||
"stake_amount": 0.05,
|
"stake_amount": 0.05,
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
{
|
{
|
||||||
|
"$schema": "https://schema.freqtrade.io/schema.json",
|
||||||
"trading_mode": "futures",
|
"trading_mode": "futures",
|
||||||
"margin_mode": "isolated",
|
"margin_mode": "isolated",
|
||||||
"max_open_trades": 5,
|
"max_open_trades": 5,
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
{
|
{
|
||||||
|
"$schema": "https://schema.freqtrade.io/schema.json",
|
||||||
"max_open_trades": 3,
|
"max_open_trades": 3,
|
||||||
"stake_currency": "BTC",
|
"stake_currency": "BTC",
|
||||||
"stake_amount": 0.05,
|
"stake_amount": 0.05,
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
{
|
{
|
||||||
|
"$schema": "https://schema.freqtrade.io/schema.json",
|
||||||
"max_open_trades": 5,
|
"max_open_trades": 5,
|
||||||
"stake_currency": "EUR",
|
"stake_currency": "EUR",
|
||||||
"stake_amount": 10,
|
"stake_amount": 10,
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
---
|
---
|
||||||
version: '3'
|
|
||||||
services:
|
services:
|
||||||
freqtrade:
|
freqtrade:
|
||||||
image: freqtradeorg/freqtrade:stable
|
image: freqtradeorg/freqtrade:stable
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
FROM python:3.11.8-slim-bookworm as base
|
FROM python:3.11.9-slim-bookworm as base
|
||||||
|
|
||||||
# Setup env
|
# Setup env
|
||||||
ENV LANG C.UTF-8
|
ENV LANG C.UTF-8
|
||||||
|
@ -17,7 +17,7 @@ RUN mkdir /freqtrade \
|
||||||
&& chown ftuser:ftuser /freqtrade \
|
&& chown ftuser:ftuser /freqtrade \
|
||||||
# Allow sudoers
|
# Allow sudoers
|
||||||
&& echo "ftuser ALL=(ALL) NOPASSWD: /bin/chown" >> /etc/sudoers \
|
&& echo "ftuser ALL=(ALL) NOPASSWD: /bin/chown" >> /etc/sudoers \
|
||||||
&& pip install --upgrade "pip<=24.0"
|
&& pip install --upgrade pip
|
||||||
|
|
||||||
WORKDIR /freqtrade
|
WORKDIR /freqtrade
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
---
|
---
|
||||||
version: '3'
|
|
||||||
services:
|
services:
|
||||||
freqtrade:
|
freqtrade:
|
||||||
image: freqtradeorg/freqtrade:stable_freqaitorch
|
image: freqtradeorg/freqtrade:stable_freqaitorch
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
---
|
---
|
||||||
version: '3'
|
|
||||||
services:
|
services:
|
||||||
ft_jupyterlab:
|
ft_jupyterlab:
|
||||||
build:
|
build:
|
||||||
|
|
|
@ -30,11 +30,17 @@ class SuperDuperHyperOptLoss(IHyperOptLoss):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def hyperopt_loss_function(results: DataFrame, trade_count: int,
|
def hyperopt_loss_function(
|
||||||
min_date: datetime, max_date: datetime,
|
*,
|
||||||
config: Config, processed: Dict[str, DataFrame],
|
results: DataFrame,
|
||||||
|
trade_count: int,
|
||||||
|
min_date: datetime,
|
||||||
|
max_date: datetime,
|
||||||
|
config: Config,
|
||||||
|
processed: Dict[str, DataFrame],
|
||||||
backtest_stats: Dict[str, Any],
|
backtest_stats: Dict[str, Any],
|
||||||
*args, **kwargs) -> float:
|
**kwargs,
|
||||||
|
) -> float:
|
||||||
"""
|
"""
|
||||||
Objective function, returns smaller number for better results
|
Objective function, returns smaller number for better results
|
||||||
This is the legacy algorithm (used until now in freqtrade).
|
This is the legacy algorithm (used until now in freqtrade).
|
||||||
|
|
152
docs/advanced-orderflow.md
Normal file
152
docs/advanced-orderflow.md
Normal file
|
@ -0,0 +1,152 @@
|
||||||
|
# Orderflow data
|
||||||
|
|
||||||
|
This guide walks you through utilizing public trade data for advanced orderflow analysis in Freqtrade.
|
||||||
|
|
||||||
|
!!! Warning "Experimental Feature"
|
||||||
|
The orderflow feature is currently in beta and may be subject to changes in future releases. Please report any issues or feedback on the [Freqtrade GitHub repository](https://github.com/freqtrade/freqtrade/issues).
|
||||||
|
|
||||||
|
!!! Warning "Performance"
|
||||||
|
Orderflow requires raw trades data. This data is rather large, and can cause a slow initial startup, when freqtrade needs to download the trades data for the last X candles. Additionally, enabling this feature will cause increased memory usage. Please ensure to have sufficient resources available.
|
||||||
|
|
||||||
|
## Getting Started
|
||||||
|
|
||||||
|
### Enable Public Trades
|
||||||
|
|
||||||
|
In your `config.json` file, set the `use_public_trades` option to true under the `exchange` section.
|
||||||
|
|
||||||
|
```json
|
||||||
|
"exchange": {
|
||||||
|
...
|
||||||
|
"use_public_trades": true,
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Configure Orderflow Processing
|
||||||
|
|
||||||
|
Define your desired settings for orderflow processing within the orderflow section of config.json. Here, you can adjust factors like:
|
||||||
|
|
||||||
|
- `cache_size`: How many previous orderflow candles are saved into cache instead of calculated every new candle
|
||||||
|
- `max_candles`: Filter how many candles would you like to get trades data for.
|
||||||
|
- `scale`: This controls the price bin size for the footprint chart.
|
||||||
|
- `stacked_imbalance_range`: Defines the minimum consecutive imbalanced price levels required for consideration.
|
||||||
|
- `imbalance_volume`: Filters out imbalances with volume below this threshold.
|
||||||
|
- `imbalance_ratio`: Filters out imbalances with a ratio (difference between ask and bid volume) lower than this value.
|
||||||
|
|
||||||
|
```json
|
||||||
|
"orderflow": {
|
||||||
|
"cache_size": 1000,
|
||||||
|
"max_candles": 1500,
|
||||||
|
"scale": 0.5,
|
||||||
|
"stacked_imbalance_range": 3, // needs at least this amount of imbalance next to each other
|
||||||
|
"imbalance_volume": 1, // filters out below
|
||||||
|
"imbalance_ratio": 3 // filters out ratio lower than
|
||||||
|
},
|
||||||
|
```
|
||||||
|
|
||||||
|
## Downloading Trade Data for Backtesting
|
||||||
|
|
||||||
|
To download historical trade data for backtesting, use the --dl-trades flag with the freqtrade download-data command.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
freqtrade download-data -p BTC/USDT:USDT --timerange 20230101- --trading-mode futures --timeframes 5m --dl-trades
|
||||||
|
```
|
||||||
|
|
||||||
|
!!! Warning "Data availability"
|
||||||
|
Not all exchanges provide public trade data. For supported exchanges, freqtrade will warn you if public trade data is not available if you start downloading data with the `--dl-trades` flag.
|
||||||
|
|
||||||
|
## Accessing Orderflow Data
|
||||||
|
|
||||||
|
Once activated, several new columns become available in your dataframe:
|
||||||
|
|
||||||
|
``` python
|
||||||
|
|
||||||
|
dataframe["trades"] # Contains information about each individual trade.
|
||||||
|
dataframe["orderflow"] # Represents a footprint chart dict (see below)
|
||||||
|
dataframe["imbalances"] # Contains information about imbalances in the order flow.
|
||||||
|
dataframe["bid"] # Total bid volume
|
||||||
|
dataframe["ask"] # Total ask volume
|
||||||
|
dataframe["delta"] # Difference between ask and bid volume.
|
||||||
|
dataframe["min_delta"] # Minimum delta within the candle
|
||||||
|
dataframe["max_delta"] # Maximum delta within the candle
|
||||||
|
dataframe["total_trades"] # Total number of trades
|
||||||
|
dataframe["stacked_imbalances_bid"] # Price level of stacked bid imbalance
|
||||||
|
dataframe["stacked_imbalances_ask"] # Price level of stacked ask imbalance
|
||||||
|
```
|
||||||
|
|
||||||
|
You can access these columns in your strategy code for further analysis. Here's an example:
|
||||||
|
|
||||||
|
``` python
|
||||||
|
def populate_indicators(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
|
||||||
|
# Calculating cumulative delta
|
||||||
|
dataframe["cum_delta"] = cumulative_delta(dataframe["delta"])
|
||||||
|
# Accessing total trades
|
||||||
|
total_trades = dataframe["total_trades"]
|
||||||
|
...
|
||||||
|
|
||||||
|
def cumulative_delta(delta: Series):
|
||||||
|
cumdelta = delta.cumsum()
|
||||||
|
return cumdelta
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
### Footprint chart (`dataframe["orderflow"]`)
|
||||||
|
|
||||||
|
This column provides a detailed breakdown of buy and sell orders at different price levels, offering valuable insights into order flow dynamics. The `scale` parameter in your configuration determines the price bin size for this representation
|
||||||
|
|
||||||
|
The `orderflow` column contains a dict with the following structure:
|
||||||
|
|
||||||
|
``` output
|
||||||
|
{
|
||||||
|
"price": {
|
||||||
|
"bid_amount": 0.0,
|
||||||
|
"ask_amount": 0.0,
|
||||||
|
"bid": 0,
|
||||||
|
"ask": 0,
|
||||||
|
"delta": 0.0,
|
||||||
|
"total_volume": 0.0,
|
||||||
|
"total_trades": 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Orderflow column explanation
|
||||||
|
|
||||||
|
- key: Price bin - binned at `scale` intervals
|
||||||
|
- `bid_amount`: Total volume bought at each price level.
|
||||||
|
- `ask_amount`: Total volume sold at each price level.
|
||||||
|
- `bid`: Number of buy orders at each price level.
|
||||||
|
- `ask`: Number of sell orders at each price level.
|
||||||
|
- `delta`: Difference between ask and bid volume at each price level.
|
||||||
|
- `total_volume`: Total volume (ask amount + bid amount) at each price level.
|
||||||
|
- `total_trades`: Total number of trades (ask + bid) at each price level.
|
||||||
|
|
||||||
|
By leveraging these features, you can gain valuable insights into market sentiment and potential trading opportunities based on order flow analysis.
|
||||||
|
|
||||||
|
### Raw trades data (`dataframe["trades"]`)
|
||||||
|
|
||||||
|
List with the individual trades that occurred during the candle. This data can be used for more granular analysis of order flow dynamics.
|
||||||
|
|
||||||
|
Each individual entry contains a dict with the following keys:
|
||||||
|
|
||||||
|
- `timestamp`: Timestamp of the trade.
|
||||||
|
- `date`: Date of the trade.
|
||||||
|
- `price`: Price of the trade.
|
||||||
|
- `amount`: Volume of the trade.
|
||||||
|
- `side`: Buy or sell.
|
||||||
|
- `id`: Unique identifier for the trade.
|
||||||
|
- `cost`: Total cost of the trade (price * amount).
|
||||||
|
|
||||||
|
### Imbalances (`dataframe["imbalances"]`)
|
||||||
|
|
||||||
|
This column provides a dict with information about imbalances in the order flow. An imbalance occurs when there is a significant difference between the ask and bid volume at a given price level.
|
||||||
|
|
||||||
|
Each row looks as follows - with price as index, and the corresponding bid and ask imbalance values as columns
|
||||||
|
|
||||||
|
``` output
|
||||||
|
{
|
||||||
|
"price": {
|
||||||
|
"bid_imbalance": False,
|
||||||
|
"ask_imbalance": False
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
|
@ -114,8 +114,46 @@ services:
|
||||||
--strategy SampleStrategy
|
--strategy SampleStrategy
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
You can use whatever naming convention you want, freqtrade1 and 2 are arbitrary. Note, that you will need to use different database files, port mappings and telegram configurations for each instance, as mentioned above.
|
You can use whatever naming convention you want, freqtrade1 and 2 are arbitrary. Note, that you will need to use different database files, port mappings and telegram configurations for each instance, as mentioned above.
|
||||||
|
|
||||||
|
## Use a different database system
|
||||||
|
|
||||||
|
Freqtrade is using SQLAlchemy, which supports multiple different database systems. As such, a multitude of database systems should be supported.
|
||||||
|
Freqtrade does not depend or install any additional database driver. Please refer to the [SQLAlchemy docs](https://docs.sqlalchemy.org/en/14/core/engines.html#database-urls) on installation instructions for the respective database systems.
|
||||||
|
|
||||||
|
The following systems have been tested and are known to work with freqtrade:
|
||||||
|
|
||||||
|
* sqlite (default)
|
||||||
|
* PostgreSQL
|
||||||
|
* MariaDB
|
||||||
|
|
||||||
|
!!! Warning
|
||||||
|
By using one of the below database systems, you acknowledge that you know how to manage such a system. The freqtrade team will not provide any support with setup or maintenance (or backups) of the below database systems.
|
||||||
|
|
||||||
|
### PostgreSQL
|
||||||
|
|
||||||
|
Installation:
|
||||||
|
`pip install psycopg2-binary`
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
`... --db-url postgresql+psycopg2://<username>:<password>@localhost:5432/<database>`
|
||||||
|
|
||||||
|
Freqtrade will automatically create the tables necessary upon startup.
|
||||||
|
|
||||||
|
If you're running different instances of Freqtrade, you must either setup one database per Instance or use different users / schemas for your connections.
|
||||||
|
|
||||||
|
### MariaDB / MySQL
|
||||||
|
|
||||||
|
Freqtrade supports MariaDB by using SQLAlchemy, which supports multiple different database systems.
|
||||||
|
|
||||||
|
Installation:
|
||||||
|
`pip install pymysql`
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
`... --db-url mysql+pymysql://<username>:<password>@localhost:3306/<database>`
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## Configure the bot running as a systemd service
|
## Configure the bot running as a systemd service
|
||||||
|
|
||||||
|
|
|
@ -530,10 +530,10 @@ You can then load the trades to perform further analysis as shown in the [data a
|
||||||
Since backtesting lacks some detailed information about what happens within a candle, it needs to take a few assumptions:
|
Since backtesting lacks some detailed information about what happens within a candle, it needs to take a few assumptions:
|
||||||
|
|
||||||
- Exchange [trading limits](#trading-limits-in-backtesting) are respected
|
- Exchange [trading limits](#trading-limits-in-backtesting) are respected
|
||||||
- Entries happen at open-price
|
- Entries happen at open-price unless a custom price logic has been specified
|
||||||
- All orders are filled at the requested price (no slippage) as long as the price is within the candle's high/low range
|
- All orders are filled at the requested price (no slippage) as long as the price is within the candle's high/low range
|
||||||
- Exit-signal exits happen at open-price of the consecutive candle
|
- Exit-signal exits happen at open-price of the consecutive candle
|
||||||
- Exits don't free their trade slot for a new trade until the next candle
|
- Exits free their trade slot for a new trade with a different pair
|
||||||
- Exit-signal is favored over Stoploss, because exit-signals are assumed to trigger on candle's open
|
- Exit-signal is favored over Stoploss, because exit-signals are assumed to trigger on candle's open
|
||||||
- ROI
|
- ROI
|
||||||
- Exits are compared to high - but the ROI value is used (e.g. ROI = 2%, high=5% - so the exit will be at 2%)
|
- Exits are compared to high - but the ROI value is used (e.g. ROI = 2%, high=5% - so the exit will be at 2%)
|
||||||
|
|
|
@ -12,41 +12,50 @@ This page explains the different parameters of the bot and how to run it.
|
||||||
|
|
||||||
```
|
```
|
||||||
usage: freqtrade [-h] [-V]
|
usage: freqtrade [-h] [-V]
|
||||||
{trade,create-userdir,new-config,new-strategy,download-data,convert-data,convert-trade-data,list-data,backtesting,edge,hyperopt,hyperopt-list,hyperopt-show,list-exchanges,list-hyperopts,list-markets,list-pairs,list-strategies,list-timeframes,show-trades,test-pairlist,install-ui,plot-dataframe,plot-profit,webserver}
|
{trade,create-userdir,new-config,show-config,new-strategy,download-data,convert-data,convert-trade-data,trades-to-ohlcv,list-data,backtesting,backtesting-show,backtesting-analysis,edge,hyperopt,hyperopt-list,hyperopt-show,list-exchanges,list-markets,list-pairs,list-strategies,list-freqaimodels,list-timeframes,show-trades,test-pairlist,convert-db,install-ui,plot-dataframe,plot-profit,webserver,strategy-updater,lookahead-analysis,recursive-analysis}
|
||||||
...
|
...
|
||||||
|
|
||||||
Free, open source crypto trading bot
|
Free, open source crypto trading bot
|
||||||
|
|
||||||
positional arguments:
|
positional arguments:
|
||||||
{trade,create-userdir,new-config,new-strategy,download-data,convert-data,convert-trade-data,list-data,backtesting,edge,hyperopt,hyperopt-list,hyperopt-show,list-exchanges,list-hyperopts,list-markets,list-pairs,list-strategies,list-timeframes,show-trades,test-pairlist,install-ui,plot-dataframe,plot-profit,webserver}
|
{trade,create-userdir,new-config,show-config,new-strategy,download-data,convert-data,convert-trade-data,trades-to-ohlcv,list-data,backtesting,backtesting-show,backtesting-analysis,edge,hyperopt,hyperopt-list,hyperopt-show,list-exchanges,list-markets,list-pairs,list-strategies,list-freqaimodels,list-timeframes,show-trades,test-pairlist,convert-db,install-ui,plot-dataframe,plot-profit,webserver,strategy-updater,lookahead-analysis,recursive-analysis}
|
||||||
trade Trade module.
|
trade Trade module.
|
||||||
create-userdir Create user-data directory.
|
create-userdir Create user-data directory.
|
||||||
new-config Create new config
|
new-config Create new config
|
||||||
|
show-config Show resolved config
|
||||||
new-strategy Create new strategy
|
new-strategy Create new strategy
|
||||||
download-data Download backtesting data.
|
download-data Download backtesting data.
|
||||||
convert-data Convert candle (OHLCV) data from one format to
|
convert-data Convert candle (OHLCV) data from one format to
|
||||||
another.
|
another.
|
||||||
convert-trade-data Convert trade data from one format to another.
|
convert-trade-data Convert trade data from one format to another.
|
||||||
|
trades-to-ohlcv Convert trade data to OHLCV data.
|
||||||
list-data List downloaded data.
|
list-data List downloaded data.
|
||||||
backtesting Backtesting module.
|
backtesting Backtesting module.
|
||||||
|
backtesting-show Show past Backtest results
|
||||||
|
backtesting-analysis
|
||||||
|
Backtest Analysis module.
|
||||||
edge Edge module.
|
edge Edge module.
|
||||||
hyperopt Hyperopt module.
|
hyperopt Hyperopt module.
|
||||||
hyperopt-list List Hyperopt results
|
hyperopt-list List Hyperopt results
|
||||||
hyperopt-show Show details of Hyperopt results
|
hyperopt-show Show details of Hyperopt results
|
||||||
list-exchanges Print available exchanges.
|
list-exchanges Print available exchanges.
|
||||||
list-hyperopts Print available hyperopt classes.
|
|
||||||
list-markets Print markets on exchange.
|
list-markets Print markets on exchange.
|
||||||
list-pairs Print pairs on exchange.
|
list-pairs Print pairs on exchange.
|
||||||
list-strategies Print available strategies.
|
list-strategies Print available strategies.
|
||||||
|
list-freqaimodels Print available freqAI models.
|
||||||
list-timeframes Print available timeframes for the exchange.
|
list-timeframes Print available timeframes for the exchange.
|
||||||
show-trades Show trades.
|
show-trades Show trades.
|
||||||
test-pairlist Test your pairlist configuration.
|
test-pairlist Test your pairlist configuration.
|
||||||
|
convert-db Migrate database to different system
|
||||||
install-ui Install FreqUI
|
install-ui Install FreqUI
|
||||||
plot-dataframe Plot candles with indicators.
|
plot-dataframe Plot candles with indicators.
|
||||||
plot-profit Generate plot showing profits.
|
plot-profit Generate plot showing profits.
|
||||||
webserver Webserver module.
|
webserver Webserver module.
|
||||||
|
strategy-updater updates outdated strategy files to the current version
|
||||||
|
lookahead-analysis Check for potential look ahead bias.
|
||||||
|
recursive-analysis Check for potential recursive formula issue.
|
||||||
|
|
||||||
optional arguments:
|
options:
|
||||||
-h, --help show this help message and exit
|
-h, --help show this help message and exit
|
||||||
-V, --version show program's version number and exit
|
-V, --version show program's version number and exit
|
||||||
|
|
||||||
|
|
|
@ -123,6 +123,19 @@ This is similar to using multiple `--config` parameters, but simpler in usage as
|
||||||
|
|
||||||
If multiple files are in the `add_config_files` section, then they will be assumed to be at identical levels, having the last occurrence override the earlier config (unless a parent already defined such a key).
|
If multiple files are in the `add_config_files` section, then they will be assumed to be at identical levels, having the last occurrence override the earlier config (unless a parent already defined such a key).
|
||||||
|
|
||||||
|
## Editor autocomplete and validation
|
||||||
|
|
||||||
|
If you are using an editor that supports JSON schema, you can use the schema provided by Freqtrade to get autocompletion and validation of your configuration file by adding the following line to the top of your configuration file:
|
||||||
|
|
||||||
|
``` json
|
||||||
|
{
|
||||||
|
"$schema": "https://schema.freqtrade.io/schema.json",
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
??? Note "Develop version"
|
||||||
|
The develop schema is available as `https://schema.freqtrade.io/schema_dev.json` - though we recommend to stick to the stable version for the best experience.
|
||||||
|
|
||||||
## Configuration parameters
|
## Configuration parameters
|
||||||
|
|
||||||
The table below will list all configuration parameters available.
|
The table below will list all configuration parameters available.
|
||||||
|
@ -204,9 +217,10 @@ Mandatory parameters are marked as **Required**, which means that they are requi
|
||||||
| `exchange.uid` | API uid to use for the exchange. Only required when you are in production mode and for exchanges that use uid for API requests.<br>**Keep it in secret, do not disclose publicly.** <br> **Datatype:** String
|
| `exchange.uid` | API uid to use for the exchange. Only required when you are in production mode and for exchanges that use uid for API requests.<br>**Keep it in secret, do not disclose publicly.** <br> **Datatype:** String
|
||||||
| `exchange.pair_whitelist` | List of pairs to use by the bot for trading and to check for potential trades during backtesting. Supports regex pairs as `.*/BTC`. Not used by VolumePairList. [More information](plugins.md#pairlists-and-pairlist-handlers). <br> **Datatype:** List
|
| `exchange.pair_whitelist` | List of pairs to use by the bot for trading and to check for potential trades during backtesting. Supports regex pairs as `.*/BTC`. Not used by VolumePairList. [More information](plugins.md#pairlists-and-pairlist-handlers). <br> **Datatype:** List
|
||||||
| `exchange.pair_blacklist` | List of pairs the bot must absolutely avoid for trading and backtesting. [More information](plugins.md#pairlists-and-pairlist-handlers). <br> **Datatype:** List
|
| `exchange.pair_blacklist` | List of pairs the bot must absolutely avoid for trading and backtesting. [More information](plugins.md#pairlists-and-pairlist-handlers). <br> **Datatype:** List
|
||||||
| `exchange.ccxt_config` | Additional CCXT parameters passed to both ccxt instances (sync and async). This is usually the correct place for additional ccxt configurations. Parameters may differ from exchange to exchange and are documented in the [ccxt documentation](https://ccxt.readthedocs.io/en/latest/manual.html#instantiation). Please avoid adding exchange secrets here (use the dedicated fields instead), as they may be contained in logs. <br> **Datatype:** Dict
|
| `exchange.ccxt_config` | Additional CCXT parameters passed to both ccxt instances (sync and async). This is usually the correct place for additional ccxt configurations. Parameters may differ from exchange to exchange and are documented in the [ccxt documentation](https://docs.ccxt.com/#/README?id=overriding-exchange-properties-upon-instantiation). Please avoid adding exchange secrets here (use the dedicated fields instead), as they may be contained in logs. <br> **Datatype:** Dict
|
||||||
| `exchange.ccxt_sync_config` | Additional CCXT parameters passed to the regular (sync) ccxt instance. Parameters may differ from exchange to exchange and are documented in the [ccxt documentation](https://ccxt.readthedocs.io/en/latest/manual.html#instantiation) <br> **Datatype:** Dict
|
| `exchange.ccxt_sync_config` | Additional CCXT parameters passed to the regular (sync) ccxt instance. Parameters may differ from exchange to exchange and are documented in the [ccxt documentation](https://docs.ccxt.com/#/README?id=overriding-exchange-properties-upon-instantiation) <br> **Datatype:** Dict
|
||||||
| `exchange.ccxt_async_config` | Additional CCXT parameters passed to the async ccxt instance. Parameters may differ from exchange to exchange and are documented in the [ccxt documentation](https://ccxt.readthedocs.io/en/latest/manual.html#instantiation) <br> **Datatype:** Dict
|
| `exchange.ccxt_async_config` | Additional CCXT parameters passed to the async ccxt instance. Parameters may differ from exchange to exchange and are documented in the [ccxt documentation](https://docs.ccxt.com/#/README?id=overriding-exchange-properties-upon-instantiation) <br> **Datatype:** Dict
|
||||||
|
| `exchange.enable_ws` | Enable the usage of Websockets for the exchange. <br>[More information](#consuming-exchange-websockets).<br>*Defaults to `true`.* <br> **Datatype:** Boolean
|
||||||
| `exchange.markets_refresh_interval` | The interval in minutes in which markets are reloaded. <br>*Defaults to `60` minutes.* <br> **Datatype:** Positive Integer
|
| `exchange.markets_refresh_interval` | The interval in minutes in which markets are reloaded. <br>*Defaults to `60` minutes.* <br> **Datatype:** Positive Integer
|
||||||
| `exchange.skip_pair_validation` | Skip pairlist validation on startup.<br>*Defaults to `false`*<br> **Datatype:** Boolean
|
| `exchange.skip_pair_validation` | Skip pairlist validation on startup.<br>*Defaults to `false`*<br> **Datatype:** Boolean
|
||||||
| `exchange.skip_open_order_update` | Skips open order updates on startup should the exchange cause problems. Only relevant in live conditions.<br>*Defaults to `false`*<br> **Datatype:** Boolean
|
| `exchange.skip_open_order_update` | Skips open order updates on startup should the exchange cause problems. Only relevant in live conditions.<br>*Defaults to `false`*<br> **Datatype:** Boolean
|
||||||
|
@ -409,6 +423,8 @@ Or another example if your position adjustment assumes it can do 1 additional bu
|
||||||
|
|
||||||
--8<-- "includes/pricing.md"
|
--8<-- "includes/pricing.md"
|
||||||
|
|
||||||
|
## Further Configuration details
|
||||||
|
|
||||||
### Understand minimal_roi
|
### Understand minimal_roi
|
||||||
|
|
||||||
The `minimal_roi` configuration parameter is a JSON object where the key is a duration
|
The `minimal_roi` configuration parameter is a JSON object where the key is a duration
|
||||||
|
@ -614,6 +630,30 @@ Freqtrade supports both Demo and Pro coingecko API keys.
|
||||||
The Coingecko API key is NOT required for the bot to function correctly.
|
The Coingecko API key is NOT required for the bot to function correctly.
|
||||||
It is only used for the conversion of coin to fiat in the Telegram reports, which usually also work without API key.
|
It is only used for the conversion of coin to fiat in the Telegram reports, which usually also work without API key.
|
||||||
|
|
||||||
|
## Consuming exchange Websockets
|
||||||
|
|
||||||
|
Freqtrade can consume websockets through ccxt.pro.
|
||||||
|
|
||||||
|
Freqtrade aims ensure data is available at all times.
|
||||||
|
Should the websocket connection fail (or be disabled), the bot will fall back to REST API calls.
|
||||||
|
|
||||||
|
Should you experience problems you suspect are caused by websockets, you can disable these via the setting `exchange.enable_ws`, which defaults to true.
|
||||||
|
|
||||||
|
```jsonc
|
||||||
|
"exchange": {
|
||||||
|
// ...
|
||||||
|
"enable_ws": false,
|
||||||
|
// ...
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Should you be required to use a proxy, please refer to the [proxy section](#using-proxy-with-freqtrade) for more information.
|
||||||
|
|
||||||
|
!!! Info "Rollout"
|
||||||
|
We're implementing this out slowly, ensuring stability of your bots.
|
||||||
|
Currently, usage is limited to ohlcv data streams.
|
||||||
|
It's also limited to a few exchanges, with new exchanges being added on an ongoing basis.
|
||||||
|
|
||||||
## Using Dry-run mode
|
## Using Dry-run mode
|
||||||
|
|
||||||
We recommend starting the bot in the Dry-run mode to see how your bot will
|
We recommend starting the bot in the Dry-run mode to see how your bot will
|
||||||
|
@ -650,9 +690,9 @@ Once you will be happy with your bot performance running in the Dry-run mode, yo
|
||||||
* API-keys may or may not be provided. Only Read-Only operations (i.e. operations that do not alter account state) on the exchange are performed in dry-run mode.
|
* API-keys may or may not be provided. Only Read-Only operations (i.e. operations that do not alter account state) on the exchange are performed in dry-run mode.
|
||||||
* Wallets (`/balance`) are simulated based on `dry_run_wallet`.
|
* Wallets (`/balance`) are simulated based on `dry_run_wallet`.
|
||||||
* Orders are simulated, and will not be posted to the exchange.
|
* Orders are simulated, and will not be posted to the exchange.
|
||||||
* Market orders fill based on orderbook volume the moment the order is placed.
|
* Market orders fill based on orderbook volume the moment the order is placed, with a maximum slippage of 5%.
|
||||||
* Limit orders fill once the price reaches the defined level - or time out based on `unfilledtimeout` settings.
|
* Limit orders fill once the price reaches the defined level - or time out based on `unfilledtimeout` settings.
|
||||||
* Limit orders will be converted to market orders if they cross the price by more than 1%.
|
* Limit orders will be converted to market orders if they cross the price by more than 1%, and will be filled immediately based regular market order rules (see point about Market orders above).
|
||||||
* In combination with `stoploss_on_exchange`, the stop_loss price is assumed to be filled.
|
* In combination with `stoploss_on_exchange`, the stop_loss price is assumed to be filled.
|
||||||
* Open orders (not trades, which are stored in the database) are kept open after bot restarts, with the assumption that they were not filled while being offline.
|
* Open orders (not trades, which are stored in the database) are kept open after bot restarts, with the assumption that they were not filled while being offline.
|
||||||
|
|
||||||
|
@ -702,7 +742,7 @@ You should also make sure to read the [Exchanges](exchanges.md) section of the d
|
||||||
|
|
||||||
**NEVER** share your private configuration file or your exchange keys with anyone!
|
**NEVER** share your private configuration file or your exchange keys with anyone!
|
||||||
|
|
||||||
### Using proxy with Freqtrade
|
## Using a proxy with Freqtrade
|
||||||
|
|
||||||
To use a proxy with freqtrade, export your proxy settings using the variables `"HTTP_PROXY"` and `"HTTPS_PROXY"` set to the appropriate values.
|
To use a proxy with freqtrade, export your proxy settings using the variables `"HTTP_PROXY"` and `"HTTPS_PROXY"` set to the appropriate values.
|
||||||
This will have the proxy settings applied to everything (telegram, coingecko, ...) **except** for exchange requests.
|
This will have the proxy settings applied to everything (telegram, coingecko, ...) **except** for exchange requests.
|
||||||
|
@ -713,7 +753,7 @@ export HTTPS_PROXY="http://addr:port"
|
||||||
freqtrade
|
freqtrade
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Proxy exchange requests
|
### Proxy exchange requests
|
||||||
|
|
||||||
To use a proxy for exchange connections - you will have to define the proxies as part of the ccxt configuration.
|
To use a proxy for exchange connections - you will have to define the proxies as part of the ccxt configuration.
|
||||||
|
|
||||||
|
@ -722,6 +762,7 @@ To use a proxy for exchange connections - you will have to define the proxies as
|
||||||
"exchange": {
|
"exchange": {
|
||||||
"ccxt_config": {
|
"ccxt_config": {
|
||||||
"httpsProxy": "http://addr:port",
|
"httpsProxy": "http://addr:port",
|
||||||
|
"wsProxy": "http://addr:port",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -423,7 +423,8 @@ You can get a list of downloaded data using the `list-data` sub-command.
|
||||||
usage: freqtrade list-data [-h] [-v] [--logfile FILE] [-V] [-c PATH] [-d PATH]
|
usage: freqtrade list-data [-h] [-v] [--logfile FILE] [-V] [-c PATH] [-d PATH]
|
||||||
[--userdir PATH] [--exchange EXCHANGE]
|
[--userdir PATH] [--exchange EXCHANGE]
|
||||||
[--data-format-ohlcv {json,jsongz,hdf5,feather,parquet}]
|
[--data-format-ohlcv {json,jsongz,hdf5,feather,parquet}]
|
||||||
[-p PAIRS [PAIRS ...]]
|
[--data-format-trades {json,jsongz,hdf5,feather,parquet}]
|
||||||
|
[--trades] [-p PAIRS [PAIRS ...]]
|
||||||
[--trading-mode {spot,margin,futures}]
|
[--trading-mode {spot,margin,futures}]
|
||||||
[--show-timerange]
|
[--show-timerange]
|
||||||
|
|
||||||
|
@ -433,6 +434,10 @@ options:
|
||||||
--data-format-ohlcv {json,jsongz,hdf5,feather,parquet}
|
--data-format-ohlcv {json,jsongz,hdf5,feather,parquet}
|
||||||
Storage format for downloaded candle (OHLCV) data.
|
Storage format for downloaded candle (OHLCV) data.
|
||||||
(default: `feather`).
|
(default: `feather`).
|
||||||
|
--data-format-trades {json,jsongz,hdf5,feather,parquet}
|
||||||
|
Storage format for downloaded trades data. (default:
|
||||||
|
`feather`).
|
||||||
|
--trades Work on trades data instead of OHLCV data.
|
||||||
-p PAIRS [PAIRS ...], --pairs PAIRS [PAIRS ...]
|
-p PAIRS [PAIRS ...], --pairs PAIRS [PAIRS ...]
|
||||||
Limit command to these pairs. Pairs are space-
|
Limit command to these pairs. Pairs are space-
|
||||||
separated.
|
separated.
|
||||||
|
@ -465,13 +470,29 @@ Common arguments:
|
||||||
```bash
|
```bash
|
||||||
> freqtrade list-data --userdir ~/.freqtrade/user_data/
|
> freqtrade list-data --userdir ~/.freqtrade/user_data/
|
||||||
|
|
||||||
Found 33 pair / timeframe combinations.
|
Found 33 pair / timeframe combinations.
|
||||||
pairs timeframe
|
┏━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━┓
|
||||||
---------- -----------------------------------------
|
┃ Pair ┃ Timeframe ┃ Type ┃
|
||||||
ADA/BTC 5m, 15m, 30m, 1h, 2h, 4h, 6h, 12h, 1d
|
┡━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━┩
|
||||||
ADA/ETH 5m, 15m, 30m, 1h, 2h, 4h, 6h, 12h, 1d
|
│ ADA/BTC │ 5m, 15m, 30m, 1h, 2h, 4h, 6h, 12h, 1d │ spot │
|
||||||
ETH/BTC 5m, 15m, 30m, 1h, 2h, 4h, 6h, 12h, 1d
|
│ ADA/ETH │ 5m, 15m, 30m, 1h, 2h, 4h, 6h, 12h, 1d │ spot │
|
||||||
ETH/USDT 5m, 15m, 30m, 1h, 2h, 4h
|
│ ETH/BTC │ 5m, 15m, 30m, 1h, 2h, 4h, 6h, 12h, 1d │ spot │
|
||||||
|
│ ETH/USDT │ 5m, 15m, 30m, 1h, 2h, 4h │ spot │
|
||||||
|
└───────────────┴───────────────────────────────────────────┴──────┘
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
Show all trades data including from/to timerange
|
||||||
|
|
||||||
|
``` bash
|
||||||
|
> freqtrade list-data --show --trades
|
||||||
|
Found trades data for 1 pair.
|
||||||
|
┏━━━━━━━━━┳━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━┓
|
||||||
|
┃ Pair ┃ Type ┃ From ┃ To ┃ Trades ┃
|
||||||
|
┡━━━━━━━━━╇━━━━━━╇━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━┩
|
||||||
|
│ XRP/ETH │ spot │ 2019-10-11 00:00:11 │ 2019-10-13 11:19:28 │ 12477 │
|
||||||
|
└─────────┴──────┴─────────────────────┴─────────────────────┴────────┘
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## Trades (tick) data
|
## Trades (tick) data
|
||||||
|
|
|
@ -22,7 +22,7 @@ This will spin up a local server (usually on port 8000) so you can see if everyt
|
||||||
## Developer setup
|
## Developer setup
|
||||||
|
|
||||||
To configure a development environment, you can either use the provided [DevContainer](#devcontainer-setup), or use the `setup.sh` script and answer "y" when asked "Do you want to install dependencies for dev [y/N]? ".
|
To configure a development environment, you can either use the provided [DevContainer](#devcontainer-setup), or use the `setup.sh` script and answer "y" when asked "Do you want to install dependencies for dev [y/N]? ".
|
||||||
Alternatively (e.g. if your system is not supported by the setup.sh script), follow the manual installation process and run `pip3 install -e .[all]`.
|
Alternatively (e.g. if your system is not supported by the setup.sh script), follow the manual installation process and run `pip3 install -r requirements-dev.txt` - followed by `pip3 install -e .[all]`.
|
||||||
|
|
||||||
This will install all required tools for development, including `pytest`, `ruff`, `mypy`, and `coveralls`.
|
This will install all required tools for development, including `pytest`, `ruff`, `mypy`, and `coveralls`.
|
||||||
|
|
||||||
|
@ -481,21 +481,24 @@ Once the PR against stable is merged (best right after merging):
|
||||||
|
|
||||||
### pypi
|
### pypi
|
||||||
|
|
||||||
!!! Note
|
!!! Warning "Manual Releases"
|
||||||
This process is now automated as part of Github Actions.
|
This process is automated as part of Github Actions.
|
||||||
|
Manual pypi pushes should not be necessary.
|
||||||
|
|
||||||
To create a pypi release, please run the following commands:
|
??? example "Manual release"
|
||||||
|
To manually create a pypi release, please run the following commands:
|
||||||
|
|
||||||
Additional requirement: `wheel`, `twine` (for uploading), account on pypi with proper permissions.
|
Additional requirement: `wheel`, `twine` (for uploading), account on pypi with proper permissions.
|
||||||
|
|
||||||
``` bash
|
``` bash
|
||||||
python setup.py sdist bdist_wheel
|
pip install -U build
|
||||||
|
python -m build --sdist --wheel
|
||||||
|
|
||||||
# For pypi test (to check if some change to the installation did work)
|
# For pypi test (to check if some change to the installation did work)
|
||||||
twine upload --repository-url https://test.pypi.org/legacy/ dist/*
|
twine upload --repository-url https://test.pypi.org/legacy/ dist/*
|
||||||
|
|
||||||
# For production:
|
# For production:
|
||||||
twine upload dist/*
|
twine upload dist/*
|
||||||
```
|
```
|
||||||
|
|
||||||
Please don't push non-releases to the productive / real pypi instance.
|
Please don't push non-releases to the productive / real pypi instance.
|
||||||
|
|
|
@ -58,7 +58,6 @@ The plot configuration can be accessed via the "Plot Configurator" (Cog icon) bu
|
||||||
|
|
||||||
### Settings
|
### Settings
|
||||||
|
|
||||||
|
|
||||||
Several UI related settings can be changed by accessing the settings page.
|
Several UI related settings can be changed by accessing the settings page.
|
||||||
|
|
||||||
Things you can change (among others):
|
Things you can change (among others):
|
||||||
|
|
|
@ -73,23 +73,26 @@ Backtesting mode requires [downloading the necessary data](#downloading-data-to-
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
### Saving prediction data
|
### Saving backtesting prediction data
|
||||||
|
|
||||||
To allow for tweaking your strategy (**not** the features!), FreqAI will automatically save the predictions during backtesting so that they can be reused for future backtests and live runs using the same `identifier` model. This provides a performance enhancement geared towards enabling **high-level hyperopting** of entry/exit criteria.
|
To allow for tweaking your strategy (**not** the features!), FreqAI will automatically save the predictions during backtesting so that they can be reused for future backtests and live runs using the same `identifier` model. This provides a performance enhancement geared towards enabling **high-level hyperopting** of entry/exit criteria.
|
||||||
|
|
||||||
An additional directory called `backtesting_predictions`, which contains all the predictions stored in `hdf` format, will be created in the `unique-id` folder.
|
An additional directory called `backtesting_predictions`, which contains all the predictions stored in `feather` format, will be created in the `unique-id` folder.
|
||||||
|
|
||||||
To change your **features**, you **must** set a new `identifier` in the config to signal to FreqAI to train new models.
|
To change your **features**, you **must** set a new `identifier` in the config to signal to FreqAI to train new models.
|
||||||
|
|
||||||
To save the models generated during a particular backtest so that you can start a live deployment from one of them instead of training a new model, you must set `save_backtest_models` to `True` in the config.
|
To save the models generated during a particular backtest so that you can start a live deployment from one of them instead of training a new model, you must set `save_backtest_models` to `True` in the config.
|
||||||
|
|
||||||
|
!!! Note
|
||||||
|
To ensure that the model can be reused, freqAI will call your strategy with a dataframe of length 1.
|
||||||
|
If your strategy requires more data than this to generate the same features, you can't reuse backtest predictions for live deployment and need to update your `identifier` for each new backtest.
|
||||||
|
|
||||||
### Backtest live collected predictions
|
### Backtest live collected predictions
|
||||||
|
|
||||||
FreqAI allow you to reuse live historic predictions through the backtest parameter `--freqai-backtest-live-models`. This can be useful when you want to reuse predictions generated in dry/run for comparison or other study.
|
FreqAI allow you to reuse live historic predictions through the backtest parameter `--freqai-backtest-live-models`. This can be useful when you want to reuse predictions generated in dry/run for comparison or other study.
|
||||||
|
|
||||||
The `--timerange` parameter must not be informed, as it will be automatically calculated through the data in the historic predictions file.
|
The `--timerange` parameter must not be informed, as it will be automatically calculated through the data in the historic predictions file.
|
||||||
|
|
||||||
|
|
||||||
### Downloading data to cover the full backtest period
|
### Downloading data to cover the full backtest period
|
||||||
|
|
||||||
For live/dry deployments, FreqAI will download the necessary data automatically. However, to use backtesting functionality, you need to download the necessary data using `download-data` (details [here](data-download.md#data-downloading)). You need to pay careful attention to understanding how much *additional* data needs to be downloaded to ensure that there is a sufficient amount of training data *before* the start of the backtesting time range. The amount of additional data can be roughly estimated by moving the start date of the time range backwards by `train_period_days` and the `startup_candle_count` (see the [parameter table](freqai-parameter-table.md) for detailed descriptions of these parameters) from the beginning of the desired backtesting time range.
|
For live/dry deployments, FreqAI will download the necessary data automatically. However, to use backtesting functionality, you need to download the necessary data using `download-data` (details [here](data-download.md#data-downloading)). You need to pay careful attention to understanding how much *additional* data needs to be downloaded to ensure that there is a sufficient amount of training data *before* the start of the backtesting time range. The amount of additional data can be roughly estimated by moving the start date of the time range backwards by `train_period_days` and the `startup_candle_count` (see the [parameter table](freqai-parameter-table.md) for detailed descriptions of these parameters) from the beginning of the desired backtesting time range.
|
||||||
|
|
|
@ -2,11 +2,11 @@
|
||||||
|
|
||||||
Pairlist Handlers define the list of pairs (pairlist) that the bot should trade. They are configured in the `pairlists` section of the configuration settings.
|
Pairlist Handlers define the list of pairs (pairlist) that the bot should trade. They are configured in the `pairlists` section of the configuration settings.
|
||||||
|
|
||||||
In your configuration, you can use Static Pairlist (defined by the [`StaticPairList`](#static-pair-list) Pairlist Handler) and Dynamic Pairlist (defined by the [`VolumePairList`](#volume-pair-list) Pairlist Handler).
|
In your configuration, you can use Static Pairlist (defined by the [`StaticPairList`](#static-pair-list) Pairlist Handler) and Dynamic Pairlist (defined by the [`VolumePairList`](#volume-pair-list) and [`PercentChangePairList`](#percent-change-pair-list) Pairlist Handlers).
|
||||||
|
|
||||||
Additionally, [`AgeFilter`](#agefilter), [`PrecisionFilter`](#precisionfilter), [`PriceFilter`](#pricefilter), [`ShuffleFilter`](#shufflefilter), [`SpreadFilter`](#spreadfilter) and [`VolatilityFilter`](#volatilityfilter) act as Pairlist Filters, removing certain pairs and/or moving their positions in the pairlist.
|
Additionally, [`AgeFilter`](#agefilter), [`PrecisionFilter`](#precisionfilter), [`PriceFilter`](#pricefilter), [`ShuffleFilter`](#shufflefilter), [`SpreadFilter`](#spreadfilter) and [`VolatilityFilter`](#volatilityfilter) act as Pairlist Filters, removing certain pairs and/or moving their positions in the pairlist.
|
||||||
|
|
||||||
If multiple Pairlist Handlers are used, they are chained and a combination of all Pairlist Handlers forms the resulting pairlist the bot uses for trading and backtesting. Pairlist Handlers are executed in the sequence they are configured. You can define either `StaticPairList`, `VolumePairList`, `ProducerPairList`, `RemotePairList` or `MarketCapPairList` as the starting Pairlist Handler.
|
If multiple Pairlist Handlers are used, they are chained and a combination of all Pairlist Handlers forms the resulting pairlist the bot uses for trading and backtesting. Pairlist Handlers are executed in the sequence they are configured. You can define either `StaticPairList`, `VolumePairList`, `ProducerPairList`, `RemotePairList`, `MarketCapPairList` or `PercentChangePairList` as the starting Pairlist Handler.
|
||||||
|
|
||||||
Inactive markets are always removed from the resulting pairlist. Explicitly blacklisted pairs (those in the `pair_blacklist` configuration setting) are also always removed from the resulting pairlist.
|
Inactive markets are always removed from the resulting pairlist. Explicitly blacklisted pairs (those in the `pair_blacklist` configuration setting) are also always removed from the resulting pairlist.
|
||||||
|
|
||||||
|
@ -22,6 +22,7 @@ You may also use something like `.*DOWN/BTC` or `.*UP/BTC` to exclude leveraged
|
||||||
|
|
||||||
* [`StaticPairList`](#static-pair-list) (default, if not configured differently)
|
* [`StaticPairList`](#static-pair-list) (default, if not configured differently)
|
||||||
* [`VolumePairList`](#volume-pair-list)
|
* [`VolumePairList`](#volume-pair-list)
|
||||||
|
* [`PercentChangePairList`](#percent-change-pair-list)
|
||||||
* [`ProducerPairList`](#producerpairlist)
|
* [`ProducerPairList`](#producerpairlist)
|
||||||
* [`RemotePairList`](#remotepairlist)
|
* [`RemotePairList`](#remotepairlist)
|
||||||
* [`MarketCapPairList`](#marketcappairlist)
|
* [`MarketCapPairList`](#marketcappairlist)
|
||||||
|
@ -152,6 +153,89 @@ More sophisticated approach can be used, by using `lookback_timeframe` for candl
|
||||||
!!! Note
|
!!! Note
|
||||||
`VolumePairList` does not support backtesting mode.
|
`VolumePairList` does not support backtesting mode.
|
||||||
|
|
||||||
|
#### Percent Change Pair List
|
||||||
|
|
||||||
|
`PercentChangePairList` filters and sorts pairs based on the percentage change in their price over the last 24 hours or any defined timeframe as part of advanced options. This allows traders to focus on assets that have experienced significant price movements, either positive or negative.
|
||||||
|
|
||||||
|
**Configuration Options**
|
||||||
|
|
||||||
|
* `number_assets`: Specifies the number of top pairs to select based on the 24-hour percentage change.
|
||||||
|
* `min_value`: Sets a minimum percentage change threshold. Pairs with a percentage change below this value will be filtered out.
|
||||||
|
* `max_value`: Sets a maximum percentage change threshold. Pairs with a percentage change above this value will be filtered out.
|
||||||
|
* `sort_direction`: Specifies the order in which pairs are sorted based on their percentage change. Accepts two values: `asc` for ascending order and `desc` for descending order.
|
||||||
|
* `refresh_period`: Defines the interval (in seconds) at which the pairlist will be refreshed. The default is 1800 seconds (30 minutes).
|
||||||
|
* `lookback_days`: Number of days to look back. When `lookback_days` is selected, the `lookback_timeframe` is defaulted to 1 day.
|
||||||
|
* `lookback_timeframe`: Timeframe to use for the lookback period.
|
||||||
|
* `lookback_period`: Number of periods to look back at.
|
||||||
|
|
||||||
|
When PercentChangePairList is used after other Pairlist Handlers, it will operate on the outputs of those handlers. If it is the leading Pairlist Handler, it will select pairs from all available markets with the specified stake currency.
|
||||||
|
|
||||||
|
`PercentChangePairList` uses ticker data from the exchange, provided via the ccxt library:
|
||||||
|
The percentage change is calculated as the change in price over the last 24 hours.
|
||||||
|
|
||||||
|
??? Note "Unsupported exchanges"
|
||||||
|
On some exchanges (like HTX), regular PercentChangePairList does not work as the api does not natively provide 24h percent change in price. This can be worked around by using candle data to calculate the percentage change. To roughly simulate 24h percent change, you can use the following configuration. Please note that these pairlists will only refresh once per day.
|
||||||
|
```json
|
||||||
|
"pairlists": [
|
||||||
|
{
|
||||||
|
"method": "PercentChangePairList",
|
||||||
|
"number_assets": 20,
|
||||||
|
"min_value": 0,
|
||||||
|
"refresh_period": 86400,
|
||||||
|
"lookback_days": 1
|
||||||
|
}
|
||||||
|
],
|
||||||
|
```
|
||||||
|
|
||||||
|
**Example Configuration to Read from Ticker**
|
||||||
|
|
||||||
|
```json
|
||||||
|
"pairlists": [
|
||||||
|
{
|
||||||
|
"method": "PercentChangePairList",
|
||||||
|
"number_assets": 15,
|
||||||
|
"min_value": -10,
|
||||||
|
"max_value": 50
|
||||||
|
}
|
||||||
|
],
|
||||||
|
```
|
||||||
|
|
||||||
|
In this configuration:
|
||||||
|
|
||||||
|
1. The top 15 pairs are selected based on the highest percentage change in price over the last 24 hours.
|
||||||
|
2. Only pairs with a percentage change between -10% and 50% are considered.
|
||||||
|
|
||||||
|
**Example Configuration to Read from Candles**
|
||||||
|
|
||||||
|
```json
|
||||||
|
"pairlists": [
|
||||||
|
{
|
||||||
|
"method": "PercentChangePairList",
|
||||||
|
"number_assets": 15,
|
||||||
|
"sort_key": "percentage",
|
||||||
|
"min_value": 0,
|
||||||
|
"refresh_period": 3600,
|
||||||
|
"lookback_timeframe": "1h",
|
||||||
|
"lookback_period": 72
|
||||||
|
}
|
||||||
|
],
|
||||||
|
```
|
||||||
|
|
||||||
|
This example builds the percent change pairs based on a rolling period of 3 days of 1-hour candles by using `lookback_timeframe` for candle size and `lookback_period` which specifies the number of candles.
|
||||||
|
|
||||||
|
The percent change in price is calculated using the following formula, which expresses the percentage difference between the current candle's close price and the previous candle's close price, as defined by the specified timeframe and lookback period:
|
||||||
|
|
||||||
|
$$ Percent Change = (\frac{Current Close - Previous Close}{Previous Close}) * 100 $$
|
||||||
|
|
||||||
|
!!! Warning "Range look back and refresh period"
|
||||||
|
When used in conjunction with `lookback_days` and `lookback_timeframe` the `refresh_period` can not be smaller than the candle size in seconds. As this will result in unnecessary requests to the exchanges API.
|
||||||
|
|
||||||
|
!!! Warning "Performance implications when using lookback range"
|
||||||
|
If used in first position in combination with lookback, the computation of the range-based percent change can be time and resource consuming, as it downloads candles for all tradable pairs. Hence it's highly advised to use the standard approach with `PercentChangePairList` to narrow the pairlist down for further percent-change calculation.
|
||||||
|
|
||||||
|
!!! Note "Backtesting"
|
||||||
|
`PercentChangePairList` does not support backtesting mode.
|
||||||
|
|
||||||
#### ProducerPairList
|
#### ProducerPairList
|
||||||
|
|
||||||
With `ProducerPairList`, you can reuse the pairlist from a [Producer](producer-consumer.md) without explicitly defining the pairlist on each consumer.
|
With `ProducerPairList`, you can reuse the pairlist from a [Producer](producer-consumer.md) without explicitly defining the pairlist on each consumer.
|
||||||
|
|
|
@ -36,6 +36,7 @@ All protection end times are rounded up to the next candle to avoid sudden, unex
|
||||||
| `lookback_period_candles` | Only trades that completed within the last `lookback_period_candles` candles will be considered. This setting may be ignored by some Protections. <br> **Datatype:** Positive integer (in candles).
|
| `lookback_period_candles` | Only trades that completed within the last `lookback_period_candles` candles will be considered. This setting may be ignored by some Protections. <br> **Datatype:** Positive integer (in candles).
|
||||||
| `lookback_period` | Only trades that completed after `current_time - lookback_period` will be considered. <br>Cannot be used together with `lookback_period_candles`. <br>This setting may be ignored by some Protections. <br> **Datatype:** Float (in minutes)
|
| `lookback_period` | Only trades that completed after `current_time - lookback_period` will be considered. <br>Cannot be used together with `lookback_period_candles`. <br>This setting may be ignored by some Protections. <br> **Datatype:** Float (in minutes)
|
||||||
| `trade_limit` | Number of trades required at minimum (not used by all Protections). <br> **Datatype:** Positive integer
|
| `trade_limit` | Number of trades required at minimum (not used by all Protections). <br> **Datatype:** Positive integer
|
||||||
|
| `unlock_at` | Time when trading will be unlocked regularly (not used by all Protections). <br> **Datatype:** string <br>**Input Format:** "HH:MM" (24-hours)
|
||||||
|
|
||||||
!!! Note "Durations"
|
!!! Note "Durations"
|
||||||
Durations (`stop_duration*` and `lookback_period*` can be defined in either minutes or candles).
|
Durations (`stop_duration*` and `lookback_period*` can be defined in either minutes or candles).
|
||||||
|
@ -44,7 +45,7 @@ All protection end times are rounded up to the next candle to avoid sudden, unex
|
||||||
#### Stoploss Guard
|
#### Stoploss Guard
|
||||||
|
|
||||||
`StoplossGuard` selects all trades within `lookback_period` in minutes (or in candles when using `lookback_period_candles`).
|
`StoplossGuard` selects all trades within `lookback_period` in minutes (or in candles when using `lookback_period_candles`).
|
||||||
If `trade_limit` or more trades resulted in stoploss, trading will stop for `stop_duration` in minutes (or in candles when using `stop_duration_candles`).
|
If `trade_limit` or more trades resulted in stoploss, trading will stop for `stop_duration` in minutes (or in candles when using `stop_duration_candles`, or until the set time when using `unlock_at`).
|
||||||
|
|
||||||
This applies across all pairs, unless `only_per_pair` is set to true, which will then only look at one pair at a time.
|
This applies across all pairs, unless `only_per_pair` is set to true, which will then only look at one pair at a time.
|
||||||
|
|
||||||
|
@ -97,7 +98,7 @@ def protections(self):
|
||||||
#### Low Profit Pairs
|
#### Low Profit Pairs
|
||||||
|
|
||||||
`LowProfitPairs` uses all trades for a pair within `lookback_period` in minutes (or in candles when using `lookback_period_candles`) to determine the overall profit ratio.
|
`LowProfitPairs` uses all trades for a pair within `lookback_period` in minutes (or in candles when using `lookback_period_candles`) to determine the overall profit ratio.
|
||||||
If that ratio is below `required_profit`, that pair will be locked for `stop_duration` in minutes (or in candles when using `stop_duration_candles`).
|
If that ratio is below `required_profit`, that pair will be locked for `stop_duration` in minutes (or in candles when using `stop_duration_candles`, or until the set time when using `unlock_at`).
|
||||||
|
|
||||||
For futures bots, setting `only_per_side` will make the bot only consider one side, and will then only lock this one side, allowing for example shorts to continue after a series of long losses.
|
For futures bots, setting `only_per_side` will make the bot only consider one side, and will then only lock this one side, allowing for example shorts to continue after a series of long losses.
|
||||||
|
|
||||||
|
@ -120,7 +121,7 @@ def protections(self):
|
||||||
|
|
||||||
#### Cooldown Period
|
#### Cooldown Period
|
||||||
|
|
||||||
`CooldownPeriod` locks a pair for `stop_duration` in minutes (or in candles when using `stop_duration_candles`) after selling, avoiding a re-entry for this pair for `stop_duration` minutes.
|
`CooldownPeriod` locks a pair for `stop_duration` in minutes (or in candles when using `stop_duration_candles`, or until the set time when using `unlock_at`) after exiting, avoiding a re-entry for this pair for `stop_duration` minutes.
|
||||||
|
|
||||||
The below example will stop trading a pair for 2 candles after closing a trade, allowing this pair to "cool down".
|
The below example will stop trading a pair for 2 candles after closing a trade, allowing this pair to "cool down".
|
||||||
|
|
||||||
|
|
45
docs/includes/strategy-imports.md
Normal file
45
docs/includes/strategy-imports.md
Normal file
|
@ -0,0 +1,45 @@
|
||||||
|
## Imports necessary for a strategy
|
||||||
|
|
||||||
|
When creating a strategy, you will need to import the necessary modules and classes. The following imports are required for a strategy:
|
||||||
|
|
||||||
|
By default, we recommend the following imports as a base line for your strategy:
|
||||||
|
This will cover all imports necessary for freqtrade functions to work.
|
||||||
|
Obviously you can add more imports as needed for your strategy.
|
||||||
|
|
||||||
|
``` python
|
||||||
|
# flake8: noqa: F401
|
||||||
|
# isort: skip_file
|
||||||
|
# --- Do not remove these imports ---
|
||||||
|
import numpy as np
|
||||||
|
import pandas as pd
|
||||||
|
from datetime import datetime, timedelta, timezone
|
||||||
|
from pandas import DataFrame
|
||||||
|
from typing import Dict, Optional, Union, Tuple
|
||||||
|
|
||||||
|
from freqtrade.strategy import (
|
||||||
|
IStrategy,
|
||||||
|
Trade,
|
||||||
|
Order,
|
||||||
|
PairLocks,
|
||||||
|
informative, # @informative decorator
|
||||||
|
# Hyperopt Parameters
|
||||||
|
BooleanParameter,
|
||||||
|
CategoricalParameter,
|
||||||
|
DecimalParameter,
|
||||||
|
IntParameter,
|
||||||
|
RealParameter,
|
||||||
|
# timeframe helpers
|
||||||
|
timeframe_to_minutes,
|
||||||
|
timeframe_to_next_date,
|
||||||
|
timeframe_to_prev_date,
|
||||||
|
# Strategy helper functions
|
||||||
|
merge_informative_pair,
|
||||||
|
stoploss_from_absolute,
|
||||||
|
stoploss_from_open,
|
||||||
|
)
|
||||||
|
|
||||||
|
# --------------------------------
|
||||||
|
# Add your lib to import here
|
||||||
|
import talib.abstract as ta
|
||||||
|
from technical import qtpylib
|
||||||
|
```
|
|
@ -42,6 +42,7 @@ Please read the [exchange specific notes](exchanges.md) to learn about eventual,
|
||||||
- [X] [Binance](https://www.binance.com/)
|
- [X] [Binance](https://www.binance.com/)
|
||||||
- [X] [Bitmart](https://bitmart.com/)
|
- [X] [Bitmart](https://bitmart.com/)
|
||||||
- [X] [BingX](https://bingx.com/invite/0EM9RX)
|
- [X] [BingX](https://bingx.com/invite/0EM9RX)
|
||||||
|
- [X] [Bybit](https://bybit.com/)
|
||||||
- [X] [Gate.io](https://www.gate.io/ref/6266643)
|
- [X] [Gate.io](https://www.gate.io/ref/6266643)
|
||||||
- [X] [HTX](https://www.htx.com/) (Former Huobi)
|
- [X] [HTX](https://www.htx.com/) (Former Huobi)
|
||||||
- [X] [Kraken](https://kraken.com/)
|
- [X] [Kraken](https://kraken.com/)
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
markdown==3.6
|
markdown==3.7
|
||||||
mkdocs==1.6.0
|
mkdocs==1.6.1
|
||||||
mkdocs-material==9.5.27
|
mkdocs-material==9.5.34
|
||||||
mdx_truly_sane_lists==1.3
|
mdx_truly_sane_lists==1.3
|
||||||
pymdown-extensions==10.8.1
|
pymdown-extensions==10.9
|
||||||
jinja2==3.1.4
|
jinja2==3.1.4
|
||||||
|
mike==2.1.3
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
## FreqUI
|
## FreqUI
|
||||||
|
|
||||||
FreqUI now has it's own dedicated [documentation section](frequi.md) - please refer to that section for all information regarding the FreqUI.
|
FreqUI now has it's own dedicated [documentation section](freq-ui.md) - please refer to that section for all information regarding the FreqUI.
|
||||||
|
|
||||||
## Configuration
|
## Configuration
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,13 @@
|
||||||
# SQL Helper
|
# SQL Helper
|
||||||
|
|
||||||
This page contains some help if you want to edit your sqlite db.
|
This page contains some help if you want to query your sqlite db.
|
||||||
|
|
||||||
|
!!! Tip "Other Database systems"
|
||||||
|
To use other Database Systems like PostgreSQL or MariaDB, you can use the same queries, but you need to use the respective client for the database system. [Click here](advanced-setup.md#use-a-different-database-system) to learn how to setup a different database system with freqtrade.
|
||||||
|
|
||||||
|
!!! Warning
|
||||||
|
If you are not familiar with SQL, you should be very careful when running queries on your database.
|
||||||
|
Always make sure to have a backup of your database before running any queries.
|
||||||
|
|
||||||
## Install sqlite3
|
## Install sqlite3
|
||||||
|
|
||||||
|
@ -43,13 +50,25 @@ sqlite3
|
||||||
.schema <table_name>
|
.schema <table_name>
|
||||||
```
|
```
|
||||||
|
|
||||||
## Get all trades in the table
|
### Get all trades in the table
|
||||||
|
|
||||||
```sql
|
```sql
|
||||||
SELECT * FROM trades;
|
SELECT * FROM trades;
|
||||||
```
|
```
|
||||||
|
|
||||||
## Fix trade still open after a manual exit on the exchange
|
## Destructive queries
|
||||||
|
|
||||||
|
Queries that write to the database.
|
||||||
|
These queries should usually not be necessary as freqtrade tries to handle all database operations itself - or exposes them via API or telegram commands.
|
||||||
|
|
||||||
|
!!! Warning
|
||||||
|
Please make sure you have a backup of your database before running any of the below queries.
|
||||||
|
|
||||||
|
!!! Danger
|
||||||
|
You should also **never** run any writing query (`update`, `insert`, `delete`) while a bot is connected to the database.
|
||||||
|
This can and will lead to data corruption - most likely, without the possibility of recovery.
|
||||||
|
|
||||||
|
### Fix trade still open after a manual exit on the exchange
|
||||||
|
|
||||||
!!! Warning
|
!!! Warning
|
||||||
Manually selling a pair on the exchange will not be detected by the bot and it will try to sell anyway. Whenever possible, /forceexit <tradeid> should be used to accomplish the same thing.
|
Manually selling a pair on the exchange will not be detected by the bot and it will try to sell anyway. Whenever possible, /forceexit <tradeid> should be used to accomplish the same thing.
|
||||||
|
@ -69,7 +88,7 @@ SET is_open=0,
|
||||||
WHERE id=<trade_ID_to_update>;
|
WHERE id=<trade_ID_to_update>;
|
||||||
```
|
```
|
||||||
|
|
||||||
### Example
|
#### Example
|
||||||
|
|
||||||
```sql
|
```sql
|
||||||
UPDATE trades
|
UPDATE trades
|
||||||
|
@ -82,7 +101,7 @@ SET is_open=0,
|
||||||
WHERE id=31;
|
WHERE id=31;
|
||||||
```
|
```
|
||||||
|
|
||||||
## Remove trade from the database
|
### Remove trade from the database
|
||||||
|
|
||||||
!!! Tip "Use RPC Methods to delete trades"
|
!!! Tip "Use RPC Methods to delete trades"
|
||||||
Consider using `/delete <tradeid>` via telegram or rest API. That's the recommended way to deleting trades.
|
Consider using `/delete <tradeid>` via telegram or rest API. That's the recommended way to deleting trades.
|
||||||
|
@ -100,39 +119,3 @@ DELETE FROM trades WHERE id = 31;
|
||||||
|
|
||||||
!!! Warning
|
!!! Warning
|
||||||
This will remove this trade from the database. Please make sure you got the correct id and **NEVER** run this query without the `where` clause.
|
This will remove this trade from the database. Please make sure you got the correct id and **NEVER** run this query without the `where` clause.
|
||||||
|
|
||||||
## Use a different database system
|
|
||||||
|
|
||||||
Freqtrade is using SQLAlchemy, which supports multiple different database systems. As such, a multitude of database systems should be supported.
|
|
||||||
Freqtrade does not depend or install any additional database driver. Please refer to the [SQLAlchemy docs](https://docs.sqlalchemy.org/en/14/core/engines.html#database-urls) on installation instructions for the respective database systems.
|
|
||||||
|
|
||||||
The following systems have been tested and are known to work with freqtrade:
|
|
||||||
|
|
||||||
* sqlite (default)
|
|
||||||
* PostgreSQL
|
|
||||||
* MariaDB
|
|
||||||
|
|
||||||
!!! Warning
|
|
||||||
By using one of the below database systems, you acknowledge that you know how to manage such a system. The freqtrade team will not provide any support with setup or maintenance (or backups) of the below database systems.
|
|
||||||
|
|
||||||
### PostgreSQL
|
|
||||||
|
|
||||||
Installation:
|
|
||||||
`pip install psycopg2-binary`
|
|
||||||
|
|
||||||
Usage:
|
|
||||||
`... --db-url postgresql+psycopg2://<username>:<password>@localhost:5432/<database>`
|
|
||||||
|
|
||||||
Freqtrade will automatically create the tables necessary upon startup.
|
|
||||||
|
|
||||||
If you're running different instances of Freqtrade, you must either setup one database per Instance or use different users / schemas for your connections.
|
|
||||||
|
|
||||||
### MariaDB / MySQL
|
|
||||||
|
|
||||||
Freqtrade supports MariaDB by using SQLAlchemy, which supports multiple different database systems.
|
|
||||||
|
|
||||||
Installation:
|
|
||||||
`pip install pymysql`
|
|
||||||
|
|
||||||
Usage:
|
|
||||||
`... --db-url mysql+pymysql://<username>:<password>@localhost:3306/<database>`
|
|
||||||
|
|
|
@ -24,6 +24,8 @@ Currently available callbacks:
|
||||||
!!! Tip "Callback calling sequence"
|
!!! Tip "Callback calling sequence"
|
||||||
You can find the callback calling sequence in [bot-basics](bot-basics.md#bot-execution-logic)
|
You can find the callback calling sequence in [bot-basics](bot-basics.md#bot-execution-logic)
|
||||||
|
|
||||||
|
--8<-- "includes/strategy-imports.md"
|
||||||
|
|
||||||
## Bot start
|
## Bot start
|
||||||
|
|
||||||
A simple callback which is called once when the strategy is loaded.
|
A simple callback which is called once when the strategy is loaded.
|
||||||
|
@ -41,10 +43,10 @@ class AwesomeStrategy(IStrategy):
|
||||||
Called only once after bot instantiation.
|
Called only once after bot instantiation.
|
||||||
:param **kwargs: Ensure to keep this here so updates to this won't break your strategy.
|
:param **kwargs: Ensure to keep this here so updates to this won't break your strategy.
|
||||||
"""
|
"""
|
||||||
if self.config['runmode'].value in ('live', 'dry_run'):
|
if self.config["runmode"].value in ("live", "dry_run"):
|
||||||
# Assign this to the class by using self.*
|
# Assign this to the class by using self.*
|
||||||
# can then be used by populate_* methods
|
# can then be used by populate_* methods
|
||||||
self.custom_remote_data = requests.get('https://some_remote_source.example.com')
|
self.custom_remote_data = requests.get("https://some_remote_source.example.com")
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -57,6 +59,7 @@ seconds, unless configured differently) or once per candle in backtest/hyperopt
|
||||||
This can be used to perform calculations which are pair independent (apply to all pairs), loading of external data, etc.
|
This can be used to perform calculations which are pair independent (apply to all pairs), loading of external data, etc.
|
||||||
|
|
||||||
``` python
|
``` python
|
||||||
|
# Default imports
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
class AwesomeStrategy(IStrategy):
|
class AwesomeStrategy(IStrategy):
|
||||||
|
@ -71,10 +74,10 @@ class AwesomeStrategy(IStrategy):
|
||||||
:param current_time: datetime object, containing the current datetime
|
:param current_time: datetime object, containing the current datetime
|
||||||
:param **kwargs: Ensure to keep this here so updates to this won't break your strategy.
|
:param **kwargs: Ensure to keep this here so updates to this won't break your strategy.
|
||||||
"""
|
"""
|
||||||
if self.config['runmode'].value in ('live', 'dry_run'):
|
if self.config["runmode"].value in ("live", "dry_run"):
|
||||||
# Assign this to the class by using self.*
|
# Assign this to the class by using self.*
|
||||||
# can then be used by populate_* methods
|
# can then be used by populate_* methods
|
||||||
self.remote_data = requests.get('https://some_remote_source.example.com')
|
self.remote_data = requests.get("https://some_remote_source.example.com")
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -83,6 +86,8 @@ class AwesomeStrategy(IStrategy):
|
||||||
Called before entering a trade, makes it possible to manage your position size when placing a new trade.
|
Called before entering a trade, makes it possible to manage your position size when placing a new trade.
|
||||||
|
|
||||||
```python
|
```python
|
||||||
|
# Default imports
|
||||||
|
|
||||||
class AwesomeStrategy(IStrategy):
|
class AwesomeStrategy(IStrategy):
|
||||||
def custom_stake_amount(self, pair: str, current_time: datetime, current_rate: float,
|
def custom_stake_amount(self, pair: str, current_time: datetime, current_rate: float,
|
||||||
proposed_stake: float, min_stake: Optional[float], max_stake: float,
|
proposed_stake: float, min_stake: Optional[float], max_stake: float,
|
||||||
|
@ -92,13 +97,13 @@ class AwesomeStrategy(IStrategy):
|
||||||
dataframe, _ = self.dp.get_analyzed_dataframe(pair=pair, timeframe=self.timeframe)
|
dataframe, _ = self.dp.get_analyzed_dataframe(pair=pair, timeframe=self.timeframe)
|
||||||
current_candle = dataframe.iloc[-1].squeeze()
|
current_candle = dataframe.iloc[-1].squeeze()
|
||||||
|
|
||||||
if current_candle['fastk_rsi_1h'] > current_candle['fastd_rsi_1h']:
|
if current_candle["fastk_rsi_1h"] > current_candle["fastd_rsi_1h"]:
|
||||||
if self.config['stake_amount'] == 'unlimited':
|
if self.config["stake_amount"] == "unlimited":
|
||||||
# Use entire available wallet during favorable conditions when in compounding mode.
|
# Use entire available wallet during favorable conditions when in compounding mode.
|
||||||
return max_stake
|
return max_stake
|
||||||
else:
|
else:
|
||||||
# Compound profits during favorable conditions instead of using a static stake.
|
# Compound profits during favorable conditions instead of using a static stake.
|
||||||
return self.wallets.get_total_stake_amount() / self.config['max_open_trades']
|
return self.wallets.get_total_stake_amount() / self.config["max_open_trades"]
|
||||||
|
|
||||||
# Use default stake amount.
|
# Use default stake amount.
|
||||||
return proposed_stake
|
return proposed_stake
|
||||||
|
@ -129,25 +134,27 @@ Using `custom_exit()` signals in place of stoploss though *is not recommended*.
|
||||||
An example of how we can use different indicators depending on the current profit and also exit trades that were open longer than one day:
|
An example of how we can use different indicators depending on the current profit and also exit trades that were open longer than one day:
|
||||||
|
|
||||||
``` python
|
``` python
|
||||||
|
# Default imports
|
||||||
|
|
||||||
class AwesomeStrategy(IStrategy):
|
class AwesomeStrategy(IStrategy):
|
||||||
def custom_exit(self, pair: str, trade: 'Trade', current_time: 'datetime', current_rate: float,
|
def custom_exit(self, pair: str, trade: Trade, current_time: datetime, current_rate: float,
|
||||||
current_profit: float, **kwargs):
|
current_profit: float, **kwargs):
|
||||||
dataframe, _ = self.dp.get_analyzed_dataframe(pair, self.timeframe)
|
dataframe, _ = self.dp.get_analyzed_dataframe(pair, self.timeframe)
|
||||||
last_candle = dataframe.iloc[-1].squeeze()
|
last_candle = dataframe.iloc[-1].squeeze()
|
||||||
|
|
||||||
# Above 20% profit, sell when rsi < 80
|
# Above 20% profit, sell when rsi < 80
|
||||||
if current_profit > 0.2:
|
if current_profit > 0.2:
|
||||||
if last_candle['rsi'] < 80:
|
if last_candle["rsi"] < 80:
|
||||||
return 'rsi_below_80'
|
return "rsi_below_80"
|
||||||
|
|
||||||
# Between 2% and 10%, sell if EMA-long above EMA-short
|
# Between 2% and 10%, sell if EMA-long above EMA-short
|
||||||
if 0.02 < current_profit < 0.1:
|
if 0.02 < current_profit < 0.1:
|
||||||
if last_candle['emalong'] > last_candle['emashort']:
|
if last_candle["emalong"] > last_candle["emashort"]:
|
||||||
return 'ema_long_below_80'
|
return "ema_long_below_80"
|
||||||
|
|
||||||
# Sell any positions at a loss if they are held for more than one day.
|
# Sell any positions at a loss if they are held for more than one day.
|
||||||
if current_profit < 0.0 and (current_time - trade.open_date_utc).days >= 1:
|
if current_profit < 0.0 and (current_time - trade.open_date_utc).days >= 1:
|
||||||
return 'unclog'
|
return "unclog"
|
||||||
```
|
```
|
||||||
|
|
||||||
See [Dataframe access](strategy-advanced.md#dataframe-access) for more information about dataframe use in strategy callbacks.
|
See [Dataframe access](strategy-advanced.md#dataframe-access) for more information about dataframe use in strategy callbacks.
|
||||||
|
@ -168,7 +175,6 @@ The absolute value of the return value is used (the sign is ignored), so returni
|
||||||
Returning `None` will be interpreted as "no desire to change", and is the only safe way to return when you'd like to not modify the stoploss.
|
Returning `None` will be interpreted as "no desire to change", and is the only safe way to return when you'd like to not modify the stoploss.
|
||||||
`NaN` and `inf` values are considered invalid and will be ignored (identical to `None`).
|
`NaN` and `inf` values are considered invalid and will be ignored (identical to `None`).
|
||||||
|
|
||||||
|
|
||||||
Stoploss on exchange works similar to `trailing_stop`, and the stoploss on exchange is updated as configured in `stoploss_on_exchange_interval` ([More details about stoploss on exchange](stoploss.md#stop-loss-on-exchangefreqtrade)).
|
Stoploss on exchange works similar to `trailing_stop`, and the stoploss on exchange is updated as configured in `stoploss_on_exchange_interval` ([More details about stoploss on exchange](stoploss.md#stop-loss-on-exchangefreqtrade)).
|
||||||
|
|
||||||
!!! Note "Use of dates"
|
!!! Note "Use of dates"
|
||||||
|
@ -196,9 +202,7 @@ Of course, many more things are possible, and all examples can be combined at wi
|
||||||
To simulate a regular trailing stoploss of 4% (trailing 4% behind the maximum reached price) you would use the following very simple method:
|
To simulate a regular trailing stoploss of 4% (trailing 4% behind the maximum reached price) you would use the following very simple method:
|
||||||
|
|
||||||
``` python
|
``` python
|
||||||
# additional imports required
|
# Default imports
|
||||||
from datetime import datetime
|
|
||||||
from freqtrade.persistence import Trade
|
|
||||||
|
|
||||||
class AwesomeStrategy(IStrategy):
|
class AwesomeStrategy(IStrategy):
|
||||||
|
|
||||||
|
@ -206,7 +210,7 @@ class AwesomeStrategy(IStrategy):
|
||||||
|
|
||||||
use_custom_stoploss = True
|
use_custom_stoploss = True
|
||||||
|
|
||||||
def custom_stoploss(self, pair: str, trade: 'Trade', current_time: datetime,
|
def custom_stoploss(self, pair: str, trade: Trade, current_time: datetime,
|
||||||
current_rate: float, current_profit: float, after_fill: bool,
|
current_rate: float, current_profit: float, after_fill: bool,
|
||||||
**kwargs) -> Optional[float]:
|
**kwargs) -> Optional[float]:
|
||||||
"""
|
"""
|
||||||
|
@ -236,8 +240,7 @@ class AwesomeStrategy(IStrategy):
|
||||||
Use the initial stoploss for the first 60 minutes, after this change to 10% trailing stoploss, and after 2 hours (120 minutes) we use a 5% trailing stoploss.
|
Use the initial stoploss for the first 60 minutes, after this change to 10% trailing stoploss, and after 2 hours (120 minutes) we use a 5% trailing stoploss.
|
||||||
|
|
||||||
``` python
|
``` python
|
||||||
from datetime import datetime, timedelta
|
# Default imports
|
||||||
from freqtrade.persistence import Trade
|
|
||||||
|
|
||||||
class AwesomeStrategy(IStrategy):
|
class AwesomeStrategy(IStrategy):
|
||||||
|
|
||||||
|
@ -245,7 +248,7 @@ class AwesomeStrategy(IStrategy):
|
||||||
|
|
||||||
use_custom_stoploss = True
|
use_custom_stoploss = True
|
||||||
|
|
||||||
def custom_stoploss(self, pair: str, trade: 'Trade', current_time: datetime,
|
def custom_stoploss(self, pair: str, trade: Trade, current_time: datetime,
|
||||||
current_rate: float, current_profit: float, after_fill: bool,
|
current_rate: float, current_profit: float, after_fill: bool,
|
||||||
**kwargs) -> Optional[float]:
|
**kwargs) -> Optional[float]:
|
||||||
|
|
||||||
|
@ -263,8 +266,7 @@ Use the initial stoploss for the first 60 minutes, after this change to 10% trai
|
||||||
If an additional order fills, set stoploss to -10% below the new `open_rate` ([Averaged across all entries](#position-adjust-calculations)).
|
If an additional order fills, set stoploss to -10% below the new `open_rate` ([Averaged across all entries](#position-adjust-calculations)).
|
||||||
|
|
||||||
``` python
|
``` python
|
||||||
from datetime import datetime, timedelta
|
# Default imports
|
||||||
from freqtrade.persistence import Trade
|
|
||||||
|
|
||||||
class AwesomeStrategy(IStrategy):
|
class AwesomeStrategy(IStrategy):
|
||||||
|
|
||||||
|
@ -272,7 +274,7 @@ class AwesomeStrategy(IStrategy):
|
||||||
|
|
||||||
use_custom_stoploss = True
|
use_custom_stoploss = True
|
||||||
|
|
||||||
def custom_stoploss(self, pair: str, trade: 'Trade', current_time: datetime,
|
def custom_stoploss(self, pair: str, trade: Trade, current_time: datetime,
|
||||||
current_rate: float, current_profit: float, after_fill: bool,
|
current_rate: float, current_profit: float, after_fill: bool,
|
||||||
**kwargs) -> Optional[float]:
|
**kwargs) -> Optional[float]:
|
||||||
|
|
||||||
|
@ -293,8 +295,7 @@ Use a different stoploss depending on the pair.
|
||||||
In this example, we'll trail the highest price with 10% trailing stoploss for `ETH/BTC` and `XRP/BTC`, with 5% trailing stoploss for `LTC/BTC` and with 15% for all other pairs.
|
In this example, we'll trail the highest price with 10% trailing stoploss for `ETH/BTC` and `XRP/BTC`, with 5% trailing stoploss for `LTC/BTC` and with 15% for all other pairs.
|
||||||
|
|
||||||
``` python
|
``` python
|
||||||
from datetime import datetime
|
# Default imports
|
||||||
from freqtrade.persistence import Trade
|
|
||||||
|
|
||||||
class AwesomeStrategy(IStrategy):
|
class AwesomeStrategy(IStrategy):
|
||||||
|
|
||||||
|
@ -302,13 +303,13 @@ class AwesomeStrategy(IStrategy):
|
||||||
|
|
||||||
use_custom_stoploss = True
|
use_custom_stoploss = True
|
||||||
|
|
||||||
def custom_stoploss(self, pair: str, trade: 'Trade', current_time: datetime,
|
def custom_stoploss(self, pair: str, trade: Trade, current_time: datetime,
|
||||||
current_rate: float, current_profit: float, after_fill: bool,
|
current_rate: float, current_profit: float, after_fill: bool,
|
||||||
**kwargs) -> Optional[float]:
|
**kwargs) -> Optional[float]:
|
||||||
|
|
||||||
if pair in ('ETH/BTC', 'XRP/BTC'):
|
if pair in ("ETH/BTC", "XRP/BTC"):
|
||||||
return -0.10
|
return -0.10
|
||||||
elif pair in ('LTC/BTC'):
|
elif pair in ("LTC/BTC"):
|
||||||
return -0.05
|
return -0.05
|
||||||
return -0.15
|
return -0.15
|
||||||
```
|
```
|
||||||
|
@ -320,8 +321,7 @@ Use the initial stoploss until the profit is above 4%, then use a trailing stopl
|
||||||
Please note that the stoploss can only increase, values lower than the current stoploss are ignored.
|
Please note that the stoploss can only increase, values lower than the current stoploss are ignored.
|
||||||
|
|
||||||
``` python
|
``` python
|
||||||
from datetime import datetime, timedelta
|
# Default imports
|
||||||
from freqtrade.persistence import Trade
|
|
||||||
|
|
||||||
class AwesomeStrategy(IStrategy):
|
class AwesomeStrategy(IStrategy):
|
||||||
|
|
||||||
|
@ -329,7 +329,7 @@ class AwesomeStrategy(IStrategy):
|
||||||
|
|
||||||
use_custom_stoploss = True
|
use_custom_stoploss = True
|
||||||
|
|
||||||
def custom_stoploss(self, pair: str, trade: 'Trade', current_time: datetime,
|
def custom_stoploss(self, pair: str, trade: Trade, current_time: datetime,
|
||||||
current_rate: float, current_profit: float, after_fill: bool,
|
current_rate: float, current_profit: float, after_fill: bool,
|
||||||
**kwargs) -> Optional[float]:
|
**kwargs) -> Optional[float]:
|
||||||
|
|
||||||
|
@ -353,9 +353,7 @@ Instead of continuously trailing behind the current price, this example sets fix
|
||||||
* Once profit is > 40% - set stoploss to 25% above open price.
|
* Once profit is > 40% - set stoploss to 25% above open price.
|
||||||
|
|
||||||
``` python
|
``` python
|
||||||
from datetime import datetime
|
# Default imports
|
||||||
from freqtrade.persistence import Trade
|
|
||||||
from freqtrade.strategy import stoploss_from_open
|
|
||||||
|
|
||||||
class AwesomeStrategy(IStrategy):
|
class AwesomeStrategy(IStrategy):
|
||||||
|
|
||||||
|
@ -363,7 +361,7 @@ class AwesomeStrategy(IStrategy):
|
||||||
|
|
||||||
use_custom_stoploss = True
|
use_custom_stoploss = True
|
||||||
|
|
||||||
def custom_stoploss(self, pair: str, trade: 'Trade', current_time: datetime,
|
def custom_stoploss(self, pair: str, trade: Trade, current_time: datetime,
|
||||||
current_rate: float, current_profit: float, after_fill: bool,
|
current_rate: float, current_profit: float, after_fill: bool,
|
||||||
**kwargs) -> Optional[float]:
|
**kwargs) -> Optional[float]:
|
||||||
|
|
||||||
|
@ -384,15 +382,17 @@ class AwesomeStrategy(IStrategy):
|
||||||
Absolute stoploss value may be derived from indicators stored in dataframe. Example uses parabolic SAR below the price as stoploss.
|
Absolute stoploss value may be derived from indicators stored in dataframe. Example uses parabolic SAR below the price as stoploss.
|
||||||
|
|
||||||
``` python
|
``` python
|
||||||
|
# Default imports
|
||||||
|
|
||||||
class AwesomeStrategy(IStrategy):
|
class AwesomeStrategy(IStrategy):
|
||||||
|
|
||||||
def populate_indicators(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
|
def populate_indicators(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
|
||||||
# <...>
|
# <...>
|
||||||
dataframe['sar'] = ta.SAR(dataframe)
|
dataframe["sar"] = ta.SAR(dataframe)
|
||||||
|
|
||||||
use_custom_stoploss = True
|
use_custom_stoploss = True
|
||||||
|
|
||||||
def custom_stoploss(self, pair: str, trade: 'Trade', current_time: datetime,
|
def custom_stoploss(self, pair: str, trade: Trade, current_time: datetime,
|
||||||
current_rate: float, current_profit: float, after_fill: bool,
|
current_rate: float, current_profit: float, after_fill: bool,
|
||||||
**kwargs) -> Optional[float]:
|
**kwargs) -> Optional[float]:
|
||||||
|
|
||||||
|
@ -400,7 +400,7 @@ class AwesomeStrategy(IStrategy):
|
||||||
last_candle = dataframe.iloc[-1].squeeze()
|
last_candle = dataframe.iloc[-1].squeeze()
|
||||||
|
|
||||||
# Use parabolic sar as absolute stoploss price
|
# Use parabolic sar as absolute stoploss price
|
||||||
stoploss_price = last_candle['sar']
|
stoploss_price = last_candle["sar"]
|
||||||
|
|
||||||
# Convert absolute price to percentage relative to current_rate
|
# Convert absolute price to percentage relative to current_rate
|
||||||
if stoploss_price < current_rate:
|
if stoploss_price < current_rate:
|
||||||
|
@ -429,10 +429,7 @@ Stoploss values returned from `custom_stoploss()` must specify a percentage rela
|
||||||
|
|
||||||
|
|
||||||
``` python
|
``` python
|
||||||
|
# Default imports
|
||||||
from datetime import datetime
|
|
||||||
from freqtrade.persistence import Trade
|
|
||||||
from freqtrade.strategy import IStrategy, stoploss_from_open
|
|
||||||
|
|
||||||
class AwesomeStrategy(IStrategy):
|
class AwesomeStrategy(IStrategy):
|
||||||
|
|
||||||
|
@ -440,7 +437,7 @@ Stoploss values returned from `custom_stoploss()` must specify a percentage rela
|
||||||
|
|
||||||
use_custom_stoploss = True
|
use_custom_stoploss = True
|
||||||
|
|
||||||
def custom_stoploss(self, pair: str, trade: 'Trade', current_time: datetime,
|
def custom_stoploss(self, pair: str, trade: Trade, current_time: datetime,
|
||||||
current_rate: float, current_profit: float, after_fill: bool,
|
current_rate: float, current_profit: float, after_fill: bool,
|
||||||
**kwargs) -> Optional[float]:
|
**kwargs) -> Optional[float]:
|
||||||
|
|
||||||
|
@ -469,38 +466,34 @@ The helper function `stoploss_from_absolute()` can be used to convert from an ab
|
||||||
|
|
||||||
??? Example "Returning a stoploss using absolute price from the custom stoploss function"
|
??? Example "Returning a stoploss using absolute price from the custom stoploss function"
|
||||||
|
|
||||||
If we want to trail a stop price at 2xATR below current price we can call `stoploss_from_absolute(current_rate + (side * candle['atr'] * 2), current_rate=current_rate, is_short=trade.is_short, leverage=trade.leverage)`.
|
If we want to trail a stop price at 2xATR below current price we can call `stoploss_from_absolute(current_rate + (side * candle["atr"] * 2), current_rate=current_rate, is_short=trade.is_short, leverage=trade.leverage)`.
|
||||||
For futures, we need to adjust the direction (up or down), as well as adjust for leverage, since the [`custom_stoploss`](strategy-callbacks.md#custom-stoploss) callback returns the ["risk for this trade"](stoploss.md#stoploss-and-leverage) - not the relative price movement.
|
For futures, we need to adjust the direction (up or down), as well as adjust for leverage, since the [`custom_stoploss`](strategy-callbacks.md#custom-stoploss) callback returns the ["risk for this trade"](stoploss.md#stoploss-and-leverage) - not the relative price movement.
|
||||||
|
|
||||||
``` python
|
``` python
|
||||||
|
# Default imports
|
||||||
from datetime import datetime
|
|
||||||
from freqtrade.persistence import Trade
|
|
||||||
from freqtrade.strategy import IStrategy, stoploss_from_absolute, timeframe_to_prev_date
|
|
||||||
|
|
||||||
class AwesomeStrategy(IStrategy):
|
class AwesomeStrategy(IStrategy):
|
||||||
|
|
||||||
use_custom_stoploss = True
|
use_custom_stoploss = True
|
||||||
|
|
||||||
def populate_indicators_1h(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
|
def populate_indicators(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
|
||||||
dataframe['atr'] = ta.ATR(dataframe, timeperiod=14)
|
dataframe["atr"] = ta.ATR(dataframe, timeperiod=14)
|
||||||
return dataframe
|
return dataframe
|
||||||
|
|
||||||
def custom_stoploss(self, pair: str, trade: 'Trade', current_time: datetime,
|
def custom_stoploss(self, pair: str, trade: Trade, current_time: datetime,
|
||||||
current_rate: float, current_profit: float, after_fill: bool,
|
current_rate: float, current_profit: float, after_fill: bool,
|
||||||
**kwargs) -> Optional[float]:
|
**kwargs) -> Optional[float]:
|
||||||
dataframe, _ = self.dp.get_analyzed_dataframe(pair, self.timeframe)
|
dataframe, _ = self.dp.get_analyzed_dataframe(pair, self.timeframe)
|
||||||
trade_date = timeframe_to_prev_date(self.timeframe, trade.open_date_utc)
|
trade_date = timeframe_to_prev_date(self.timeframe, trade.open_date_utc)
|
||||||
candle = dataframe.iloc[-1].squeeze()
|
candle = dataframe.iloc[-1].squeeze()
|
||||||
side = 1 if trade.is_short else -1
|
side = 1 if trade.is_short else -1
|
||||||
return stoploss_from_absolute(current_rate + (side * candle['atr'] * 2),
|
return stoploss_from_absolute(current_rate + (side * candle["atr"] * 2),
|
||||||
current_rate=current_rate,
|
current_rate=current_rate,
|
||||||
is_short=trade.is_short,
|
is_short=trade.is_short,
|
||||||
leverage=trade.leverage)
|
leverage=trade.leverage)
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## Custom order price rules
|
## Custom order price rules
|
||||||
|
@ -520,19 +513,18 @@ Each of these methods are called right before placing an order on the exchange.
|
||||||
### Custom order entry and exit price example
|
### Custom order entry and exit price example
|
||||||
|
|
||||||
``` python
|
``` python
|
||||||
from datetime import datetime, timedelta, timezone
|
# Default imports
|
||||||
from freqtrade.persistence import Trade
|
|
||||||
|
|
||||||
class AwesomeStrategy(IStrategy):
|
class AwesomeStrategy(IStrategy):
|
||||||
|
|
||||||
# ... populate_* methods
|
# ... populate_* methods
|
||||||
|
|
||||||
def custom_entry_price(self, pair: str, trade: Optional['Trade'], current_time: datetime, proposed_rate: float,
|
def custom_entry_price(self, pair: str, trade: Optional[Trade], current_time: datetime, proposed_rate: float,
|
||||||
entry_tag: Optional[str], side: str, **kwargs) -> float:
|
entry_tag: Optional[str], side: str, **kwargs) -> float:
|
||||||
|
|
||||||
dataframe, last_updated = self.dp.get_analyzed_dataframe(pair=pair,
|
dataframe, last_updated = self.dp.get_analyzed_dataframe(pair=pair,
|
||||||
timeframe=self.timeframe)
|
timeframe=self.timeframe)
|
||||||
new_entryprice = dataframe['bollinger_10_lowerband'].iat[-1]
|
new_entryprice = dataframe["bollinger_10_lowerband"].iat[-1]
|
||||||
|
|
||||||
return new_entryprice
|
return new_entryprice
|
||||||
|
|
||||||
|
@ -542,7 +534,7 @@ class AwesomeStrategy(IStrategy):
|
||||||
|
|
||||||
dataframe, last_updated = self.dp.get_analyzed_dataframe(pair=pair,
|
dataframe, last_updated = self.dp.get_analyzed_dataframe(pair=pair,
|
||||||
timeframe=self.timeframe)
|
timeframe=self.timeframe)
|
||||||
new_exitprice = dataframe['bollinger_10_upperband'].iat[-1]
|
new_exitprice = dataframe["bollinger_10_upperband"].iat[-1]
|
||||||
|
|
||||||
return new_exitprice
|
return new_exitprice
|
||||||
|
|
||||||
|
@ -579,8 +571,7 @@ It applies a tight timeout for higher priced assets, while allowing more time to
|
||||||
The function must return either `True` (cancel order) or `False` (keep order alive).
|
The function must return either `True` (cancel order) or `False` (keep order alive).
|
||||||
|
|
||||||
``` python
|
``` python
|
||||||
from datetime import datetime, timedelta
|
# Default imports
|
||||||
from freqtrade.persistence import Trade, Order
|
|
||||||
|
|
||||||
class AwesomeStrategy(IStrategy):
|
class AwesomeStrategy(IStrategy):
|
||||||
|
|
||||||
|
@ -588,11 +579,11 @@ class AwesomeStrategy(IStrategy):
|
||||||
|
|
||||||
# Set unfilledtimeout to 25 hours, since the maximum timeout from below is 24 hours.
|
# Set unfilledtimeout to 25 hours, since the maximum timeout from below is 24 hours.
|
||||||
unfilledtimeout = {
|
unfilledtimeout = {
|
||||||
'entry': 60 * 25,
|
"entry": 60 * 25,
|
||||||
'exit': 60 * 25
|
"exit": 60 * 25
|
||||||
}
|
}
|
||||||
|
|
||||||
def check_entry_timeout(self, pair: str, trade: 'Trade', order: 'Order',
|
def check_entry_timeout(self, pair: str, trade: Trade, order: Order,
|
||||||
current_time: datetime, **kwargs) -> bool:
|
current_time: datetime, **kwargs) -> bool:
|
||||||
if trade.open_rate > 100 and trade.open_date_utc < current_time - timedelta(minutes=5):
|
if trade.open_rate > 100 and trade.open_date_utc < current_time - timedelta(minutes=5):
|
||||||
return True
|
return True
|
||||||
|
@ -603,7 +594,7 @@ class AwesomeStrategy(IStrategy):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def check_exit_timeout(self, pair: str, trade: Trade, order: 'Order',
|
def check_exit_timeout(self, pair: str, trade: Trade, order: Order,
|
||||||
current_time: datetime, **kwargs) -> bool:
|
current_time: datetime, **kwargs) -> bool:
|
||||||
if trade.open_rate > 100 and trade.open_date_utc < current_time - timedelta(minutes=5):
|
if trade.open_rate > 100 and trade.open_date_utc < current_time - timedelta(minutes=5):
|
||||||
return True
|
return True
|
||||||
|
@ -620,8 +611,7 @@ class AwesomeStrategy(IStrategy):
|
||||||
### Custom order timeout example (using additional data)
|
### Custom order timeout example (using additional data)
|
||||||
|
|
||||||
``` python
|
``` python
|
||||||
from datetime import datetime
|
# Default imports
|
||||||
from freqtrade.persistence import Trade, Order
|
|
||||||
|
|
||||||
class AwesomeStrategy(IStrategy):
|
class AwesomeStrategy(IStrategy):
|
||||||
|
|
||||||
|
@ -629,24 +619,24 @@ class AwesomeStrategy(IStrategy):
|
||||||
|
|
||||||
# Set unfilledtimeout to 25 hours, since the maximum timeout from below is 24 hours.
|
# Set unfilledtimeout to 25 hours, since the maximum timeout from below is 24 hours.
|
||||||
unfilledtimeout = {
|
unfilledtimeout = {
|
||||||
'entry': 60 * 25,
|
"entry": 60 * 25,
|
||||||
'exit': 60 * 25
|
"exit": 60 * 25
|
||||||
}
|
}
|
||||||
|
|
||||||
def check_entry_timeout(self, pair: str, trade: 'Trade', order: 'Order',
|
def check_entry_timeout(self, pair: str, trade: Trade, order: Order,
|
||||||
current_time: datetime, **kwargs) -> bool:
|
current_time: datetime, **kwargs) -> bool:
|
||||||
ob = self.dp.orderbook(pair, 1)
|
ob = self.dp.orderbook(pair, 1)
|
||||||
current_price = ob['bids'][0][0]
|
current_price = ob["bids"][0][0]
|
||||||
# Cancel buy order if price is more than 2% above the order.
|
# Cancel buy order if price is more than 2% above the order.
|
||||||
if current_price > order.price * 1.02:
|
if current_price > order.price * 1.02:
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def check_exit_timeout(self, pair: str, trade: 'Trade', order: 'Order',
|
def check_exit_timeout(self, pair: str, trade: Trade, order: Order,
|
||||||
current_time: datetime, **kwargs) -> bool:
|
current_time: datetime, **kwargs) -> bool:
|
||||||
ob = self.dp.orderbook(pair, 1)
|
ob = self.dp.orderbook(pair, 1)
|
||||||
current_price = ob['asks'][0][0]
|
current_price = ob["asks"][0][0]
|
||||||
# Cancel sell order if price is more than 2% below the order.
|
# Cancel sell order if price is more than 2% below the order.
|
||||||
if current_price < order.price * 0.98:
|
if current_price < order.price * 0.98:
|
||||||
return True
|
return True
|
||||||
|
@ -665,6 +655,8 @@ This are the last methods that will be called before an order is placed.
|
||||||
`confirm_trade_entry()` can be used to abort a trade entry at the latest second (maybe because the price is not what we expect).
|
`confirm_trade_entry()` can be used to abort a trade entry at the latest second (maybe because the price is not what we expect).
|
||||||
|
|
||||||
``` python
|
``` python
|
||||||
|
# Default imports
|
||||||
|
|
||||||
class AwesomeStrategy(IStrategy):
|
class AwesomeStrategy(IStrategy):
|
||||||
|
|
||||||
# ... populate_* methods
|
# ... populate_* methods
|
||||||
|
@ -689,7 +681,7 @@ class AwesomeStrategy(IStrategy):
|
||||||
:param time_in_force: Time in force. Defaults to GTC (Good-til-cancelled).
|
:param time_in_force: Time in force. Defaults to GTC (Good-til-cancelled).
|
||||||
:param current_time: datetime object, containing the current datetime
|
:param current_time: datetime object, containing the current datetime
|
||||||
:param entry_tag: Optional entry_tag (buy_tag) if provided with the buy signal.
|
:param entry_tag: Optional entry_tag (buy_tag) if provided with the buy signal.
|
||||||
:param side: 'long' or 'short' - indicating the direction of the proposed trade
|
:param side: "long" or "short" - indicating the direction of the proposed trade
|
||||||
:param **kwargs: Ensure to keep this here so updates to this won't break your strategy.
|
:param **kwargs: Ensure to keep this here so updates to this won't break your strategy.
|
||||||
:return bool: When True is returned, then the buy-order is placed on the exchange.
|
:return bool: When True is returned, then the buy-order is placed on the exchange.
|
||||||
False aborts the process
|
False aborts the process
|
||||||
|
@ -711,8 +703,7 @@ The exit-reasons (if applicable) will be in the following sequence:
|
||||||
* `trailing_stop_loss`
|
* `trailing_stop_loss`
|
||||||
|
|
||||||
``` python
|
``` python
|
||||||
from freqtrade.persistence import Trade
|
# Default imports
|
||||||
|
|
||||||
|
|
||||||
class AwesomeStrategy(IStrategy):
|
class AwesomeStrategy(IStrategy):
|
||||||
|
|
||||||
|
@ -738,14 +729,14 @@ class AwesomeStrategy(IStrategy):
|
||||||
or current rate for market orders.
|
or current rate for market orders.
|
||||||
:param time_in_force: Time in force. Defaults to GTC (Good-til-cancelled).
|
:param time_in_force: Time in force. Defaults to GTC (Good-til-cancelled).
|
||||||
:param exit_reason: Exit reason.
|
:param exit_reason: Exit reason.
|
||||||
Can be any of ['roi', 'stop_loss', 'stoploss_on_exchange', 'trailing_stop_loss',
|
Can be any of ["roi", "stop_loss", "stoploss_on_exchange", "trailing_stop_loss",
|
||||||
'exit_signal', 'force_exit', 'emergency_exit']
|
"exit_signal", "force_exit", "emergency_exit"]
|
||||||
:param current_time: datetime object, containing the current datetime
|
:param current_time: datetime object, containing the current datetime
|
||||||
:param **kwargs: Ensure to keep this here so updates to this won't break your strategy.
|
:param **kwargs: Ensure to keep this here so updates to this won't break your strategy.
|
||||||
:return bool: When True, then the exit-order is placed on the exchange.
|
:return bool: When True, then the exit-order is placed on the exchange.
|
||||||
False aborts the process
|
False aborts the process
|
||||||
"""
|
"""
|
||||||
if exit_reason == 'force_exit' and trade.calc_profit_ratio(rate) < 0:
|
if exit_reason == "force_exit" and trade.calc_profit_ratio(rate) < 0:
|
||||||
# Reject force-sells with negative profit
|
# Reject force-sells with negative profit
|
||||||
# This is just a sample, please adjust to your needs
|
# This is just a sample, please adjust to your needs
|
||||||
# (this does not necessarily make sense, assuming you know when you're force-selling)
|
# (this does not necessarily make sense, assuming you know when you're force-selling)
|
||||||
|
@ -771,7 +762,7 @@ This callback is **not** called when there is an open order (either buy or sell)
|
||||||
`adjust_trade_position()` is called very frequently for the duration of a trade, so you must keep your implementation as performant as possible.
|
`adjust_trade_position()` is called very frequently for the duration of a trade, so you must keep your implementation as performant as possible.
|
||||||
|
|
||||||
Position adjustments will always be applied in the direction of the trade, so a positive value will always increase your position (negative values will decrease your position), no matter if it's a long or short trade.
|
Position adjustments will always be applied in the direction of the trade, so a positive value will always increase your position (negative values will decrease your position), no matter if it's a long or short trade.
|
||||||
Adjustment orders can be assigned with a tag by returning a 2 element Tuple, with the first element being the adjustment amount, and the 2nd element the tag (e.g. `return 250, 'increase_favorable_conditions'`).
|
Adjustment orders can be assigned with a tag by returning a 2 element Tuple, with the first element being the adjustment amount, and the 2nd element the tag (e.g. `return 250, "increase_favorable_conditions"`).
|
||||||
|
|
||||||
Modifications to leverage are not possible, and the stake-amount returned is assumed to be before applying leverage.
|
Modifications to leverage are not possible, and the stake-amount returned is assumed to be before applying leverage.
|
||||||
|
|
||||||
|
@ -793,7 +784,7 @@ Returning a value more than the above (so remaining stake_amount would become ne
|
||||||
!!! Note "About stake size"
|
!!! Note "About stake size"
|
||||||
Using fixed stake size means it will be the amount used for the first order, just like without position adjustment.
|
Using fixed stake size means it will be the amount used for the first order, just like without position adjustment.
|
||||||
If you wish to buy additional orders with DCA, then make sure to leave enough funds in the wallet for that.
|
If you wish to buy additional orders with DCA, then make sure to leave enough funds in the wallet for that.
|
||||||
Using 'unlimited' stake amount with DCA orders requires you to also implement the `custom_stake_amount()` callback to avoid allocating all funds to the initial order.
|
Using `"unlimited"` stake amount with DCA orders requires you to also implement the `custom_stake_amount()` callback to avoid allocating all funds to the initial order.
|
||||||
|
|
||||||
!!! Warning "Stoploss calculation"
|
!!! Warning "Stoploss calculation"
|
||||||
Stoploss is still calculated from the initial opening price, not averaged price.
|
Stoploss is still calculated from the initial opening price, not averaged price.
|
||||||
|
@ -811,9 +802,7 @@ Returning a value more than the above (so remaining stake_amount would become ne
|
||||||
Trades with long duration and 10s or even 100ds of position adjustments are therefore not recommended, and should be closed at regular intervals to not affect performance.
|
Trades with long duration and 10s or even 100ds of position adjustments are therefore not recommended, and should be closed at regular intervals to not affect performance.
|
||||||
|
|
||||||
``` python
|
``` python
|
||||||
from freqtrade.persistence import Trade
|
# Default imports
|
||||||
from typing import Optional, Tuple, Union
|
|
||||||
|
|
||||||
|
|
||||||
class DigDeeperStrategy(IStrategy):
|
class DigDeeperStrategy(IStrategy):
|
||||||
|
|
||||||
|
@ -876,7 +865,7 @@ class DigDeeperStrategy(IStrategy):
|
||||||
|
|
||||||
if current_profit > 0.05 and trade.nr_of_successful_exits == 0:
|
if current_profit > 0.05 and trade.nr_of_successful_exits == 0:
|
||||||
# Take half of the profit at +5%
|
# Take half of the profit at +5%
|
||||||
return -(trade.stake_amount / 2), 'half_profit_5%'
|
return -(trade.stake_amount / 2), "half_profit_5%"
|
||||||
|
|
||||||
if current_profit > -0.05:
|
if current_profit > -0.05:
|
||||||
return None
|
return None
|
||||||
|
@ -886,7 +875,7 @@ class DigDeeperStrategy(IStrategy):
|
||||||
# Only buy when not actively falling price.
|
# Only buy when not actively falling price.
|
||||||
last_candle = dataframe.iloc[-1].squeeze()
|
last_candle = dataframe.iloc[-1].squeeze()
|
||||||
previous_candle = dataframe.iloc[-2].squeeze()
|
previous_candle = dataframe.iloc[-2].squeeze()
|
||||||
if last_candle['close'] < previous_candle['close']:
|
if last_candle["close"] < previous_candle["close"]:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
filled_entries = trade.select_filled_orders(trade.entry_side)
|
filled_entries = trade.select_filled_orders(trade.entry_side)
|
||||||
|
@ -904,7 +893,7 @@ class DigDeeperStrategy(IStrategy):
|
||||||
stake_amount = filled_entries[0].stake_amount
|
stake_amount = filled_entries[0].stake_amount
|
||||||
# This then calculates current safety order size
|
# This then calculates current safety order size
|
||||||
stake_amount = stake_amount * (1 + (count_of_entries * 0.25))
|
stake_amount = stake_amount * (1 + (count_of_entries * 0.25))
|
||||||
return stake_amount, '1/3rd_increase'
|
return stake_amount, "1/3rd_increase"
|
||||||
except Exception as exception:
|
except Exception as exception:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@ -951,8 +940,7 @@ If the cancellation of the original order fails, then the order will not be repl
|
||||||
Entry Orders that are cancelled via the above methods will not have this callback called. Be sure to update timeout values to match your expectations.
|
Entry Orders that are cancelled via the above methods will not have this callback called. Be sure to update timeout values to match your expectations.
|
||||||
|
|
||||||
```python
|
```python
|
||||||
from freqtrade.persistence import Trade
|
# Default imports
|
||||||
from datetime import timedelta, datetime
|
|
||||||
|
|
||||||
class AwesomeStrategy(IStrategy):
|
class AwesomeStrategy(IStrategy):
|
||||||
|
|
||||||
|
@ -977,13 +965,18 @@ class AwesomeStrategy(IStrategy):
|
||||||
:param proposed_rate: Rate, calculated based on pricing settings in entry_pricing.
|
:param proposed_rate: Rate, calculated based on pricing settings in entry_pricing.
|
||||||
:param current_order_rate: Rate of the existing order in place.
|
:param current_order_rate: Rate of the existing order in place.
|
||||||
:param entry_tag: Optional entry_tag (buy_tag) if provided with the buy signal.
|
:param entry_tag: Optional entry_tag (buy_tag) if provided with the buy signal.
|
||||||
:param side: 'long' or 'short' - indicating the direction of the proposed trade
|
:param side: "long" or "short" - indicating the direction of the proposed trade
|
||||||
:param **kwargs: Ensure to keep this here so updates to this won't break your strategy.
|
:param **kwargs: Ensure to keep this here so updates to this won't break your strategy.
|
||||||
:return float: New entry price value if provided
|
:return float: New entry price value if provided
|
||||||
|
|
||||||
"""
|
"""
|
||||||
# Limit orders to use and follow SMA200 as price target for the first 10 minutes since entry trigger for BTC/USDT pair.
|
# Limit orders to use and follow SMA200 as price target for the first 10 minutes since entry trigger for BTC/USDT pair.
|
||||||
if pair == 'BTC/USDT' and entry_tag == 'long_sma200' and side == 'long' and (current_time - timedelta(minutes=10)) > trade.open_date_utc:
|
if (
|
||||||
|
pair == "BTC/USDT"
|
||||||
|
and entry_tag == "long_sma200"
|
||||||
|
and side == "long"
|
||||||
|
and (current_time - timedelta(minutes=10)) > trade.open_date_utc
|
||||||
|
):
|
||||||
# just cancel the order if it has been filled more than half of the amount
|
# just cancel the order if it has been filled more than half of the amount
|
||||||
if order.filled > order.remaining:
|
if order.filled > order.remaining:
|
||||||
return None
|
return None
|
||||||
|
@ -991,7 +984,7 @@ class AwesomeStrategy(IStrategy):
|
||||||
dataframe, _ = self.dp.get_analyzed_dataframe(pair=pair, timeframe=self.timeframe)
|
dataframe, _ = self.dp.get_analyzed_dataframe(pair=pair, timeframe=self.timeframe)
|
||||||
current_candle = dataframe.iloc[-1].squeeze()
|
current_candle = dataframe.iloc[-1].squeeze()
|
||||||
# desired price
|
# desired price
|
||||||
return current_candle['sma_200']
|
return current_candle["sma_200"]
|
||||||
# default: maintain existing order
|
# default: maintain existing order
|
||||||
return current_order_rate
|
return current_order_rate
|
||||||
```
|
```
|
||||||
|
@ -1006,6 +999,8 @@ Values that are above `max_leverage` will be adjusted to `max_leverage`.
|
||||||
For markets / exchanges that don't support leverage, this method is ignored.
|
For markets / exchanges that don't support leverage, this method is ignored.
|
||||||
|
|
||||||
``` python
|
``` python
|
||||||
|
# Default imports
|
||||||
|
|
||||||
class AwesomeStrategy(IStrategy):
|
class AwesomeStrategy(IStrategy):
|
||||||
def leverage(self, pair: str, current_time: datetime, current_rate: float,
|
def leverage(self, pair: str, current_time: datetime, current_rate: float,
|
||||||
proposed_leverage: float, max_leverage: float, entry_tag: Optional[str], side: str,
|
proposed_leverage: float, max_leverage: float, entry_tag: Optional[str], side: str,
|
||||||
|
@ -1019,7 +1014,7 @@ class AwesomeStrategy(IStrategy):
|
||||||
:param proposed_leverage: A leverage proposed by the bot.
|
:param proposed_leverage: A leverage proposed by the bot.
|
||||||
:param max_leverage: Max leverage allowed on this pair
|
:param max_leverage: Max leverage allowed on this pair
|
||||||
:param entry_tag: Optional entry_tag (buy_tag) if provided with the buy signal.
|
:param entry_tag: Optional entry_tag (buy_tag) if provided with the buy signal.
|
||||||
:param side: 'long' or 'short' - indicating the direction of the proposed trade
|
:param side: "long" or "short" - indicating the direction of the proposed trade
|
||||||
:return: A leverage amount, which is between 1.0 and max_leverage.
|
:return: A leverage amount, which is between 1.0 and max_leverage.
|
||||||
"""
|
"""
|
||||||
return 1.0
|
return 1.0
|
||||||
|
@ -1036,6 +1031,8 @@ It will be called independent of the order type (entry, exit, stoploss or positi
|
||||||
Assuming that your strategy needs to store the high value of the candle at trade entry, this is possible with this callback as the following example show.
|
Assuming that your strategy needs to store the high value of the candle at trade entry, this is possible with this callback as the following example show.
|
||||||
|
|
||||||
``` python
|
``` python
|
||||||
|
# Default imports
|
||||||
|
|
||||||
class AwesomeStrategy(IStrategy):
|
class AwesomeStrategy(IStrategy):
|
||||||
def order_filled(self, pair: str, trade: Trade, order: Order, current_time: datetime, **kwargs) -> None:
|
def order_filled(self, pair: str, trade: Trade, order: Order, current_time: datetime, **kwargs) -> None:
|
||||||
"""
|
"""
|
||||||
|
@ -1052,7 +1049,7 @@ class AwesomeStrategy(IStrategy):
|
||||||
last_candle = dataframe.iloc[-1].squeeze()
|
last_candle = dataframe.iloc[-1].squeeze()
|
||||||
|
|
||||||
if (trade.nr_of_successful_entries == 1) and (order.ft_order_side == trade.entry_side):
|
if (trade.nr_of_successful_entries == 1) and (order.ft_order_side == trade.entry_side):
|
||||||
trade.set_custom_data(key='entry_candle_high', value=last_candle['high'])
|
trade.set_custom_data(key="entry_candle_high", value=last_candle["high"])
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
|
@ -158,7 +158,7 @@ Out of the box, freqtrade installs the following technical libraries:
|
||||||
|
|
||||||
- [ta-lib](https://ta-lib.github.io/ta-lib-python/)
|
- [ta-lib](https://ta-lib.github.io/ta-lib-python/)
|
||||||
- [pandas-ta](https://twopirllc.github.io/pandas-ta/)
|
- [pandas-ta](https://twopirllc.github.io/pandas-ta/)
|
||||||
- [technical](https://github.com/freqtrade/technical/)
|
- [technical](https://technical.freqtrade.io)
|
||||||
|
|
||||||
Additional technical libraries can be installed as necessary, or custom indicators may be written / invented by the strategy author.
|
Additional technical libraries can be installed as necessary, or custom indicators may be written / invented by the strategy author.
|
||||||
|
|
||||||
|
@ -407,6 +407,8 @@ Currently this is `pair`, which can be accessed using `metadata['pair']` - and w
|
||||||
The Metadata-dict should not be modified and does not persist information across multiple calls.
|
The Metadata-dict should not be modified and does not persist information across multiple calls.
|
||||||
Instead, have a look at the [Storing information](strategy-advanced.md#storing-information-persistent) section.
|
Instead, have a look at the [Storing information](strategy-advanced.md#storing-information-persistent) section.
|
||||||
|
|
||||||
|
--8<-- "includes/strategy-imports.md"
|
||||||
|
|
||||||
## Strategy file loading
|
## Strategy file loading
|
||||||
|
|
||||||
By default, freqtrade will attempt to load strategies from all `.py` files within `user_data/strategies`.
|
By default, freqtrade will attempt to load strategies from all `.py` files within `user_data/strategies`.
|
||||||
|
@ -715,6 +717,7 @@ This is where calling `self.dp.current_whitelist()` comes in handy.
|
||||||
|
|
||||||
??? Note "Plotting with current_whitelist"
|
??? Note "Plotting with current_whitelist"
|
||||||
Current whitelist is not supported for `plot-dataframe`, as this command is usually used by providing an explicit pairlist - and would therefore make the return values of this method misleading.
|
Current whitelist is not supported for `plot-dataframe`, as this command is usually used by providing an explicit pairlist - and would therefore make the return values of this method misleading.
|
||||||
|
It's also not supported for freqUI visualization in [webserver mode](utils.md#webserver-mode) - as the configuration for webserver mode doesn't require a pairlist to be set.
|
||||||
|
|
||||||
### *get_pair_dataframe(pair, timeframe)*
|
### *get_pair_dataframe(pair, timeframe)*
|
||||||
|
|
||||||
|
|
|
@ -13,19 +13,22 @@ Please follow the [documentation](https://www.freqtrade.io/en/stable/data-downlo
|
||||||
import os
|
import os
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
# Change directory
|
# Change directory
|
||||||
# Modify this cell to insure that the output shows the correct path.
|
# Modify this cell to insure that the output shows the correct path.
|
||||||
# Define all paths relative to the project root shown in the cell output
|
# Define all paths relative to the project root shown in the cell output
|
||||||
project_root = "somedir/freqtrade"
|
project_root = "somedir/freqtrade"
|
||||||
i=0
|
i = 0
|
||||||
try:
|
try:
|
||||||
os.chdir(project_root)
|
os.chdir(project_root)
|
||||||
assert Path('LICENSE').is_file()
|
if not Path("LICENSE").is_file():
|
||||||
except:
|
i = 0
|
||||||
while i<4 and (not Path('LICENSE').is_file()):
|
while i < 4 and (not Path("LICENSE").is_file()):
|
||||||
os.chdir(Path(Path.cwd(), '../'))
|
os.chdir(Path(Path.cwd(), "../"))
|
||||||
i+=1
|
i += 1
|
||||||
project_root = Path.cwd()
|
project_root = Path.cwd()
|
||||||
|
except FileNotFoundError:
|
||||||
|
print("Please define the project root relative to the current directory")
|
||||||
print(Path.cwd())
|
print(Path.cwd())
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -35,6 +38,7 @@ print(Path.cwd())
|
||||||
```python
|
```python
|
||||||
from freqtrade.configuration import Configuration
|
from freqtrade.configuration import Configuration
|
||||||
|
|
||||||
|
|
||||||
# Customize these according to your needs.
|
# Customize these according to your needs.
|
||||||
|
|
||||||
# Initialize empty configuration object
|
# Initialize empty configuration object
|
||||||
|
@ -58,12 +62,14 @@ pair = "BTC/USDT"
|
||||||
from freqtrade.data.history import load_pair_history
|
from freqtrade.data.history import load_pair_history
|
||||||
from freqtrade.enums import CandleType
|
from freqtrade.enums import CandleType
|
||||||
|
|
||||||
candles = load_pair_history(datadir=data_location,
|
|
||||||
|
candles = load_pair_history(
|
||||||
|
datadir=data_location,
|
||||||
timeframe=config["timeframe"],
|
timeframe=config["timeframe"],
|
||||||
pair=pair,
|
pair=pair,
|
||||||
data_format = "json", # Make sure to update this to your data
|
data_format="json", # Make sure to update this to your data
|
||||||
candle_type=CandleType.SPOT,
|
candle_type=CandleType.SPOT,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Confirm success
|
# Confirm success
|
||||||
print(f"Loaded {len(candles)} rows of data for {pair} from {data_location}")
|
print(f"Loaded {len(candles)} rows of data for {pair} from {data_location}")
|
||||||
|
@ -76,14 +82,16 @@ candles.head()
|
||||||
|
|
||||||
```python
|
```python
|
||||||
# Load strategy using values set above
|
# Load strategy using values set above
|
||||||
from freqtrade.resolvers import StrategyResolver
|
|
||||||
from freqtrade.data.dataprovider import DataProvider
|
from freqtrade.data.dataprovider import DataProvider
|
||||||
|
from freqtrade.resolvers import StrategyResolver
|
||||||
|
|
||||||
|
|
||||||
strategy = StrategyResolver.load_strategy(config)
|
strategy = StrategyResolver.load_strategy(config)
|
||||||
strategy.dp = DataProvider(config, None, None)
|
strategy.dp = DataProvider(config, None, None)
|
||||||
strategy.ft_bot_start()
|
strategy.ft_bot_start()
|
||||||
|
|
||||||
# Generate buy/sell signals using strategy
|
# Generate buy/sell signals using strategy
|
||||||
df = strategy.analyze_ticker(candles, {'pair': pair})
|
df = strategy.analyze_ticker(candles, {"pair": pair})
|
||||||
df.tail()
|
df.tail()
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -102,7 +110,7 @@ df.tail()
|
||||||
```python
|
```python
|
||||||
# Report results
|
# Report results
|
||||||
print(f"Generated {df['enter_long'].sum()} entry signals")
|
print(f"Generated {df['enter_long'].sum()} entry signals")
|
||||||
data = df.set_index('date', drop=False)
|
data = df.set_index("date", drop=False)
|
||||||
data.tail()
|
data.tail()
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -119,10 +127,13 @@ Analyze a trades dataframe (also used below for plotting)
|
||||||
```python
|
```python
|
||||||
from freqtrade.data.btanalysis import load_backtest_data, load_backtest_stats
|
from freqtrade.data.btanalysis import load_backtest_data, load_backtest_stats
|
||||||
|
|
||||||
|
|
||||||
# if backtest_dir points to a directory, it'll automatically load the last backtest file.
|
# if backtest_dir points to a directory, it'll automatically load the last backtest file.
|
||||||
backtest_dir = config["user_data_dir"] / "backtest_results"
|
backtest_dir = config["user_data_dir"] / "backtest_results"
|
||||||
# backtest_dir can also point to a specific file
|
# backtest_dir can also point to a specific file
|
||||||
# backtest_dir = config["user_data_dir"] / "backtest_results/backtest-result-2020-07-01_20-04-22.json"
|
# backtest_dir = (
|
||||||
|
# config["user_data_dir"] / "backtest_results/backtest-result-2020-07-01_20-04-22.json"
|
||||||
|
# )
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
|
@ -131,24 +142,24 @@ backtest_dir = config["user_data_dir"] / "backtest_results"
|
||||||
# This contains all information used to generate the backtest result.
|
# This contains all information used to generate the backtest result.
|
||||||
stats = load_backtest_stats(backtest_dir)
|
stats = load_backtest_stats(backtest_dir)
|
||||||
|
|
||||||
strategy = 'SampleStrategy'
|
strategy = "SampleStrategy"
|
||||||
# All statistics are available per strategy, so if `--strategy-list` was used during backtest, this will be reflected here as well.
|
# All statistics are available per strategy, so if `--strategy-list` was used during backtest,
|
||||||
|
# this will be reflected here as well.
|
||||||
# Example usages:
|
# Example usages:
|
||||||
print(stats['strategy'][strategy]['results_per_pair'])
|
print(stats["strategy"][strategy]["results_per_pair"])
|
||||||
# Get pairlist used for this backtest
|
# Get pairlist used for this backtest
|
||||||
print(stats['strategy'][strategy]['pairlist'])
|
print(stats["strategy"][strategy]["pairlist"])
|
||||||
# Get market change (average change of all pairs from start to end of the backtest period)
|
# Get market change (average change of all pairs from start to end of the backtest period)
|
||||||
print(stats['strategy'][strategy]['market_change'])
|
print(stats["strategy"][strategy]["market_change"])
|
||||||
# Maximum drawdown ()
|
# Maximum drawdown ()
|
||||||
print(stats['strategy'][strategy]['max_drawdown'])
|
print(stats["strategy"][strategy]["max_drawdown"])
|
||||||
# Maximum drawdown start and end
|
# Maximum drawdown start and end
|
||||||
print(stats['strategy'][strategy]['drawdown_start'])
|
print(stats["strategy"][strategy]["drawdown_start"])
|
||||||
print(stats['strategy'][strategy]['drawdown_end'])
|
print(stats["strategy"][strategy]["drawdown_end"])
|
||||||
|
|
||||||
|
|
||||||
# Get strategy comparison (only relevant if multiple strategies were compared)
|
# Get strategy comparison (only relevant if multiple strategies were compared)
|
||||||
print(stats['strategy_comparison'])
|
print(stats["strategy_comparison"])
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
|
@ -166,24 +177,25 @@ trades.groupby("pair")["exit_reason"].value_counts()
|
||||||
```python
|
```python
|
||||||
# Plotting equity line (starting with 0 on day 1 and adding daily profit for each backtested day)
|
# Plotting equity line (starting with 0 on day 1 and adding daily profit for each backtested day)
|
||||||
|
|
||||||
|
import pandas as pd
|
||||||
|
import plotly.express as px
|
||||||
|
|
||||||
from freqtrade.configuration import Configuration
|
from freqtrade.configuration import Configuration
|
||||||
from freqtrade.data.btanalysis import load_backtest_stats
|
from freqtrade.data.btanalysis import load_backtest_stats
|
||||||
import plotly.express as px
|
|
||||||
import pandas as pd
|
|
||||||
|
|
||||||
# strategy = 'SampleStrategy'
|
# strategy = 'SampleStrategy'
|
||||||
# config = Configuration.from_files(["user_data/config.json"])
|
# config = Configuration.from_files(["user_data/config.json"])
|
||||||
# backtest_dir = config["user_data_dir"] / "backtest_results"
|
# backtest_dir = config["user_data_dir"] / "backtest_results"
|
||||||
|
|
||||||
stats = load_backtest_stats(backtest_dir)
|
stats = load_backtest_stats(backtest_dir)
|
||||||
strategy_stats = stats['strategy'][strategy]
|
strategy_stats = stats["strategy"][strategy]
|
||||||
|
|
||||||
df = pd.DataFrame(columns=['dates','equity'], data=strategy_stats['daily_profit'])
|
df = pd.DataFrame(columns=["dates", "equity"], data=strategy_stats["daily_profit"])
|
||||||
df['equity_daily'] = df['equity'].cumsum()
|
df["equity_daily"] = df["equity"].cumsum()
|
||||||
|
|
||||||
fig = px.line(df, x="dates", y="equity_daily")
|
fig = px.line(df, x="dates", y="equity_daily")
|
||||||
fig.show()
|
fig.show()
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### Load live trading results into a pandas dataframe
|
### Load live trading results into a pandas dataframe
|
||||||
|
@ -194,6 +206,7 @@ In case you did already some trading and want to analyze your performance
|
||||||
```python
|
```python
|
||||||
from freqtrade.data.btanalysis import load_trades_from_db
|
from freqtrade.data.btanalysis import load_trades_from_db
|
||||||
|
|
||||||
|
|
||||||
# Fetch trades from database
|
# Fetch trades from database
|
||||||
trades = load_trades_from_db("sqlite:///tradesv3.sqlite")
|
trades = load_trades_from_db("sqlite:///tradesv3.sqlite")
|
||||||
|
|
||||||
|
@ -210,8 +223,9 @@ This can be useful to find the best `max_open_trades` parameter, when used with
|
||||||
```python
|
```python
|
||||||
from freqtrade.data.btanalysis import analyze_trade_parallelism
|
from freqtrade.data.btanalysis import analyze_trade_parallelism
|
||||||
|
|
||||||
|
|
||||||
# Analyze the above
|
# Analyze the above
|
||||||
parallel_trades = analyze_trade_parallelism(trades, '5m')
|
parallel_trades = analyze_trade_parallelism(trades, "5m")
|
||||||
|
|
||||||
parallel_trades.plot()
|
parallel_trades.plot()
|
||||||
```
|
```
|
||||||
|
@ -223,22 +237,22 @@ Freqtrade offers interactive plotting capabilities based on plotly.
|
||||||
|
|
||||||
```python
|
```python
|
||||||
from freqtrade.plot.plotting import generate_candlestick_graph
|
from freqtrade.plot.plotting import generate_candlestick_graph
|
||||||
|
|
||||||
|
|
||||||
# Limit graph period to keep plotly quick and reactive
|
# Limit graph period to keep plotly quick and reactive
|
||||||
|
|
||||||
# Filter trades to one pair
|
# Filter trades to one pair
|
||||||
trades_red = trades.loc[trades['pair'] == pair]
|
trades_red = trades.loc[trades["pair"] == pair]
|
||||||
|
|
||||||
data_red = data['2019-06-01':'2019-06-10']
|
data_red = data["2019-06-01":"2019-06-10"]
|
||||||
# Generate candlestick graph
|
# Generate candlestick graph
|
||||||
graph = generate_candlestick_graph(pair=pair,
|
graph = generate_candlestick_graph(
|
||||||
|
pair=pair,
|
||||||
data=data_red,
|
data=data_red,
|
||||||
trades=trades_red,
|
trades=trades_red,
|
||||||
indicators1=['sma20', 'ema50', 'ema55'],
|
indicators1=["sma20", "ema50", "ema55"],
|
||||||
indicators2=['rsi', 'macd', 'macdsignal', 'macdhist']
|
indicators2=["rsi", "macd", "macdsignal", "macdhist"],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
|
@ -248,7 +262,6 @@ graph = generate_candlestick_graph(pair=pair,
|
||||||
|
|
||||||
# Render graph in a separate window
|
# Render graph in a separate window
|
||||||
graph.show(renderer="browser")
|
graph.show(renderer="browser")
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## Plot average profit per trade as distribution graph
|
## Plot average profit per trade as distribution graph
|
||||||
|
@ -257,12 +270,12 @@ graph.show(renderer="browser")
|
||||||
```python
|
```python
|
||||||
import plotly.figure_factory as ff
|
import plotly.figure_factory as ff
|
||||||
|
|
||||||
|
|
||||||
hist_data = [trades.profit_ratio]
|
hist_data = [trades.profit_ratio]
|
||||||
group_labels = ['profit_ratio'] # name of the dataset
|
group_labels = ["profit_ratio"] # name of the dataset
|
||||||
|
|
||||||
fig = ff.create_distplot(hist_data, group_labels, bin_size=0.01)
|
fig = ff.create_distplot(hist_data, group_labels, bin_size=0.01)
|
||||||
fig.show()
|
fig.show()
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
Feel free to submit an issue or Pull Request enhancing this document if you would like to share ideas on how to best analyze the data.
|
Feel free to submit an issue or Pull Request enhancing this document if you would like to share ideas on how to best analyze the data.
|
||||||
|
|
|
@ -11,3 +11,7 @@
|
||||||
.rst-versions .rst-other-versions {
|
.rst-versions .rst-other-versions {
|
||||||
color: white;
|
color: white;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.md-version__list {
|
||||||
|
font-weight: 500 !important;
|
||||||
|
}
|
||||||
|
|
|
@ -418,8 +418,9 @@ Common arguments:
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
By default, only active pairs/markets are shown. Active pairs/markets are those that can currently be traded
|
By default, only active pairs/markets are shown. Active pairs/markets are those that can currently be traded on the exchange.
|
||||||
on the exchange. The see the list of all pairs/markets (not only the active ones), use the `-a`/`-all` option.
|
You can use the `-a`/`-all` option to see the list of all pairs/markets, including the inactive ones.
|
||||||
|
Pairs may be listed as untradeable if the smallest tradeable price for the market is very small, i.e. less than `1e-11` (`0.00000000001`)
|
||||||
|
|
||||||
Pairs/markets are sorted by its symbol string in the printed output.
|
Pairs/markets are sorted by its symbol string in the printed output.
|
||||||
|
|
||||||
|
@ -488,7 +489,7 @@ freqtrade test-pairlist --config config.json --quote USDT BTC
|
||||||
|
|
||||||
`freqtrade convert-db` can be used to convert your database from one system to another (sqlite -> postgres, postgres -> other postgres), migrating all trades, orders and Pairlocks.
|
`freqtrade convert-db` can be used to convert your database from one system to another (sqlite -> postgres, postgres -> other postgres), migrating all trades, orders and Pairlocks.
|
||||||
|
|
||||||
Please refer to the [SQL cheatsheet](sql_cheatsheet.md#use-a-different-database-system) to learn about requirements for different database systems.
|
Please refer to the [corresponding documentation](advanced-setup.md#use-a-different-database-system) to learn about requirements for different database systems.
|
||||||
|
|
||||||
```
|
```
|
||||||
usage: freqtrade convert-db [-h] [--db-url PATH] [--db-url-from PATH]
|
usage: freqtrade convert-db [-h] [--db-url PATH] [--db-url-from PATH]
|
||||||
|
|
|
@ -1,12 +1,12 @@
|
||||||
"""Freqtrade bot"""
|
"""Freqtrade bot"""
|
||||||
|
|
||||||
__version__ = "2024.7-dev"
|
__version__ = "2024.9-dev"
|
||||||
|
|
||||||
if "dev" in __version__:
|
if "dev" in __version__:
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import subprocess
|
import subprocess # noqa: S404
|
||||||
|
|
||||||
freqtrade_basedir = Path(__file__).parent
|
freqtrade_basedir = Path(__file__).parent
|
||||||
|
|
||||||
|
|
|
@ -15,6 +15,7 @@ from freqtrade.commands.data_commands import (
|
||||||
start_convert_trades,
|
start_convert_trades,
|
||||||
start_download_data,
|
start_download_data,
|
||||||
start_list_data,
|
start_list_data,
|
||||||
|
start_list_trades_data,
|
||||||
)
|
)
|
||||||
from freqtrade.commands.db_commands import start_convert_db
|
from freqtrade.commands.db_commands import start_convert_db
|
||||||
from freqtrade.commands.deploy_commands import (
|
from freqtrade.commands.deploy_commands import (
|
||||||
|
|
|
@ -2,10 +2,10 @@
|
||||||
This module contains the argument manager class
|
This module contains the argument manager class
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import argparse
|
from argparse import ArgumentParser, Namespace, _ArgumentGroup
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, Dict, List, Optional
|
from typing import Any, Dict, List, Optional, Union
|
||||||
|
|
||||||
from freqtrade.commands.cli_options import AVAILABLE_CLI_OPTIONS
|
from freqtrade.commands.cli_options import AVAILABLE_CLI_OPTIONS
|
||||||
from freqtrade.constants import DEFAULT_CONFIG
|
from freqtrade.constants import DEFAULT_CONFIG
|
||||||
|
@ -132,7 +132,15 @@ ARGS_CONVERT_TRADES = [
|
||||||
"trading_mode",
|
"trading_mode",
|
||||||
]
|
]
|
||||||
|
|
||||||
ARGS_LIST_DATA = ["exchange", "dataformat_ohlcv", "pairs", "trading_mode", "show_timerange"]
|
ARGS_LIST_DATA = [
|
||||||
|
"exchange",
|
||||||
|
"dataformat_ohlcv",
|
||||||
|
"dataformat_trades",
|
||||||
|
"trades",
|
||||||
|
"pairs",
|
||||||
|
"trading_mode",
|
||||||
|
"show_timerange",
|
||||||
|
]
|
||||||
|
|
||||||
ARGS_DOWNLOAD_DATA = [
|
ARGS_DOWNLOAD_DATA = [
|
||||||
"pairs",
|
"pairs",
|
||||||
|
@ -226,6 +234,19 @@ ARGS_ANALYZE_ENTRIES_EXITS = [
|
||||||
"analysis_csv_path",
|
"analysis_csv_path",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
ARGS_STRATEGY_UPDATER = ["strategy_list", "strategy_path", "recursive_strategy_search"]
|
||||||
|
|
||||||
|
ARGS_LOOKAHEAD_ANALYSIS = [
|
||||||
|
a
|
||||||
|
for a in ARGS_BACKTEST
|
||||||
|
if a
|
||||||
|
not in ("position_stacking", "use_max_market_positions", "backtest_cache", "backtest_breakdown")
|
||||||
|
] + ["minimum_trade_amount", "targeted_trade_amount", "lookahead_analysis_exportfilename"]
|
||||||
|
|
||||||
|
ARGS_RECURSIVE_ANALYSIS = ["timeframe", "timerange", "dataformat_ohlcv", "pairs", "startup_candle"]
|
||||||
|
|
||||||
|
# Command level configs - keep at the bottom of the above definitions
|
||||||
NO_CONF_REQURIED = [
|
NO_CONF_REQURIED = [
|
||||||
"convert-data",
|
"convert-data",
|
||||||
"convert-trade-data",
|
"convert-trade-data",
|
||||||
|
@ -248,14 +269,6 @@ NO_CONF_REQURIED = [
|
||||||
|
|
||||||
NO_CONF_ALLOWED = ["create-userdir", "list-exchanges", "new-strategy"]
|
NO_CONF_ALLOWED = ["create-userdir", "list-exchanges", "new-strategy"]
|
||||||
|
|
||||||
ARGS_STRATEGY_UPDATER = ["strategy_list", "strategy_path", "recursive_strategy_search"]
|
|
||||||
|
|
||||||
ARGS_LOOKAHEAD_ANALYSIS = [
|
|
||||||
a for a in ARGS_BACKTEST if a not in ("position_stacking", "use_max_market_positions", "cache")
|
|
||||||
] + ["minimum_trade_amount", "targeted_trade_amount", "lookahead_analysis_exportfilename"]
|
|
||||||
|
|
||||||
ARGS_RECURSIVE_ANALYSIS = ["timeframe", "timerange", "dataformat_ohlcv", "pairs", "startup_candle"]
|
|
||||||
|
|
||||||
|
|
||||||
class Arguments:
|
class Arguments:
|
||||||
"""
|
"""
|
||||||
|
@ -264,7 +277,7 @@ class Arguments:
|
||||||
|
|
||||||
def __init__(self, args: Optional[List[str]]) -> None:
|
def __init__(self, args: Optional[List[str]]) -> None:
|
||||||
self.args = args
|
self.args = args
|
||||||
self._parsed_arg: Optional[argparse.Namespace] = None
|
self._parsed_arg: Optional[Namespace] = None
|
||||||
|
|
||||||
def get_parsed_arg(self) -> Dict[str, Any]:
|
def get_parsed_arg(self) -> Dict[str, Any]:
|
||||||
"""
|
"""
|
||||||
|
@ -277,7 +290,7 @@ class Arguments:
|
||||||
|
|
||||||
return vars(self._parsed_arg)
|
return vars(self._parsed_arg)
|
||||||
|
|
||||||
def _parse_args(self) -> argparse.Namespace:
|
def _parse_args(self) -> Namespace:
|
||||||
"""
|
"""
|
||||||
Parses given arguments and returns an argparse Namespace instance.
|
Parses given arguments and returns an argparse Namespace instance.
|
||||||
"""
|
"""
|
||||||
|
@ -306,7 +319,9 @@ class Arguments:
|
||||||
|
|
||||||
return parsed_arg
|
return parsed_arg
|
||||||
|
|
||||||
def _build_args(self, optionlist, parser):
|
def _build_args(
|
||||||
|
self, optionlist: List[str], parser: Union[ArgumentParser, _ArgumentGroup]
|
||||||
|
) -> None:
|
||||||
for val in optionlist:
|
for val in optionlist:
|
||||||
opt = AVAILABLE_CLI_OPTIONS[val]
|
opt = AVAILABLE_CLI_OPTIONS[val]
|
||||||
parser.add_argument(*opt.cli, dest=val, **opt.kwargs)
|
parser.add_argument(*opt.cli, dest=val, **opt.kwargs)
|
||||||
|
@ -317,16 +332,16 @@ class Arguments:
|
||||||
:return: None
|
:return: None
|
||||||
"""
|
"""
|
||||||
# Build shared arguments (as group Common Options)
|
# Build shared arguments (as group Common Options)
|
||||||
_common_parser = argparse.ArgumentParser(add_help=False)
|
_common_parser = ArgumentParser(add_help=False)
|
||||||
group = _common_parser.add_argument_group("Common arguments")
|
group = _common_parser.add_argument_group("Common arguments")
|
||||||
self._build_args(optionlist=ARGS_COMMON, parser=group)
|
self._build_args(optionlist=ARGS_COMMON, parser=group)
|
||||||
|
|
||||||
_strategy_parser = argparse.ArgumentParser(add_help=False)
|
_strategy_parser = ArgumentParser(add_help=False)
|
||||||
strategy_group = _strategy_parser.add_argument_group("Strategy arguments")
|
strategy_group = _strategy_parser.add_argument_group("Strategy arguments")
|
||||||
self._build_args(optionlist=ARGS_STRATEGY, parser=strategy_group)
|
self._build_args(optionlist=ARGS_STRATEGY, parser=strategy_group)
|
||||||
|
|
||||||
# Build main command
|
# Build main command
|
||||||
self.parser = argparse.ArgumentParser(
|
self.parser = ArgumentParser(
|
||||||
prog="freqtrade", description="Free, open source crypto trading bot"
|
prog="freqtrade", description="Free, open source crypto trading bot"
|
||||||
)
|
)
|
||||||
self._build_args(optionlist=["version"], parser=self.parser)
|
self._build_args(optionlist=["version"], parser=self.parser)
|
||||||
|
|
|
@ -446,8 +446,12 @@ AVAILABLE_CLI_OPTIONS = {
|
||||||
),
|
),
|
||||||
"download_trades": Arg(
|
"download_trades": Arg(
|
||||||
"--dl-trades",
|
"--dl-trades",
|
||||||
help="Download trades instead of OHLCV data. The bot will resample trades to the "
|
help="Download trades instead of OHLCV data.",
|
||||||
"desired timeframe as specified as --timeframes/-t.",
|
action="store_true",
|
||||||
|
),
|
||||||
|
"trades": Arg(
|
||||||
|
"--trades",
|
||||||
|
help="Work on trades data instead of OHLCV data.",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
),
|
),
|
||||||
"convert_trades": Arg(
|
"convert_trades": Arg(
|
||||||
|
|
|
@ -14,8 +14,10 @@ from freqtrade.data.history import download_data_main
|
||||||
from freqtrade.enums import CandleType, RunMode, TradingMode
|
from freqtrade.enums import CandleType, RunMode, TradingMode
|
||||||
from freqtrade.exceptions import ConfigurationError
|
from freqtrade.exceptions import ConfigurationError
|
||||||
from freqtrade.exchange import timeframe_to_minutes
|
from freqtrade.exchange import timeframe_to_minutes
|
||||||
|
from freqtrade.misc import plural
|
||||||
from freqtrade.plugins.pairlist.pairlist_helpers import dynamic_expand_pairlist
|
from freqtrade.plugins.pairlist.pairlist_helpers import dynamic_expand_pairlist
|
||||||
from freqtrade.resolvers import ExchangeResolver
|
from freqtrade.resolvers import ExchangeResolver
|
||||||
|
from freqtrade.util import print_rich_table
|
||||||
from freqtrade.util.migrations import migrate_data
|
from freqtrade.util.migrations import migrate_data
|
||||||
|
|
||||||
|
|
||||||
|
@ -114,12 +116,14 @@ def start_convert_data(args: Dict[str, Any], ohlcv: bool = True) -> None:
|
||||||
|
|
||||||
def start_list_data(args: Dict[str, Any]) -> None:
|
def start_list_data(args: Dict[str, Any]) -> None:
|
||||||
"""
|
"""
|
||||||
List available backtest data
|
List available OHLCV data
|
||||||
"""
|
"""
|
||||||
|
|
||||||
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
if args["trades"]:
|
||||||
|
start_list_trades_data(args)
|
||||||
|
return
|
||||||
|
|
||||||
from tabulate import tabulate
|
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||||
|
|
||||||
from freqtrade.data.history import get_datahandler
|
from freqtrade.data.history import get_datahandler
|
||||||
|
|
||||||
|
@ -128,11 +132,9 @@ def start_list_data(args: Dict[str, Any]) -> None:
|
||||||
paircombs = dhc.ohlcv_get_available_data(
|
paircombs = dhc.ohlcv_get_available_data(
|
||||||
config["datadir"], config.get("trading_mode", TradingMode.SPOT)
|
config["datadir"], config.get("trading_mode", TradingMode.SPOT)
|
||||||
)
|
)
|
||||||
|
|
||||||
if args["pairs"]:
|
if args["pairs"]:
|
||||||
paircombs = [comb for comb in paircombs if comb[0] in args["pairs"]]
|
paircombs = [comb for comb in paircombs if comb[0] in args["pairs"]]
|
||||||
|
title = f"Found {len(paircombs)} pair / timeframe combinations."
|
||||||
print(f"Found {len(paircombs)} pair / timeframe combinations.")
|
|
||||||
if not config.get("show_timerange"):
|
if not config.get("show_timerange"):
|
||||||
groupedpair = defaultdict(list)
|
groupedpair = defaultdict(list)
|
||||||
for pair, timeframe, candle_type in sorted(
|
for pair, timeframe, candle_type in sorted(
|
||||||
|
@ -141,25 +143,21 @@ def start_list_data(args: Dict[str, Any]) -> None:
|
||||||
groupedpair[(pair, candle_type)].append(timeframe)
|
groupedpair[(pair, candle_type)].append(timeframe)
|
||||||
|
|
||||||
if groupedpair:
|
if groupedpair:
|
||||||
print(
|
print_rich_table(
|
||||||
tabulate(
|
|
||||||
[
|
[
|
||||||
(pair, ", ".join(timeframes), candle_type)
|
(pair, ", ".join(timeframes), candle_type)
|
||||||
for (pair, candle_type), timeframes in groupedpair.items()
|
for (pair, candle_type), timeframes in groupedpair.items()
|
||||||
],
|
],
|
||||||
headers=("Pair", "Timeframe", "Type"),
|
("Pair", "Timeframe", "Type"),
|
||||||
tablefmt="psql",
|
title,
|
||||||
stralign="right",
|
table_kwargs={"min_width": 50},
|
||||||
)
|
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
paircombs1 = [
|
paircombs1 = [
|
||||||
(pair, timeframe, candle_type, *dhc.ohlcv_data_min_max(pair, timeframe, candle_type))
|
(pair, timeframe, candle_type, *dhc.ohlcv_data_min_max(pair, timeframe, candle_type))
|
||||||
for pair, timeframe, candle_type in paircombs
|
for pair, timeframe, candle_type in paircombs
|
||||||
]
|
]
|
||||||
|
print_rich_table(
|
||||||
print(
|
|
||||||
tabulate(
|
|
||||||
[
|
[
|
||||||
(
|
(
|
||||||
pair,
|
pair,
|
||||||
|
@ -167,14 +165,61 @@ def start_list_data(args: Dict[str, Any]) -> None:
|
||||||
candle_type,
|
candle_type,
|
||||||
start.strftime(DATETIME_PRINT_FORMAT),
|
start.strftime(DATETIME_PRINT_FORMAT),
|
||||||
end.strftime(DATETIME_PRINT_FORMAT),
|
end.strftime(DATETIME_PRINT_FORMAT),
|
||||||
length,
|
str(length),
|
||||||
)
|
)
|
||||||
for pair, timeframe, candle_type, start, end, length in sorted(
|
for pair, timeframe, candle_type, start, end, length in sorted(
|
||||||
paircombs1, key=lambda x: (x[0], timeframe_to_minutes(x[1]), x[2])
|
paircombs1, key=lambda x: (x[0], timeframe_to_minutes(x[1]), x[2])
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
headers=("Pair", "Timeframe", "Type", "From", "To", "Candles"),
|
("Pair", "Timeframe", "Type", "From", "To", "Candles"),
|
||||||
tablefmt="psql",
|
summary=title,
|
||||||
stralign="right",
|
table_kwargs={"min_width": 50},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def start_list_trades_data(args: Dict[str, Any]) -> None:
|
||||||
|
"""
|
||||||
|
List available Trades data
|
||||||
|
"""
|
||||||
|
|
||||||
|
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||||
|
|
||||||
|
from freqtrade.data.history import get_datahandler
|
||||||
|
|
||||||
|
dhc = get_datahandler(config["datadir"], config["dataformat_trades"])
|
||||||
|
|
||||||
|
paircombs = dhc.trades_get_available_data(
|
||||||
|
config["datadir"], config.get("trading_mode", TradingMode.SPOT)
|
||||||
|
)
|
||||||
|
|
||||||
|
if args["pairs"]:
|
||||||
|
paircombs = [comb for comb in paircombs if comb in args["pairs"]]
|
||||||
|
|
||||||
|
title = f"Found trades data for {len(paircombs)} {plural(len(paircombs), 'pair')}."
|
||||||
|
if not config.get("show_timerange"):
|
||||||
|
print_rich_table(
|
||||||
|
[(pair, config.get("candle_type_def", CandleType.SPOT)) for pair in sorted(paircombs)],
|
||||||
|
("Pair", "Type"),
|
||||||
|
title,
|
||||||
|
table_kwargs={"min_width": 50},
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
paircombs1 = [
|
||||||
|
(pair, *dhc.trades_data_min_max(pair, config.get("trading_mode", TradingMode.SPOT)))
|
||||||
|
for pair in paircombs
|
||||||
|
]
|
||||||
|
print_rich_table(
|
||||||
|
[
|
||||||
|
(
|
||||||
|
pair,
|
||||||
|
config.get("candle_type_def", CandleType.SPOT),
|
||||||
|
start.strftime(DATETIME_PRINT_FORMAT),
|
||||||
|
end.strftime(DATETIME_PRINT_FORMAT),
|
||||||
|
str(length),
|
||||||
|
)
|
||||||
|
for pair, start, end, length in sorted(paircombs1, key=lambda x: (x[0]))
|
||||||
|
],
|
||||||
|
("Pair", "Type", "From", "To", "Trades"),
|
||||||
|
summary=title,
|
||||||
|
table_kwargs={"min_width": 50},
|
||||||
)
|
)
|
||||||
|
|
|
@ -2,8 +2,6 @@ import logging
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
from typing import Any, Dict
|
from typing import Any, Dict
|
||||||
|
|
||||||
from colorama import init as colorama_init
|
|
||||||
|
|
||||||
from freqtrade.configuration import setup_utils_configuration
|
from freqtrade.configuration import setup_utils_configuration
|
||||||
from freqtrade.data.btanalysis import get_latest_hyperopt_file
|
from freqtrade.data.btanalysis import get_latest_hyperopt_file
|
||||||
from freqtrade.enums import RunMode
|
from freqtrade.enums import RunMode
|
||||||
|
@ -18,6 +16,7 @@ def start_hyperopt_list(args: Dict[str, Any]) -> None:
|
||||||
"""
|
"""
|
||||||
List hyperopt epochs previously evaluated
|
List hyperopt epochs previously evaluated
|
||||||
"""
|
"""
|
||||||
|
from freqtrade.optimize.hyperopt_output import HyperoptOutput
|
||||||
from freqtrade.optimize.hyperopt_tools import HyperoptTools
|
from freqtrade.optimize.hyperopt_tools import HyperoptTools
|
||||||
|
|
||||||
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||||
|
@ -35,21 +34,17 @@ def start_hyperopt_list(args: Dict[str, Any]) -> None:
|
||||||
# Previous evaluations
|
# Previous evaluations
|
||||||
epochs, total_epochs = HyperoptTools.load_filtered_results(results_file, config)
|
epochs, total_epochs = HyperoptTools.load_filtered_results(results_file, config)
|
||||||
|
|
||||||
if print_colorized:
|
|
||||||
colorama_init(autoreset=True)
|
|
||||||
|
|
||||||
if not export_csv:
|
if not export_csv:
|
||||||
try:
|
try:
|
||||||
print(
|
h_out = HyperoptOutput()
|
||||||
HyperoptTools.get_result_table(
|
h_out.add_data(
|
||||||
config,
|
config,
|
||||||
epochs,
|
epochs,
|
||||||
total_epochs,
|
total_epochs,
|
||||||
not config.get("hyperopt_list_best", False),
|
not config.get("hyperopt_list_best", False),
|
||||||
print_colorized,
|
|
||||||
0,
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
h_out.print(print_colorized=print_colorized)
|
||||||
|
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
print("User interrupted..")
|
print("User interrupted..")
|
||||||
|
|
||||||
|
|
|
@ -4,17 +4,18 @@ import sys
|
||||||
from typing import Any, Dict, List, Union
|
from typing import Any, Dict, List, Union
|
||||||
|
|
||||||
import rapidjson
|
import rapidjson
|
||||||
from colorama import Fore, Style
|
from rich.console import Console
|
||||||
from colorama import init as colorama_init
|
from rich.table import Table
|
||||||
from tabulate import tabulate
|
from rich.text import Text
|
||||||
|
|
||||||
from freqtrade.configuration import setup_utils_configuration
|
from freqtrade.configuration import setup_utils_configuration
|
||||||
from freqtrade.enums import RunMode
|
from freqtrade.enums import RunMode
|
||||||
from freqtrade.exceptions import ConfigurationError, OperationalException
|
from freqtrade.exceptions import ConfigurationError, OperationalException
|
||||||
from freqtrade.exchange import list_available_exchanges, market_is_active
|
from freqtrade.exchange import list_available_exchanges, market_is_active
|
||||||
|
from freqtrade.ft_types import ValidExchangesType
|
||||||
from freqtrade.misc import parse_db_uri_for_logging, plural
|
from freqtrade.misc import parse_db_uri_for_logging, plural
|
||||||
from freqtrade.resolvers import ExchangeResolver, StrategyResolver
|
from freqtrade.resolvers import ExchangeResolver, StrategyResolver
|
||||||
from freqtrade.types import ValidExchangesType
|
from freqtrade.util import print_rich_table
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -26,71 +27,76 @@ def start_list_exchanges(args: Dict[str, Any]) -> None:
|
||||||
:param args: Cli args from Arguments()
|
:param args: Cli args from Arguments()
|
||||||
:return: None
|
:return: None
|
||||||
"""
|
"""
|
||||||
exchanges = list_available_exchanges(args["list_exchanges_all"])
|
available_exchanges: List[ValidExchangesType] = list_available_exchanges(
|
||||||
|
args["list_exchanges_all"]
|
||||||
|
)
|
||||||
|
|
||||||
if args["print_one_column"]:
|
if args["print_one_column"]:
|
||||||
print("\n".join([e["name"] for e in exchanges]))
|
print("\n".join([e["classname"] for e in available_exchanges]))
|
||||||
else:
|
else:
|
||||||
headers = {
|
if args["list_exchanges_all"]:
|
||||||
"name": "Exchange name",
|
title = (
|
||||||
"supported": "Supported",
|
f"All exchanges supported by the ccxt library "
|
||||||
"trade_modes": "Markets",
|
f"({len(available_exchanges)} exchanges):"
|
||||||
"comment": "Reason",
|
)
|
||||||
}
|
else:
|
||||||
headers.update({"valid": "Valid"} if args["list_exchanges_all"] else {})
|
available_exchanges = [e for e in available_exchanges if e["valid"] is not False]
|
||||||
|
title = f"Exchanges available for Freqtrade ({len(available_exchanges)} exchanges):"
|
||||||
|
|
||||||
def build_entry(exchange: ValidExchangesType, valid: bool):
|
table = Table(title=title)
|
||||||
valid_entry = {"valid": exchange["valid"]} if valid else {}
|
|
||||||
result: Dict[str, Union[str, bool]] = {
|
table.add_column("Exchange Name")
|
||||||
"name": exchange["name"],
|
table.add_column("Class Name")
|
||||||
**valid_entry,
|
table.add_column("Markets")
|
||||||
"supported": "Official" if exchange["supported"] else "",
|
table.add_column("Reason")
|
||||||
"trade_modes": ", ".join(
|
|
||||||
(f"{a['margin_mode']} " if a["margin_mode"] else "") + a["trading_mode"]
|
for exchange in available_exchanges:
|
||||||
|
name = Text(exchange["name"])
|
||||||
|
if exchange["supported"]:
|
||||||
|
name.append(" (Supported)", style="italic")
|
||||||
|
name.stylize("green bold")
|
||||||
|
classname = Text(exchange["classname"])
|
||||||
|
if exchange["is_alias"]:
|
||||||
|
name.stylize("strike")
|
||||||
|
classname.stylize("strike")
|
||||||
|
classname.append(f" (use {exchange['alias_for']})", style="italic")
|
||||||
|
|
||||||
|
trade_modes = Text(
|
||||||
|
", ".join(
|
||||||
|
(f"{a.get('margin_mode', '')} {a['trading_mode']}").lstrip()
|
||||||
for a in exchange["trade_modes"]
|
for a in exchange["trade_modes"]
|
||||||
),
|
),
|
||||||
"comment": exchange["comment"],
|
style="",
|
||||||
}
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
if args["list_exchanges_all"]:
|
|
||||||
print("All exchanges supported by the ccxt library:")
|
|
||||||
exchanges = [build_entry(e, True) for e in exchanges]
|
|
||||||
else:
|
|
||||||
print("Exchanges available for Freqtrade:")
|
|
||||||
exchanges = [build_entry(e, False) for e in exchanges if e["valid"] is not False]
|
|
||||||
|
|
||||||
print(
|
|
||||||
tabulate(
|
|
||||||
exchanges,
|
|
||||||
headers=headers,
|
|
||||||
)
|
)
|
||||||
|
if exchange["dex"]:
|
||||||
|
trade_modes = Text("DEX: ") + trade_modes
|
||||||
|
trade_modes.stylize("bold", 0, 3)
|
||||||
|
|
||||||
|
table.add_row(
|
||||||
|
name,
|
||||||
|
classname,
|
||||||
|
trade_modes,
|
||||||
|
exchange["comment"],
|
||||||
|
style=None if exchange["valid"] else "red",
|
||||||
)
|
)
|
||||||
|
# table.add_row(*[exchange[header] for header in headers])
|
||||||
|
|
||||||
|
console = Console()
|
||||||
|
console.print(table)
|
||||||
|
|
||||||
|
|
||||||
def _print_objs_tabular(objs: List, print_colorized: bool) -> None:
|
def _print_objs_tabular(objs: List, print_colorized: bool) -> None:
|
||||||
if print_colorized:
|
|
||||||
colorama_init(autoreset=True)
|
|
||||||
red = Fore.RED
|
|
||||||
yellow = Fore.YELLOW
|
|
||||||
reset = Style.RESET_ALL
|
|
||||||
else:
|
|
||||||
red = ""
|
|
||||||
yellow = ""
|
|
||||||
reset = ""
|
|
||||||
|
|
||||||
names = [s["name"] for s in objs]
|
names = [s["name"] for s in objs]
|
||||||
objs_to_print = [
|
objs_to_print: List[Dict[str, Union[Text, str]]] = [
|
||||||
{
|
{
|
||||||
"name": s["name"] if s["name"] else "--",
|
"name": Text(s["name"] if s["name"] else "--"),
|
||||||
"location": s["location_rel"],
|
"location": s["location_rel"],
|
||||||
"status": (
|
"status": (
|
||||||
red + "LOAD FAILED" + reset
|
Text("LOAD FAILED", style="bold red")
|
||||||
if s["class"] is None
|
if s["class"] is None
|
||||||
else "OK"
|
else Text("OK", style="bold green")
|
||||||
if names.count(s["name"]) == 1
|
if names.count(s["name"]) == 1
|
||||||
else yellow + "DUPLICATE NAME" + reset
|
else Text("DUPLICATE NAME", style="bold yellow")
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
for s in objs
|
for s in objs
|
||||||
|
@ -100,11 +106,23 @@ def _print_objs_tabular(objs: List, print_colorized: bool) -> None:
|
||||||
objs_to_print[idx].update(
|
objs_to_print[idx].update(
|
||||||
{
|
{
|
||||||
"hyperoptable": "Yes" if s["hyperoptable"]["count"] > 0 else "No",
|
"hyperoptable": "Yes" if s["hyperoptable"]["count"] > 0 else "No",
|
||||||
"buy-Params": len(s["hyperoptable"].get("buy", [])),
|
"buy-Params": str(len(s["hyperoptable"].get("buy", []))),
|
||||||
"sell-Params": len(s["hyperoptable"].get("sell", [])),
|
"sell-Params": str(len(s["hyperoptable"].get("sell", []))),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
print(tabulate(objs_to_print, headers="keys", tablefmt="psql", stralign="right"))
|
table = Table()
|
||||||
|
|
||||||
|
for header in objs_to_print[0].keys():
|
||||||
|
table.add_column(header.capitalize(), justify="right")
|
||||||
|
|
||||||
|
for row in objs_to_print:
|
||||||
|
table.add_row(*[row[header] for header in objs_to_print[0].keys()])
|
||||||
|
|
||||||
|
console = Console(
|
||||||
|
color_system="auto" if print_colorized else None,
|
||||||
|
width=200 if "pytest" in sys.modules else None,
|
||||||
|
)
|
||||||
|
console.print(table)
|
||||||
|
|
||||||
|
|
||||||
def start_list_strategies(args: Dict[str, Any]) -> None:
|
def start_list_strategies(args: Dict[str, Any]) -> None:
|
||||||
|
@ -269,9 +287,7 @@ def start_list_markets(args: Dict[str, Any], pairs_only: bool = False) -> None:
|
||||||
writer.writeheader()
|
writer.writeheader()
|
||||||
writer.writerows(tabular_data)
|
writer.writerows(tabular_data)
|
||||||
else:
|
else:
|
||||||
# print data as a table, with the human-readable summary
|
print_rich_table(tabular_data, headers, summary_str)
|
||||||
print(f"{summary_str}:")
|
|
||||||
print(tabulate(tabular_data, headers="keys", tablefmt="psql", stralign="right"))
|
|
||||||
elif not (
|
elif not (
|
||||||
args.get("print_one_column", False)
|
args.get("print_one_column", False)
|
||||||
or args.get("list_pairs_print_json", False)
|
or args.get("list_pairs_print_json", False)
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
# flake8: noqa: F401
|
# flake8: noqa: F401
|
||||||
|
|
||||||
|
from freqtrade.configuration.asyncio_config import asyncio_setup
|
||||||
from freqtrade.configuration.config_secrets import sanitize_config
|
from freqtrade.configuration.config_secrets import sanitize_config
|
||||||
from freqtrade.configuration.config_setup import setup_utils_configuration
|
from freqtrade.configuration.config_setup import setup_utils_configuration
|
||||||
from freqtrade.configuration.config_validation import validate_config_consistency
|
from freqtrade.configuration.config_validation import validate_config_consistency
|
||||||
|
|
10
freqtrade/configuration/asyncio_config.py
Normal file
10
freqtrade/configuration/asyncio_config.py
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
def asyncio_setup() -> None: # pragma: no cover
|
||||||
|
# Set eventloop for win32 setups
|
||||||
|
|
||||||
|
if sys.platform == "win32":
|
||||||
|
import asyncio
|
||||||
|
|
||||||
|
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
|
1313
freqtrade/configuration/config_schema.py
Normal file
1313
freqtrade/configuration/config_schema.py
Normal file
File diff suppressed because it is too large
Load Diff
|
@ -14,9 +14,17 @@ def sanitize_config(config: Config, *, show_sensitive: bool = False) -> Config:
|
||||||
return config
|
return config
|
||||||
keys_to_remove = [
|
keys_to_remove = [
|
||||||
"exchange.key",
|
"exchange.key",
|
||||||
|
"exchange.api_key",
|
||||||
|
"exchange.apiKey",
|
||||||
"exchange.secret",
|
"exchange.secret",
|
||||||
"exchange.password",
|
"exchange.password",
|
||||||
"exchange.uid",
|
"exchange.uid",
|
||||||
|
"exchange.account_id",
|
||||||
|
"exchange.accountId",
|
||||||
|
"exchange.wallet_address",
|
||||||
|
"exchange.walletAddress",
|
||||||
|
"exchange.private_key",
|
||||||
|
"exchange.privateKey",
|
||||||
"telegram.token",
|
"telegram.token",
|
||||||
"telegram.chat_id",
|
"telegram.chat_id",
|
||||||
"discord.webhook_url",
|
"discord.webhook_url",
|
||||||
|
@ -29,8 +37,10 @@ def sanitize_config(config: Config, *, show_sensitive: bool = False) -> Config:
|
||||||
nested_config = config
|
nested_config = config
|
||||||
for nested_key in nested_keys[:-1]:
|
for nested_key in nested_keys[:-1]:
|
||||||
nested_config = nested_config.get(nested_key, {})
|
nested_config = nested_config.get(nested_key, {})
|
||||||
|
if nested_keys[-1] in nested_config:
|
||||||
nested_config[nested_keys[-1]] = "REDACTED"
|
nested_config[nested_keys[-1]] = "REDACTED"
|
||||||
else:
|
else:
|
||||||
|
if key in config:
|
||||||
config[key] = "REDACTED"
|
config[key] = "REDACTED"
|
||||||
|
|
||||||
return config
|
return config
|
||||||
|
|
|
@ -1,13 +1,22 @@
|
||||||
import logging
|
import logging
|
||||||
from collections import Counter
|
from collections import Counter
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
|
from datetime import datetime
|
||||||
from typing import Any, Dict
|
from typing import Any, Dict
|
||||||
|
|
||||||
from jsonschema import Draft4Validator, validators
|
from jsonschema import Draft4Validator, validators
|
||||||
from jsonschema.exceptions import ValidationError, best_match
|
from jsonschema.exceptions import ValidationError, best_match
|
||||||
|
|
||||||
from freqtrade import constants
|
from freqtrade.configuration.config_schema import (
|
||||||
|
CONF_SCHEMA,
|
||||||
|
SCHEMA_BACKTEST_REQUIRED,
|
||||||
|
SCHEMA_BACKTEST_REQUIRED_FINAL,
|
||||||
|
SCHEMA_MINIMAL_REQUIRED,
|
||||||
|
SCHEMA_MINIMAL_WEBSERVER,
|
||||||
|
SCHEMA_TRADE_REQUIRED,
|
||||||
|
)
|
||||||
from freqtrade.configuration.deprecated_settings import process_deprecated_setting
|
from freqtrade.configuration.deprecated_settings import process_deprecated_setting
|
||||||
|
from freqtrade.constants import UNLIMITED_STAKE_AMOUNT
|
||||||
from freqtrade.enums import RunMode, TradingMode
|
from freqtrade.enums import RunMode, TradingMode
|
||||||
from freqtrade.exceptions import ConfigurationError
|
from freqtrade.exceptions import ConfigurationError
|
||||||
|
|
||||||
|
@ -41,18 +50,18 @@ def validate_config_schema(conf: Dict[str, Any], preliminary: bool = False) -> D
|
||||||
:param conf: Config in JSON format
|
:param conf: Config in JSON format
|
||||||
:return: Returns the config if valid, otherwise throw an exception
|
:return: Returns the config if valid, otherwise throw an exception
|
||||||
"""
|
"""
|
||||||
conf_schema = deepcopy(constants.CONF_SCHEMA)
|
conf_schema = deepcopy(CONF_SCHEMA)
|
||||||
if conf.get("runmode", RunMode.OTHER) in (RunMode.DRY_RUN, RunMode.LIVE):
|
if conf.get("runmode", RunMode.OTHER) in (RunMode.DRY_RUN, RunMode.LIVE):
|
||||||
conf_schema["required"] = constants.SCHEMA_TRADE_REQUIRED
|
conf_schema["required"] = SCHEMA_TRADE_REQUIRED
|
||||||
elif conf.get("runmode", RunMode.OTHER) in (RunMode.BACKTEST, RunMode.HYPEROPT):
|
elif conf.get("runmode", RunMode.OTHER) in (RunMode.BACKTEST, RunMode.HYPEROPT):
|
||||||
if preliminary:
|
if preliminary:
|
||||||
conf_schema["required"] = constants.SCHEMA_BACKTEST_REQUIRED
|
conf_schema["required"] = SCHEMA_BACKTEST_REQUIRED
|
||||||
else:
|
else:
|
||||||
conf_schema["required"] = constants.SCHEMA_BACKTEST_REQUIRED_FINAL
|
conf_schema["required"] = SCHEMA_BACKTEST_REQUIRED_FINAL
|
||||||
elif conf.get("runmode", RunMode.OTHER) == RunMode.WEBSERVER:
|
elif conf.get("runmode", RunMode.OTHER) == RunMode.WEBSERVER:
|
||||||
conf_schema["required"] = constants.SCHEMA_MINIMAL_WEBSERVER
|
conf_schema["required"] = SCHEMA_MINIMAL_WEBSERVER
|
||||||
else:
|
else:
|
||||||
conf_schema["required"] = constants.SCHEMA_MINIMAL_REQUIRED
|
conf_schema["required"] = SCHEMA_MINIMAL_REQUIRED
|
||||||
try:
|
try:
|
||||||
FreqtradeValidator(conf_schema).validate(conf)
|
FreqtradeValidator(conf_schema).validate(conf)
|
||||||
return conf
|
return conf
|
||||||
|
@ -83,6 +92,7 @@ def validate_config_consistency(conf: Dict[str, Any], *, preliminary: bool = Fal
|
||||||
_validate_freqai_include_timeframes(conf, preliminary=preliminary)
|
_validate_freqai_include_timeframes(conf, preliminary=preliminary)
|
||||||
_validate_consumers(conf)
|
_validate_consumers(conf)
|
||||||
validate_migrated_strategy_settings(conf)
|
validate_migrated_strategy_settings(conf)
|
||||||
|
_validate_orderflow(conf)
|
||||||
|
|
||||||
# validate configuration before returning
|
# validate configuration before returning
|
||||||
logger.info("Validating configuration ...")
|
logger.info("Validating configuration ...")
|
||||||
|
@ -97,7 +107,7 @@ def _validate_unlimited_amount(conf: Dict[str, Any]) -> None:
|
||||||
if (
|
if (
|
||||||
not conf.get("edge", {}).get("enabled")
|
not conf.get("edge", {}).get("enabled")
|
||||||
and conf.get("max_open_trades") == float("inf")
|
and conf.get("max_open_trades") == float("inf")
|
||||||
and conf.get("stake_amount") == constants.UNLIMITED_STAKE_AMOUNT
|
and conf.get("stake_amount") == UNLIMITED_STAKE_AMOUNT
|
||||||
):
|
):
|
||||||
raise ConfigurationError("`max_open_trades` and `stake_amount` cannot both be unlimited.")
|
raise ConfigurationError("`max_open_trades` and `stake_amount` cannot both be unlimited.")
|
||||||
|
|
||||||
|
@ -192,16 +202,32 @@ def _validate_protections(conf: Dict[str, Any]) -> None:
|
||||||
"""
|
"""
|
||||||
|
|
||||||
for prot in conf.get("protections", []):
|
for prot in conf.get("protections", []):
|
||||||
|
parsed_unlock_at = None
|
||||||
|
if (config_unlock_at := prot.get("unlock_at")) is not None:
|
||||||
|
try:
|
||||||
|
parsed_unlock_at = datetime.strptime(config_unlock_at, "%H:%M")
|
||||||
|
except ValueError:
|
||||||
|
raise ConfigurationError(f"Invalid date format for unlock_at: {config_unlock_at}.")
|
||||||
|
|
||||||
if "stop_duration" in prot and "stop_duration_candles" in prot:
|
if "stop_duration" in prot and "stop_duration_candles" in prot:
|
||||||
raise ConfigurationError(
|
raise ConfigurationError(
|
||||||
"Protections must specify either `stop_duration` or `stop_duration_candles`.\n"
|
"Protections must specify either `stop_duration` or `stop_duration_candles`.\n"
|
||||||
f"Please fix the protection {prot.get('method')}"
|
f"Please fix the protection {prot.get('method')}."
|
||||||
)
|
)
|
||||||
|
|
||||||
if "lookback_period" in prot and "lookback_period_candles" in prot:
|
if "lookback_period" in prot and "lookback_period_candles" in prot:
|
||||||
raise ConfigurationError(
|
raise ConfigurationError(
|
||||||
"Protections must specify either `lookback_period` or `lookback_period_candles`.\n"
|
"Protections must specify either `lookback_period` or `lookback_period_candles`.\n"
|
||||||
f"Please fix the protection {prot.get('method')}"
|
f"Please fix the protection {prot.get('method')}."
|
||||||
|
)
|
||||||
|
|
||||||
|
if parsed_unlock_at is not None and (
|
||||||
|
"stop_duration" in prot or "stop_duration_candles" in prot
|
||||||
|
):
|
||||||
|
raise ConfigurationError(
|
||||||
|
"Protections must specify either `unlock_at`, `stop_duration` or "
|
||||||
|
"`stop_duration_candles`.\n"
|
||||||
|
f"Please fix the protection {prot.get('method')}."
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -421,6 +447,14 @@ def _validate_consumers(conf: Dict[str, Any]) -> None:
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _validate_orderflow(conf: Dict[str, Any]) -> None:
|
||||||
|
if conf.get("exchange", {}).get("use_public_trades"):
|
||||||
|
if "orderflow" not in conf:
|
||||||
|
raise ConfigurationError(
|
||||||
|
"Orderflow is a required configuration key when using public trades."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def _strategy_settings(conf: Dict[str, Any]) -> None:
|
def _strategy_settings(conf: Dict[str, Any]) -> None:
|
||||||
process_deprecated_setting(conf, None, "use_sell_signal", None, "use_exit_signal")
|
process_deprecated_setting(conf, None, "use_sell_signal", None, "use_exit_signal")
|
||||||
process_deprecated_setting(conf, None, "sell_profit_only", None, "exit_profit_only")
|
process_deprecated_setting(conf, None, "sell_profit_only", None, "exit_profit_only")
|
||||||
|
|
|
@ -468,7 +468,7 @@ class Configuration:
|
||||||
else:
|
else:
|
||||||
logger.info(logstring.format(config[argname]))
|
logger.info(logstring.format(config[argname]))
|
||||||
if deprecated_msg:
|
if deprecated_msg:
|
||||||
warnings.warn(f"DEPRECATED: {deprecated_msg}", DeprecationWarning)
|
warnings.warn(f"DEPRECATED: {deprecated_msg}", DeprecationWarning, stacklevel=1)
|
||||||
|
|
||||||
def _resolve_pairs_list(self, config: Config) -> None:
|
def _resolve_pairs_list(self, config: Config) -> None:
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -38,7 +38,7 @@ def chown_user_directory(directory: Path) -> None:
|
||||||
"""
|
"""
|
||||||
if running_in_docker():
|
if running_in_docker():
|
||||||
try:
|
try:
|
||||||
import subprocess
|
import subprocess # noqa: S404
|
||||||
|
|
||||||
subprocess.check_output(["sudo", "chown", "-R", "ftuser:", str(directory.resolve())])
|
subprocess.check_output(["sudo", "chown", "-R", "ftuser:", str(directory.resolve())])
|
||||||
except Exception:
|
except Exception:
|
||||||
|
|
|
@ -4,9 +4,9 @@
|
||||||
bot constants
|
bot constants
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from typing import Any, Dict, List, Literal, Tuple
|
from typing import Any, Dict, List, Literal, Optional, Tuple
|
||||||
|
|
||||||
from freqtrade.enums import CandleType, PriceType, RPCMessageType
|
from freqtrade.enums import CandleType, PriceType
|
||||||
|
|
||||||
|
|
||||||
DOCS_LINK = "https://www.freqtrade.io/en/stable"
|
DOCS_LINK = "https://www.freqtrade.io/en/stable"
|
||||||
|
@ -42,6 +42,7 @@ HYPEROPT_LOSS_BUILTIN = [
|
||||||
AVAILABLE_PAIRLISTS = [
|
AVAILABLE_PAIRLISTS = [
|
||||||
"StaticPairList",
|
"StaticPairList",
|
||||||
"VolumePairList",
|
"VolumePairList",
|
||||||
|
"PercentChangePairList",
|
||||||
"ProducerPairList",
|
"ProducerPairList",
|
||||||
"RemotePairList",
|
"RemotePairList",
|
||||||
"MarketCapPairList",
|
"MarketCapPairList",
|
||||||
|
@ -68,6 +69,7 @@ DEFAULT_DATAFRAME_COLUMNS = ["date", "open", "high", "low", "close", "volume"]
|
||||||
# Don't modify sequence of DEFAULT_TRADES_COLUMNS
|
# Don't modify sequence of DEFAULT_TRADES_COLUMNS
|
||||||
# it has wide consequences for stored trades files
|
# it has wide consequences for stored trades files
|
||||||
DEFAULT_TRADES_COLUMNS = ["timestamp", "id", "type", "side", "price", "amount", "cost"]
|
DEFAULT_TRADES_COLUMNS = ["timestamp", "id", "type", "side", "price", "amount", "cost"]
|
||||||
|
DEFAULT_ORDERFLOW_COLUMNS = ["level", "bid", "ask", "delta"]
|
||||||
TRADES_DTYPES = {
|
TRADES_DTYPES = {
|
||||||
"timestamp": "int64",
|
"timestamp": "int64",
|
||||||
"id": "str",
|
"id": "str",
|
||||||
|
@ -171,585 +173,6 @@ MINIMAL_CONFIG = {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
__MESSAGE_TYPE_DICT: Dict[str, Dict[str, str]] = {x: {"type": "object"} for x in RPCMessageType}
|
|
||||||
|
|
||||||
# Required json-schema for user specified config
|
|
||||||
CONF_SCHEMA = {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"max_open_trades": {"type": ["integer", "number"], "minimum": -1},
|
|
||||||
"new_pairs_days": {"type": "integer", "default": 30},
|
|
||||||
"timeframe": {"type": "string"},
|
|
||||||
"stake_currency": {"type": "string"},
|
|
||||||
"stake_amount": {
|
|
||||||
"type": ["number", "string"],
|
|
||||||
"minimum": 0.0001,
|
|
||||||
"pattern": UNLIMITED_STAKE_AMOUNT,
|
|
||||||
},
|
|
||||||
"tradable_balance_ratio": {"type": "number", "minimum": 0.0, "maximum": 1, "default": 0.99},
|
|
||||||
"available_capital": {
|
|
||||||
"type": "number",
|
|
||||||
"minimum": 0,
|
|
||||||
},
|
|
||||||
"amend_last_stake_amount": {"type": "boolean", "default": False},
|
|
||||||
"last_stake_amount_min_ratio": {
|
|
||||||
"type": "number",
|
|
||||||
"minimum": 0.0,
|
|
||||||
"maximum": 1.0,
|
|
||||||
"default": 0.5,
|
|
||||||
},
|
|
||||||
"fiat_display_currency": {"type": "string", "enum": SUPPORTED_FIAT},
|
|
||||||
"dry_run": {"type": "boolean"},
|
|
||||||
"dry_run_wallet": {"type": "number", "default": DRY_RUN_WALLET},
|
|
||||||
"cancel_open_orders_on_exit": {"type": "boolean", "default": False},
|
|
||||||
"process_only_new_candles": {"type": "boolean"},
|
|
||||||
"minimal_roi": {
|
|
||||||
"type": "object",
|
|
||||||
"patternProperties": {"^[0-9.]+$": {"type": "number"}},
|
|
||||||
},
|
|
||||||
"amount_reserve_percent": {"type": "number", "minimum": 0.0, "maximum": 0.5},
|
|
||||||
"stoploss": {"type": "number", "maximum": 0, "exclusiveMaximum": True},
|
|
||||||
"trailing_stop": {"type": "boolean"},
|
|
||||||
"trailing_stop_positive": {"type": "number", "minimum": 0, "maximum": 1},
|
|
||||||
"trailing_stop_positive_offset": {"type": "number", "minimum": 0, "maximum": 1},
|
|
||||||
"trailing_only_offset_is_reached": {"type": "boolean"},
|
|
||||||
"use_exit_signal": {"type": "boolean"},
|
|
||||||
"exit_profit_only": {"type": "boolean"},
|
|
||||||
"exit_profit_offset": {"type": "number"},
|
|
||||||
"fee": {"type": "number", "minimum": 0, "maximum": 0.1},
|
|
||||||
"ignore_roi_if_entry_signal": {"type": "boolean"},
|
|
||||||
"ignore_buying_expired_candle_after": {"type": "number"},
|
|
||||||
"trading_mode": {"type": "string", "enum": TRADING_MODES},
|
|
||||||
"margin_mode": {"type": "string", "enum": MARGIN_MODES},
|
|
||||||
"reduce_df_footprint": {"type": "boolean", "default": False},
|
|
||||||
"minimum_trade_amount": {"type": "number", "default": 10},
|
|
||||||
"targeted_trade_amount": {"type": "number", "default": 20},
|
|
||||||
"lookahead_analysis_exportfilename": {"type": "string"},
|
|
||||||
"startup_candle": {
|
|
||||||
"type": "array",
|
|
||||||
"uniqueItems": True,
|
|
||||||
"default": [199, 399, 499, 999, 1999],
|
|
||||||
},
|
|
||||||
"liquidation_buffer": {"type": "number", "minimum": 0.0, "maximum": 0.99},
|
|
||||||
"backtest_breakdown": {
|
|
||||||
"type": "array",
|
|
||||||
"items": {"type": "string", "enum": BACKTEST_BREAKDOWNS},
|
|
||||||
},
|
|
||||||
"bot_name": {"type": "string"},
|
|
||||||
"unfilledtimeout": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"entry": {"type": "number", "minimum": 1},
|
|
||||||
"exit": {"type": "number", "minimum": 1},
|
|
||||||
"exit_timeout_count": {"type": "number", "minimum": 0, "default": 0},
|
|
||||||
"unit": {"type": "string", "enum": TIMEOUT_UNITS, "default": "minutes"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"entry_pricing": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"price_last_balance": {
|
|
||||||
"type": "number",
|
|
||||||
"minimum": 0,
|
|
||||||
"maximum": 1,
|
|
||||||
"exclusiveMaximum": False,
|
|
||||||
},
|
|
||||||
"price_side": {"type": "string", "enum": PRICING_SIDES, "default": "same"},
|
|
||||||
"use_order_book": {"type": "boolean"},
|
|
||||||
"order_book_top": {
|
|
||||||
"type": "integer",
|
|
||||||
"minimum": 1,
|
|
||||||
"maximum": 50,
|
|
||||||
},
|
|
||||||
"check_depth_of_market": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"enabled": {"type": "boolean"},
|
|
||||||
"bids_to_ask_delta": {"type": "number", "minimum": 0},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"required": ["price_side"],
|
|
||||||
},
|
|
||||||
"exit_pricing": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"price_side": {"type": "string", "enum": PRICING_SIDES, "default": "same"},
|
|
||||||
"price_last_balance": {
|
|
||||||
"type": "number",
|
|
||||||
"minimum": 0,
|
|
||||||
"maximum": 1,
|
|
||||||
"exclusiveMaximum": False,
|
|
||||||
},
|
|
||||||
"use_order_book": {"type": "boolean"},
|
|
||||||
"order_book_top": {
|
|
||||||
"type": "integer",
|
|
||||||
"minimum": 1,
|
|
||||||
"maximum": 50,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"required": ["price_side"],
|
|
||||||
},
|
|
||||||
"custom_price_max_distance_ratio": {"type": "number", "minimum": 0.0},
|
|
||||||
"order_types": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"entry": {"type": "string", "enum": ORDERTYPE_POSSIBILITIES},
|
|
||||||
"exit": {"type": "string", "enum": ORDERTYPE_POSSIBILITIES},
|
|
||||||
"force_exit": {"type": "string", "enum": ORDERTYPE_POSSIBILITIES},
|
|
||||||
"force_entry": {"type": "string", "enum": ORDERTYPE_POSSIBILITIES},
|
|
||||||
"emergency_exit": {
|
|
||||||
"type": "string",
|
|
||||||
"enum": ORDERTYPE_POSSIBILITIES,
|
|
||||||
"default": "market",
|
|
||||||
},
|
|
||||||
"stoploss": {"type": "string", "enum": ORDERTYPE_POSSIBILITIES},
|
|
||||||
"stoploss_on_exchange": {"type": "boolean"},
|
|
||||||
"stoploss_price_type": {"type": "string", "enum": STOPLOSS_PRICE_TYPES},
|
|
||||||
"stoploss_on_exchange_interval": {"type": "number"},
|
|
||||||
"stoploss_on_exchange_limit_ratio": {
|
|
||||||
"type": "number",
|
|
||||||
"minimum": 0.0,
|
|
||||||
"maximum": 1.0,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"required": ["entry", "exit", "stoploss", "stoploss_on_exchange"],
|
|
||||||
},
|
|
||||||
"order_time_in_force": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"entry": {"type": "string", "enum": ORDERTIF_POSSIBILITIES},
|
|
||||||
"exit": {"type": "string", "enum": ORDERTIF_POSSIBILITIES},
|
|
||||||
},
|
|
||||||
"required": REQUIRED_ORDERTIF,
|
|
||||||
},
|
|
||||||
"coingecko": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"is_demo": {"type": "boolean", "default": True},
|
|
||||||
"api_key": {"type": "string"},
|
|
||||||
},
|
|
||||||
"required": ["is_demo", "api_key"],
|
|
||||||
},
|
|
||||||
"exchange": {"$ref": "#/definitions/exchange"},
|
|
||||||
"edge": {"$ref": "#/definitions/edge"},
|
|
||||||
"freqai": {"$ref": "#/definitions/freqai"},
|
|
||||||
"external_message_consumer": {"$ref": "#/definitions/external_message_consumer"},
|
|
||||||
"experimental": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {"block_bad_exchanges": {"type": "boolean"}},
|
|
||||||
},
|
|
||||||
"pairlists": {
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"method": {"type": "string", "enum": AVAILABLE_PAIRLISTS},
|
|
||||||
},
|
|
||||||
"required": ["method"],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"protections": {
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"method": {"type": "string", "enum": AVAILABLE_PROTECTIONS},
|
|
||||||
"stop_duration": {"type": "number", "minimum": 0.0},
|
|
||||||
"stop_duration_candles": {"type": "number", "minimum": 0},
|
|
||||||
"trade_limit": {"type": "number", "minimum": 1},
|
|
||||||
"lookback_period": {"type": "number", "minimum": 1},
|
|
||||||
"lookback_period_candles": {"type": "number", "minimum": 1},
|
|
||||||
},
|
|
||||||
"required": ["method"],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"telegram": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"enabled": {"type": "boolean"},
|
|
||||||
"token": {"type": "string"},
|
|
||||||
"chat_id": {"type": "string"},
|
|
||||||
"allow_custom_messages": {"type": "boolean", "default": True},
|
|
||||||
"balance_dust_level": {"type": "number", "minimum": 0.0},
|
|
||||||
"notification_settings": {
|
|
||||||
"type": "object",
|
|
||||||
"default": {},
|
|
||||||
"properties": {
|
|
||||||
"status": {"type": "string", "enum": TELEGRAM_SETTING_OPTIONS},
|
|
||||||
"warning": {"type": "string", "enum": TELEGRAM_SETTING_OPTIONS},
|
|
||||||
"startup": {"type": "string", "enum": TELEGRAM_SETTING_OPTIONS},
|
|
||||||
"entry": {"type": "string", "enum": TELEGRAM_SETTING_OPTIONS},
|
|
||||||
"entry_fill": {
|
|
||||||
"type": "string",
|
|
||||||
"enum": TELEGRAM_SETTING_OPTIONS,
|
|
||||||
"default": "off",
|
|
||||||
},
|
|
||||||
"entry_cancel": {
|
|
||||||
"type": "string",
|
|
||||||
"enum": TELEGRAM_SETTING_OPTIONS,
|
|
||||||
},
|
|
||||||
"exit": {
|
|
||||||
"type": ["string", "object"],
|
|
||||||
"additionalProperties": {
|
|
||||||
"type": "string",
|
|
||||||
"enum": TELEGRAM_SETTING_OPTIONS,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"exit_fill": {
|
|
||||||
"type": "string",
|
|
||||||
"enum": TELEGRAM_SETTING_OPTIONS,
|
|
||||||
"default": "on",
|
|
||||||
},
|
|
||||||
"exit_cancel": {"type": "string", "enum": TELEGRAM_SETTING_OPTIONS},
|
|
||||||
"protection_trigger": {
|
|
||||||
"type": "string",
|
|
||||||
"enum": TELEGRAM_SETTING_OPTIONS,
|
|
||||||
"default": "on",
|
|
||||||
},
|
|
||||||
"protection_trigger_global": {
|
|
||||||
"type": "string",
|
|
||||||
"enum": TELEGRAM_SETTING_OPTIONS,
|
|
||||||
"default": "on",
|
|
||||||
},
|
|
||||||
"show_candle": {
|
|
||||||
"type": "string",
|
|
||||||
"enum": ["off", "ohlc"],
|
|
||||||
"default": "off",
|
|
||||||
},
|
|
||||||
"strategy_msg": {
|
|
||||||
"type": "string",
|
|
||||||
"enum": TELEGRAM_SETTING_OPTIONS,
|
|
||||||
"default": "on",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"reload": {"type": "boolean"},
|
|
||||||
},
|
|
||||||
"required": ["enabled", "token", "chat_id"],
|
|
||||||
},
|
|
||||||
"webhook": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"enabled": {"type": "boolean"},
|
|
||||||
"url": {"type": "string"},
|
|
||||||
"format": {"type": "string", "enum": WEBHOOK_FORMAT_OPTIONS, "default": "form"},
|
|
||||||
"retries": {"type": "integer", "minimum": 0},
|
|
||||||
"retry_delay": {"type": "number", "minimum": 0},
|
|
||||||
**__MESSAGE_TYPE_DICT,
|
|
||||||
# **{x: {'type': 'object'} for x in RPCMessageType},
|
|
||||||
# Below -> Deprecated
|
|
||||||
"webhookentry": {"type": "object"},
|
|
||||||
"webhookentrycancel": {"type": "object"},
|
|
||||||
"webhookentryfill": {"type": "object"},
|
|
||||||
"webhookexit": {"type": "object"},
|
|
||||||
"webhookexitcancel": {"type": "object"},
|
|
||||||
"webhookexitfill": {"type": "object"},
|
|
||||||
"webhookstatus": {"type": "object"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"discord": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"enabled": {"type": "boolean"},
|
|
||||||
"webhook_url": {"type": "string"},
|
|
||||||
"exit_fill": {
|
|
||||||
"type": "array",
|
|
||||||
"items": {"type": "object"},
|
|
||||||
"default": [
|
|
||||||
{"Trade ID": "{trade_id}"},
|
|
||||||
{"Exchange": "{exchange}"},
|
|
||||||
{"Pair": "{pair}"},
|
|
||||||
{"Direction": "{direction}"},
|
|
||||||
{"Open rate": "{open_rate}"},
|
|
||||||
{"Close rate": "{close_rate}"},
|
|
||||||
{"Amount": "{amount}"},
|
|
||||||
{"Open date": "{open_date:%Y-%m-%d %H:%M:%S}"},
|
|
||||||
{"Close date": "{close_date:%Y-%m-%d %H:%M:%S}"},
|
|
||||||
{"Profit": "{profit_amount} {stake_currency}"},
|
|
||||||
{"Profitability": "{profit_ratio:.2%}"},
|
|
||||||
{"Enter tag": "{enter_tag}"},
|
|
||||||
{"Exit Reason": "{exit_reason}"},
|
|
||||||
{"Strategy": "{strategy}"},
|
|
||||||
{"Timeframe": "{timeframe}"},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
"entry_fill": {
|
|
||||||
"type": "array",
|
|
||||||
"items": {"type": "object"},
|
|
||||||
"default": [
|
|
||||||
{"Trade ID": "{trade_id}"},
|
|
||||||
{"Exchange": "{exchange}"},
|
|
||||||
{"Pair": "{pair}"},
|
|
||||||
{"Direction": "{direction}"},
|
|
||||||
{"Open rate": "{open_rate}"},
|
|
||||||
{"Amount": "{amount}"},
|
|
||||||
{"Open date": "{open_date:%Y-%m-%d %H:%M:%S}"},
|
|
||||||
{"Enter tag": "{enter_tag}"},
|
|
||||||
{"Strategy": "{strategy} {timeframe}"},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"api_server": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"enabled": {"type": "boolean"},
|
|
||||||
"listen_ip_address": {"format": "ipv4"},
|
|
||||||
"listen_port": {"type": "integer", "minimum": 1024, "maximum": 65535},
|
|
||||||
"username": {"type": "string"},
|
|
||||||
"password": {"type": "string"},
|
|
||||||
"ws_token": {"type": ["string", "array"], "items": {"type": "string"}},
|
|
||||||
"jwt_secret_key": {"type": "string"},
|
|
||||||
"CORS_origins": {"type": "array", "items": {"type": "string"}},
|
|
||||||
"verbosity": {"type": "string", "enum": ["error", "info"]},
|
|
||||||
},
|
|
||||||
"required": ["enabled", "listen_ip_address", "listen_port", "username", "password"],
|
|
||||||
},
|
|
||||||
"db_url": {"type": "string"},
|
|
||||||
"export": {"type": "string", "enum": EXPORT_OPTIONS, "default": "trades"},
|
|
||||||
"disableparamexport": {"type": "boolean"},
|
|
||||||
"initial_state": {"type": "string", "enum": ["running", "stopped"]},
|
|
||||||
"force_entry_enable": {"type": "boolean"},
|
|
||||||
"disable_dataframe_checks": {"type": "boolean"},
|
|
||||||
"internals": {
|
|
||||||
"type": "object",
|
|
||||||
"default": {},
|
|
||||||
"properties": {
|
|
||||||
"process_throttle_secs": {"type": "integer"},
|
|
||||||
"interval": {"type": "integer"},
|
|
||||||
"sd_notify": {"type": "boolean"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"dataformat_ohlcv": {
|
|
||||||
"type": "string",
|
|
||||||
"enum": AVAILABLE_DATAHANDLERS,
|
|
||||||
"default": "feather",
|
|
||||||
},
|
|
||||||
"dataformat_trades": {
|
|
||||||
"type": "string",
|
|
||||||
"enum": AVAILABLE_DATAHANDLERS,
|
|
||||||
"default": "feather",
|
|
||||||
},
|
|
||||||
"position_adjustment_enable": {"type": "boolean"},
|
|
||||||
"max_entry_position_adjustment": {"type": ["integer", "number"], "minimum": -1},
|
|
||||||
},
|
|
||||||
"definitions": {
|
|
||||||
"exchange": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"name": {"type": "string"},
|
|
||||||
"key": {"type": "string", "default": ""},
|
|
||||||
"secret": {"type": "string", "default": ""},
|
|
||||||
"password": {"type": "string", "default": ""},
|
|
||||||
"uid": {"type": "string"},
|
|
||||||
"pair_whitelist": {
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "string",
|
|
||||||
},
|
|
||||||
"uniqueItems": True,
|
|
||||||
},
|
|
||||||
"pair_blacklist": {
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "string",
|
|
||||||
},
|
|
||||||
"uniqueItems": True,
|
|
||||||
},
|
|
||||||
"unknown_fee_rate": {"type": "number"},
|
|
||||||
"outdated_offset": {"type": "integer", "minimum": 1},
|
|
||||||
"markets_refresh_interval": {"type": "integer"},
|
|
||||||
"ccxt_config": {"type": "object"},
|
|
||||||
"ccxt_async_config": {"type": "object"},
|
|
||||||
},
|
|
||||||
"required": ["name"],
|
|
||||||
},
|
|
||||||
"edge": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"enabled": {"type": "boolean"},
|
|
||||||
"process_throttle_secs": {"type": "integer", "minimum": 600},
|
|
||||||
"calculate_since_number_of_days": {"type": "integer"},
|
|
||||||
"allowed_risk": {"type": "number"},
|
|
||||||
"stoploss_range_min": {"type": "number"},
|
|
||||||
"stoploss_range_max": {"type": "number"},
|
|
||||||
"stoploss_range_step": {"type": "number"},
|
|
||||||
"minimum_winrate": {"type": "number"},
|
|
||||||
"minimum_expectancy": {"type": "number"},
|
|
||||||
"min_trade_number": {"type": "number"},
|
|
||||||
"max_trade_duration_minute": {"type": "integer"},
|
|
||||||
"remove_pumps": {"type": "boolean"},
|
|
||||||
},
|
|
||||||
"required": ["process_throttle_secs", "allowed_risk"],
|
|
||||||
},
|
|
||||||
"external_message_consumer": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"enabled": {"type": "boolean", "default": False},
|
|
||||||
"producers": {
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"name": {"type": "string"},
|
|
||||||
"host": {"type": "string"},
|
|
||||||
"port": {
|
|
||||||
"type": "integer",
|
|
||||||
"default": 8080,
|
|
||||||
"minimum": 0,
|
|
||||||
"maximum": 65535,
|
|
||||||
},
|
|
||||||
"secure": {"type": "boolean", "default": False},
|
|
||||||
"ws_token": {"type": "string"},
|
|
||||||
},
|
|
||||||
"required": ["name", "host", "ws_token"],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"wait_timeout": {"type": "integer", "minimum": 0},
|
|
||||||
"sleep_time": {"type": "integer", "minimum": 0},
|
|
||||||
"ping_timeout": {"type": "integer", "minimum": 0},
|
|
||||||
"remove_entry_exit_signals": {"type": "boolean", "default": False},
|
|
||||||
"initial_candle_limit": {
|
|
||||||
"type": "integer",
|
|
||||||
"minimum": 0,
|
|
||||||
"maximum": 1500,
|
|
||||||
"default": 1500,
|
|
||||||
},
|
|
||||||
"message_size_limit": { # In megabytes
|
|
||||||
"type": "integer",
|
|
||||||
"minimum": 1,
|
|
||||||
"maximum": 20,
|
|
||||||
"default": 8,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"required": ["producers"],
|
|
||||||
},
|
|
||||||
"freqai": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"enabled": {"type": "boolean", "default": False},
|
|
||||||
"keras": {"type": "boolean", "default": False},
|
|
||||||
"write_metrics_to_disk": {"type": "boolean", "default": False},
|
|
||||||
"purge_old_models": {"type": ["boolean", "number"], "default": 2},
|
|
||||||
"conv_width": {"type": "integer", "default": 1},
|
|
||||||
"train_period_days": {"type": "integer", "default": 0},
|
|
||||||
"backtest_period_days": {"type": "number", "default": 7},
|
|
||||||
"identifier": {"type": "string", "default": "example"},
|
|
||||||
"feature_parameters": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"include_corr_pairlist": {"type": "array"},
|
|
||||||
"include_timeframes": {"type": "array"},
|
|
||||||
"label_period_candles": {"type": "integer"},
|
|
||||||
"include_shifted_candles": {"type": "integer", "default": 0},
|
|
||||||
"DI_threshold": {"type": "number", "default": 0},
|
|
||||||
"weight_factor": {"type": "number", "default": 0},
|
|
||||||
"principal_component_analysis": {"type": "boolean", "default": False},
|
|
||||||
"use_SVM_to_remove_outliers": {"type": "boolean", "default": False},
|
|
||||||
"plot_feature_importances": {"type": "integer", "default": 0},
|
|
||||||
"svm_params": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"shuffle": {"type": "boolean", "default": False},
|
|
||||||
"nu": {"type": "number", "default": 0.1},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"shuffle_after_split": {"type": "boolean", "default": False},
|
|
||||||
"buffer_train_data_candles": {"type": "integer", "default": 0},
|
|
||||||
},
|
|
||||||
"required": [
|
|
||||||
"include_timeframes",
|
|
||||||
"include_corr_pairlist",
|
|
||||||
],
|
|
||||||
},
|
|
||||||
"data_split_parameters": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"test_size": {"type": "number"},
|
|
||||||
"random_state": {"type": "integer"},
|
|
||||||
"shuffle": {"type": "boolean", "default": False},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"model_training_parameters": {"type": "object"},
|
|
||||||
"rl_config": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"drop_ohlc_from_features": {"type": "boolean", "default": False},
|
|
||||||
"train_cycles": {"type": "integer"},
|
|
||||||
"max_trade_duration_candles": {"type": "integer"},
|
|
||||||
"add_state_info": {"type": "boolean", "default": False},
|
|
||||||
"max_training_drawdown_pct": {"type": "number", "default": 0.02},
|
|
||||||
"cpu_count": {"type": "integer", "default": 1},
|
|
||||||
"model_type": {"type": "string", "default": "PPO"},
|
|
||||||
"policy_type": {"type": "string", "default": "MlpPolicy"},
|
|
||||||
"net_arch": {"type": "array", "default": [128, 128]},
|
|
||||||
"randomize_starting_position": {"type": "boolean", "default": False},
|
|
||||||
"progress_bar": {"type": "boolean", "default": True},
|
|
||||||
"model_reward_parameters": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"rr": {"type": "number", "default": 1},
|
|
||||||
"profit_aim": {"type": "number", "default": 0.025},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"required": [
|
|
||||||
"enabled",
|
|
||||||
"train_period_days",
|
|
||||||
"backtest_period_days",
|
|
||||||
"identifier",
|
|
||||||
"feature_parameters",
|
|
||||||
"data_split_parameters",
|
|
||||||
],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
SCHEMA_TRADE_REQUIRED = [
|
|
||||||
"exchange",
|
|
||||||
"timeframe",
|
|
||||||
"max_open_trades",
|
|
||||||
"stake_currency",
|
|
||||||
"stake_amount",
|
|
||||||
"tradable_balance_ratio",
|
|
||||||
"last_stake_amount_min_ratio",
|
|
||||||
"dry_run",
|
|
||||||
"dry_run_wallet",
|
|
||||||
"exit_pricing",
|
|
||||||
"entry_pricing",
|
|
||||||
"stoploss",
|
|
||||||
"minimal_roi",
|
|
||||||
"internals",
|
|
||||||
"dataformat_ohlcv",
|
|
||||||
"dataformat_trades",
|
|
||||||
]
|
|
||||||
|
|
||||||
SCHEMA_BACKTEST_REQUIRED = [
|
|
||||||
"exchange",
|
|
||||||
"stake_currency",
|
|
||||||
"stake_amount",
|
|
||||||
"dry_run_wallet",
|
|
||||||
"dataformat_ohlcv",
|
|
||||||
"dataformat_trades",
|
|
||||||
]
|
|
||||||
SCHEMA_BACKTEST_REQUIRED_FINAL = SCHEMA_BACKTEST_REQUIRED + [
|
|
||||||
"stoploss",
|
|
||||||
"minimal_roi",
|
|
||||||
"max_open_trades",
|
|
||||||
]
|
|
||||||
|
|
||||||
SCHEMA_MINIMAL_REQUIRED = [
|
|
||||||
"exchange",
|
|
||||||
"dry_run",
|
|
||||||
"dataformat_ohlcv",
|
|
||||||
"dataformat_trades",
|
|
||||||
]
|
|
||||||
SCHEMA_MINIMAL_WEBSERVER = SCHEMA_MINIMAL_REQUIRED + [
|
|
||||||
"api_server",
|
|
||||||
]
|
|
||||||
|
|
||||||
CANCEL_REASON = {
|
CANCEL_REASON = {
|
||||||
"TIMEOUT": "cancelled due to timeout",
|
"TIMEOUT": "cancelled due to timeout",
|
||||||
|
@ -770,6 +193,9 @@ ListPairsWithTimeframes = List[PairWithTimeframe]
|
||||||
|
|
||||||
# Type for trades list
|
# Type for trades list
|
||||||
TradeList = List[List]
|
TradeList = List[List]
|
||||||
|
# ticks, pair, timeframe, CandleType
|
||||||
|
TickWithTimeframe = Tuple[str, str, CandleType, Optional[int], Optional[int]]
|
||||||
|
ListTicksWithTimeframes = List[TickWithTimeframe]
|
||||||
|
|
||||||
LongShort = Literal["long", "short"]
|
LongShort = Literal["long", "short"]
|
||||||
EntryExit = Literal["entry", "exit"]
|
EntryExit = Literal["entry", "exit"]
|
||||||
|
|
|
@ -2,5 +2,8 @@
|
||||||
Module to handle data operations for freqtrade
|
Module to handle data operations for freqtrade
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from freqtrade.data import converter
|
||||||
|
|
||||||
|
|
||||||
# limit what's imported when using `from freqtrade.data import *`
|
# limit what's imported when using `from freqtrade.data import *`
|
||||||
__all__ = ["converter"]
|
__all__ = ["converter"]
|
||||||
|
|
|
@ -13,10 +13,10 @@ import pandas as pd
|
||||||
|
|
||||||
from freqtrade.constants import LAST_BT_RESULT_FN, IntOrInf
|
from freqtrade.constants import LAST_BT_RESULT_FN, IntOrInf
|
||||||
from freqtrade.exceptions import ConfigurationError, OperationalException
|
from freqtrade.exceptions import ConfigurationError, OperationalException
|
||||||
|
from freqtrade.ft_types import BacktestHistoryEntryType, BacktestResultType
|
||||||
from freqtrade.misc import file_dump_json, json_load
|
from freqtrade.misc import file_dump_json, json_load
|
||||||
from freqtrade.optimize.backtest_caching import get_backtest_metadata_filename
|
from freqtrade.optimize.backtest_caching import get_backtest_metadata_filename
|
||||||
from freqtrade.persistence import LocalTrade, Trade, init_db
|
from freqtrade.persistence import LocalTrade, Trade, init_db
|
||||||
from freqtrade.types import BacktestHistoryEntryType, BacktestResultType
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -185,7 +185,7 @@ def load_and_merge_backtest_result(strategy_name: str, filename: Path, results:
|
||||||
"""
|
"""
|
||||||
bt_data = load_backtest_stats(filename)
|
bt_data = load_backtest_stats(filename)
|
||||||
k: Literal["metadata", "strategy"]
|
k: Literal["metadata", "strategy"]
|
||||||
for k in ("metadata", "strategy"): # type: ignore
|
for k in ("metadata", "strategy"):
|
||||||
results[k][strategy_name] = bt_data[k][strategy_name]
|
results[k][strategy_name] = bt_data[k][strategy_name]
|
||||||
results["metadata"][strategy_name]["filename"] = filename.stem
|
results["metadata"][strategy_name]["filename"] = filename.stem
|
||||||
comparison = bt_data["strategy_comparison"]
|
comparison = bt_data["strategy_comparison"]
|
||||||
|
@ -401,7 +401,15 @@ def analyze_trade_parallelism(results: pd.DataFrame, timeframe: str) -> pd.DataF
|
||||||
|
|
||||||
timeframe_freq = timeframe_to_resample_freq(timeframe)
|
timeframe_freq = timeframe_to_resample_freq(timeframe)
|
||||||
dates = [
|
dates = [
|
||||||
pd.Series(pd.date_range(row[1]["open_date"], row[1]["close_date"], freq=timeframe_freq))
|
pd.Series(
|
||||||
|
pd.date_range(
|
||||||
|
row[1]["open_date"],
|
||||||
|
row[1]["close_date"],
|
||||||
|
freq=timeframe_freq,
|
||||||
|
# Exclude right boundary - the date is the candle open date.
|
||||||
|
inclusive="left",
|
||||||
|
)
|
||||||
|
)
|
||||||
for row in results[["open_date", "close_date"]].iterrows()
|
for row in results[["open_date", "close_date"]].iterrows()
|
||||||
]
|
]
|
||||||
deltas = [len(x) for x in dates]
|
deltas = [len(x) for x in dates]
|
||||||
|
|
|
@ -8,6 +8,7 @@ from freqtrade.data.converter.converter import (
|
||||||
trim_dataframe,
|
trim_dataframe,
|
||||||
trim_dataframes,
|
trim_dataframes,
|
||||||
)
|
)
|
||||||
|
from freqtrade.data.converter.orderflow import populate_dataframe_with_trades
|
||||||
from freqtrade.data.converter.trade_converter import (
|
from freqtrade.data.converter.trade_converter import (
|
||||||
convert_trades_format,
|
convert_trades_format,
|
||||||
convert_trades_to_ohlcv,
|
convert_trades_to_ohlcv,
|
||||||
|
@ -30,6 +31,7 @@ __all__ = [
|
||||||
"trim_dataframes",
|
"trim_dataframes",
|
||||||
"convert_trades_format",
|
"convert_trades_format",
|
||||||
"convert_trades_to_ohlcv",
|
"convert_trades_to_ohlcv",
|
||||||
|
"populate_dataframe_with_trades",
|
||||||
"trades_convert_types",
|
"trades_convert_types",
|
||||||
"trades_df_remove_duplicates",
|
"trades_df_remove_duplicates",
|
||||||
"trades_dict_to_list",
|
"trades_dict_to_list",
|
||||||
|
|
297
freqtrade/data/converter/orderflow.py
Normal file
297
freqtrade/data/converter/orderflow.py
Normal file
|
@ -0,0 +1,297 @@
|
||||||
|
"""
|
||||||
|
Functions to convert orderflow data from public_trades
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import time
|
||||||
|
import typing
|
||||||
|
from collections import OrderedDict
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Tuple
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
import pandas as pd
|
||||||
|
|
||||||
|
from freqtrade.constants import DEFAULT_ORDERFLOW_COLUMNS
|
||||||
|
from freqtrade.enums import RunMode
|
||||||
|
from freqtrade.exceptions import DependencyException
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def _init_dataframe_with_trades_columns(dataframe: pd.DataFrame):
|
||||||
|
"""
|
||||||
|
Populates a dataframe with trades columns
|
||||||
|
:param dataframe: Dataframe to populate
|
||||||
|
"""
|
||||||
|
# Initialize columns with appropriate dtypes
|
||||||
|
dataframe["trades"] = np.nan
|
||||||
|
dataframe["orderflow"] = np.nan
|
||||||
|
dataframe["imbalances"] = np.nan
|
||||||
|
dataframe["stacked_imbalances_bid"] = np.nan
|
||||||
|
dataframe["stacked_imbalances_ask"] = np.nan
|
||||||
|
dataframe["max_delta"] = np.nan
|
||||||
|
dataframe["min_delta"] = np.nan
|
||||||
|
dataframe["bid"] = np.nan
|
||||||
|
dataframe["ask"] = np.nan
|
||||||
|
dataframe["delta"] = np.nan
|
||||||
|
dataframe["total_trades"] = np.nan
|
||||||
|
|
||||||
|
# Ensure the 'trades' column is of object type
|
||||||
|
dataframe["trades"] = dataframe["trades"].astype(object)
|
||||||
|
dataframe["orderflow"] = dataframe["orderflow"].astype(object)
|
||||||
|
dataframe["imbalances"] = dataframe["imbalances"].astype(object)
|
||||||
|
dataframe["stacked_imbalances_bid"] = dataframe["stacked_imbalances_bid"].astype(object)
|
||||||
|
dataframe["stacked_imbalances_ask"] = dataframe["stacked_imbalances_ask"].astype(object)
|
||||||
|
|
||||||
|
|
||||||
|
def _calculate_ohlcv_candle_start_and_end(df: pd.DataFrame, timeframe: str):
|
||||||
|
from freqtrade.exchange import timeframe_to_next_date, timeframe_to_resample_freq
|
||||||
|
|
||||||
|
timeframe_frequency = timeframe_to_resample_freq(timeframe)
|
||||||
|
# calculate ohlcv candle start and end
|
||||||
|
if df is not None and not df.empty:
|
||||||
|
df["datetime"] = pd.to_datetime(df["date"], unit="ms")
|
||||||
|
df["candle_start"] = df["datetime"].dt.floor(timeframe_frequency)
|
||||||
|
# used in _now_is_time_to_refresh_trades
|
||||||
|
df["candle_end"] = df["candle_start"].apply(
|
||||||
|
lambda candle_start: timeframe_to_next_date(timeframe, candle_start)
|
||||||
|
)
|
||||||
|
df.drop(columns=["datetime"], inplace=True)
|
||||||
|
|
||||||
|
|
||||||
|
def populate_dataframe_with_trades(
|
||||||
|
cached_grouped_trades: OrderedDict[Tuple[datetime, datetime], pd.DataFrame],
|
||||||
|
config,
|
||||||
|
dataframe: pd.DataFrame,
|
||||||
|
trades: pd.DataFrame,
|
||||||
|
) -> Tuple[pd.DataFrame, OrderedDict[Tuple[datetime, datetime], pd.DataFrame]]:
|
||||||
|
"""
|
||||||
|
Populates a dataframe with trades
|
||||||
|
:param dataframe: Dataframe to populate
|
||||||
|
:param trades: Trades to populate with
|
||||||
|
:return: Dataframe with trades populated
|
||||||
|
"""
|
||||||
|
timeframe = config["timeframe"]
|
||||||
|
config_orderflow = config["orderflow"]
|
||||||
|
|
||||||
|
# create columns for trades
|
||||||
|
_init_dataframe_with_trades_columns(dataframe)
|
||||||
|
if trades is None or trades.empty:
|
||||||
|
return dataframe, cached_grouped_trades
|
||||||
|
|
||||||
|
try:
|
||||||
|
start_time = time.time()
|
||||||
|
# calculate ohlcv candle start and end
|
||||||
|
_calculate_ohlcv_candle_start_and_end(trades, timeframe)
|
||||||
|
|
||||||
|
# get date of earliest max_candles candle
|
||||||
|
max_candles = config_orderflow["max_candles"]
|
||||||
|
start_date = dataframe.tail(max_candles).date.iat[0]
|
||||||
|
# slice of trades that are before current ohlcv candles to make groupby faster
|
||||||
|
trades = trades.loc[trades["candle_start"] >= start_date]
|
||||||
|
trades.reset_index(inplace=True, drop=True)
|
||||||
|
|
||||||
|
# group trades by candle start
|
||||||
|
trades_grouped_by_candle_start = trades.groupby("candle_start", group_keys=False)
|
||||||
|
# Create Series to hold complex data
|
||||||
|
trades_series = pd.Series(index=dataframe.index, dtype=object)
|
||||||
|
orderflow_series = pd.Series(index=dataframe.index, dtype=object)
|
||||||
|
imbalances_series = pd.Series(index=dataframe.index, dtype=object)
|
||||||
|
stacked_imbalances_bid_series = pd.Series(index=dataframe.index, dtype=object)
|
||||||
|
stacked_imbalances_ask_series = pd.Series(index=dataframe.index, dtype=object)
|
||||||
|
|
||||||
|
trades_grouped_by_candle_start = trades.groupby("candle_start", group_keys=False)
|
||||||
|
for candle_start, trades_grouped_df in trades_grouped_by_candle_start:
|
||||||
|
is_between = candle_start == dataframe["date"]
|
||||||
|
if is_between.any():
|
||||||
|
from freqtrade.exchange import timeframe_to_next_date
|
||||||
|
|
||||||
|
candle_next = timeframe_to_next_date(timeframe, typing.cast(datetime, candle_start))
|
||||||
|
if candle_next not in trades_grouped_by_candle_start.groups:
|
||||||
|
logger.warning(
|
||||||
|
f"candle at {candle_start} with {len(trades_grouped_df)} trades "
|
||||||
|
f"might be unfinished, because no finished trades at {candle_next}"
|
||||||
|
)
|
||||||
|
|
||||||
|
indices = dataframe.index[is_between].tolist()
|
||||||
|
# Add trades to each candle
|
||||||
|
trades_series.loc[indices] = [
|
||||||
|
trades_grouped_df.drop(columns=["candle_start", "candle_end"]).to_dict(
|
||||||
|
orient="records"
|
||||||
|
)
|
||||||
|
]
|
||||||
|
# Use caching mechanism
|
||||||
|
if (candle_start, candle_next) in cached_grouped_trades:
|
||||||
|
cache_entry = cached_grouped_trades[
|
||||||
|
(typing.cast(datetime, candle_start), candle_next)
|
||||||
|
]
|
||||||
|
# dataframe.loc[is_between] = cache_entry # doesn't take, so we need workaround:
|
||||||
|
# Create a dictionary of the column values to be assigned
|
||||||
|
update_dict = {c: cache_entry[c].iat[0] for c in cache_entry.columns}
|
||||||
|
# Assign the values using the update_dict
|
||||||
|
dataframe.loc[is_between, update_dict.keys()] = pd.DataFrame(
|
||||||
|
[update_dict], index=dataframe.loc[is_between].index
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Calculate orderflow for each candle
|
||||||
|
orderflow = trades_to_volumeprofile_with_total_delta_bid_ask(
|
||||||
|
trades_grouped_df, scale=config_orderflow["scale"]
|
||||||
|
)
|
||||||
|
orderflow_series.loc[indices] = [orderflow.to_dict(orient="index")]
|
||||||
|
# Calculate imbalances for each candle's orderflow
|
||||||
|
imbalances = trades_orderflow_to_imbalances(
|
||||||
|
orderflow,
|
||||||
|
imbalance_ratio=config_orderflow["imbalance_ratio"],
|
||||||
|
imbalance_volume=config_orderflow["imbalance_volume"],
|
||||||
|
)
|
||||||
|
imbalances_series.loc[indices] = [imbalances.to_dict(orient="index")]
|
||||||
|
|
||||||
|
stacked_imbalance_range = config_orderflow["stacked_imbalance_range"]
|
||||||
|
stacked_imbalances_bid_series.loc[indices] = [
|
||||||
|
stacked_imbalance_bid(
|
||||||
|
imbalances, stacked_imbalance_range=stacked_imbalance_range
|
||||||
|
)
|
||||||
|
]
|
||||||
|
stacked_imbalances_ask_series.loc[indices] = [
|
||||||
|
stacked_imbalance_ask(
|
||||||
|
imbalances, stacked_imbalance_range=stacked_imbalance_range
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
bid = np.where(
|
||||||
|
trades_grouped_df["side"].str.contains("sell"), trades_grouped_df["amount"], 0
|
||||||
|
)
|
||||||
|
|
||||||
|
ask = np.where(
|
||||||
|
trades_grouped_df["side"].str.contains("buy"), trades_grouped_df["amount"], 0
|
||||||
|
)
|
||||||
|
deltas_per_trade = ask - bid
|
||||||
|
min_delta = deltas_per_trade.cumsum().min()
|
||||||
|
max_delta = deltas_per_trade.cumsum().max()
|
||||||
|
dataframe.loc[indices, "max_delta"] = max_delta
|
||||||
|
dataframe.loc[indices, "min_delta"] = min_delta
|
||||||
|
|
||||||
|
dataframe.loc[indices, "bid"] = bid.sum()
|
||||||
|
dataframe.loc[indices, "ask"] = ask.sum()
|
||||||
|
dataframe.loc[indices, "delta"] = (
|
||||||
|
dataframe.loc[indices, "ask"] - dataframe.loc[indices, "bid"]
|
||||||
|
)
|
||||||
|
dataframe.loc[indices, "total_trades"] = len(trades_grouped_df)
|
||||||
|
|
||||||
|
# Cache the result
|
||||||
|
cached_grouped_trades[(typing.cast(datetime, candle_start), candle_next)] = (
|
||||||
|
dataframe.loc[is_between].copy()
|
||||||
|
)
|
||||||
|
|
||||||
|
# Maintain cache size
|
||||||
|
if (
|
||||||
|
config.get("runmode") in (RunMode.DRY_RUN, RunMode.LIVE)
|
||||||
|
and len(cached_grouped_trades) > config_orderflow["cache_size"]
|
||||||
|
):
|
||||||
|
cached_grouped_trades.popitem(last=False)
|
||||||
|
else:
|
||||||
|
logger.debug(f"Found NO candles for trades starting with {candle_start}")
|
||||||
|
logger.debug(f"trades.groups_keys in {time.time() - start_time} seconds")
|
||||||
|
|
||||||
|
# Merge the complex data Series back into the DataFrame
|
||||||
|
dataframe["trades"] = trades_series
|
||||||
|
dataframe["orderflow"] = orderflow_series
|
||||||
|
dataframe["imbalances"] = imbalances_series
|
||||||
|
dataframe["stacked_imbalances_bid"] = stacked_imbalances_bid_series
|
||||||
|
dataframe["stacked_imbalances_ask"] = stacked_imbalances_ask_series
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.exception("Error populating dataframe with trades")
|
||||||
|
raise DependencyException(e)
|
||||||
|
|
||||||
|
return dataframe, cached_grouped_trades
|
||||||
|
|
||||||
|
|
||||||
|
def trades_to_volumeprofile_with_total_delta_bid_ask(
|
||||||
|
trades: pd.DataFrame, scale: float
|
||||||
|
) -> pd.DataFrame:
|
||||||
|
"""
|
||||||
|
:param trades: dataframe
|
||||||
|
:param scale: scale aka bin size e.g. 0.5
|
||||||
|
:return: trades binned to levels according to scale aka orderflow
|
||||||
|
"""
|
||||||
|
df = pd.DataFrame([], columns=DEFAULT_ORDERFLOW_COLUMNS)
|
||||||
|
# create bid, ask where side is sell or buy
|
||||||
|
df["bid_amount"] = np.where(trades["side"].str.contains("sell"), trades["amount"], 0)
|
||||||
|
df["ask_amount"] = np.where(trades["side"].str.contains("buy"), trades["amount"], 0)
|
||||||
|
df["bid"] = np.where(trades["side"].str.contains("sell"), 1, 0)
|
||||||
|
df["ask"] = np.where(trades["side"].str.contains("buy"), 1, 0)
|
||||||
|
# round the prices to the nearest multiple of the scale
|
||||||
|
df["price"] = ((trades["price"] / scale).round() * scale).astype("float64").values
|
||||||
|
if df.empty:
|
||||||
|
df["total"] = np.nan
|
||||||
|
df["delta"] = np.nan
|
||||||
|
return df
|
||||||
|
|
||||||
|
df["delta"] = df["ask_amount"] - df["bid_amount"]
|
||||||
|
df["total_volume"] = df["ask_amount"] + df["bid_amount"]
|
||||||
|
df["total_trades"] = df["ask"] + df["bid"]
|
||||||
|
|
||||||
|
# group to bins aka apply scale
|
||||||
|
df = df.groupby("price").sum(numeric_only=True)
|
||||||
|
return df
|
||||||
|
|
||||||
|
|
||||||
|
def trades_orderflow_to_imbalances(df: pd.DataFrame, imbalance_ratio: int, imbalance_volume: int):
|
||||||
|
"""
|
||||||
|
:param df: dataframes with bid and ask
|
||||||
|
:param imbalance_ratio: imbalance_ratio e.g. 3
|
||||||
|
:param imbalance_volume: imbalance volume e.g. 10
|
||||||
|
:return: dataframe with bid and ask imbalance
|
||||||
|
"""
|
||||||
|
bid = df.bid
|
||||||
|
# compares bid and ask diagonally
|
||||||
|
ask = df.ask.shift(-1)
|
||||||
|
bid_imbalance = (bid / ask) > (imbalance_ratio)
|
||||||
|
# overwrite bid_imbalance with False if volume is not big enough
|
||||||
|
bid_imbalance_filtered = np.where(df.total_volume < imbalance_volume, False, bid_imbalance)
|
||||||
|
ask_imbalance = (ask / bid) > (imbalance_ratio)
|
||||||
|
# overwrite ask_imbalance with False if volume is not big enough
|
||||||
|
ask_imbalance_filtered = np.where(df.total_volume < imbalance_volume, False, ask_imbalance)
|
||||||
|
dataframe = pd.DataFrame(
|
||||||
|
{"bid_imbalance": bid_imbalance_filtered, "ask_imbalance": ask_imbalance_filtered},
|
||||||
|
index=df.index,
|
||||||
|
)
|
||||||
|
|
||||||
|
return dataframe
|
||||||
|
|
||||||
|
|
||||||
|
def stacked_imbalance(
|
||||||
|
df: pd.DataFrame, label: str, stacked_imbalance_range: int, should_reverse: bool
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
y * (y.groupby((y != y.shift()).cumsum()).cumcount() + 1)
|
||||||
|
https://stackoverflow.com/questions/27626542/counting-consecutive-positive-values-in-python-pandas-array
|
||||||
|
"""
|
||||||
|
imbalance = df[f"{label}_imbalance"]
|
||||||
|
int_series = pd.Series(np.where(imbalance, 1, 0))
|
||||||
|
stacked = int_series * (
|
||||||
|
int_series.groupby((int_series != int_series.shift()).cumsum()).cumcount() + 1
|
||||||
|
)
|
||||||
|
|
||||||
|
max_stacked_imbalance_idx = stacked.index[stacked >= stacked_imbalance_range]
|
||||||
|
stacked_imbalance_price = np.nan
|
||||||
|
if not max_stacked_imbalance_idx.empty:
|
||||||
|
idx = (
|
||||||
|
max_stacked_imbalance_idx[0]
|
||||||
|
if not should_reverse
|
||||||
|
else np.flipud(max_stacked_imbalance_idx)[0]
|
||||||
|
)
|
||||||
|
stacked_imbalance_price = imbalance.index[idx]
|
||||||
|
return stacked_imbalance_price
|
||||||
|
|
||||||
|
|
||||||
|
def stacked_imbalance_ask(df: pd.DataFrame, stacked_imbalance_range: int):
|
||||||
|
return stacked_imbalance(df, "ask", stacked_imbalance_range, should_reverse=True)
|
||||||
|
|
||||||
|
|
||||||
|
def stacked_imbalance_bid(df: pd.DataFrame, stacked_imbalance_range: int):
|
||||||
|
return stacked_imbalance(df, "bid", stacked_imbalance_range, should_reverse=False)
|
|
@ -19,11 +19,11 @@ from freqtrade.constants import (
|
||||||
ListPairsWithTimeframes,
|
ListPairsWithTimeframes,
|
||||||
PairWithTimeframe,
|
PairWithTimeframe,
|
||||||
)
|
)
|
||||||
from freqtrade.data.history import load_pair_history
|
from freqtrade.data.history import get_datahandler, load_pair_history
|
||||||
from freqtrade.enums import CandleType, RPCMessageType, RunMode
|
from freqtrade.enums import CandleType, RPCMessageType, RunMode, TradingMode
|
||||||
from freqtrade.exceptions import ExchangeError, OperationalException
|
from freqtrade.exceptions import ExchangeError, OperationalException
|
||||||
from freqtrade.exchange import Exchange, timeframe_to_prev_date, timeframe_to_seconds
|
from freqtrade.exchange import Exchange, timeframe_to_prev_date, timeframe_to_seconds
|
||||||
from freqtrade.exchange.types import OrderBook
|
from freqtrade.exchange.exchange_types import OrderBook
|
||||||
from freqtrade.misc import append_candles_to_dataframe
|
from freqtrade.misc import append_candles_to_dataframe
|
||||||
from freqtrade.rpc import RPCManager
|
from freqtrade.rpc import RPCManager
|
||||||
from freqtrade.rpc.rpc_types import RPCAnalyzedDFMsg
|
from freqtrade.rpc.rpc_types import RPCAnalyzedDFMsg
|
||||||
|
@ -445,7 +445,20 @@ class DataProvider:
|
||||||
if self._exchange is None:
|
if self._exchange is None:
|
||||||
raise OperationalException(NO_EXCHANGE_EXCEPTION)
|
raise OperationalException(NO_EXCHANGE_EXCEPTION)
|
||||||
final_pairs = (pairlist + helping_pairs) if helping_pairs else pairlist
|
final_pairs = (pairlist + helping_pairs) if helping_pairs else pairlist
|
||||||
|
# refresh latest ohlcv data
|
||||||
self._exchange.refresh_latest_ohlcv(final_pairs)
|
self._exchange.refresh_latest_ohlcv(final_pairs)
|
||||||
|
# refresh latest trades data
|
||||||
|
self.refresh_latest_trades(pairlist)
|
||||||
|
|
||||||
|
def refresh_latest_trades(self, pairlist: ListPairsWithTimeframes) -> None:
|
||||||
|
"""
|
||||||
|
Refresh latest trades data (if enabled in config)
|
||||||
|
"""
|
||||||
|
|
||||||
|
use_public_trades = self._config.get("exchange", {}).get("use_public_trades", False)
|
||||||
|
if use_public_trades:
|
||||||
|
if self._exchange:
|
||||||
|
self._exchange.refresh_latest_trades(pairlist)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def available_pairs(self) -> ListPairsWithTimeframes:
|
def available_pairs(self) -> ListPairsWithTimeframes:
|
||||||
|
@ -483,6 +496,42 @@ class DataProvider:
|
||||||
else:
|
else:
|
||||||
return DataFrame()
|
return DataFrame()
|
||||||
|
|
||||||
|
def trades(
|
||||||
|
self, pair: str, timeframe: Optional[str] = None, copy: bool = True, candle_type: str = ""
|
||||||
|
) -> DataFrame:
|
||||||
|
"""
|
||||||
|
Get candle (TRADES) data for the given pair as DataFrame
|
||||||
|
Please use the `available_pairs` method to verify which pairs are currently cached.
|
||||||
|
This is not meant to be used in callbacks because of lookahead bias.
|
||||||
|
:param pair: pair to get the data for
|
||||||
|
:param timeframe: Timeframe to get data for
|
||||||
|
:param candle_type: '', mark, index, premiumIndex, or funding_rate
|
||||||
|
:param copy: copy dataframe before returning if True.
|
||||||
|
Use False only for read-only operations (where the dataframe is not modified)
|
||||||
|
"""
|
||||||
|
if self.runmode in (RunMode.DRY_RUN, RunMode.LIVE):
|
||||||
|
if self._exchange is None:
|
||||||
|
raise OperationalException(NO_EXCHANGE_EXCEPTION)
|
||||||
|
_candle_type = (
|
||||||
|
CandleType.from_string(candle_type)
|
||||||
|
if candle_type != ""
|
||||||
|
else self._config["candle_type_def"]
|
||||||
|
)
|
||||||
|
return self._exchange.trades(
|
||||||
|
(pair, timeframe or self._config["timeframe"], _candle_type), copy=copy
|
||||||
|
)
|
||||||
|
elif self.runmode in (RunMode.BACKTEST, RunMode.HYPEROPT):
|
||||||
|
data_handler = get_datahandler(
|
||||||
|
self._config["datadir"], data_format=self._config["dataformat_trades"]
|
||||||
|
)
|
||||||
|
trades_df = data_handler.trades_load(
|
||||||
|
pair, self._config.get("trading_mode", TradingMode.SPOT)
|
||||||
|
)
|
||||||
|
return trades_df
|
||||||
|
|
||||||
|
else:
|
||||||
|
return DataFrame()
|
||||||
|
|
||||||
def market(self, pair: str) -> Optional[Dict[str, Any]]:
|
def market(self, pair: str) -> Optional[Dict[str, Any]]:
|
||||||
"""
|
"""
|
||||||
Return market data for the pair
|
Return market data for the pair
|
||||||
|
|
|
@ -4,7 +4,6 @@ from typing import List
|
||||||
|
|
||||||
import joblib
|
import joblib
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
from tabulate import tabulate
|
|
||||||
|
|
||||||
from freqtrade.configuration import TimeRange
|
from freqtrade.configuration import TimeRange
|
||||||
from freqtrade.constants import Config
|
from freqtrade.constants import Config
|
||||||
|
@ -14,6 +13,7 @@ from freqtrade.data.btanalysis import (
|
||||||
load_backtest_stats,
|
load_backtest_stats,
|
||||||
)
|
)
|
||||||
from freqtrade.exceptions import OperationalException
|
from freqtrade.exceptions import OperationalException
|
||||||
|
from freqtrade.util import print_df_rich_table
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -307,7 +307,7 @@ def _print_table(
|
||||||
if name is not None:
|
if name is not None:
|
||||||
print(name)
|
print(name)
|
||||||
|
|
||||||
print(tabulate(data, headers="keys", tablefmt="psql", showindex=show_index))
|
print_df_rich_table(data, data.keys(), show_index=show_index)
|
||||||
|
|
||||||
|
|
||||||
def process_entry_exit_reasons(config: Config):
|
def process_entry_exit_reasons(config: Config):
|
||||||
|
|
|
@ -12,7 +12,7 @@ from datetime import datetime, timezone
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import List, Optional, Tuple, Type
|
from typing import List, Optional, Tuple, Type
|
||||||
|
|
||||||
from pandas import DataFrame
|
from pandas import DataFrame, to_datetime
|
||||||
|
|
||||||
from freqtrade import misc
|
from freqtrade import misc
|
||||||
from freqtrade.configuration import TimeRange
|
from freqtrade.configuration import TimeRange
|
||||||
|
@ -32,6 +32,7 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
class IDataHandler(ABC):
|
class IDataHandler(ABC):
|
||||||
_OHLCV_REGEX = r"^([a-zA-Z_\d-]+)\-(\d+[a-zA-Z]{1,2})\-?([a-zA-Z_]*)?(?=\.)"
|
_OHLCV_REGEX = r"^([a-zA-Z_\d-]+)\-(\d+[a-zA-Z]{1,2})\-?([a-zA-Z_]*)?(?=\.)"
|
||||||
|
_TRADES_REGEX = r"^([a-zA-Z_\d-]+)\-(trades)?(?=\.)"
|
||||||
|
|
||||||
def __init__(self, datadir: Path) -> None:
|
def __init__(self, datadir: Path) -> None:
|
||||||
self._datadir = datadir
|
self._datadir = datadir
|
||||||
|
@ -166,6 +167,50 @@ class IDataHandler(ABC):
|
||||||
:param candle_type: Any of the enum CandleType (must match trading mode!)
|
:param candle_type: Any of the enum CandleType (must match trading mode!)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def trades_get_available_data(cls, datadir: Path, trading_mode: TradingMode) -> List[str]:
|
||||||
|
"""
|
||||||
|
Returns a list of all pairs with ohlcv data available in this datadir
|
||||||
|
:param datadir: Directory to search for ohlcv files
|
||||||
|
:param trading_mode: trading-mode to be used
|
||||||
|
:return: List of Tuples of (pair, timeframe, CandleType)
|
||||||
|
"""
|
||||||
|
if trading_mode == TradingMode.FUTURES:
|
||||||
|
datadir = datadir.joinpath("futures")
|
||||||
|
_tmp = [
|
||||||
|
re.search(cls._TRADES_REGEX, p.name)
|
||||||
|
for p in datadir.glob(f"*.{cls._get_file_extension()}")
|
||||||
|
]
|
||||||
|
return [
|
||||||
|
cls.rebuild_pair_from_filename(match[1])
|
||||||
|
for match in _tmp
|
||||||
|
if match and len(match.groups()) > 1
|
||||||
|
]
|
||||||
|
|
||||||
|
def trades_data_min_max(
|
||||||
|
self,
|
||||||
|
pair: str,
|
||||||
|
trading_mode: TradingMode,
|
||||||
|
) -> Tuple[datetime, datetime, int]:
|
||||||
|
"""
|
||||||
|
Returns the min and max timestamp for the given pair's trades data.
|
||||||
|
:param pair: Pair to get min/max for
|
||||||
|
:param trading_mode: Trading mode to use (used to determine the filename)
|
||||||
|
:return: (min, max, len)
|
||||||
|
"""
|
||||||
|
df = self._trades_load(pair, trading_mode)
|
||||||
|
if df.empty:
|
||||||
|
return (
|
||||||
|
datetime.fromtimestamp(0, tz=timezone.utc),
|
||||||
|
datetime.fromtimestamp(0, tz=timezone.utc),
|
||||||
|
0,
|
||||||
|
)
|
||||||
|
return (
|
||||||
|
to_datetime(df.iloc[0]["timestamp"], unit="ms", utc=True).to_pydatetime(),
|
||||||
|
to_datetime(df.iloc[-1]["timestamp"], unit="ms", utc=True).to_pydatetime(),
|
||||||
|
len(df),
|
||||||
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def trades_get_pairs(cls, datadir: Path) -> List[str]:
|
def trades_get_pairs(cls, datadir: Path) -> List[str]:
|
||||||
"""
|
"""
|
||||||
|
@ -247,9 +292,13 @@ class IDataHandler(ABC):
|
||||||
:param timerange: Timerange to load trades for - currently not implemented
|
:param timerange: Timerange to load trades for - currently not implemented
|
||||||
:return: List of trades
|
:return: List of trades
|
||||||
"""
|
"""
|
||||||
trades = trades_df_remove_duplicates(
|
try:
|
||||||
self._trades_load(pair, trading_mode, timerange=timerange)
|
trades = self._trades_load(pair, trading_mode, timerange=timerange)
|
||||||
)
|
except Exception:
|
||||||
|
logger.exception(f"Error loading trades for {pair}")
|
||||||
|
return DataFrame(columns=DEFAULT_TRADES_COLUMNS)
|
||||||
|
|
||||||
|
trades = trades_df_remove_duplicates(trades)
|
||||||
|
|
||||||
trades = trades_convert_types(trades)
|
trades = trades_convert_types(trades)
|
||||||
return trades
|
return trades
|
||||||
|
|
|
@ -26,8 +26,7 @@ from freqtrade.enums import CandleType, TradingMode
|
||||||
from freqtrade.exceptions import OperationalException
|
from freqtrade.exceptions import OperationalException
|
||||||
from freqtrade.exchange import Exchange
|
from freqtrade.exchange import Exchange
|
||||||
from freqtrade.plugins.pairlist.pairlist_helpers import dynamic_expand_pairlist
|
from freqtrade.plugins.pairlist.pairlist_helpers import dynamic_expand_pairlist
|
||||||
from freqtrade.util import dt_ts, format_ms_time
|
from freqtrade.util import dt_now, dt_ts, format_ms_time, get_progress_tracker
|
||||||
from freqtrade.util.datetime_helpers import dt_now
|
|
||||||
from freqtrade.util.migrations import migrate_data
|
from freqtrade.util.migrations import migrate_data
|
||||||
|
|
||||||
|
|
||||||
|
@ -155,11 +154,9 @@ def refresh_data(
|
||||||
:param candle_type: Any of the enum CandleType (must match trading mode!)
|
:param candle_type: Any of the enum CandleType (must match trading mode!)
|
||||||
"""
|
"""
|
||||||
data_handler = get_datahandler(datadir, data_format)
|
data_handler = get_datahandler(datadir, data_format)
|
||||||
for idx, pair in enumerate(pairs):
|
for pair in pairs:
|
||||||
process = f"{idx}/{len(pairs)}"
|
|
||||||
_download_pair_history(
|
_download_pair_history(
|
||||||
pair=pair,
|
pair=pair,
|
||||||
process=process,
|
|
||||||
timeframe=timeframe,
|
timeframe=timeframe,
|
||||||
datadir=datadir,
|
datadir=datadir,
|
||||||
timerange=timerange,
|
timerange=timerange,
|
||||||
|
@ -223,7 +220,6 @@ def _download_pair_history(
|
||||||
datadir: Path,
|
datadir: Path,
|
||||||
exchange: Exchange,
|
exchange: Exchange,
|
||||||
timeframe: str = "5m",
|
timeframe: str = "5m",
|
||||||
process: str = "",
|
|
||||||
new_pairs_days: int = 30,
|
new_pairs_days: int = 30,
|
||||||
data_handler: Optional[IDataHandler] = None,
|
data_handler: Optional[IDataHandler] = None,
|
||||||
timerange: Optional[TimeRange] = None,
|
timerange: Optional[TimeRange] = None,
|
||||||
|
@ -261,7 +257,7 @@ def _download_pair_history(
|
||||||
)
|
)
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
f'({process}) - Download history data for "{pair}", {timeframe}, '
|
f'Download history data for "{pair}", {timeframe}, '
|
||||||
f"{candle_type} and store in {datadir}. "
|
f"{candle_type} and store in {datadir}. "
|
||||||
f'From {format_ms_time(since_ms) if since_ms else "start"} to '
|
f'From {format_ms_time(since_ms) if since_ms else "start"} to '
|
||||||
f'{format_ms_time(until_ms) if until_ms else "now"}'
|
f'{format_ms_time(until_ms) if until_ms else "now"}'
|
||||||
|
@ -345,18 +341,24 @@ def refresh_backtest_ohlcv_data(
|
||||||
pairs_not_available = []
|
pairs_not_available = []
|
||||||
data_handler = get_datahandler(datadir, data_format)
|
data_handler = get_datahandler(datadir, data_format)
|
||||||
candle_type = CandleType.get_default(trading_mode)
|
candle_type = CandleType.get_default(trading_mode)
|
||||||
process = ""
|
with get_progress_tracker() as progress:
|
||||||
for idx, pair in enumerate(pairs, start=1):
|
tf_length = len(timeframes) if trading_mode != "futures" else len(timeframes) + 2
|
||||||
|
timeframe_task = progress.add_task("Timeframe", total=tf_length)
|
||||||
|
pair_task = progress.add_task("Downloading data...", total=len(pairs))
|
||||||
|
|
||||||
|
for pair in pairs:
|
||||||
|
progress.update(pair_task, description=f"Downloading {pair}")
|
||||||
|
progress.update(timeframe_task, completed=0)
|
||||||
|
|
||||||
if pair not in exchange.markets:
|
if pair not in exchange.markets:
|
||||||
pairs_not_available.append(pair)
|
pairs_not_available.append(pair)
|
||||||
logger.info(f"Skipping pair {pair}...")
|
logger.info(f"Skipping pair {pair}...")
|
||||||
continue
|
continue
|
||||||
for timeframe in timeframes:
|
for timeframe in timeframes:
|
||||||
|
progress.update(timeframe_task, description=f"Timeframe {timeframe}")
|
||||||
logger.debug(f"Downloading pair {pair}, {candle_type}, interval {timeframe}.")
|
logger.debug(f"Downloading pair {pair}, {candle_type}, interval {timeframe}.")
|
||||||
process = f"{idx}/{len(pairs)}"
|
|
||||||
_download_pair_history(
|
_download_pair_history(
|
||||||
pair=pair,
|
pair=pair,
|
||||||
process=process,
|
|
||||||
datadir=datadir,
|
datadir=datadir,
|
||||||
exchange=exchange,
|
exchange=exchange,
|
||||||
timerange=timerange,
|
timerange=timerange,
|
||||||
|
@ -367,6 +369,7 @@ def refresh_backtest_ohlcv_data(
|
||||||
erase=erase,
|
erase=erase,
|
||||||
prepend=prepend,
|
prepend=prepend,
|
||||||
)
|
)
|
||||||
|
progress.update(timeframe_task, advance=1)
|
||||||
if trading_mode == "futures":
|
if trading_mode == "futures":
|
||||||
# Predefined candletype (and timeframe) depending on exchange
|
# Predefined candletype (and timeframe) depending on exchange
|
||||||
# Downloads what is necessary to backtest based on futures data.
|
# Downloads what is necessary to backtest based on futures data.
|
||||||
|
@ -381,7 +384,6 @@ def refresh_backtest_ohlcv_data(
|
||||||
logger.debug(f"Downloading pair {pair}, {candle_type_f}, interval {tf}.")
|
logger.debug(f"Downloading pair {pair}, {candle_type_f}, interval {tf}.")
|
||||||
_download_pair_history(
|
_download_pair_history(
|
||||||
pair=pair,
|
pair=pair,
|
||||||
process=process,
|
|
||||||
datadir=datadir,
|
datadir=datadir,
|
||||||
exchange=exchange,
|
exchange=exchange,
|
||||||
timerange=timerange,
|
timerange=timerange,
|
||||||
|
@ -392,6 +394,12 @@ def refresh_backtest_ohlcv_data(
|
||||||
erase=erase,
|
erase=erase,
|
||||||
prepend=prepend,
|
prepend=prepend,
|
||||||
)
|
)
|
||||||
|
progress.update(
|
||||||
|
timeframe_task, advance=1, description=f"Timeframe {candle_type_f}, {tf}"
|
||||||
|
)
|
||||||
|
|
||||||
|
progress.update(pair_task, advance=1)
|
||||||
|
progress.update(timeframe_task, description="Timeframe")
|
||||||
|
|
||||||
return pairs_not_available
|
return pairs_not_available
|
||||||
|
|
||||||
|
@ -480,7 +488,7 @@ def _download_trades_history(
|
||||||
return True
|
return True
|
||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.exception(f'Failed to download historic trades for pair: "{pair}". ')
|
logger.exception(f'Failed to download and store historic trades for pair: "{pair}". ')
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
@ -501,7 +509,10 @@ def refresh_backtest_trades_data(
|
||||||
"""
|
"""
|
||||||
pairs_not_available = []
|
pairs_not_available = []
|
||||||
data_handler = get_datahandler(datadir, data_format=data_format)
|
data_handler = get_datahandler(datadir, data_format=data_format)
|
||||||
|
with get_progress_tracker() as progress:
|
||||||
|
pair_task = progress.add_task("Downloading data...", total=len(pairs))
|
||||||
for pair in pairs:
|
for pair in pairs:
|
||||||
|
progress.update(pair_task, description=f"Downloading trades [{pair}]")
|
||||||
if pair not in exchange.markets:
|
if pair not in exchange.markets:
|
||||||
pairs_not_available.append(pair)
|
pairs_not_available.append(pair)
|
||||||
logger.info(f"Skipping pair {pair}...")
|
logger.info(f"Skipping pair {pair}...")
|
||||||
|
@ -520,6 +531,8 @@ def refresh_backtest_trades_data(
|
||||||
data_handler=data_handler,
|
data_handler=data_handler,
|
||||||
trading_mode=trading_mode,
|
trading_mode=trading_mode,
|
||||||
)
|
)
|
||||||
|
progress.update(pair_task, advance=1)
|
||||||
|
|
||||||
return pairs_not_available
|
return pairs_not_available
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -11,3 +11,6 @@ class MarginMode(str, Enum):
|
||||||
CROSS = "cross"
|
CROSS = "cross"
|
||||||
ISOLATED = "isolated"
|
ISOLATED = "isolated"
|
||||||
NONE = ""
|
NONE = ""
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"{self.name.lower()}"
|
||||||
|
|
|
@ -10,3 +10,6 @@ class TradingMode(str, Enum):
|
||||||
SPOT = "spot"
|
SPOT = "spot"
|
||||||
MARGIN = "margin"
|
MARGIN = "margin"
|
||||||
FUTURES = "futures"
|
FUTURES = "futures"
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"{self.name.lower()}"
|
||||||
|
|
|
@ -11,6 +11,7 @@ from freqtrade.exchange.bitpanda import Bitpanda
|
||||||
from freqtrade.exchange.bitvavo import Bitvavo
|
from freqtrade.exchange.bitvavo import Bitvavo
|
||||||
from freqtrade.exchange.bybit import Bybit
|
from freqtrade.exchange.bybit import Bybit
|
||||||
from freqtrade.exchange.coinbasepro import Coinbasepro
|
from freqtrade.exchange.coinbasepro import Coinbasepro
|
||||||
|
from freqtrade.exchange.cryptocom import Cryptocom
|
||||||
from freqtrade.exchange.exchange_utils import (
|
from freqtrade.exchange.exchange_utils import (
|
||||||
ROUND_DOWN,
|
ROUND_DOWN,
|
||||||
ROUND_UP,
|
ROUND_UP,
|
||||||
|
@ -38,6 +39,7 @@ from freqtrade.exchange.exchange_utils_timeframe import (
|
||||||
from freqtrade.exchange.gate import Gate
|
from freqtrade.exchange.gate import Gate
|
||||||
from freqtrade.exchange.hitbtc import Hitbtc
|
from freqtrade.exchange.hitbtc import Hitbtc
|
||||||
from freqtrade.exchange.htx import Htx
|
from freqtrade.exchange.htx import Htx
|
||||||
|
from freqtrade.exchange.hyperliquid import Hyperliquid
|
||||||
from freqtrade.exchange.idex import Idex
|
from freqtrade.exchange.idex import Idex
|
||||||
from freqtrade.exchange.kraken import Kraken
|
from freqtrade.exchange.kraken import Kraken
|
||||||
from freqtrade.exchange.kucoin import Kucoin
|
from freqtrade.exchange.kucoin import Kucoin
|
||||||
|
|
|
@ -11,7 +11,7 @@ from freqtrade.enums import CandleType, MarginMode, PriceType, TradingMode
|
||||||
from freqtrade.exceptions import DDosProtection, OperationalException, TemporaryError
|
from freqtrade.exceptions import DDosProtection, OperationalException, TemporaryError
|
||||||
from freqtrade.exchange import Exchange
|
from freqtrade.exchange import Exchange
|
||||||
from freqtrade.exchange.common import retrier
|
from freqtrade.exchange.common import retrier
|
||||||
from freqtrade.exchange.types import OHLCVResponse, Tickers
|
from freqtrade.exchange.exchange_types import FtHas, OHLCVResponse, Tickers
|
||||||
from freqtrade.misc import deep_merge_dicts, json_load
|
from freqtrade.misc import deep_merge_dicts, json_load
|
||||||
|
|
||||||
|
|
||||||
|
@ -19,7 +19,7 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class Binance(Exchange):
|
class Binance(Exchange):
|
||||||
_ft_has: Dict = {
|
_ft_has: FtHas = {
|
||||||
"stoploss_on_exchange": True,
|
"stoploss_on_exchange": True,
|
||||||
"stop_price_param": "stopPrice",
|
"stop_price_param": "stopPrice",
|
||||||
"stop_price_prop": "stopPrice",
|
"stop_price_prop": "stopPrice",
|
||||||
|
@ -30,8 +30,9 @@ class Binance(Exchange):
|
||||||
"trades_pagination_arg": "fromId",
|
"trades_pagination_arg": "fromId",
|
||||||
"trades_has_history": True,
|
"trades_has_history": True,
|
||||||
"l2_limit_range": [5, 10, 20, 50, 100, 500, 1000],
|
"l2_limit_range": [5, 10, 20, 50, 100, 500, 1000],
|
||||||
|
"ws_enabled": True,
|
||||||
}
|
}
|
||||||
_ft_has_futures: Dict = {
|
_ft_has_futures: FtHas = {
|
||||||
"stoploss_order_types": {"limit": "stop", "market": "stop_market"},
|
"stoploss_order_types": {"limit": "stop", "market": "stop_market"},
|
||||||
"order_time_in_force": ["GTC", "FOK", "IOC"],
|
"order_time_in_force": ["GTC", "FOK", "IOC"],
|
||||||
"tickers_have_price": False,
|
"tickers_have_price": False,
|
||||||
|
@ -42,6 +43,7 @@ class Binance(Exchange):
|
||||||
PriceType.LAST: "CONTRACT_PRICE",
|
PriceType.LAST: "CONTRACT_PRICE",
|
||||||
PriceType.MARK: "MARK_PRICE",
|
PriceType.MARK: "MARK_PRICE",
|
||||||
},
|
},
|
||||||
|
"ws_enabled": False,
|
||||||
}
|
}
|
||||||
|
|
||||||
_supported_trading_mode_margin_pairs: List[Tuple[TradingMode, MarginMode]] = [
|
_supported_trading_mode_margin_pairs: List[Tuple[TradingMode, MarginMode]] = [
|
||||||
|
@ -190,7 +192,7 @@ class Binance(Exchange):
|
||||||
if maintenance_amt is None:
|
if maintenance_amt is None:
|
||||||
raise OperationalException(
|
raise OperationalException(
|
||||||
"Parameter maintenance_amt is required by Binance.liquidation_price"
|
"Parameter maintenance_amt is required by Binance.liquidation_price"
|
||||||
f"for {self.trading_mode.value}"
|
f"for {self.trading_mode}"
|
||||||
)
|
)
|
||||||
|
|
||||||
if self.trading_mode == TradingMode.FUTURES:
|
if self.trading_mode == TradingMode.FUTURES:
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,9 +1,9 @@
|
||||||
"""Bingx exchange subclass"""
|
"""Bingx exchange subclass"""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from typing import Dict
|
|
||||||
|
|
||||||
from freqtrade.exchange import Exchange
|
from freqtrade.exchange import Exchange
|
||||||
|
from freqtrade.exchange.exchange_types import FtHas
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -15,7 +15,7 @@ class Bingx(Exchange):
|
||||||
with this exchange.
|
with this exchange.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
_ft_has: Dict = {
|
_ft_has: FtHas = {
|
||||||
"ohlcv_candle_limit": 1000,
|
"ohlcv_candle_limit": 1000,
|
||||||
"stoploss_on_exchange": True,
|
"stoploss_on_exchange": True,
|
||||||
"stoploss_order_types": {"limit": "limit", "market": "market"},
|
"stoploss_order_types": {"limit": "limit", "market": "market"},
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
"""Bitmart exchange subclass"""
|
"""Bitmart exchange subclass"""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from typing import Dict
|
|
||||||
|
|
||||||
from freqtrade.exchange import Exchange
|
from freqtrade.exchange import Exchange
|
||||||
|
from freqtrade.exchange.exchange_types import FtHas
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -15,7 +15,7 @@ class Bitmart(Exchange):
|
||||||
with this exchange.
|
with this exchange.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
_ft_has: Dict = {
|
_ft_has: FtHas = {
|
||||||
"stoploss_on_exchange": False, # Bitmart API does not support stoploss orders
|
"stoploss_on_exchange": False, # Bitmart API does not support stoploss orders
|
||||||
"ohlcv_candle_limit": 200,
|
"ohlcv_candle_limit": 200,
|
||||||
"trades_has_history": False, # Endpoint doesn't seem to support pagination
|
"trades_has_history": False, # Endpoint doesn't seem to support pagination
|
||||||
|
|
|
@ -1,9 +1,11 @@
|
||||||
"""Kucoin exchange subclass."""
|
"""Bitvavo exchange subclass."""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from typing import Dict
|
|
||||||
|
from ccxt import DECIMAL_PLACES
|
||||||
|
|
||||||
from freqtrade.exchange import Exchange
|
from freqtrade.exchange import Exchange
|
||||||
|
from freqtrade.exchange.exchange_types import FtHas
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -19,6 +21,14 @@ class Bitvavo(Exchange):
|
||||||
may still not work as expected.
|
may still not work as expected.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
_ft_has: Dict = {
|
_ft_has: FtHas = {
|
||||||
"ohlcv_candle_limit": 1440,
|
"ohlcv_candle_limit": 1440,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@property
|
||||||
|
def precisionMode(self) -> int:
|
||||||
|
"""
|
||||||
|
Exchange ccxt precisionMode
|
||||||
|
Override due to https://github.com/ccxt/ccxt/issues/20408
|
||||||
|
"""
|
||||||
|
return DECIMAL_PLACES
|
||||||
|
|
|
@ -11,6 +11,7 @@ from freqtrade.enums import CandleType, MarginMode, PriceType, TradingMode
|
||||||
from freqtrade.exceptions import DDosProtection, ExchangeError, OperationalException, TemporaryError
|
from freqtrade.exceptions import DDosProtection, ExchangeError, OperationalException, TemporaryError
|
||||||
from freqtrade.exchange import Exchange
|
from freqtrade.exchange import Exchange
|
||||||
from freqtrade.exchange.common import retrier
|
from freqtrade.exchange.common import retrier
|
||||||
|
from freqtrade.exchange.exchange_types import FtHas
|
||||||
from freqtrade.util.datetime_helpers import dt_now, dt_ts
|
from freqtrade.util.datetime_helpers import dt_now, dt_ts
|
||||||
|
|
||||||
|
|
||||||
|
@ -29,13 +30,14 @@ class Bybit(Exchange):
|
||||||
|
|
||||||
unified_account = False
|
unified_account = False
|
||||||
|
|
||||||
_ft_has: Dict = {
|
_ft_has: FtHas = {
|
||||||
"ohlcv_candle_limit": 1000,
|
"ohlcv_candle_limit": 1000,
|
||||||
"ohlcv_has_history": True,
|
"ohlcv_has_history": True,
|
||||||
"order_time_in_force": ["GTC", "FOK", "IOC", "PO"],
|
"order_time_in_force": ["GTC", "FOK", "IOC", "PO"],
|
||||||
|
"ws_enabled": True,
|
||||||
"trades_has_history": False, # Endpoint doesn't support pagination
|
"trades_has_history": False, # Endpoint doesn't support pagination
|
||||||
}
|
}
|
||||||
_ft_has_futures: Dict = {
|
_ft_has_futures: FtHas = {
|
||||||
"ohlcv_has_history": True,
|
"ohlcv_has_history": True,
|
||||||
"mark_ohlcv_timeframe": "4h",
|
"mark_ohlcv_timeframe": "4h",
|
||||||
"funding_fee_timeframe": "8h",
|
"funding_fee_timeframe": "8h",
|
||||||
|
|
|
@ -47,7 +47,7 @@ def check_exchange(config: Config, check_for_bad: bool = True) -> bool:
|
||||||
f'{", ".join(available_exchanges())}'
|
f'{", ".join(available_exchanges())}'
|
||||||
)
|
)
|
||||||
|
|
||||||
valid, reason = validate_exchange(exchange)
|
valid, reason, _ = validate_exchange(exchange)
|
||||||
if not valid:
|
if not valid:
|
||||||
if check_for_bad:
|
if check_for_bad:
|
||||||
raise OperationalException(
|
raise OperationalException(
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
"""CoinbasePro exchange subclass"""
|
"""CoinbasePro exchange subclass"""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from typing import Dict
|
|
||||||
|
|
||||||
from freqtrade.exchange import Exchange
|
from freqtrade.exchange import Exchange
|
||||||
|
from freqtrade.exchange.exchange_types import FtHas
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -19,6 +19,6 @@ class Coinbasepro(Exchange):
|
||||||
may still not work as expected.
|
may still not work as expected.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
_ft_has: Dict = {
|
_ft_has: FtHas = {
|
||||||
"ohlcv_candle_limit": 300,
|
"ohlcv_candle_limit": 300,
|
||||||
}
|
}
|
||||||
|
|
|
@ -37,7 +37,6 @@ API_FETCH_ORDER_RETRY_COUNT = 5
|
||||||
|
|
||||||
BAD_EXCHANGES = {
|
BAD_EXCHANGES = {
|
||||||
"bitmex": "Various reasons.",
|
"bitmex": "Various reasons.",
|
||||||
"phemex": "Does not provide history.",
|
|
||||||
"probit": "Requires additional, regular calls to `signIn()`.",
|
"probit": "Requires additional, regular calls to `signIn()`.",
|
||||||
"poloniex": "Does not provide fetch_order endpoint to fetch both open and closed orders.",
|
"poloniex": "Does not provide fetch_order endpoint to fetch both open and closed orders.",
|
||||||
}
|
}
|
||||||
|
@ -55,6 +54,7 @@ SUPPORTED_EXCHANGES = [
|
||||||
"binance",
|
"binance",
|
||||||
"bingx",
|
"bingx",
|
||||||
"bitmart",
|
"bitmart",
|
||||||
|
"bybit",
|
||||||
"gate",
|
"gate",
|
||||||
"htx",
|
"htx",
|
||||||
"kraken",
|
"kraken",
|
||||||
|
@ -65,6 +65,7 @@ SUPPORTED_EXCHANGES = [
|
||||||
EXCHANGE_HAS_REQUIRED: Dict[str, List[str]] = {
|
EXCHANGE_HAS_REQUIRED: Dict[str, List[str]] = {
|
||||||
# Required / private
|
# Required / private
|
||||||
"fetchOrder": ["fetchOpenOrder", "fetchClosedOrder"],
|
"fetchOrder": ["fetchOpenOrder", "fetchClosedOrder"],
|
||||||
|
"fetchL2OrderBook": ["fetchTicker"],
|
||||||
"cancelOrder": [],
|
"cancelOrder": [],
|
||||||
"createOrder": [],
|
"createOrder": [],
|
||||||
"fetchBalance": [],
|
"fetchBalance": [],
|
||||||
|
@ -92,6 +93,8 @@ EXCHANGE_HAS_OPTIONAL = [
|
||||||
# 'fetchMarketLeverageTiers', # Futures initialization
|
# 'fetchMarketLeverageTiers', # Futures initialization
|
||||||
# 'fetchOpenOrder', 'fetchClosedOrder', # replacement for fetchOrder
|
# 'fetchOpenOrder', 'fetchClosedOrder', # replacement for fetchOrder
|
||||||
# 'fetchOpenOrders', 'fetchClosedOrders', # 'fetchOrders', # Refinding balance...
|
# 'fetchOpenOrders', 'fetchClosedOrders', # 'fetchOrders', # Refinding balance...
|
||||||
|
# ccxt.pro
|
||||||
|
"watchOHLCV",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
|
19
freqtrade/exchange/cryptocom.py
Normal file
19
freqtrade/exchange/cryptocom.py
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
"""Crypto.com exchange subclass"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from freqtrade.exchange import Exchange
|
||||||
|
from freqtrade.exchange.exchange_types import FtHas
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Cryptocom(Exchange):
|
||||||
|
"""Crypto.com exchange class.
|
||||||
|
Contains adjustments needed for Freqtrade to work with this exchange.
|
||||||
|
"""
|
||||||
|
|
||||||
|
_ft_has: FtHas = {
|
||||||
|
"ohlcv_candle_limit": 300,
|
||||||
|
}
|
|
@ -14,7 +14,7 @@ from threading import Lock
|
||||||
from typing import Any, Coroutine, Dict, List, Literal, Optional, Tuple, Union
|
from typing import Any, Coroutine, Dict, List, Literal, Optional, Tuple, Union
|
||||||
|
|
||||||
import ccxt
|
import ccxt
|
||||||
import ccxt.async_support as ccxt_async
|
import ccxt.pro as ccxt_pro
|
||||||
from cachetools import TTLCache
|
from cachetools import TTLCache
|
||||||
from ccxt import TICK_SIZE
|
from ccxt import TICK_SIZE
|
||||||
from dateutil import parser
|
from dateutil import parser
|
||||||
|
@ -22,6 +22,7 @@ from pandas import DataFrame, concat
|
||||||
|
|
||||||
from freqtrade.constants import (
|
from freqtrade.constants import (
|
||||||
DEFAULT_AMOUNT_RESERVE_PERCENT,
|
DEFAULT_AMOUNT_RESERVE_PERCENT,
|
||||||
|
DEFAULT_TRADES_COLUMNS,
|
||||||
NON_OPEN_EXCHANGE_STATES,
|
NON_OPEN_EXCHANGE_STATES,
|
||||||
BidAsk,
|
BidAsk,
|
||||||
BuySell,
|
BuySell,
|
||||||
|
@ -33,8 +34,22 @@ from freqtrade.constants import (
|
||||||
OBLiteral,
|
OBLiteral,
|
||||||
PairWithTimeframe,
|
PairWithTimeframe,
|
||||||
)
|
)
|
||||||
from freqtrade.data.converter import clean_ohlcv_dataframe, ohlcv_to_dataframe, trades_dict_to_list
|
from freqtrade.data.converter import (
|
||||||
from freqtrade.enums import OPTIMIZE_MODES, CandleType, MarginMode, PriceType, RunMode, TradingMode
|
clean_ohlcv_dataframe,
|
||||||
|
ohlcv_to_dataframe,
|
||||||
|
trades_df_remove_duplicates,
|
||||||
|
trades_dict_to_list,
|
||||||
|
trades_list_to_df,
|
||||||
|
)
|
||||||
|
from freqtrade.enums import (
|
||||||
|
OPTIMIZE_MODES,
|
||||||
|
TRADE_MODES,
|
||||||
|
CandleType,
|
||||||
|
MarginMode,
|
||||||
|
PriceType,
|
||||||
|
RunMode,
|
||||||
|
TradingMode,
|
||||||
|
)
|
||||||
from freqtrade.exceptions import (
|
from freqtrade.exceptions import (
|
||||||
ConfigurationError,
|
ConfigurationError,
|
||||||
DDosProtection,
|
DDosProtection,
|
||||||
|
@ -52,11 +67,19 @@ from freqtrade.exchange.common import (
|
||||||
retrier,
|
retrier,
|
||||||
retrier_async,
|
retrier_async,
|
||||||
)
|
)
|
||||||
|
from freqtrade.exchange.exchange_types import (
|
||||||
|
CcxtBalances,
|
||||||
|
CcxtPosition,
|
||||||
|
FtHas,
|
||||||
|
OHLCVResponse,
|
||||||
|
OrderBook,
|
||||||
|
Ticker,
|
||||||
|
Tickers,
|
||||||
|
)
|
||||||
from freqtrade.exchange.exchange_utils import (
|
from freqtrade.exchange.exchange_utils import (
|
||||||
ROUND,
|
ROUND,
|
||||||
ROUND_DOWN,
|
ROUND_DOWN,
|
||||||
ROUND_UP,
|
ROUND_UP,
|
||||||
CcxtModuleType,
|
|
||||||
amount_to_contract_precision,
|
amount_to_contract_precision,
|
||||||
amount_to_contracts,
|
amount_to_contracts,
|
||||||
amount_to_precision,
|
amount_to_precision,
|
||||||
|
@ -73,7 +96,7 @@ from freqtrade.exchange.exchange_utils_timeframe import (
|
||||||
timeframe_to_prev_date,
|
timeframe_to_prev_date,
|
||||||
timeframe_to_seconds,
|
timeframe_to_seconds,
|
||||||
)
|
)
|
||||||
from freqtrade.exchange.types import OHLCVResponse, OrderBook, Ticker, Tickers
|
from freqtrade.exchange.exchange_ws import ExchangeWS
|
||||||
from freqtrade.misc import (
|
from freqtrade.misc import (
|
||||||
chunks,
|
chunks,
|
||||||
deep_merge_dicts,
|
deep_merge_dicts,
|
||||||
|
@ -83,7 +106,7 @@ from freqtrade.misc import (
|
||||||
)
|
)
|
||||||
from freqtrade.plugins.pairlist.pairlist_helpers import expand_pairlist
|
from freqtrade.plugins.pairlist.pairlist_helpers import expand_pairlist
|
||||||
from freqtrade.util import dt_from_ts, dt_now
|
from freqtrade.util import dt_from_ts, dt_now
|
||||||
from freqtrade.util.datetime_helpers import dt_humanize_delta, dt_ts
|
from freqtrade.util.datetime_helpers import dt_humanize_delta, dt_ts, format_ms_time
|
||||||
from freqtrade.util.periodic_cache import PeriodicCache
|
from freqtrade.util.periodic_cache import PeriodicCache
|
||||||
|
|
||||||
|
|
||||||
|
@ -100,10 +123,11 @@ class Exchange:
|
||||||
# Dict to specify which options each exchange implements
|
# Dict to specify which options each exchange implements
|
||||||
# This defines defaults, which can be selectively overridden by subclasses using _ft_has
|
# This defines defaults, which can be selectively overridden by subclasses using _ft_has
|
||||||
# or by specifying them in the configuration.
|
# or by specifying them in the configuration.
|
||||||
_ft_has_default: Dict = {
|
_ft_has_default: FtHas = {
|
||||||
"stoploss_on_exchange": False,
|
"stoploss_on_exchange": False,
|
||||||
"stop_price_param": "stopLossPrice", # Used for stoploss_on_exchange request
|
"stop_price_param": "stopLossPrice", # Used for stoploss_on_exchange request
|
||||||
"stop_price_prop": "stopLossPrice", # Used for stoploss_on_exchange response parsing
|
"stop_price_prop": "stopLossPrice", # Used for stoploss_on_exchange response parsing
|
||||||
|
"stoploss_order_types": {},
|
||||||
"order_time_in_force": ["GTC"],
|
"order_time_in_force": ["GTC"],
|
||||||
"ohlcv_params": {},
|
"ohlcv_params": {},
|
||||||
"ohlcv_candle_limit": 500,
|
"ohlcv_candle_limit": 500,
|
||||||
|
@ -113,8 +137,10 @@ class Exchange:
|
||||||
# Check https://github.com/ccxt/ccxt/issues/10767 for removal of ohlcv_volume_currency
|
# Check https://github.com/ccxt/ccxt/issues/10767 for removal of ohlcv_volume_currency
|
||||||
"ohlcv_volume_currency": "base", # "base" or "quote"
|
"ohlcv_volume_currency": "base", # "base" or "quote"
|
||||||
"tickers_have_quoteVolume": True,
|
"tickers_have_quoteVolume": True,
|
||||||
|
"tickers_have_percentage": True,
|
||||||
"tickers_have_bid_ask": True, # bid / ask empty for fetch_tickers
|
"tickers_have_bid_ask": True, # bid / ask empty for fetch_tickers
|
||||||
"tickers_have_price": True,
|
"tickers_have_price": True,
|
||||||
|
"trades_limit": 1000, # Limit for 1 call to fetch_trades
|
||||||
"trades_pagination": "time", # Possible are "time" or "id"
|
"trades_pagination": "time", # Possible are "time" or "id"
|
||||||
"trades_pagination_arg": "since",
|
"trades_pagination_arg": "since",
|
||||||
"trades_has_history": False,
|
"trades_has_history": False,
|
||||||
|
@ -130,9 +156,10 @@ class Exchange:
|
||||||
"marketOrderRequiresPrice": False,
|
"marketOrderRequiresPrice": False,
|
||||||
"exchange_has_overrides": {}, # Dictionary overriding ccxt's "has".
|
"exchange_has_overrides": {}, # Dictionary overriding ccxt's "has".
|
||||||
# Expected to be in the format {"fetchOHLCV": True} or {"fetchOHLCV": False}
|
# Expected to be in the format {"fetchOHLCV": True} or {"fetchOHLCV": False}
|
||||||
|
"ws_enabled": False, # Set to true for exchanges with tested websocket support
|
||||||
}
|
}
|
||||||
_ft_has: Dict = {}
|
_ft_has: FtHas = {}
|
||||||
_ft_has_futures: Dict = {}
|
_ft_has_futures: FtHas = {}
|
||||||
|
|
||||||
_supported_trading_mode_margin_pairs: List[Tuple[TradingMode, MarginMode]] = [
|
_supported_trading_mode_margin_pairs: List[Tuple[TradingMode, MarginMode]] = [
|
||||||
# TradingMode.SPOT always supported and not required in this list
|
# TradingMode.SPOT always supported and not required in this list
|
||||||
|
@ -152,7 +179,9 @@ class Exchange:
|
||||||
:return: None
|
:return: None
|
||||||
"""
|
"""
|
||||||
self._api: ccxt.Exchange
|
self._api: ccxt.Exchange
|
||||||
self._api_async: ccxt_async.Exchange
|
self._api_async: ccxt_pro.Exchange
|
||||||
|
self._ws_async: ccxt_pro.Exchange = None
|
||||||
|
self._exchange_ws: Optional[ExchangeWS] = None
|
||||||
self._markets: Dict = {}
|
self._markets: Dict = {}
|
||||||
self._trading_fees: Dict[str, Any] = {}
|
self._trading_fees: Dict[str, Any] = {}
|
||||||
self._leverage_tiers: Dict[str, List[Dict]] = {}
|
self._leverage_tiers: Dict[str, List[Dict]] = {}
|
||||||
|
@ -183,6 +212,9 @@ class Exchange:
|
||||||
self._klines: Dict[PairWithTimeframe, DataFrame] = {}
|
self._klines: Dict[PairWithTimeframe, DataFrame] = {}
|
||||||
self._expiring_candle_cache: Dict[Tuple[str, int], PeriodicCache] = {}
|
self._expiring_candle_cache: Dict[Tuple[str, int], PeriodicCache] = {}
|
||||||
|
|
||||||
|
# Holds public_trades
|
||||||
|
self._trades: Dict[PairWithTimeframe, DataFrame] = {}
|
||||||
|
|
||||||
# Holds all open sell orders for dry_run
|
# Holds all open sell orders for dry_run
|
||||||
self._dry_run_open_orders: Dict[str, Any] = {}
|
self._dry_run_open_orders: Dict[str, Any] = {}
|
||||||
|
|
||||||
|
@ -211,6 +243,8 @@ class Exchange:
|
||||||
# Assign this directly for easy access
|
# Assign this directly for easy access
|
||||||
self._ohlcv_partial_candle = self._ft_has["ohlcv_partial_candle"]
|
self._ohlcv_partial_candle = self._ft_has["ohlcv_partial_candle"]
|
||||||
|
|
||||||
|
self._max_trades_limit = self._ft_has["trades_limit"]
|
||||||
|
|
||||||
self._trades_pagination = self._ft_has["trades_pagination"]
|
self._trades_pagination = self._ft_has["trades_pagination"]
|
||||||
self._trades_pagination_arg = self._ft_has["trades_pagination_arg"]
|
self._trades_pagination_arg = self._ft_has["trades_pagination_arg"]
|
||||||
|
|
||||||
|
@ -219,7 +253,7 @@ class Exchange:
|
||||||
ccxt_config = deep_merge_dicts(exchange_conf.get("ccxt_config", {}), ccxt_config)
|
ccxt_config = deep_merge_dicts(exchange_conf.get("ccxt_config", {}), ccxt_config)
|
||||||
ccxt_config = deep_merge_dicts(exchange_conf.get("ccxt_sync_config", {}), ccxt_config)
|
ccxt_config = deep_merge_dicts(exchange_conf.get("ccxt_sync_config", {}), ccxt_config)
|
||||||
|
|
||||||
self._api = self._init_ccxt(exchange_conf, ccxt_kwargs=ccxt_config)
|
self._api = self._init_ccxt(exchange_conf, True, ccxt_config)
|
||||||
|
|
||||||
ccxt_async_config = self._ccxt_config
|
ccxt_async_config = self._ccxt_config
|
||||||
ccxt_async_config = deep_merge_dicts(
|
ccxt_async_config = deep_merge_dicts(
|
||||||
|
@ -228,7 +262,15 @@ class Exchange:
|
||||||
ccxt_async_config = deep_merge_dicts(
|
ccxt_async_config = deep_merge_dicts(
|
||||||
exchange_conf.get("ccxt_async_config", {}), ccxt_async_config
|
exchange_conf.get("ccxt_async_config", {}), ccxt_async_config
|
||||||
)
|
)
|
||||||
self._api_async = self._init_ccxt(exchange_conf, ccxt_async, ccxt_kwargs=ccxt_async_config)
|
self._api_async = self._init_ccxt(exchange_conf, False, ccxt_async_config)
|
||||||
|
self._has_watch_ohlcv = self.exchange_has("watchOHLCV") and self._ft_has["ws_enabled"]
|
||||||
|
if (
|
||||||
|
self._config["runmode"] in TRADE_MODES
|
||||||
|
and exchange_conf.get("enable_ws", True)
|
||||||
|
and self._has_watch_ohlcv
|
||||||
|
):
|
||||||
|
self._ws_async = self._init_ccxt(exchange_conf, False, ccxt_async_config)
|
||||||
|
self._exchange_ws = ExchangeWS(self._config, self._ws_async)
|
||||||
|
|
||||||
logger.info(f'Using Exchange "{self.name}"')
|
logger.info(f'Using Exchange "{self.name}"')
|
||||||
self.required_candle_call_count = 1
|
self.required_candle_call_count = 1
|
||||||
|
@ -257,6 +299,8 @@ class Exchange:
|
||||||
self.close()
|
self.close()
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
|
if self._exchange_ws:
|
||||||
|
self._exchange_ws.cleanup()
|
||||||
logger.debug("Exchange object destroyed, closing async loop")
|
logger.debug("Exchange object destroyed, closing async loop")
|
||||||
if (
|
if (
|
||||||
getattr(self, "_api_async", None)
|
getattr(self, "_api_async", None)
|
||||||
|
@ -265,6 +309,14 @@ class Exchange:
|
||||||
):
|
):
|
||||||
logger.debug("Closing async ccxt session.")
|
logger.debug("Closing async ccxt session.")
|
||||||
self.loop.run_until_complete(self._api_async.close())
|
self.loop.run_until_complete(self._api_async.close())
|
||||||
|
if (
|
||||||
|
self._ws_async
|
||||||
|
and inspect.iscoroutinefunction(self._ws_async.close)
|
||||||
|
and self._ws_async.session
|
||||||
|
):
|
||||||
|
logger.debug("Closing ws ccxt session.")
|
||||||
|
self.loop.run_until_complete(self._ws_async.close())
|
||||||
|
|
||||||
if self.loop and not self.loop.is_closed():
|
if self.loop and not self.loop.is_closed():
|
||||||
self.loop.close()
|
self.loop.close()
|
||||||
|
|
||||||
|
@ -273,7 +325,7 @@ class Exchange:
|
||||||
asyncio.set_event_loop(loop)
|
asyncio.set_event_loop(loop)
|
||||||
return loop
|
return loop
|
||||||
|
|
||||||
def validate_config(self, config):
|
def validate_config(self, config: Config) -> None:
|
||||||
# Check if timeframe is available
|
# Check if timeframe is available
|
||||||
self.validate_timeframes(config.get("timeframe"))
|
self.validate_timeframes(config.get("timeframe"))
|
||||||
|
|
||||||
|
@ -286,29 +338,43 @@ class Exchange:
|
||||||
self.validate_trading_mode_and_margin_mode(self.trading_mode, self.margin_mode)
|
self.validate_trading_mode_and_margin_mode(self.trading_mode, self.margin_mode)
|
||||||
self.validate_pricing(config["exit_pricing"])
|
self.validate_pricing(config["exit_pricing"])
|
||||||
self.validate_pricing(config["entry_pricing"])
|
self.validate_pricing(config["entry_pricing"])
|
||||||
|
self.validate_orderflow(config["exchange"])
|
||||||
|
self.validate_freqai(config)
|
||||||
|
|
||||||
def _init_ccxt(
|
def _init_ccxt(
|
||||||
self,
|
self, exchange_config: Dict[str, Any], sync: bool, ccxt_kwargs: Dict[str, Any]
|
||||||
exchange_config: Dict[str, Any],
|
|
||||||
ccxt_module: CcxtModuleType = ccxt,
|
|
||||||
*,
|
|
||||||
ccxt_kwargs: Dict,
|
|
||||||
) -> ccxt.Exchange:
|
) -> ccxt.Exchange:
|
||||||
"""
|
"""
|
||||||
Initialize ccxt with given config and return valid
|
Initialize ccxt with given config and return valid ccxt instance.
|
||||||
ccxt instance.
|
|
||||||
"""
|
"""
|
||||||
# Find matching class for the given exchange name
|
# Find matching class for the given exchange name
|
||||||
name = exchange_config["name"]
|
name = exchange_config["name"]
|
||||||
|
if sync:
|
||||||
|
ccxt_module = ccxt
|
||||||
|
else:
|
||||||
|
ccxt_module = ccxt_pro
|
||||||
|
if not is_exchange_known_ccxt(name, ccxt_module):
|
||||||
|
# Fall back to async if pro doesn't support this exchange
|
||||||
|
import ccxt.async_support as ccxt_async
|
||||||
|
|
||||||
|
ccxt_module = ccxt_async
|
||||||
|
|
||||||
if not is_exchange_known_ccxt(name, ccxt_module):
|
if not is_exchange_known_ccxt(name, ccxt_module):
|
||||||
raise OperationalException(f"Exchange {name} is not supported by ccxt")
|
raise OperationalException(f"Exchange {name} is not supported by ccxt")
|
||||||
|
|
||||||
ex_config = {
|
ex_config = {
|
||||||
"apiKey": exchange_config.get("key"),
|
"apiKey": exchange_config.get(
|
||||||
|
"api_key", exchange_config.get("apiKey", exchange_config.get("key"))
|
||||||
|
),
|
||||||
"secret": exchange_config.get("secret"),
|
"secret": exchange_config.get("secret"),
|
||||||
"password": exchange_config.get("password"),
|
"password": exchange_config.get("password"),
|
||||||
"uid": exchange_config.get("uid", ""),
|
"uid": exchange_config.get("uid", ""),
|
||||||
|
"accountId": exchange_config.get("account_id", exchange_config.get("accountId", "")),
|
||||||
|
# DEX attributes:
|
||||||
|
"walletAddress": exchange_config.get(
|
||||||
|
"wallet_address", exchange_config.get("walletAddress")
|
||||||
|
),
|
||||||
|
"privateKey": exchange_config.get("private_key", exchange_config.get("privateKey")),
|
||||||
}
|
}
|
||||||
if ccxt_kwargs:
|
if ccxt_kwargs:
|
||||||
logger.info("Applying additional ccxt config: %s", ccxt_kwargs)
|
logger.info("Applying additional ccxt config: %s", ccxt_kwargs)
|
||||||
|
@ -360,7 +426,17 @@ class Exchange:
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def precisionMode(self) -> int:
|
def precisionMode(self) -> int:
|
||||||
"""exchange ccxt precisionMode"""
|
"""Exchange ccxt precisionMode"""
|
||||||
|
return self._api.precisionMode
|
||||||
|
|
||||||
|
@property
|
||||||
|
def precision_mode_price(self) -> int:
|
||||||
|
"""
|
||||||
|
Exchange ccxt precisionMode used for price
|
||||||
|
Workaround for ccxt limitation to not have precisionMode for price
|
||||||
|
if it differs for an exchange
|
||||||
|
Might need to be updated if https://github.com/ccxt/ccxt/issues/20408 is fixed.
|
||||||
|
"""
|
||||||
return self._api.precisionMode
|
return self._api.precisionMode
|
||||||
|
|
||||||
def additional_exchange_init(self) -> None:
|
def additional_exchange_init(self) -> None:
|
||||||
|
@ -392,7 +468,7 @@ class Exchange:
|
||||||
"""
|
"""
|
||||||
return int(
|
return int(
|
||||||
self._ft_has.get("ohlcv_candle_limit_per_timeframe", {}).get(
|
self._ft_has.get("ohlcv_candle_limit_per_timeframe", {}).get(
|
||||||
timeframe, self._ft_has.get("ohlcv_candle_limit")
|
timeframe, str(self._ft_has.get("ohlcv_candle_limit"))
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -483,6 +559,15 @@ class Exchange:
|
||||||
else:
|
else:
|
||||||
return DataFrame()
|
return DataFrame()
|
||||||
|
|
||||||
|
def trades(self, pair_interval: PairWithTimeframe, copy: bool = True) -> DataFrame:
|
||||||
|
if pair_interval in self._trades:
|
||||||
|
if copy:
|
||||||
|
return self._trades[pair_interval].copy()
|
||||||
|
else:
|
||||||
|
return self._trades[pair_interval]
|
||||||
|
else:
|
||||||
|
return DataFrame(columns=DEFAULT_TRADES_COLUMNS)
|
||||||
|
|
||||||
def get_contract_size(self, pair: str) -> Optional[float]:
|
def get_contract_size(self, pair: str) -> Optional[float]:
|
||||||
if self.trading_mode == TradingMode.FUTURES:
|
if self.trading_mode == TradingMode.FUTURES:
|
||||||
market = self.markets.get(pair, {})
|
market = self.markets.get(pair, {})
|
||||||
|
@ -531,6 +616,13 @@ class Exchange:
|
||||||
amount, self.get_precision_amount(pair), self.precisionMode, contract_size
|
amount, self.get_precision_amount(pair), self.precisionMode, contract_size
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def ws_connection_reset(self):
|
||||||
|
"""
|
||||||
|
called at regular intervals to reset the websocket connection
|
||||||
|
"""
|
||||||
|
if self._exchange_ws:
|
||||||
|
self._exchange_ws.reset_connections()
|
||||||
|
|
||||||
def _load_async_markets(self, reload: bool = False) -> Dict[str, Any]:
|
def _load_async_markets(self, reload: bool = False) -> Dict[str, Any]:
|
||||||
try:
|
try:
|
||||||
markets = self.loop.run_until_complete(
|
markets = self.loop.run_until_complete(
|
||||||
|
@ -562,6 +654,12 @@ class Exchange:
|
||||||
# Reload async markets, then assign them to sync api
|
# Reload async markets, then assign them to sync api
|
||||||
self._markets = self._load_async_markets(reload=True)
|
self._markets = self._load_async_markets(reload=True)
|
||||||
self._api.set_markets(self._api_async.markets, self._api_async.currencies)
|
self._api.set_markets(self._api_async.markets, self._api_async.currencies)
|
||||||
|
# Assign options array, as it contains some temporary information from the exchange.
|
||||||
|
self._api.options = self._api_async.options
|
||||||
|
if self._exchange_ws:
|
||||||
|
# Set markets to avoid reloading on websocket api
|
||||||
|
self._ws_async.set_markets(self._api.markets, self._api.currencies)
|
||||||
|
self._ws_async.options = self._api.options
|
||||||
self._last_markets_refresh = dt_ts()
|
self._last_markets_refresh = dt_ts()
|
||||||
|
|
||||||
if is_initial and self._ft_has["needs_trading_fees"]:
|
if is_initial and self._ft_has["needs_trading_fees"]:
|
||||||
|
@ -609,7 +707,7 @@ class Exchange:
|
||||||
# Note: ccxt has BaseCurrency/QuoteCurrency format for pairs
|
# Note: ccxt has BaseCurrency/QuoteCurrency format for pairs
|
||||||
if self.markets and pair not in self.markets:
|
if self.markets and pair not in self.markets:
|
||||||
raise OperationalException(
|
raise OperationalException(
|
||||||
f"Pair {pair} is not available on {self.name} {self.trading_mode.value}. "
|
f"Pair {pair} is not available on {self.name} {self.trading_mode}. "
|
||||||
f"Please remove {pair} from your whitelist."
|
f"Please remove {pair} from your whitelist."
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -723,6 +821,21 @@ class Exchange:
|
||||||
f"Time in force policies are not supported for {self.name} yet."
|
f"Time in force policies are not supported for {self.name} yet."
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def validate_orderflow(self, exchange: Dict) -> None:
|
||||||
|
if exchange.get("use_public_trades", False) and (
|
||||||
|
not self.exchange_has("fetchTrades") or not self._ft_has["trades_has_history"]
|
||||||
|
):
|
||||||
|
raise ConfigurationError(
|
||||||
|
f"Trade data not available for {self.name}. Can't use orderflow feature."
|
||||||
|
)
|
||||||
|
|
||||||
|
def validate_freqai(self, config: Config) -> None:
|
||||||
|
freqai_enabled = config.get("freqai", {}).get("enabled", False)
|
||||||
|
if freqai_enabled and not self._ft_has["ohlcv_has_history"]:
|
||||||
|
raise ConfigurationError(
|
||||||
|
f"Historic OHLCV data not available for {self.name}. Can't use freqAI."
|
||||||
|
)
|
||||||
|
|
||||||
def validate_required_startup_candles(self, startup_candles: int, timeframe: str) -> int:
|
def validate_required_startup_candles(self, startup_candles: int, timeframe: str) -> int:
|
||||||
"""
|
"""
|
||||||
Checks if required startup_candles is more than ohlcv_candle_limit().
|
Checks if required startup_candles is more than ohlcv_candle_limit().
|
||||||
|
@ -777,7 +890,7 @@ class Exchange:
|
||||||
):
|
):
|
||||||
mm_value = margin_mode and margin_mode.value
|
mm_value = margin_mode and margin_mode.value
|
||||||
raise OperationalException(
|
raise OperationalException(
|
||||||
f"Freqtrade does not support {mm_value} {trading_mode.value} on {self.name}"
|
f"Freqtrade does not support {mm_value} {trading_mode} on {self.name}"
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_option(self, param: str, default: Optional[Any] = None) -> Any:
|
def get_option(self, param: str, default: Optional[Any] = None) -> Any:
|
||||||
|
@ -795,7 +908,7 @@ class Exchange:
|
||||||
"""
|
"""
|
||||||
if endpoint in self._ft_has.get("exchange_has_overrides", {}):
|
if endpoint in self._ft_has.get("exchange_has_overrides", {}):
|
||||||
return self._ft_has["exchange_has_overrides"][endpoint]
|
return self._ft_has["exchange_has_overrides"][endpoint]
|
||||||
return endpoint in self._api.has and self._api.has[endpoint]
|
return endpoint in self._api_async.has and self._api_async.has[endpoint]
|
||||||
|
|
||||||
def get_precision_amount(self, pair: str) -> Optional[float]:
|
def get_precision_amount(self, pair: str) -> Optional[float]:
|
||||||
"""
|
"""
|
||||||
|
@ -827,7 +940,10 @@ class Exchange:
|
||||||
For stoploss calculations, must use ROUND_UP for longs, and ROUND_DOWN for shorts.
|
For stoploss calculations, must use ROUND_UP for longs, and ROUND_DOWN for shorts.
|
||||||
"""
|
"""
|
||||||
return price_to_precision(
|
return price_to_precision(
|
||||||
price, self.get_precision_price(pair), self.precisionMode, rounding_mode=rounding_mode
|
price,
|
||||||
|
self.get_precision_price(pair),
|
||||||
|
self.precision_mode_price,
|
||||||
|
rounding_mode=rounding_mode,
|
||||||
)
|
)
|
||||||
|
|
||||||
def price_get_one_pip(self, pair: str, price: float) -> float:
|
def price_get_one_pip(self, pair: str, price: float) -> float:
|
||||||
|
@ -1564,7 +1680,7 @@ class Exchange:
|
||||||
return order
|
return order
|
||||||
|
|
||||||
@retrier
|
@retrier
|
||||||
def get_balances(self) -> dict:
|
def get_balances(self) -> CcxtBalances:
|
||||||
try:
|
try:
|
||||||
balances = self._api.fetch_balance()
|
balances = self._api.fetch_balance()
|
||||||
# Remove additional info from ccxt results
|
# Remove additional info from ccxt results
|
||||||
|
@ -1584,7 +1700,7 @@ class Exchange:
|
||||||
raise OperationalException(e) from e
|
raise OperationalException(e) from e
|
||||||
|
|
||||||
@retrier
|
@retrier
|
||||||
def fetch_positions(self, pair: Optional[str] = None) -> List[Dict]:
|
def fetch_positions(self, pair: Optional[str] = None) -> List[CcxtPosition]:
|
||||||
"""
|
"""
|
||||||
Fetch positions from the exchange.
|
Fetch positions from the exchange.
|
||||||
If no pair is given, all positions are returned.
|
If no pair is given, all positions are returned.
|
||||||
|
@ -1596,7 +1712,7 @@ class Exchange:
|
||||||
symbols = []
|
symbols = []
|
||||||
if pair:
|
if pair:
|
||||||
symbols.append(pair)
|
symbols.append(pair)
|
||||||
positions: List[Dict] = self._api.fetch_positions(symbols)
|
positions: List[CcxtPosition] = self._api.fetch_positions(symbols)
|
||||||
self._log_exchange_response("fetch_positions", positions)
|
self._log_exchange_response("fetch_positions", positions)
|
||||||
return positions
|
return positions
|
||||||
except ccxt.DDoSProtection as e:
|
except ccxt.DDoSProtection as e:
|
||||||
|
@ -2019,7 +2135,7 @@ class Exchange:
|
||||||
def get_fee(
|
def get_fee(
|
||||||
self,
|
self,
|
||||||
symbol: str,
|
symbol: str,
|
||||||
type: str = "",
|
order_type: str = "",
|
||||||
side: str = "",
|
side: str = "",
|
||||||
amount: float = 1,
|
amount: float = 1,
|
||||||
price: float = 1,
|
price: float = 1,
|
||||||
|
@ -2028,13 +2144,13 @@ class Exchange:
|
||||||
"""
|
"""
|
||||||
Retrieve fee from exchange
|
Retrieve fee from exchange
|
||||||
:param symbol: Pair
|
:param symbol: Pair
|
||||||
:param type: Type of order (market, limit, ...)
|
:param order_type: Type of order (market, limit, ...)
|
||||||
:param side: Side of order (buy, sell)
|
:param side: Side of order (buy, sell)
|
||||||
:param amount: Amount of order
|
:param amount: Amount of order
|
||||||
:param price: Price of order
|
:param price: Price of order
|
||||||
:param taker_or_maker: 'maker' or 'taker' (ignored if "type" is provided)
|
:param taker_or_maker: 'maker' or 'taker' (ignored if "type" is provided)
|
||||||
"""
|
"""
|
||||||
if type and type == "market":
|
if order_type and order_type == "market":
|
||||||
taker_or_maker = "taker"
|
taker_or_maker = "taker"
|
||||||
try:
|
try:
|
||||||
if self._config["dry_run"] and self._config.get("fee", None) is not None:
|
if self._config["dry_run"] and self._config.get("fee", None) is not None:
|
||||||
|
@ -2045,7 +2161,7 @@ class Exchange:
|
||||||
|
|
||||||
return self._api.calculate_fee(
|
return self._api.calculate_fee(
|
||||||
symbol=symbol,
|
symbol=symbol,
|
||||||
type=type,
|
type=order_type,
|
||||||
side=side,
|
side=side,
|
||||||
amount=amount,
|
amount=amount,
|
||||||
price=price,
|
price=price,
|
||||||
|
@ -2228,9 +2344,40 @@ class Exchange:
|
||||||
cache: bool,
|
cache: bool,
|
||||||
) -> Coroutine[Any, Any, OHLCVResponse]:
|
) -> Coroutine[Any, Any, OHLCVResponse]:
|
||||||
not_all_data = cache and self.required_candle_call_count > 1
|
not_all_data = cache and self.required_candle_call_count > 1
|
||||||
|
if cache and candle_type in (CandleType.SPOT, CandleType.FUTURES):
|
||||||
|
if self._has_watch_ohlcv and self._exchange_ws:
|
||||||
|
# Subscribe to websocket
|
||||||
|
self._exchange_ws.schedule_ohlcv(pair, timeframe, candle_type)
|
||||||
|
|
||||||
if cache and (pair, timeframe, candle_type) in self._klines:
|
if cache and (pair, timeframe, candle_type) in self._klines:
|
||||||
candle_limit = self.ohlcv_candle_limit(timeframe, candle_type)
|
candle_limit = self.ohlcv_candle_limit(timeframe, candle_type)
|
||||||
min_date = date_minus_candles(timeframe, candle_limit - 5).timestamp()
|
min_date = int(date_minus_candles(timeframe, candle_limit - 5).timestamp())
|
||||||
|
|
||||||
|
if self._exchange_ws:
|
||||||
|
candle_date = int(timeframe_to_prev_date(timeframe).timestamp() * 1000)
|
||||||
|
prev_candle_date = int(date_minus_candles(timeframe, 1).timestamp() * 1000)
|
||||||
|
candles = self._exchange_ws.ccxt_object.ohlcvs.get(pair, {}).get(timeframe)
|
||||||
|
half_candle = int(candle_date - (candle_date - prev_candle_date) * 0.5)
|
||||||
|
last_refresh_time = int(
|
||||||
|
self._exchange_ws.klines_last_refresh.get((pair, timeframe, candle_type), 0)
|
||||||
|
)
|
||||||
|
|
||||||
|
if (
|
||||||
|
candles
|
||||||
|
and candles[-1][0] >= prev_candle_date
|
||||||
|
and last_refresh_time >= half_candle
|
||||||
|
):
|
||||||
|
# Usable result, candle contains the previous candle.
|
||||||
|
# Also, we check if the last refresh time is no more than half the candle ago.
|
||||||
|
logger.debug(f"reuse watch result for {pair}, {timeframe}, {last_refresh_time}")
|
||||||
|
|
||||||
|
return self._exchange_ws.get_ohlcv(pair, timeframe, candle_type, candle_date)
|
||||||
|
logger.info(
|
||||||
|
f"Failed to reuse watch {pair}, {timeframe}, {candle_date < last_refresh_time},"
|
||||||
|
f" {candle_date}, {last_refresh_time}, "
|
||||||
|
f"{format_ms_time(candle_date)}, {format_ms_time(last_refresh_time)} "
|
||||||
|
)
|
||||||
|
|
||||||
# Check if 1 call can get us updated candles without hole in the data.
|
# Check if 1 call can get us updated candles without hole in the data.
|
||||||
if min_date < self._pairs_last_refresh_time.get((pair, timeframe, candle_type), 0):
|
if min_date < self._pairs_last_refresh_time.get((pair, timeframe, candle_type), 0):
|
||||||
# Cache can be used - do one-off call.
|
# Cache can be used - do one-off call.
|
||||||
|
@ -2263,7 +2410,7 @@ class Exchange:
|
||||||
|
|
||||||
def _build_ohlcv_dl_jobs(
|
def _build_ohlcv_dl_jobs(
|
||||||
self, pair_list: ListPairsWithTimeframes, since_ms: Optional[int], cache: bool
|
self, pair_list: ListPairsWithTimeframes, since_ms: Optional[int], cache: bool
|
||||||
) -> Tuple[List[Coroutine], List[Tuple[str, str, CandleType]]]:
|
) -> Tuple[List[Coroutine], List[PairWithTimeframe]]:
|
||||||
"""
|
"""
|
||||||
Build Coroutines to execute as part of refresh_latest_ohlcv
|
Build Coroutines to execute as part of refresh_latest_ohlcv
|
||||||
"""
|
"""
|
||||||
|
@ -2357,17 +2504,17 @@ class Exchange:
|
||||||
logger.debug("Refreshing candle (OHLCV) data for %d pairs", len(pair_list))
|
logger.debug("Refreshing candle (OHLCV) data for %d pairs", len(pair_list))
|
||||||
|
|
||||||
# Gather coroutines to run
|
# Gather coroutines to run
|
||||||
input_coroutines, cached_pairs = self._build_ohlcv_dl_jobs(pair_list, since_ms, cache)
|
ohlcv_dl_jobs, cached_pairs = self._build_ohlcv_dl_jobs(pair_list, since_ms, cache)
|
||||||
|
|
||||||
results_df = {}
|
results_df = {}
|
||||||
# Chunk requests into batches of 100 to avoid overwhelming ccxt Throttling
|
# Chunk requests into batches of 100 to avoid overwhelming ccxt Throttling
|
||||||
for input_coro in chunks(input_coroutines, 100):
|
for dl_jobs_batch in chunks(ohlcv_dl_jobs, 100):
|
||||||
|
|
||||||
async def gather_stuff(coro):
|
async def gather_coroutines(coro):
|
||||||
return await asyncio.gather(*coro, return_exceptions=True)
|
return await asyncio.gather(*coro, return_exceptions=True)
|
||||||
|
|
||||||
with self._loop_lock:
|
with self._loop_lock:
|
||||||
results = self.loop.run_until_complete(gather_stuff(input_coro))
|
results = self.loop.run_until_complete(gather_coroutines(dl_jobs_batch))
|
||||||
|
|
||||||
for res in results:
|
for res in results:
|
||||||
if isinstance(res, Exception):
|
if isinstance(res, Exception):
|
||||||
|
@ -2495,12 +2642,13 @@ class Exchange:
|
||||||
except (ccxt.OperationFailed, ccxt.ExchangeError) as e:
|
except (ccxt.OperationFailed, ccxt.ExchangeError) as e:
|
||||||
raise TemporaryError(
|
raise TemporaryError(
|
||||||
f"Could not fetch historical candle (OHLCV) data "
|
f"Could not fetch historical candle (OHLCV) data "
|
||||||
f"for pair {pair} due to {e.__class__.__name__}. "
|
f"for {pair}, {timeframe}, {candle_type} due to {e.__class__.__name__}. "
|
||||||
f"Message: {e}"
|
f"Message: {e}"
|
||||||
) from e
|
) from e
|
||||||
except ccxt.BaseError as e:
|
except ccxt.BaseError as e:
|
||||||
raise OperationalException(
|
raise OperationalException(
|
||||||
f"Could not fetch historical candle (OHLCV) data for pair {pair}. Message: {e}"
|
f"Could not fetch historical candle (OHLCV) data for "
|
||||||
|
f"{pair}, {timeframe}, {candle_type}. Message: {e}"
|
||||||
) from e
|
) from e
|
||||||
|
|
||||||
async def _fetch_funding_rate_history(
|
async def _fetch_funding_rate_history(
|
||||||
|
@ -2519,6 +2667,194 @@ class Exchange:
|
||||||
data = [[x["timestamp"], x["fundingRate"], 0, 0, 0, 0] for x in data]
|
data = [[x["timestamp"], x["fundingRate"], 0, 0, 0, 0] for x in data]
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
# fetch Trade data stuff
|
||||||
|
|
||||||
|
def needed_candle_for_trades_ms(self, timeframe: str, candle_type: CandleType) -> int:
|
||||||
|
candle_limit = self.ohlcv_candle_limit(timeframe, candle_type)
|
||||||
|
tf_s = timeframe_to_seconds(timeframe)
|
||||||
|
candles_fetched = candle_limit * self.required_candle_call_count
|
||||||
|
|
||||||
|
max_candles = self._config["orderflow"]["max_candles"]
|
||||||
|
|
||||||
|
required_candles = min(max_candles, candles_fetched)
|
||||||
|
move_to = (
|
||||||
|
tf_s * candle_limit * required_candles
|
||||||
|
if required_candles > candle_limit
|
||||||
|
else (max_candles + 1) * tf_s
|
||||||
|
)
|
||||||
|
|
||||||
|
now = timeframe_to_next_date(timeframe)
|
||||||
|
return int((now - timedelta(seconds=move_to)).timestamp() * 1000)
|
||||||
|
|
||||||
|
def _process_trades_df(
|
||||||
|
self,
|
||||||
|
pair: str,
|
||||||
|
timeframe: str,
|
||||||
|
c_type: CandleType,
|
||||||
|
ticks: List[List],
|
||||||
|
cache: bool,
|
||||||
|
first_required_candle_date: int,
|
||||||
|
) -> DataFrame:
|
||||||
|
# keeping parsed dataframe in cache
|
||||||
|
trades_df = trades_list_to_df(ticks, True)
|
||||||
|
|
||||||
|
if cache:
|
||||||
|
if (pair, timeframe, c_type) in self._trades:
|
||||||
|
old = self._trades[(pair, timeframe, c_type)]
|
||||||
|
# Reassign so we return the updated, combined df
|
||||||
|
combined_df = concat([old, trades_df], axis=0)
|
||||||
|
logger.debug(f"Clean duplicated ticks from Trades data {pair}")
|
||||||
|
trades_df = DataFrame(
|
||||||
|
trades_df_remove_duplicates(combined_df), columns=combined_df.columns
|
||||||
|
)
|
||||||
|
# Age out old candles
|
||||||
|
trades_df = trades_df[first_required_candle_date < trades_df["timestamp"]]
|
||||||
|
trades_df = trades_df.reset_index(drop=True)
|
||||||
|
self._trades[(pair, timeframe, c_type)] = trades_df
|
||||||
|
return trades_df
|
||||||
|
|
||||||
|
async def _build_trades_dl_jobs(
|
||||||
|
self, pairwt: PairWithTimeframe, data_handler, cache: bool
|
||||||
|
) -> Tuple[PairWithTimeframe, Optional[DataFrame]]:
|
||||||
|
"""
|
||||||
|
Build coroutines to refresh trades for (they're then called through async.gather)
|
||||||
|
"""
|
||||||
|
pair, timeframe, candle_type = pairwt
|
||||||
|
since_ms = None
|
||||||
|
new_ticks: List = []
|
||||||
|
all_stored_ticks_df = DataFrame(columns=DEFAULT_TRADES_COLUMNS + ["date"])
|
||||||
|
first_candle_ms = self.needed_candle_for_trades_ms(timeframe, candle_type)
|
||||||
|
# refresh, if
|
||||||
|
# a. not in _trades
|
||||||
|
# b. no cache used
|
||||||
|
# c. need new data
|
||||||
|
is_in_cache = (pair, timeframe, candle_type) in self._trades
|
||||||
|
if (
|
||||||
|
not is_in_cache
|
||||||
|
or not cache
|
||||||
|
or self._now_is_time_to_refresh_trades(pair, timeframe, candle_type)
|
||||||
|
):
|
||||||
|
logger.debug(f"Refreshing TRADES data for {pair}")
|
||||||
|
# fetch trades since latest _trades and
|
||||||
|
# store together with existing trades
|
||||||
|
try:
|
||||||
|
until = None
|
||||||
|
from_id = None
|
||||||
|
if is_in_cache:
|
||||||
|
from_id = self._trades[(pair, timeframe, candle_type)].iloc[-1]["id"]
|
||||||
|
until = dt_ts() # now
|
||||||
|
|
||||||
|
else:
|
||||||
|
until = int(timeframe_to_prev_date(timeframe).timestamp()) * 1000
|
||||||
|
all_stored_ticks_df = data_handler.trades_load(
|
||||||
|
f"{pair}-cached", self.trading_mode
|
||||||
|
)
|
||||||
|
|
||||||
|
if not all_stored_ticks_df.empty:
|
||||||
|
if (
|
||||||
|
all_stored_ticks_df.iloc[-1]["timestamp"] > first_candle_ms
|
||||||
|
and all_stored_ticks_df.iloc[0]["timestamp"] <= first_candle_ms
|
||||||
|
):
|
||||||
|
# Use cache and populate further
|
||||||
|
last_cached_ms = all_stored_ticks_df.iloc[-1]["timestamp"]
|
||||||
|
from_id = all_stored_ticks_df.iloc[-1]["id"]
|
||||||
|
# only use cached if it's closer than first_candle_ms
|
||||||
|
since_ms = (
|
||||||
|
last_cached_ms
|
||||||
|
if last_cached_ms > first_candle_ms
|
||||||
|
else first_candle_ms
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# Skip cache, it's too old
|
||||||
|
all_stored_ticks_df = DataFrame(
|
||||||
|
columns=DEFAULT_TRADES_COLUMNS + ["date"]
|
||||||
|
)
|
||||||
|
|
||||||
|
# from_id overrules with exchange set to id paginate
|
||||||
|
[_, new_ticks] = await self._async_get_trade_history(
|
||||||
|
pair,
|
||||||
|
since=since_ms if since_ms else first_candle_ms,
|
||||||
|
until=until,
|
||||||
|
from_id=from_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception:
|
||||||
|
logger.exception(f"Refreshing TRADES data for {pair} failed")
|
||||||
|
return pairwt, None
|
||||||
|
|
||||||
|
if new_ticks:
|
||||||
|
all_stored_ticks_list = all_stored_ticks_df[DEFAULT_TRADES_COLUMNS].values.tolist()
|
||||||
|
all_stored_ticks_list.extend(new_ticks)
|
||||||
|
trades_df = self._process_trades_df(
|
||||||
|
pair,
|
||||||
|
timeframe,
|
||||||
|
candle_type,
|
||||||
|
all_stored_ticks_list,
|
||||||
|
cache,
|
||||||
|
first_required_candle_date=first_candle_ms,
|
||||||
|
)
|
||||||
|
data_handler.trades_store(
|
||||||
|
f"{pair}-cached", trades_df[DEFAULT_TRADES_COLUMNS], self.trading_mode
|
||||||
|
)
|
||||||
|
return pairwt, trades_df
|
||||||
|
else:
|
||||||
|
logger.error(f"No new ticks for {pair}")
|
||||||
|
return pairwt, None
|
||||||
|
|
||||||
|
def refresh_latest_trades(
|
||||||
|
self,
|
||||||
|
pair_list: ListPairsWithTimeframes,
|
||||||
|
*,
|
||||||
|
cache: bool = True,
|
||||||
|
) -> Dict[PairWithTimeframe, DataFrame]:
|
||||||
|
"""
|
||||||
|
Refresh in-memory TRADES asynchronously and set `_trades` with the result
|
||||||
|
Loops asynchronously over pair_list and downloads all pairs async (semi-parallel).
|
||||||
|
Only used in the dataprovider.refresh() method.
|
||||||
|
:param pair_list: List of 3 element tuples containing (pair, timeframe, candle_type)
|
||||||
|
:param cache: Assign result to _trades. Useful for one-off downloads like for pairlists
|
||||||
|
:return: Dict of [{(pair, timeframe): Dataframe}]
|
||||||
|
"""
|
||||||
|
from freqtrade.data.history import get_datahandler
|
||||||
|
|
||||||
|
data_handler = get_datahandler(
|
||||||
|
self._config["datadir"], data_format=self._config["dataformat_trades"]
|
||||||
|
)
|
||||||
|
logger.debug("Refreshing TRADES data for %d pairs", len(pair_list))
|
||||||
|
results_df = {}
|
||||||
|
trades_dl_jobs = []
|
||||||
|
for pair_wt in set(pair_list):
|
||||||
|
trades_dl_jobs.append(self._build_trades_dl_jobs(pair_wt, data_handler, cache))
|
||||||
|
|
||||||
|
async def gather_coroutines(coro):
|
||||||
|
return await asyncio.gather(*coro, return_exceptions=True)
|
||||||
|
|
||||||
|
for dl_job_chunk in chunks(trades_dl_jobs, 100):
|
||||||
|
with self._loop_lock:
|
||||||
|
results = self.loop.run_until_complete(gather_coroutines(dl_job_chunk))
|
||||||
|
|
||||||
|
for res in results:
|
||||||
|
if isinstance(res, Exception):
|
||||||
|
logger.warning(f"Async code raised an exception: {repr(res)}")
|
||||||
|
continue
|
||||||
|
pairwt, trades_df = res
|
||||||
|
if trades_df is not None:
|
||||||
|
results_df[pairwt] = trades_df
|
||||||
|
|
||||||
|
return results_df
|
||||||
|
|
||||||
|
def _now_is_time_to_refresh_trades(
|
||||||
|
self, pair: str, timeframe: str, candle_type: CandleType
|
||||||
|
) -> bool: # Timeframe in seconds
|
||||||
|
trades = self.trades((pair, timeframe, candle_type), False)
|
||||||
|
pair_last_refreshed = int(trades.iloc[-1]["timestamp"])
|
||||||
|
full_candle = (
|
||||||
|
int(timeframe_to_next_date(timeframe, dt_from_ts(pair_last_refreshed)).timestamp())
|
||||||
|
* 1000
|
||||||
|
)
|
||||||
|
now = dt_ts()
|
||||||
|
return full_candle <= now
|
||||||
|
|
||||||
# Fetch historic trades
|
# Fetch historic trades
|
||||||
|
|
||||||
@retrier_async
|
@retrier_async
|
||||||
|
@ -2533,10 +2869,11 @@ class Exchange:
|
||||||
returns: List of dicts containing trades, the next iteration value (new "since" or trade_id)
|
returns: List of dicts containing trades, the next iteration value (new "since" or trade_id)
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
|
trades_limit = self._max_trades_limit
|
||||||
# fetch trades asynchronously
|
# fetch trades asynchronously
|
||||||
if params:
|
if params:
|
||||||
logger.debug("Fetching trades for pair %s, params: %s ", pair, params)
|
logger.debug("Fetching trades for pair %s, params: %s ", pair, params)
|
||||||
trades = await self._api_async.fetch_trades(pair, params=params, limit=1000)
|
trades = await self._api_async.fetch_trades(pair, params=params, limit=trades_limit)
|
||||||
else:
|
else:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
"Fetching trades for pair %s, since %s %s...",
|
"Fetching trades for pair %s, since %s %s...",
|
||||||
|
@ -2544,7 +2881,7 @@ class Exchange:
|
||||||
since,
|
since,
|
||||||
"(" + dt_from_ts(since).isoformat() + ") " if since is not None else "",
|
"(" + dt_from_ts(since).isoformat() + ") " if since is not None else "",
|
||||||
)
|
)
|
||||||
trades = await self._api_async.fetch_trades(pair, since=since, limit=1000)
|
trades = await self._api_async.fetch_trades(pair, since=since, limit=trades_limit)
|
||||||
trades = self._trades_contracts_to_amount(trades)
|
trades = self._trades_contracts_to_amount(trades)
|
||||||
pagination_value = self._get_trade_pagination_next_value(trades)
|
pagination_value = self._get_trade_pagination_next_value(trades)
|
||||||
return trades_dict_to_list(trades), pagination_value
|
return trades_dict_to_list(trades), pagination_value
|
||||||
|
@ -3339,13 +3676,12 @@ class Exchange:
|
||||||
def get_maintenance_ratio_and_amt(
|
def get_maintenance_ratio_and_amt(
|
||||||
self,
|
self,
|
||||||
pair: str,
|
pair: str,
|
||||||
nominal_value: float,
|
notional_value: float,
|
||||||
) -> Tuple[float, Optional[float]]:
|
) -> Tuple[float, Optional[float]]:
|
||||||
"""
|
"""
|
||||||
Important: Must be fetching data from cached values as this is used by backtesting!
|
Important: Must be fetching data from cached values as this is used by backtesting!
|
||||||
:param pair: Market symbol
|
:param pair: Market symbol
|
||||||
:param nominal_value: The total trade amount in quote currency including leverage
|
:param notional_value: The total trade amount in quote currency
|
||||||
maintenance amount only on Binance
|
|
||||||
:return: (maintenance margin ratio, maintenance amount)
|
:return: (maintenance margin ratio, maintenance amount)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -3362,7 +3698,7 @@ class Exchange:
|
||||||
pair_tiers = self._leverage_tiers[pair]
|
pair_tiers = self._leverage_tiers[pair]
|
||||||
|
|
||||||
for tier in reversed(pair_tiers):
|
for tier in reversed(pair_tiers):
|
||||||
if nominal_value >= tier["minNotional"]:
|
if notional_value >= tier["minNotional"]:
|
||||||
return (tier["maintenanceMarginRate"], tier["maintAmt"])
|
return (tier["maintenanceMarginRate"], tier["maintAmt"])
|
||||||
|
|
||||||
raise ExchangeError("nominal value can not be lower than 0")
|
raise ExchangeError("nominal value can not be lower than 0")
|
||||||
|
@ -3370,4 +3706,3 @@ class Exchange:
|
||||||
# describes the min amt for a tier, and the lowest tier will always go down to 0
|
# describes the min amt for a tier, and the lowest tier will always go down to 0
|
||||||
else:
|
else:
|
||||||
raise ExchangeError(f"Cannot get maintenance ratio using {self.name}")
|
raise ExchangeError(f"Cannot get maintenance ratio using {self.name}")
|
||||||
raise ExchangeError(f"Cannot get maintenance ratio using {self.name}")
|
|
||||||
|
|
98
freqtrade/exchange/exchange_types.py
Normal file
98
freqtrade/exchange/exchange_types.py
Normal file
|
@ -0,0 +1,98 @@
|
||||||
|
from typing import Dict, List, Optional, Tuple, TypedDict
|
||||||
|
|
||||||
|
from freqtrade.enums import CandleType
|
||||||
|
|
||||||
|
|
||||||
|
class FtHas(TypedDict, total=False):
|
||||||
|
order_time_in_force: List[str]
|
||||||
|
exchange_has_overrides: Dict[str, bool]
|
||||||
|
marketOrderRequiresPrice: bool
|
||||||
|
|
||||||
|
# Stoploss on exchange
|
||||||
|
stoploss_on_exchange: bool
|
||||||
|
stop_price_param: str
|
||||||
|
stop_price_prop: str
|
||||||
|
stop_price_type_field: str
|
||||||
|
stop_price_type_value_mapping: Dict
|
||||||
|
stoploss_order_types: Dict[str, str]
|
||||||
|
# ohlcv
|
||||||
|
ohlcv_params: Dict
|
||||||
|
ohlcv_candle_limit: int
|
||||||
|
ohlcv_has_history: bool
|
||||||
|
ohlcv_partial_candle: bool
|
||||||
|
ohlcv_require_since: bool
|
||||||
|
ohlcv_volume_currency: str
|
||||||
|
ohlcv_candle_limit_per_timeframe: Dict[str, int]
|
||||||
|
# Tickers
|
||||||
|
tickers_have_quoteVolume: bool
|
||||||
|
tickers_have_percentage: bool
|
||||||
|
tickers_have_bid_ask: bool
|
||||||
|
tickers_have_price: bool
|
||||||
|
# Trades
|
||||||
|
trades_limit: int
|
||||||
|
trades_pagination: str
|
||||||
|
trades_pagination_arg: str
|
||||||
|
trades_has_history: bool
|
||||||
|
trades_pagination_overlap: bool
|
||||||
|
# Orderbook
|
||||||
|
l2_limit_range: Optional[List[int]]
|
||||||
|
l2_limit_range_required: bool
|
||||||
|
# Futures
|
||||||
|
ccxt_futures_name: str # usually swap
|
||||||
|
mark_ohlcv_price: str
|
||||||
|
mark_ohlcv_timeframe: str
|
||||||
|
funding_fee_timeframe: str
|
||||||
|
floor_leverage: bool
|
||||||
|
needs_trading_fees: bool
|
||||||
|
order_props_in_contracts: List[str]
|
||||||
|
|
||||||
|
# Websocket control
|
||||||
|
ws_enabled: bool
|
||||||
|
|
||||||
|
|
||||||
|
class Ticker(TypedDict):
|
||||||
|
symbol: str
|
||||||
|
ask: Optional[float]
|
||||||
|
askVolume: Optional[float]
|
||||||
|
bid: Optional[float]
|
||||||
|
bidVolume: Optional[float]
|
||||||
|
last: Optional[float]
|
||||||
|
quoteVolume: Optional[float]
|
||||||
|
baseVolume: Optional[float]
|
||||||
|
percentage: Optional[float]
|
||||||
|
# Several more - only listing required.
|
||||||
|
|
||||||
|
|
||||||
|
Tickers = Dict[str, Ticker]
|
||||||
|
|
||||||
|
|
||||||
|
class OrderBook(TypedDict):
|
||||||
|
symbol: str
|
||||||
|
bids: List[Tuple[float, float]]
|
||||||
|
asks: List[Tuple[float, float]]
|
||||||
|
timestamp: Optional[int]
|
||||||
|
datetime: Optional[str]
|
||||||
|
nonce: Optional[int]
|
||||||
|
|
||||||
|
|
||||||
|
class CcxtBalance(TypedDict):
|
||||||
|
free: float
|
||||||
|
used: float
|
||||||
|
total: float
|
||||||
|
|
||||||
|
|
||||||
|
CcxtBalances = Dict[str, CcxtBalance]
|
||||||
|
|
||||||
|
|
||||||
|
class CcxtPosition(TypedDict):
|
||||||
|
symbol: str
|
||||||
|
side: str
|
||||||
|
contracts: float
|
||||||
|
leverage: float
|
||||||
|
collateral: Optional[float]
|
||||||
|
initialMargin: Optional[float]
|
||||||
|
liquidationPrice: Optional[float]
|
||||||
|
|
||||||
|
|
||||||
|
# pair, timeframe, candleType, OHLCV, drop last?,
|
||||||
|
OHLCVResponse = Tuple[str, str, CandleType, List, bool]
|
|
@ -2,6 +2,7 @@
|
||||||
Exchange support utils
|
Exchange support utils
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import inspect
|
||||||
from datetime import datetime, timedelta, timezone
|
from datetime import datetime, timedelta, timezone
|
||||||
from math import ceil, floor
|
from math import ceil, floor
|
||||||
from typing import Any, Dict, List, Optional, Tuple
|
from typing import Any, Dict, List, Optional, Tuple
|
||||||
|
@ -25,7 +26,7 @@ from freqtrade.exchange.common import (
|
||||||
SUPPORTED_EXCHANGES,
|
SUPPORTED_EXCHANGES,
|
||||||
)
|
)
|
||||||
from freqtrade.exchange.exchange_utils_timeframe import timeframe_to_minutes, timeframe_to_prev_date
|
from freqtrade.exchange.exchange_utils_timeframe import timeframe_to_minutes, timeframe_to_prev_date
|
||||||
from freqtrade.types import ValidExchangesType
|
from freqtrade.ft_types import ValidExchangesType
|
||||||
from freqtrade.util import FtPrecise
|
from freqtrade.util import FtPrecise
|
||||||
|
|
||||||
|
|
||||||
|
@ -53,16 +54,21 @@ def available_exchanges(ccxt_module: Optional[CcxtModuleType] = None) -> List[st
|
||||||
return [x for x in exchanges if validate_exchange(x)[0]]
|
return [x for x in exchanges if validate_exchange(x)[0]]
|
||||||
|
|
||||||
|
|
||||||
def validate_exchange(exchange: str) -> Tuple[bool, str]:
|
def validate_exchange(exchange: str) -> Tuple[bool, str, Optional[ccxt.Exchange]]:
|
||||||
"""
|
"""
|
||||||
returns: can_use, reason
|
returns: can_use, reason, exchange_object
|
||||||
with Reason including both missing and missing_opt
|
with Reason including both missing and missing_opt
|
||||||
"""
|
"""
|
||||||
ex_mod = getattr(ccxt, exchange.lower())()
|
try:
|
||||||
|
ex_mod = getattr(ccxt.pro, exchange.lower())()
|
||||||
|
except AttributeError:
|
||||||
|
ex_mod = getattr(ccxt.async_support, exchange.lower())()
|
||||||
|
|
||||||
|
if not ex_mod or not ex_mod.has:
|
||||||
|
return False, "", None
|
||||||
|
|
||||||
result = True
|
result = True
|
||||||
reason = ""
|
reason = ""
|
||||||
if not ex_mod or not ex_mod.has:
|
|
||||||
return False, ""
|
|
||||||
missing = [
|
missing = [
|
||||||
k
|
k
|
||||||
for k, v in EXCHANGE_HAS_REQUIRED.items()
|
for k, v in EXCHANGE_HAS_REQUIRED.items()
|
||||||
|
@ -81,18 +87,24 @@ def validate_exchange(exchange: str) -> Tuple[bool, str]:
|
||||||
if missing_opt:
|
if missing_opt:
|
||||||
reason += f"{'. ' if reason else ''}missing opt: {', '.join(missing_opt)}. "
|
reason += f"{'. ' if reason else ''}missing opt: {', '.join(missing_opt)}. "
|
||||||
|
|
||||||
return result, reason
|
return result, reason, ex_mod
|
||||||
|
|
||||||
|
|
||||||
def _build_exchange_list_entry(
|
def _build_exchange_list_entry(
|
||||||
exchange_name: str, exchangeClasses: Dict[str, Any]
|
exchange_name: str, exchangeClasses: Dict[str, Any]
|
||||||
) -> ValidExchangesType:
|
) -> ValidExchangesType:
|
||||||
valid, comment = validate_exchange(exchange_name)
|
valid, comment, ex_mod = validate_exchange(exchange_name)
|
||||||
result: ValidExchangesType = {
|
result: ValidExchangesType = {
|
||||||
"name": exchange_name,
|
"name": getattr(ex_mod, "name", exchange_name),
|
||||||
|
"classname": exchange_name,
|
||||||
"valid": valid,
|
"valid": valid,
|
||||||
"supported": exchange_name.lower() in SUPPORTED_EXCHANGES,
|
"supported": exchange_name.lower() in SUPPORTED_EXCHANGES,
|
||||||
"comment": comment,
|
"comment": comment,
|
||||||
|
"dex": getattr(ex_mod, "dex", False),
|
||||||
|
"is_alias": getattr(ex_mod, "alias", False),
|
||||||
|
"alias_for": inspect.getmro(ex_mod.__class__)[1]().id
|
||||||
|
if getattr(ex_mod, "alias", False)
|
||||||
|
else None,
|
||||||
"trade_modes": [{"trading_mode": "spot", "margin_mode": ""}],
|
"trade_modes": [{"trading_mode": "spot", "margin_mode": ""}],
|
||||||
}
|
}
|
||||||
if resolved := exchangeClasses.get(exchange_name.lower()):
|
if resolved := exchangeClasses.get(exchange_name.lower()):
|
||||||
|
|
195
freqtrade/exchange/exchange_ws.py
Normal file
195
freqtrade/exchange/exchange_ws.py
Normal file
|
@ -0,0 +1,195 @@
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
import time
|
||||||
|
from copy import deepcopy
|
||||||
|
from functools import partial
|
||||||
|
from threading import Thread
|
||||||
|
from typing import Dict, Set
|
||||||
|
|
||||||
|
import ccxt
|
||||||
|
|
||||||
|
from freqtrade.constants import Config, PairWithTimeframe
|
||||||
|
from freqtrade.enums.candletype import CandleType
|
||||||
|
from freqtrade.exchange.exchange import timeframe_to_seconds
|
||||||
|
from freqtrade.exchange.exchange_types import OHLCVResponse
|
||||||
|
from freqtrade.util import dt_ts, format_ms_time
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class ExchangeWS:
|
||||||
|
def __init__(self, config: Config, ccxt_object: ccxt.Exchange) -> None:
|
||||||
|
self.config = config
|
||||||
|
self.ccxt_object = ccxt_object
|
||||||
|
self._background_tasks: Set[asyncio.Task] = set()
|
||||||
|
|
||||||
|
self._klines_watching: Set[PairWithTimeframe] = set()
|
||||||
|
self._klines_scheduled: Set[PairWithTimeframe] = set()
|
||||||
|
self.klines_last_refresh: Dict[PairWithTimeframe, float] = {}
|
||||||
|
self.klines_last_request: Dict[PairWithTimeframe, float] = {}
|
||||||
|
self._thread = Thread(name="ccxt_ws", target=self._start_forever)
|
||||||
|
self._thread.start()
|
||||||
|
self.__cleanup_called = False
|
||||||
|
|
||||||
|
def _start_forever(self) -> None:
|
||||||
|
self._loop = asyncio.new_event_loop()
|
||||||
|
try:
|
||||||
|
self._loop.run_forever()
|
||||||
|
finally:
|
||||||
|
if self._loop.is_running():
|
||||||
|
self._loop.stop()
|
||||||
|
|
||||||
|
def cleanup(self) -> None:
|
||||||
|
logger.debug("Cleanup called - stopping")
|
||||||
|
self._klines_watching.clear()
|
||||||
|
for task in self._background_tasks:
|
||||||
|
task.cancel()
|
||||||
|
if hasattr(self, "_loop") and not self._loop.is_closed():
|
||||||
|
self.reset_connections()
|
||||||
|
|
||||||
|
self._loop.call_soon_threadsafe(self._loop.stop)
|
||||||
|
time.sleep(0.1)
|
||||||
|
if not self._loop.is_closed():
|
||||||
|
self._loop.close()
|
||||||
|
|
||||||
|
self._thread.join()
|
||||||
|
logger.debug("Stopped")
|
||||||
|
|
||||||
|
def reset_connections(self) -> None:
|
||||||
|
"""
|
||||||
|
Reset all connections - avoids "connection-reset" errors that happen after ~9 days
|
||||||
|
"""
|
||||||
|
if hasattr(self, "_loop") and not self._loop.is_closed():
|
||||||
|
logger.info("Resetting WS connections.")
|
||||||
|
asyncio.run_coroutine_threadsafe(self._cleanup_async(), loop=self._loop)
|
||||||
|
while not self.__cleanup_called:
|
||||||
|
time.sleep(0.1)
|
||||||
|
self.__cleanup_called = False
|
||||||
|
|
||||||
|
async def _cleanup_async(self) -> None:
|
||||||
|
try:
|
||||||
|
await self.ccxt_object.close()
|
||||||
|
# Clear the cache.
|
||||||
|
# Not doing this will cause problems on startup with dynamic pairlists
|
||||||
|
self.ccxt_object.ohlcvs.clear()
|
||||||
|
except Exception:
|
||||||
|
logger.exception("Exception in _cleanup_async")
|
||||||
|
finally:
|
||||||
|
self.__cleanup_called = True
|
||||||
|
|
||||||
|
def _pop_history(self, paircomb: PairWithTimeframe) -> None:
|
||||||
|
"""
|
||||||
|
Remove history for a pair/timeframe combination from ccxt cache
|
||||||
|
"""
|
||||||
|
self.ccxt_object.ohlcvs.get(paircomb[0], {}).pop(paircomb[1], None)
|
||||||
|
|
||||||
|
def cleanup_expired(self) -> None:
|
||||||
|
"""
|
||||||
|
Remove pairs from watchlist if they've not been requested within
|
||||||
|
the last timeframe (+ offset)
|
||||||
|
"""
|
||||||
|
changed = False
|
||||||
|
for p in list(self._klines_watching):
|
||||||
|
_, timeframe, _ = p
|
||||||
|
timeframe_s = timeframe_to_seconds(timeframe)
|
||||||
|
last_refresh = self.klines_last_request.get(p, 0)
|
||||||
|
if last_refresh > 0 and (dt_ts() - last_refresh) > ((timeframe_s + 20) * 1000):
|
||||||
|
logger.info(f"Removing {p} from websocket watchlist.")
|
||||||
|
self._klines_watching.discard(p)
|
||||||
|
# Pop history to avoid getting stale data
|
||||||
|
self._pop_history(p)
|
||||||
|
changed = True
|
||||||
|
if changed:
|
||||||
|
logger.info(f"Removal done: new watch list ({len(self._klines_watching)})")
|
||||||
|
|
||||||
|
async def _schedule_while_true(self) -> None:
|
||||||
|
# For the ones we should be watching
|
||||||
|
for p in self._klines_watching:
|
||||||
|
# Check if they're already scheduled
|
||||||
|
if p not in self._klines_scheduled:
|
||||||
|
self._klines_scheduled.add(p)
|
||||||
|
pair, timeframe, candle_type = p
|
||||||
|
task = asyncio.create_task(
|
||||||
|
self._continuously_async_watch_ohlcv(pair, timeframe, candle_type)
|
||||||
|
)
|
||||||
|
self._background_tasks.add(task)
|
||||||
|
task.add_done_callback(
|
||||||
|
partial(
|
||||||
|
self._continuous_stopped,
|
||||||
|
pair=pair,
|
||||||
|
timeframe=timeframe,
|
||||||
|
candle_type=candle_type,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
def _continuous_stopped(
|
||||||
|
self, task: asyncio.Task, pair: str, timeframe: str, candle_type: CandleType
|
||||||
|
):
|
||||||
|
self._background_tasks.discard(task)
|
||||||
|
result = "done"
|
||||||
|
if task.cancelled():
|
||||||
|
result = "cancelled"
|
||||||
|
else:
|
||||||
|
if (result1 := task.result()) is not None:
|
||||||
|
result = str(result1)
|
||||||
|
|
||||||
|
logger.info(f"{pair}, {timeframe}, {candle_type} - Task finished - {result}")
|
||||||
|
self._klines_scheduled.discard((pair, timeframe, candle_type))
|
||||||
|
self._pop_history((pair, timeframe, candle_type))
|
||||||
|
|
||||||
|
async def _continuously_async_watch_ohlcv(
|
||||||
|
self, pair: str, timeframe: str, candle_type: CandleType
|
||||||
|
) -> None:
|
||||||
|
try:
|
||||||
|
while (pair, timeframe, candle_type) in self._klines_watching:
|
||||||
|
start = dt_ts()
|
||||||
|
data = await self.ccxt_object.watch_ohlcv(pair, timeframe)
|
||||||
|
self.klines_last_refresh[(pair, timeframe, candle_type)] = dt_ts()
|
||||||
|
logger.debug(
|
||||||
|
f"watch done {pair}, {timeframe}, data {len(data)} "
|
||||||
|
f"in {dt_ts() - start:.2f}s"
|
||||||
|
)
|
||||||
|
except ccxt.ExchangeClosedByUser:
|
||||||
|
logger.debug("Exchange connection closed by user")
|
||||||
|
except ccxt.BaseError:
|
||||||
|
logger.exception(f"Exception in continuously_async_watch_ohlcv for {pair}, {timeframe}")
|
||||||
|
finally:
|
||||||
|
self._klines_watching.discard((pair, timeframe, candle_type))
|
||||||
|
|
||||||
|
def schedule_ohlcv(self, pair: str, timeframe: str, candle_type: CandleType) -> None:
|
||||||
|
"""
|
||||||
|
Schedule a pair/timeframe combination to be watched
|
||||||
|
"""
|
||||||
|
self._klines_watching.add((pair, timeframe, candle_type))
|
||||||
|
self.klines_last_request[(pair, timeframe, candle_type)] = dt_ts()
|
||||||
|
# asyncio.run_coroutine_threadsafe(self.schedule_schedule(), loop=self._loop)
|
||||||
|
asyncio.run_coroutine_threadsafe(self._schedule_while_true(), loop=self._loop)
|
||||||
|
self.cleanup_expired()
|
||||||
|
|
||||||
|
async def get_ohlcv(
|
||||||
|
self,
|
||||||
|
pair: str,
|
||||||
|
timeframe: str,
|
||||||
|
candle_type: CandleType,
|
||||||
|
candle_date: int,
|
||||||
|
) -> OHLCVResponse:
|
||||||
|
"""
|
||||||
|
Returns cached klines from ccxt's "watch" cache.
|
||||||
|
:param candle_date: timestamp of the end-time of the candle.
|
||||||
|
"""
|
||||||
|
# Deepcopy the response - as it might be modified in the background as new messages arrive
|
||||||
|
candles = deepcopy(self.ccxt_object.ohlcvs.get(pair, {}).get(timeframe))
|
||||||
|
refresh_date = self.klines_last_refresh[(pair, timeframe, candle_type)]
|
||||||
|
drop_hint = False
|
||||||
|
if refresh_date > candle_date:
|
||||||
|
# Refreshed after candle was complete.
|
||||||
|
# logger.info(f"{candles[-1][0]} >= {candle_date}")
|
||||||
|
drop_hint = candles[-1][0] >= candle_date
|
||||||
|
logger.debug(
|
||||||
|
f"watch result for {pair}, {timeframe} with length {len(candles)}, "
|
||||||
|
f"{format_ms_time(candles[-1][0])}, "
|
||||||
|
f"lref={format_ms_time(refresh_date)}, "
|
||||||
|
f"candle_date={format_ms_time(candle_date)}, {drop_hint=}"
|
||||||
|
)
|
||||||
|
return pair, timeframe, candle_type, candles, drop_hint
|
|
@ -7,6 +7,7 @@ from typing import Any, Dict, List, Optional, Tuple
|
||||||
from freqtrade.constants import BuySell
|
from freqtrade.constants import BuySell
|
||||||
from freqtrade.enums import MarginMode, PriceType, TradingMode
|
from freqtrade.enums import MarginMode, PriceType, TradingMode
|
||||||
from freqtrade.exchange import Exchange
|
from freqtrade.exchange import Exchange
|
||||||
|
from freqtrade.exchange.exchange_types import FtHas
|
||||||
from freqtrade.misc import safe_value_fallback2
|
from freqtrade.misc import safe_value_fallback2
|
||||||
|
|
||||||
|
|
||||||
|
@ -23,7 +24,7 @@ class Gate(Exchange):
|
||||||
may still not work as expected.
|
may still not work as expected.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
_ft_has: Dict = {
|
_ft_has: FtHas = {
|
||||||
"ohlcv_candle_limit": 1000,
|
"ohlcv_candle_limit": 1000,
|
||||||
"order_time_in_force": ["GTC", "IOC"],
|
"order_time_in_force": ["GTC", "IOC"],
|
||||||
"stoploss_on_exchange": True,
|
"stoploss_on_exchange": True,
|
||||||
|
@ -34,7 +35,7 @@ class Gate(Exchange):
|
||||||
"trades_has_history": False, # Endpoint would support this - but ccxt doesn't.
|
"trades_has_history": False, # Endpoint would support this - but ccxt doesn't.
|
||||||
}
|
}
|
||||||
|
|
||||||
_ft_has_futures: Dict = {
|
_ft_has_futures: FtHas = {
|
||||||
"needs_trading_fees": True,
|
"needs_trading_fees": True,
|
||||||
"marketOrderRequiresPrice": False,
|
"marketOrderRequiresPrice": False,
|
||||||
"stop_price_type_field": "price_type",
|
"stop_price_type_field": "price_type",
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
import logging
|
import logging
|
||||||
from typing import Dict
|
|
||||||
|
|
||||||
from freqtrade.exchange import Exchange
|
from freqtrade.exchange import Exchange
|
||||||
|
from freqtrade.exchange.exchange_types import FtHas
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -17,6 +17,6 @@ class Hitbtc(Exchange):
|
||||||
may still not work as expected.
|
may still not work as expected.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
_ft_has: Dict = {
|
_ft_has: FtHas = {
|
||||||
"ohlcv_candle_limit": 1000,
|
"ohlcv_candle_limit": 1000,
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,6 +5,7 @@ from typing import Dict
|
||||||
|
|
||||||
from freqtrade.constants import BuySell
|
from freqtrade.constants import BuySell
|
||||||
from freqtrade.exchange import Exchange
|
from freqtrade.exchange import Exchange
|
||||||
|
from freqtrade.exchange.exchange_types import FtHas
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -16,7 +17,7 @@ class Htx(Exchange):
|
||||||
with this exchange.
|
with this exchange.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
_ft_has: Dict = {
|
_ft_has: FtHas = {
|
||||||
"stoploss_on_exchange": True,
|
"stoploss_on_exchange": True,
|
||||||
"stop_price_param": "stopPrice",
|
"stop_price_param": "stopPrice",
|
||||||
"stop_price_prop": "stopPrice",
|
"stop_price_prop": "stopPrice",
|
||||||
|
|
45
freqtrade/exchange/hyperliquid.py
Normal file
45
freqtrade/exchange/hyperliquid.py
Normal file
|
@ -0,0 +1,45 @@
|
||||||
|
"""Hyperliquid exchange subclass"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from typing import Dict
|
||||||
|
|
||||||
|
from ccxt import SIGNIFICANT_DIGITS
|
||||||
|
|
||||||
|
from freqtrade.enums import TradingMode
|
||||||
|
from freqtrade.exchange import Exchange
|
||||||
|
from freqtrade.exchange.exchange_types import FtHas
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Hyperliquid(Exchange):
|
||||||
|
"""Hyperliquid exchange class.
|
||||||
|
Contains adjustments needed for Freqtrade to work with this exchange.
|
||||||
|
"""
|
||||||
|
|
||||||
|
_ft_has: FtHas = {
|
||||||
|
# Only the most recent 5000 candles are available according to the
|
||||||
|
# exchange's API documentation.
|
||||||
|
"ohlcv_has_history": False,
|
||||||
|
"ohlcv_candle_limit": 5000,
|
||||||
|
"trades_has_history": False, # Trades endpoint doesn't seem available.
|
||||||
|
"exchange_has_overrides": {"fetchTrades": False},
|
||||||
|
}
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _ccxt_config(self) -> Dict:
|
||||||
|
# Parameters to add directly to ccxt sync/async initialization.
|
||||||
|
# ccxt defaults to swap mode.
|
||||||
|
config = {}
|
||||||
|
if self.trading_mode == TradingMode.SPOT:
|
||||||
|
config.update({"options": {"defaultType": "spot"}})
|
||||||
|
config.update(super()._ccxt_config)
|
||||||
|
return config
|
||||||
|
|
||||||
|
@property
|
||||||
|
def precision_mode_price(self) -> int:
|
||||||
|
"""
|
||||||
|
Override the default precision mode for price.
|
||||||
|
"""
|
||||||
|
return SIGNIFICANT_DIGITS
|
|
@ -1,9 +1,9 @@
|
||||||
"""Idex exchange subclass"""
|
"""Idex exchange subclass"""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from typing import Dict
|
|
||||||
|
|
||||||
from freqtrade.exchange import Exchange
|
from freqtrade.exchange import Exchange
|
||||||
|
from freqtrade.exchange.exchange_types import FtHas
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -15,6 +15,6 @@ class Idex(Exchange):
|
||||||
with this exchange.
|
with this exchange.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
_ft_has: Dict = {
|
_ft_has: FtHas = {
|
||||||
"ohlcv_candle_limit": 1000,
|
"ohlcv_candle_limit": 1000,
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,7 +12,7 @@ from freqtrade.enums import MarginMode, TradingMode
|
||||||
from freqtrade.exceptions import DDosProtection, OperationalException, TemporaryError
|
from freqtrade.exceptions import DDosProtection, OperationalException, TemporaryError
|
||||||
from freqtrade.exchange import Exchange
|
from freqtrade.exchange import Exchange
|
||||||
from freqtrade.exchange.common import retrier
|
from freqtrade.exchange.common import retrier
|
||||||
from freqtrade.exchange.types import Tickers
|
from freqtrade.exchange.exchange_types import CcxtBalances, FtHas, Tickers
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -20,7 +20,7 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
class Kraken(Exchange):
|
class Kraken(Exchange):
|
||||||
_params: Dict = {"trading_agreement": "agree"}
|
_params: Dict = {"trading_agreement": "agree"}
|
||||||
_ft_has: Dict = {
|
_ft_has: FtHas = {
|
||||||
"stoploss_on_exchange": True,
|
"stoploss_on_exchange": True,
|
||||||
"stop_price_param": "stopLossPrice",
|
"stop_price_param": "stopLossPrice",
|
||||||
"stop_price_prop": "stopLossPrice",
|
"stop_price_prop": "stopLossPrice",
|
||||||
|
@ -57,7 +57,7 @@ class Kraken(Exchange):
|
||||||
return super().get_tickers(symbols=symbols, cached=cached)
|
return super().get_tickers(symbols=symbols, cached=cached)
|
||||||
|
|
||||||
@retrier
|
@retrier
|
||||||
def get_balances(self) -> dict:
|
def get_balances(self) -> CcxtBalances:
|
||||||
if self._config["dry_run"]:
|
if self._config["dry_run"]:
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
|
|
|
@ -5,6 +5,7 @@ from typing import Dict
|
||||||
|
|
||||||
from freqtrade.constants import BuySell
|
from freqtrade.constants import BuySell
|
||||||
from freqtrade.exchange import Exchange
|
from freqtrade.exchange import Exchange
|
||||||
|
from freqtrade.exchange.exchange_types import FtHas
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -20,7 +21,7 @@ class Kucoin(Exchange):
|
||||||
may still not work as expected.
|
may still not work as expected.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
_ft_has: Dict = {
|
_ft_has: FtHas = {
|
||||||
"stoploss_on_exchange": True,
|
"stoploss_on_exchange": True,
|
||||||
"stop_price_param": "stopPrice",
|
"stop_price_param": "stopPrice",
|
||||||
"stop_price_prop": "stopPrice",
|
"stop_price_prop": "stopPrice",
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user