Merge branch 'develop' into Error-Skipping
|
@ -1,22 +0,0 @@
|
|||
FROM freqtradeorg/freqtrade:develop_freqairl
|
||||
|
||||
USER root
|
||||
# Install dependencies
|
||||
COPY requirements-dev.txt /freqtrade/
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get -y install --no-install-recommends apt-utils dialog \
|
||||
&& apt-get -y install --no-install-recommends git sudo vim build-essential \
|
||||
&& apt-get clean \
|
||||
&& mkdir -p /home/ftuser/.vscode-server /home/ftuser/.vscode-server-insiders /home/ftuser/commandhistory \
|
||||
&& echo "export PROMPT_COMMAND='history -a'" >> /home/ftuser/.bashrc \
|
||||
&& echo "export HISTFILE=~/commandhistory/.bash_history" >> /home/ftuser/.bashrc \
|
||||
&& chown ftuser:ftuser -R /home/ftuser/.local/ \
|
||||
&& chown ftuser: -R /home/ftuser/
|
||||
|
||||
USER ftuser
|
||||
|
||||
RUN pip install --user autopep8 -r docs/requirements-docs.txt -r requirements-dev.txt --no-cache-dir
|
||||
|
||||
# Empty the ENTRYPOINT to allow all commands
|
||||
ENTRYPOINT []
|
|
@ -1,42 +1,44 @@
|
|||
{
|
||||
"name": "freqtrade Develop",
|
||||
"build": {
|
||||
"dockerfile": "Dockerfile",
|
||||
"context": ".."
|
||||
},
|
||||
"image": "ghcr.io/freqtrade/freqtrade-devcontainer:latest",
|
||||
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
||||
"forwardPorts": [
|
||||
8080
|
||||
],
|
||||
"mounts": [
|
||||
"source=freqtrade-bashhistory,target=/home/ftuser/commandhistory,type=volume"
|
||||
],
|
||||
"workspaceMount": "source=${localWorkspaceFolder},target=/workspaces/freqtrade,type=bind,consistency=cached",
|
||||
// Uncomment to connect as a non-root user if you've added one. See https://aka.ms/vscode-remote/containers/non-root.
|
||||
"remoteUser": "ftuser",
|
||||
|
||||
"onCreateCommand": "pip install --user -e .",
|
||||
"postCreateCommand": "freqtrade create-userdir --userdir user_data/",
|
||||
|
||||
"workspaceFolder": "/workspaces/freqtrade",
|
||||
"customizations": {
|
||||
"settings": {
|
||||
"terminal.integrated.shell.linux": "/bin/bash",
|
||||
"editor.insertSpaces": true,
|
||||
"files.trimTrailingWhitespace": true,
|
||||
"[markdown]": {
|
||||
"files.trimTrailingWhitespace": false,
|
||||
"vscode": {
|
||||
"settings": {
|
||||
"terminal.integrated.shell.linux": "/bin/bash",
|
||||
"editor.insertSpaces": true,
|
||||
"files.trimTrailingWhitespace": true,
|
||||
"[markdown]": {
|
||||
"files.trimTrailingWhitespace": false
|
||||
},
|
||||
"python.pythonPath": "/usr/local/bin/python",
|
||||
"[python]": {
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.organizeImports": "explicit"
|
||||
},
|
||||
"editor.formatOnSave": true,
|
||||
"editor.defaultFormatter": "charliermarsh.ruff"
|
||||
}
|
||||
},
|
||||
"python.pythonPath": "/usr/local/bin/python",
|
||||
},
|
||||
|
||||
// Add the IDs of extensions you want installed when the container is created.
|
||||
"extensions": [
|
||||
"ms-python.python",
|
||||
"ms-python.vscode-pylance",
|
||||
"davidanson.vscode-markdownlint",
|
||||
"ms-azuretools.vscode-docker",
|
||||
"vscode-icons-team.vscode-icons",
|
||||
],
|
||||
// Add the IDs of extensions you want installed when the container is created.
|
||||
"extensions": [
|
||||
"ms-python.python",
|
||||
"ms-python.vscode-pylance",
|
||||
"charliermarsh.ruff",
|
||||
"davidanson.vscode-markdownlint",
|
||||
"ms-azuretools.vscode-docker",
|
||||
"vscode-icons-team.vscode-icons",
|
||||
"github.vscode-github-actions",
|
||||
],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
21
.github/.devcontainer/Dockerfile
vendored
Normal file
|
@ -0,0 +1,21 @@
|
|||
FROM freqtradeorg/freqtrade:develop_freqairl
|
||||
|
||||
USER root
|
||||
# Install dependencies
|
||||
COPY requirements-dev.txt /freqtrade/
|
||||
|
||||
ARG USERNAME=ftuser
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get -y install --no-install-recommends apt-utils dialog git ssh vim build-essential zsh \
|
||||
&& apt-get clean \
|
||||
&& mkdir -p /home/${USERNAME}/.vscode-server /home/${USERNAME}/.vscode-server-insiders /home/${USERNAME}/commandhistory \
|
||||
&& chown ${USERNAME}:${USERNAME} -R /home/${USERNAME}/.local/ \
|
||||
&& chown ${USERNAME}: -R /home/${USERNAME}/
|
||||
|
||||
USER ftuser
|
||||
|
||||
RUN pip install --user autopep8 -r docs/requirements-docs.txt -r requirements-dev.txt --no-cache-dir
|
||||
|
||||
# Empty the ENTRYPOINT to allow all commands
|
||||
ENTRYPOINT []
|
12
.github/.devcontainer/devcontainer.json
vendored
Normal file
|
@ -0,0 +1,12 @@
|
|||
{
|
||||
"name": "freqtrade Dev container image builder",
|
||||
"build": {
|
||||
"dockerfile": "Dockerfile",
|
||||
"context": "../../"
|
||||
},
|
||||
"features": {
|
||||
"ghcr.io/devcontainers/features/common-utils:2": {
|
||||
},
|
||||
"ghcr.io/stuartleeks/dev-container-features/shell-history:0.0.3": {}
|
||||
}
|
||||
}
|
3
.github/dependabot.yml
vendored
|
@ -21,6 +21,9 @@ updates:
|
|||
pytest:
|
||||
patterns:
|
||||
- "pytest*"
|
||||
mkdocs:
|
||||
patterns:
|
||||
- "mkdocs*"
|
||||
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
|
|
|
@ -19,7 +19,7 @@ jobs:
|
|||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.11"
|
||||
python-version: "3.12"
|
||||
|
||||
- name: Install ccxt
|
||||
run: pip install ccxt
|
||||
|
|
38
.github/workflows/ci.yml
vendored
|
@ -111,7 +111,11 @@ jobs:
|
|||
|
||||
- name: Run Ruff
|
||||
run: |
|
||||
ruff check --output-format=github .
|
||||
ruff check --output-format=github
|
||||
|
||||
- name: Run Ruff format check
|
||||
run: |
|
||||
ruff format --check
|
||||
|
||||
- name: Mypy
|
||||
run: |
|
||||
|
@ -230,7 +234,11 @@ jobs:
|
|||
|
||||
- name: Run Ruff
|
||||
run: |
|
||||
ruff check --output-format=github .
|
||||
ruff check --output-format=github
|
||||
|
||||
- name: Run Ruff format check
|
||||
run: |
|
||||
ruff format --check
|
||||
|
||||
- name: Mypy
|
||||
run: |
|
||||
|
@ -300,7 +308,11 @@ jobs:
|
|||
|
||||
- name: Run Ruff
|
||||
run: |
|
||||
ruff check --output-format=github .
|
||||
ruff check --output-format=github
|
||||
|
||||
- name: Run Ruff format check
|
||||
run: |
|
||||
ruff format --check
|
||||
|
||||
- name: Mypy
|
||||
run: |
|
||||
|
@ -322,7 +334,7 @@ jobs:
|
|||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.10"
|
||||
python-version: "3.12"
|
||||
|
||||
- name: pre-commit dependencies
|
||||
run: |
|
||||
|
@ -336,7 +348,7 @@ jobs:
|
|||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.10"
|
||||
python-version: "3.12"
|
||||
- uses: pre-commit/action@v3.0.1
|
||||
|
||||
docs-check:
|
||||
|
@ -351,7 +363,7 @@ jobs:
|
|||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.11"
|
||||
python-version: "3.12"
|
||||
|
||||
- name: Documentation build
|
||||
run: |
|
||||
|
@ -377,7 +389,7 @@ jobs:
|
|||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.11"
|
||||
python-version: "3.12"
|
||||
|
||||
- name: Cache_dependencies
|
||||
uses: actions/cache@v4
|
||||
|
@ -459,7 +471,7 @@ jobs:
|
|||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.11"
|
||||
python-version: "3.12"
|
||||
|
||||
- name: Build distribution
|
||||
run: |
|
||||
|
@ -530,7 +542,7 @@ jobs:
|
|||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.11"
|
||||
python-version: "3.12"
|
||||
|
||||
- name: Extract branch name
|
||||
id: extract-branch
|
||||
|
@ -553,12 +565,12 @@ jobs:
|
|||
sudo systemctl restart docker
|
||||
docker version -f '{{.Server.Experimental}}'
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: crazy-max/ghaction-docker-buildx@v3.3.1
|
||||
with:
|
||||
buildx-version: latest
|
||||
qemu-version: latest
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: Available platforms
|
||||
run: echo ${{ steps.buildx.outputs.platforms }}
|
||||
|
|
45
.github/workflows/devcontainer-build.yml
vendored
Normal file
|
@ -0,0 +1,45 @@
|
|||
name: Devcontainer Pre-Build
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: "0 3 * * 0"
|
||||
# push:
|
||||
# branches:
|
||||
# - "master"
|
||||
# tags:
|
||||
# - "v*.*.*"
|
||||
# pull_requests:
|
||||
# branches:
|
||||
# - "master"
|
||||
|
||||
concurrency:
|
||||
group: "${{ github.workflow }}"
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
packages: write
|
||||
|
||||
jobs:
|
||||
build-and-push:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
id: checkout
|
||||
uses: actions/checkout@v4
|
||||
-
|
||||
name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
-
|
||||
name: Pre-build dev container image
|
||||
uses: devcontainers/ci@v0.3
|
||||
with:
|
||||
subFolder: .github
|
||||
imageName: ghcr.io/${{ github.repository }}-devcontainer
|
||||
cacheFrom: ghcr.io/${{ github.repository }}-devcontainer
|
||||
push: always
|
5
.github/workflows/pre-commit-update.yml
vendored
|
@ -17,7 +17,7 @@ jobs:
|
|||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.11"
|
||||
python-version: "3.12"
|
||||
|
||||
|
||||
- name: Install pre-commit
|
||||
|
@ -26,9 +26,6 @@ jobs:
|
|||
- name: Run auto-update
|
||||
run: pre-commit autoupdate
|
||||
|
||||
- name: Run pre-commit
|
||||
run: pre-commit run --all-files
|
||||
|
||||
- uses: peter-evans/create-pull-request@v6
|
||||
with:
|
||||
token: ${{ secrets.REPO_SCOPED_TOKEN }}
|
||||
|
|
|
@ -9,17 +9,17 @@ repos:
|
|||
# stages: [push]
|
||||
|
||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||
rev: "v1.9.0"
|
||||
rev: "v1.10.0"
|
||||
hooks:
|
||||
- id: mypy
|
||||
exclude: build_helpers
|
||||
additional_dependencies:
|
||||
- types-cachetools==5.3.0.7
|
||||
- types-filelock==3.2.7
|
||||
- types-requests==2.31.0.20240406
|
||||
- types-requests==2.32.0.20240523
|
||||
- types-tabulate==0.9.0.20240106
|
||||
- types-python-dateutil==2.9.0.20240316
|
||||
- SQLAlchemy==2.0.29
|
||||
- SQLAlchemy==2.0.30
|
||||
# stages: [push]
|
||||
|
||||
- repo: https://github.com/pycqa/isort
|
||||
|
@ -31,7 +31,7 @@ repos:
|
|||
|
||||
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: 'v0.4.1'
|
||||
rev: 'v0.4.5'
|
||||
hooks:
|
||||
- id: ruff
|
||||
|
||||
|
@ -56,7 +56,7 @@ repos:
|
|||
)$
|
||||
|
||||
- repo: https://github.com/codespell-project/codespell
|
||||
rev: v2.2.6
|
||||
rev: v2.3.0
|
||||
hooks:
|
||||
- id: codespell
|
||||
additional_dependencies:
|
||||
|
|
11
.vscode/extensions.json
vendored
Normal file
|
@ -0,0 +1,11 @@
|
|||
{
|
||||
"recommendations": [
|
||||
"ms-python.python",
|
||||
"ms-python.vscode-pylance",
|
||||
"charliermarsh.ruff",
|
||||
"davidanson.vscode-markdownlint",
|
||||
"ms-azuretools.vscode-docker",
|
||||
"vscode-icons-team.vscode-icons",
|
||||
"github.vscode-github-actions",
|
||||
]
|
||||
}
|
|
@ -72,12 +72,12 @@ you can manually run pre-commit with `pre-commit run -a`.
|
|||
mypy freqtrade
|
||||
```
|
||||
|
||||
### 4. Ensure all imports are correct
|
||||
### 4. Ensure formatting is correct
|
||||
|
||||
#### Run isort
|
||||
#### Run ruff
|
||||
|
||||
``` bash
|
||||
isort .
|
||||
ruff format .
|
||||
```
|
||||
|
||||
## (Core)-Committer Guide
|
||||
|
|
|
@ -29,6 +29,7 @@ Please read the [exchange specific notes](docs/exchanges.md) to learn about even
|
|||
|
||||
- [X] [Binance](https://www.binance.com/)
|
||||
- [X] [Bitmart](https://bitmart.com/)
|
||||
- [X] [BingX](https://bingx.com/invite/0EM9RX)
|
||||
- [X] [Gate.io](https://www.gate.io/ref/6266643)
|
||||
- [X] [HTX](https://www.htx.com/) (Former Huobi)
|
||||
- [X] [Kraken](https://kraken.com/)
|
||||
|
|
BIN
build_helpers/TA_Lib-0.4.29-cp310-cp310-win_amd64.whl
Normal file
BIN
build_helpers/TA_Lib-0.4.29-cp311-cp311-linux_armv7l.whl
Normal file
BIN
build_helpers/TA_Lib-0.4.29-cp311-cp311-win_amd64.whl
Normal file
BIN
build_helpers/TA_Lib-0.4.29-cp312-cp312-win_amd64.whl
Normal file
BIN
build_helpers/TA_Lib-0.4.29-cp39-cp39-linux_armv7l.whl
Normal file
BIN
build_helpers/TA_Lib-0.4.29-cp39-cp39-win_amd64.whl
Normal file
|
@ -6,21 +6,18 @@ from pathlib import Path
|
|||
import ccxt
|
||||
|
||||
|
||||
key = os.environ.get('FREQTRADE__EXCHANGE__KEY')
|
||||
secret = os.environ.get('FREQTRADE__EXCHANGE__SECRET')
|
||||
key = os.environ.get("FREQTRADE__EXCHANGE__KEY")
|
||||
secret = os.environ.get("FREQTRADE__EXCHANGE__SECRET")
|
||||
|
||||
proxy = os.environ.get('CI_WEB_PROXY')
|
||||
proxy = os.environ.get("CI_WEB_PROXY")
|
||||
|
||||
exchange = ccxt.binance({
|
||||
'apiKey': key,
|
||||
'secret': secret,
|
||||
'httpsProxy': proxy,
|
||||
'options': {'defaultType': 'swap'}
|
||||
})
|
||||
exchange = ccxt.binance(
|
||||
{"apiKey": key, "secret": secret, "httpsProxy": proxy, "options": {"defaultType": "swap"}}
|
||||
)
|
||||
_ = exchange.load_markets()
|
||||
|
||||
lev_tiers = exchange.fetch_leverage_tiers()
|
||||
|
||||
# Assumes this is running in the root of the repository.
|
||||
file = Path('freqtrade/exchange/binance_leverage_tiers.json')
|
||||
json.dump(dict(sorted(lev_tiers.items())), file.open('w'), indent=2)
|
||||
file = Path("freqtrade/exchange/binance_leverage_tiers.json")
|
||||
json.dump(dict(sorted(lev_tiers.items())), file.open("w"), indent=2)
|
||||
|
|
|
@ -1,18 +1,15 @@
|
|||
#!/usr/bin/env python3
|
||||
from freqtrade_client import __version__ as client_version
|
||||
|
||||
from freqtrade import __version__ as ft_version
|
||||
from freqtrade_client import __version__ as client_version
|
||||
|
||||
|
||||
def main():
|
||||
if ft_version != client_version:
|
||||
print(f"Versions do not match: \n"
|
||||
f"ft: {ft_version} \n"
|
||||
f"client: {client_version}")
|
||||
print(f"Versions do not match: \nft: {ft_version} \nclient: {client_version}")
|
||||
exit(1)
|
||||
print(f"Versions match: ft: {ft_version}, client: {client_version}")
|
||||
exit(0)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
|
@ -6,28 +6,30 @@ from pathlib import Path
|
|||
import yaml
|
||||
|
||||
|
||||
pre_commit_file = Path('.pre-commit-config.yaml')
|
||||
require_dev = Path('requirements-dev.txt')
|
||||
require = Path('requirements.txt')
|
||||
pre_commit_file = Path(".pre-commit-config.yaml")
|
||||
require_dev = Path("requirements-dev.txt")
|
||||
require = Path("requirements.txt")
|
||||
|
||||
with require_dev.open('r') as rfile:
|
||||
with require_dev.open("r") as rfile:
|
||||
requirements = rfile.readlines()
|
||||
|
||||
with require.open('r') as rfile:
|
||||
with require.open("r") as rfile:
|
||||
requirements.extend(rfile.readlines())
|
||||
|
||||
# Extract types only
|
||||
type_reqs = [r.strip('\n') for r in requirements if r.startswith(
|
||||
'types-') or r.startswith('SQLAlchemy')]
|
||||
type_reqs = [
|
||||
r.strip("\n") for r in requirements if r.startswith("types-") or r.startswith("SQLAlchemy")
|
||||
]
|
||||
|
||||
with pre_commit_file.open('r') as file:
|
||||
f = yaml.load(file, Loader=yaml.FullLoader)
|
||||
with pre_commit_file.open("r") as file:
|
||||
f = yaml.load(file, Loader=yaml.SafeLoader)
|
||||
|
||||
|
||||
mypy_repo = [repo for repo in f['repos'] if repo['repo']
|
||||
== 'https://github.com/pre-commit/mirrors-mypy']
|
||||
mypy_repo = [
|
||||
repo for repo in f["repos"] if repo["repo"] == "https://github.com/pre-commit/mirrors-mypy"
|
||||
]
|
||||
|
||||
hooks = mypy_repo[0]['hooks'][0]['additional_dependencies']
|
||||
hooks = mypy_repo[0]["hooks"][0]["additional_dependencies"]
|
||||
|
||||
errors = []
|
||||
for hook in hooks:
|
||||
|
|
|
@ -35,7 +35,7 @@ COPY build_helpers/* /tmp/
|
|||
COPY --chown=ftuser:ftuser requirements.txt /freqtrade/
|
||||
USER ftuser
|
||||
RUN pip install --user --no-cache-dir numpy \
|
||||
&& pip install --user --no-index --find-links /tmp/ pyarrow TA-Lib==0.4.28 \
|
||||
&& pip install --user --no-index --find-links /tmp/ pyarrow TA-Lib \
|
||||
&& pip install --user --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy dependencies to runtime-image
|
||||
|
|
BIN
docs/assets/freqUI-backtesting-dark.png
Normal file
After Width: | Height: | Size: 94 KiB |
BIN
docs/assets/freqUI-backtesting-light.png
Normal file
After Width: | Height: | Size: 91 KiB |
BIN
docs/assets/freqUI-plot-configurator-dark.png
Normal file
After Width: | Height: | Size: 133 KiB |
BIN
docs/assets/freqUI-plot-configurator-light.png
Normal file
After Width: | Height: | Size: 135 KiB |
BIN
docs/assets/freqUI-trade-pane-dark.png
Normal file
After Width: | Height: | Size: 242 KiB |
BIN
docs/assets/freqUI-trade-pane-light.png
Normal file
After Width: | Height: | Size: 241 KiB |
BIN
docs/assets/freqUI-trade-pane.png
Normal file
After Width: | Height: | Size: 209 KiB |
BIN
docs/assets/frequi-login-CORS-light.png
Normal file
After Width: | Height: | Size: 53 KiB |
BIN
docs/assets/frequi-login-CORS.png
Normal file
After Width: | Height: | Size: 50 KiB |
BIN
docs/assets/frequi-settings-dark.png
Normal file
After Width: | Height: | Size: 80 KiB |
BIN
docs/assets/frequi-settings-light.png
Normal file
After Width: | Height: | Size: 79 KiB |
|
@ -522,8 +522,8 @@ To save time, by default backtest will reuse a cached result from within the las
|
|||
|
||||
### Further backtest-result analysis
|
||||
|
||||
To further analyze your backtest results, you can [export the trades](#exporting-trades-to-file).
|
||||
You can then load the trades to perform further analysis as shown in the [data analysis](data-analysis.md#backtesting) backtesting section.
|
||||
To further analyze your backtest results, freqtrade will export the trades to file by default.
|
||||
You can then load the trades to perform further analysis as shown in the [data analysis](strategy_analysis_example.md#load-backtest-results-to-pandas-dataframe) backtesting section.
|
||||
|
||||
## Assumptions made by backtesting
|
||||
|
||||
|
@ -531,12 +531,13 @@ Since backtesting lacks some detailed information about what happens within a ca
|
|||
|
||||
- Exchange [trading limits](#trading-limits-in-backtesting) are respected
|
||||
- Entries happen at open-price
|
||||
- All orders are filled at the requested price (no slippage, no unfilled orders)
|
||||
- All orders are filled at the requested price (no slippage) as long as the price is within the candle's high/low range
|
||||
- Exit-signal exits happen at open-price of the consecutive candle
|
||||
- Exits don't free their trade slot for a new trade until the next candle
|
||||
- Exit-signal is favored over Stoploss, because exit-signals are assumed to trigger on candle's open
|
||||
- ROI
|
||||
- exits are compared to high - but the ROI value is used (e.g. ROI = 2%, high=5% - so the exit will be at 2%)
|
||||
- exits are never "below the candle", so a ROI of 2% may result in a exit at 2.4% if low was at 2.4% profit
|
||||
- Exits are compared to high - but the ROI value is used (e.g. ROI = 2%, high=5% - so the exit will be at 2%)
|
||||
- Exits are never "below the candle", so a ROI of 2% may result in a exit at 2.4% if low was at 2.4% profit
|
||||
- ROI entries which came into effect on the triggering candle (e.g. `120: 0.02` for 1h candles, from `60: 0.05`) will use the candle's open as exit rate
|
||||
- Force-exits caused by `<N>=-1` ROI entries use low as exit value, unless N falls on the candle open (e.g. `120: -1` for 1h candles)
|
||||
- Stoploss exits happen exactly at stoploss price, even if low was lower, but the loss will be `2 * fees` higher than the stoploss price
|
||||
|
|
|
@ -197,7 +197,7 @@ Mandatory parameters are marked as **Required**, which means that they are requi
|
|||
| `position_adjustment_enable` | Enables the strategy to use position adjustments (additional buys or sells). [More information here](strategy-callbacks.md#adjust-trade-position). <br> [Strategy Override](#parameters-in-the-strategy). <br>*Defaults to `false`.*<br> **Datatype:** Boolean
|
||||
| `max_entry_position_adjustment` | Maximum additional order(s) for each open trade on top of the first entry Order. Set it to `-1` for unlimited additional orders. [More information here](strategy-callbacks.md#adjust-trade-position). <br> [Strategy Override](#parameters-in-the-strategy). <br>*Defaults to `-1`.*<br> **Datatype:** Positive Integer or -1
|
||||
| | **Exchange**
|
||||
| `exchange.name` | **Required.** Name of the exchange class to use. [List below](#user-content-what-values-for-exchangename). <br> **Datatype:** String
|
||||
| `exchange.name` | **Required.** Name of the exchange class to use. <br> **Datatype:** String
|
||||
| `exchange.key` | API key to use for the exchange. Only required when you are in production mode.<br>**Keep it in secret, do not disclose publicly.** <br> **Datatype:** String
|
||||
| `exchange.secret` | API secret to use for the exchange. Only required when you are in production mode.<br>**Keep it in secret, do not disclose publicly.** <br> **Datatype:** String
|
||||
| `exchange.password` | API password to use for the exchange. Only required when you are in production mode and for exchanges that use password for API requests.<br>**Keep it in secret, do not disclose publicly.** <br> **Datatype:** String
|
||||
|
@ -252,7 +252,7 @@ Mandatory parameters are marked as **Required**, which means that they are requi
|
|||
| `disable_dataframe_checks` | Disable checking the OHLCV dataframe returned from the strategy methods for correctness. Only use when intentionally changing the dataframe and understand what you are doing. [Strategy Override](#parameters-in-the-strategy).<br> *Defaults to `False`*. <br> **Datatype:** Boolean
|
||||
| `internals.process_throttle_secs` | Set the process throttle, or minimum loop duration for one bot iteration loop. Value in second. <br>*Defaults to `5` seconds.* <br> **Datatype:** Positive Integer
|
||||
| `internals.heartbeat_interval` | Print heartbeat message every N seconds. Set to 0 to disable heartbeat messages. <br>*Defaults to `60` seconds.* <br> **Datatype:** Positive Integer or 0
|
||||
| `internals.sd_notify` | Enables use of the sd_notify protocol to tell systemd service manager about changes in the bot state and issue keep-alive pings. See [here](installation.md#7-optional-configure-freqtrade-as-a-systemd-service) for more details. <br> **Datatype:** Boolean
|
||||
| `internals.sd_notify` | Enables use of the sd_notify protocol to tell systemd service manager about changes in the bot state and issue keep-alive pings. See [here](advanced-setup.md#configure-the-bot-running-as-a-systemd-service) for more details. <br> **Datatype:** Boolean
|
||||
| `strategy` | **Required** Defines Strategy class to use. Recommended to be set via `--strategy NAME`. <br> **Datatype:** ClassName
|
||||
| `strategy_path` | Adds an additional strategy lookup path (must be a directory). <br> **Datatype:** String
|
||||
| `recursive_strategy_search` | Set to `true` to recursively search sub-directories inside `user_data/strategies` for a strategy. <br> **Datatype:** Boolean
|
||||
|
@ -370,7 +370,7 @@ This setting works in combination with `max_open_trades`. The maximum capital en
|
|||
For example, the bot will at most use (0.05 BTC x 3) = 0.15 BTC, assuming a configuration of `max_open_trades=3` and `stake_amount=0.05`.
|
||||
|
||||
!!! Note
|
||||
This setting respects the [available balance configuration](#available-balance).
|
||||
This setting respects the [available balance configuration](#tradable-balance).
|
||||
|
||||
#### Dynamic stake amount
|
||||
|
||||
|
@ -568,7 +568,14 @@ The possible values are: `GTC` (default), `FOK` or `IOC`.
|
|||
This is ongoing work. For now, it is supported only for binance, gate and kucoin.
|
||||
Please don't change the default value unless you know what you are doing and have researched the impact of using different values for your particular exchange.
|
||||
|
||||
### What values can be used for fiat_display_currency?
|
||||
### Fiat conversion
|
||||
|
||||
Freqtrade uses the Coingecko API to convert the coin value to it's corresponding fiat value for the Telegram reports.
|
||||
The FIAT currency can be set in the configuration file as `fiat_display_currency`.
|
||||
|
||||
Removing `fiat_display_currency` completely from the configuration will skip initializing coingecko, and will not show any FIAT currency conversion. This has no importance for the correct functioning of the bot.
|
||||
|
||||
#### What values can be used for fiat_display_currency?
|
||||
|
||||
The `fiat_display_currency` configuration parameter sets the base currency to use for the
|
||||
conversion from coin to fiat in the bot Telegram reports.
|
||||
|
@ -587,7 +594,25 @@ The valid values are:
|
|||
"BTC", "ETH", "XRP", "LTC", "BCH", "BNB"
|
||||
```
|
||||
|
||||
Removing `fiat_display_currency` completely from the configuration will skip initializing coingecko, and will not show any FIAT currency conversion. This has no importance for the correct functioning of the bot.
|
||||
#### Coingecko Rate limit problems
|
||||
|
||||
On some IP ranges, coingecko is heavily rate-limiting.
|
||||
In such cases, you may want to add your coingecko API key to the configuration.
|
||||
|
||||
``` json
|
||||
{
|
||||
"fiat_display_currency": "USD",
|
||||
"coingecko": {
|
||||
"api_key": "your-api",
|
||||
"is_demo": true
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Freqtrade supports both Demo and Pro coingecko API keys.
|
||||
|
||||
The Coingecko API key is NOT required for the bot to function correctly.
|
||||
It is only used for the conversion of coin to fiat in the Telegram reports, which usually also work without API key.
|
||||
|
||||
## Using Dry-run mode
|
||||
|
||||
|
|
|
@ -24,10 +24,10 @@ usage: freqtrade download-data [-h] [-v] [--logfile FILE] [-V] [-c PATH]
|
|||
[--days INT] [--new-pairs-days INT]
|
||||
[--include-inactive-pairs]
|
||||
[--timerange TIMERANGE] [--dl-trades]
|
||||
[--exchange EXCHANGE]
|
||||
[--convert] [--exchange EXCHANGE]
|
||||
[-t TIMEFRAMES [TIMEFRAMES ...]] [--erase]
|
||||
[--data-format-ohlcv {json,jsongz,hdf5,feather,parquet}]
|
||||
[--data-format-trades {json,jsongz,hdf5,feather}]
|
||||
[--data-format-trades {json,jsongz,hdf5,feather,parquet}]
|
||||
[--trading-mode {spot,margin,futures}]
|
||||
[--prepend]
|
||||
|
||||
|
@ -48,6 +48,11 @@ options:
|
|||
--dl-trades Download trades instead of OHLCV data. The bot will
|
||||
resample trades to the desired timeframe as specified
|
||||
as --timeframes/-t.
|
||||
--convert Convert downloaded trades to OHLCV data. Only
|
||||
applicable in combination with `--dl-trades`. Will be
|
||||
automatic for exchanges which don't have historic
|
||||
OHLCV (e.g. Kraken). If not provided, use `trades-to-
|
||||
ohlcv` to convert trades data to OHLCV data.
|
||||
--exchange EXCHANGE Exchange name. Only valid if no config is provided.
|
||||
-t TIMEFRAMES [TIMEFRAMES ...], --timeframes TIMEFRAMES [TIMEFRAMES ...]
|
||||
Specify which tickers to download. Space-separated
|
||||
|
@ -57,7 +62,7 @@ options:
|
|||
--data-format-ohlcv {json,jsongz,hdf5,feather,parquet}
|
||||
Storage format for downloaded candle (OHLCV) data.
|
||||
(default: `feather`).
|
||||
--data-format-trades {json,jsongz,hdf5,feather}
|
||||
--data-format-trades {json,jsongz,hdf5,feather,parquet}
|
||||
Storage format for downloaded trades data. (default:
|
||||
`feather`).
|
||||
--trading-mode {spot,margin,futures}, --tradingmode {spot,margin,futures}
|
||||
|
@ -471,15 +476,20 @@ ETH/USDT 5m, 15m, 30m, 1h, 2h, 4h
|
|||
|
||||
## Trades (tick) data
|
||||
|
||||
By default, `download-data` sub-command downloads Candles (OHLCV) data. Some exchanges also provide historic trade-data via their API.
|
||||
By default, `download-data` sub-command downloads Candles (OHLCV) data. Most exchanges also provide historic trade-data via their API.
|
||||
This data can be useful if you need many different timeframes, since it is only downloaded once, and then resampled locally to the desired timeframes.
|
||||
|
||||
Since this data is large by default, the files use the feather fileformat by default. They are stored in your data-directory with the naming convention of `<pair>-trades.feather` (`ETH_BTC-trades.feather`). Incremental mode is also supported, as for historic OHLCV data, so downloading the data once per week with `--days 8` will create an incremental data-repository.
|
||||
Since this data is large by default, the files use the feather file format by default. They are stored in your data-directory with the naming convention of `<pair>-trades.feather` (`ETH_BTC-trades.feather`). Incremental mode is also supported, as for historic OHLCV data, so downloading the data once per week with `--days 8` will create an incremental data-repository.
|
||||
|
||||
To use this mode, simply add `--dl-trades` to your call. This will swap the download method to download trades, and resamples the data locally.
|
||||
To use this mode, simply add `--dl-trades` to your call. This will swap the download method to download trades.
|
||||
If `--convert` is also provided, the resample step will happen automatically and overwrite eventually existing OHLCV data for the given pair/timeframe combinations.
|
||||
|
||||
!!! Warning "do not use"
|
||||
You should not use this unless you're a kraken user. Most other exchanges provide OHLCV data with sufficient history.
|
||||
!!! Warning "Do not use"
|
||||
You should not use this unless you're a kraken user (Kraken does not provide historic OHLCV data).
|
||||
Most other exchanges provide OHLCV data with sufficient history, so downloading multiple timeframes through that method will still proof to be a lot faster than downloading trades data.
|
||||
|
||||
!!! Note "Kraken user"
|
||||
Kraken users should read [this](exchanges.md#historic-kraken-data) before starting to download data.
|
||||
|
||||
Example call:
|
||||
|
||||
|
@ -490,12 +500,6 @@ freqtrade download-data --exchange kraken --pairs XRP/EUR ETH/EUR --days 20 --dl
|
|||
!!! Note
|
||||
While this method uses async calls, it will be slow, since it requires the result of the previous call to generate the next request to the exchange.
|
||||
|
||||
!!! Warning
|
||||
The historic trades are not available during Freqtrade dry-run and live trade modes because all exchanges tested provide this data with a delay of few 100 candles, so it's not suitable for real-time trading.
|
||||
|
||||
!!! Note "Kraken user"
|
||||
Kraken users should read [this](exchanges.md#historic-kraken-data) before starting to download data.
|
||||
|
||||
## Next step
|
||||
|
||||
Great, you now have backtest data downloaded, so you can now start [backtesting](backtesting.md) your strategy.
|
||||
Great, you now have some data downloaded, so you can now start [backtesting](backtesting.md) your strategy.
|
||||
|
|
|
@ -83,7 +83,7 @@ Details will obviously vary between setups - but this should work to get you sta
|
|||
``` json
|
||||
{
|
||||
"name": "freqtrade trade",
|
||||
"type": "python",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"module": "freqtrade",
|
||||
"console": "integratedTerminal",
|
||||
|
|
|
@ -127,6 +127,13 @@ These settings will be checked on startup, and freqtrade will show an error if t
|
|||
|
||||
Freqtrade will not attempt to change these settings.
|
||||
|
||||
## Bingx
|
||||
|
||||
BingX supports [time_in_force](configuration.md#understand-order_time_in_force) with settings "GTC" (good till cancelled), "IOC" (immediate-or-cancel) and "PO" (Post only) settings.
|
||||
|
||||
!!! Tip "Stoploss on Exchange"
|
||||
Bingx supports `stoploss_on_exchange` and can use both stop-limit and stop-market orders. It provides great advantages, so we recommend to benefit from it by enabling stoploss on exchange.
|
||||
|
||||
## Kraken
|
||||
|
||||
Kraken supports [time_in_force](configuration.md#understand-order_time_in_force) with settings "GTC" (good till cancelled), "IOC" (immediate-or-cancel) and "PO" (Post only) settings.
|
||||
|
@ -299,7 +306,7 @@ $ pip3 install web3
|
|||
Most exchanges return current incomplete candle via their OHLCV/klines API interface.
|
||||
By default, Freqtrade assumes that incomplete candle is fetched from the exchange and removes the last candle assuming it's the incomplete candle.
|
||||
|
||||
Whether your exchange returns incomplete candles or not can be checked using [the helper script](developer.md#Incomplete-candles) from the Contributor documentation.
|
||||
Whether your exchange returns incomplete candles or not can be checked using [the helper script](developer.md#incomplete-candles) from the Contributor documentation.
|
||||
|
||||
Due to the danger of repainting, Freqtrade does not allow you to use this incomplete candle.
|
||||
|
||||
|
|
85
docs/freq-ui.md
Normal file
|
@ -0,0 +1,85 @@
|
|||
# FreqUI
|
||||
|
||||
Freqtrade provides a builtin webserver, which can serve [FreqUI](https://github.com/freqtrade/frequi), the freqtrade frontend.
|
||||
|
||||
By default, the UI is automatically installed as part of the installation (script, docker).
|
||||
freqUI can also be manually installed by using the `freqtrade install-ui` command.
|
||||
This same command can also be used to update freqUI to new new releases.
|
||||
|
||||
Once the bot is started in trade / dry-run mode (with `freqtrade trade`) - the UI will be available under the configured API port (by default `http://127.0.0.1:8080`).
|
||||
|
||||
??? Note "Looking to contribute to freqUI?"
|
||||
Developers should not use this method, but instead clone the corresponding use the method described in the [freqUI repository](https://github.com/freqtrade/frequi) to get the source-code of freqUI. A working installation of node will be required to build the frontend.
|
||||
|
||||
!!! tip "freqUI is not required to run freqtrade"
|
||||
freqUI is an optional component of freqtrade, and is not required to run the bot.
|
||||
It is a frontend that can be used to monitor the bot and to interact with it - but freqtrade itself will work perfectly fine without it.
|
||||
|
||||
## Configuration
|
||||
|
||||
FreqUI does not have it's own configuration file - but assumes a working setup for the [rest-api](rest-api.md) is available.
|
||||
Please refer to the corresponding documentation page to get setup with freqUI
|
||||
|
||||
## UI
|
||||
|
||||
FreqUI is a modern, responsive web application that can be used to monitor and interact with your bot.
|
||||
|
||||
FreqUI provides a light, as well as a dark theme.
|
||||
Themes can be easily switched via a prominent button at the top of the page.
|
||||
The theme of the screenshots on this page will adapt to the selected documentation Theme, so to see the dark (or light) version, please switch the theme of the Documentation.
|
||||
|
||||
### Login
|
||||
|
||||
The below screenshot shows the login screen of freqUI.
|
||||
|
||||
![FreqUI - login](assets/frequi-login-CORS.png#only-dark)
|
||||
![FreqUI - login](assets/frequi-login-CORS-light.png#only-light)
|
||||
|
||||
!!! Hint "CORS"
|
||||
The Cors error shown in this screenshot is due to the fact that the UI is running on a different port than the API, and [CORS](#cors) has not been setup correctly yet.
|
||||
|
||||
### Trade view
|
||||
|
||||
The trade view allows you to visualize the trades that the bot is making and to interact with the bot.
|
||||
On this page, you can also interact with the bot by starting and stopping it and - if configured - force trade entries and exits.
|
||||
|
||||
![FreqUI - trade view](assets/freqUI-trade-pane-dark.png#only-dark)
|
||||
![FreqUI - trade view](assets/freqUI-trade-pane-light.png#only-light)
|
||||
|
||||
### Plot Configurator
|
||||
|
||||
FreqUI Plots can be configured either via a `plot_config` configuration object in the strategy (which can be loaded via "from strategy" button) or via the UI.
|
||||
Multiple plot configurations can be created and switched at will - allowing for flexible, different views into your charts.
|
||||
|
||||
The plot configuration can be accessed via the "Plot Configurator" (Cog icon) button in the top right corner of the trade view.
|
||||
|
||||
![FreqUI - plot configuration](assets/freqUI-plot-configurator-dark.png#only-dark)
|
||||
![FreqUI - plot configuration](assets/freqUI-plot-configurator-light.png#only-light)
|
||||
|
||||
### Settings
|
||||
|
||||
|
||||
Several UI related settings can be changed by accessing the settings page.
|
||||
|
||||
Things you can change (among others):
|
||||
|
||||
* Timezone of the UI
|
||||
* Visualization of open trades as part of the favicon (browser tab)
|
||||
* Candle colors (up/down -> red/green)
|
||||
* Enable / disable in-app notification types
|
||||
|
||||
![FreqUI - Settings view](assets/frequi-settings-dark.png#only-dark)
|
||||
![FreqUI - Settings view](assets/frequi-settings-light.png#only-light)
|
||||
|
||||
## Backtesting
|
||||
|
||||
When freqtrade is started in [webserver mode](utils.md#webserver-mode) (freqtrade started with `freqtrade webserver`), the backtesting view becomes available.
|
||||
This view allows you to backtest strategies and visualize the results.
|
||||
|
||||
You can also load and visualize previous backtest results, as well as compare the results with each other.
|
||||
|
||||
![FreqUI - Backtesting](assets/freqUI-backtesting-dark.png#only-dark)
|
||||
![FreqUI - Backtesting](assets/freqUI-backtesting-light.png#only-light)
|
||||
|
||||
|
||||
--8<-- "includes/cors.md"
|
|
@ -224,7 +224,7 @@ where $W_i$ is the weight of data point $i$ in a total set of $n$ data points. B
|
|||
|
||||
## Building the data pipeline
|
||||
|
||||
By default, FreqAI builds a dynamic pipeline based on user congfiguration settings. The default settings are robust and designed to work with a variety of methods. These two steps are a `MinMaxScaler(-1,1)` and a `VarianceThreshold` which removes any column that has 0 variance. Users can activate other steps with more configuration parameters. For example if users add `use_SVM_to_remove_outliers: true` to the `freqai` config, then FreqAI will automatically add the [`SVMOutlierExtractor`](#identifying-outliers-using-a-support-vector-machine-svm) to the pipeline. Likewise, users can add `principal_component_analysis: true` to the `freqai` config to activate PCA. The [DissimilarityIndex](#identifying-outliers-with-the-dissimilarity-index-di) is activated with `DI_threshold: 1`. Finally, noise can also be added to the data with `noise_standard_deviation: 0.1`. Finally, users can add [DBSCAN](#identifying-outliers-with-dbscan) outlier removal with `use_DBSCAN_to_remove_outliers: true`.
|
||||
By default, FreqAI builds a dynamic pipeline based on user configuration settings. The default settings are robust and designed to work with a variety of methods. These two steps are a `MinMaxScaler(-1,1)` and a `VarianceThreshold` which removes any column that has 0 variance. Users can activate other steps with more configuration parameters. For example if users add `use_SVM_to_remove_outliers: true` to the `freqai` config, then FreqAI will automatically add the [`SVMOutlierExtractor`](#identifying-outliers-using-a-support-vector-machine-svm) to the pipeline. Likewise, users can add `principal_component_analysis: true` to the `freqai` config to activate PCA. The [DissimilarityIndex](#identifying-outliers-with-the-dissimilarity-index-di) is activated with `DI_threshold: 1`. Finally, noise can also be added to the data with `noise_standard_deviation: 0.1`. Finally, users can add [DBSCAN](#identifying-outliers-with-dbscan) outlier removal with `use_DBSCAN_to_remove_outliers: true`.
|
||||
|
||||
!!! note "More information available"
|
||||
Please review the [parameter table](freqai-parameter-table.md) for more information on these parameters.
|
||||
|
@ -235,7 +235,7 @@ By default, FreqAI builds a dynamic pipeline based on user congfiguration settin
|
|||
Users are encouraged to customize the data pipeline to their needs by building their own data pipeline. This can be done by simply setting `dk.feature_pipeline` to their desired `Pipeline` object inside their `IFreqaiModel` `train()` function, or if they prefer not to touch the `train()` function, they can override `define_data_pipeline`/`define_label_pipeline` functions in their `IFreqaiModel`:
|
||||
|
||||
!!! note "More information available"
|
||||
FreqAI uses the the [`DataSieve`](https://github.com/emergentmethods/datasieve) pipeline, which follows the SKlearn pipeline API, but adds, among other features, coherence between the X, y, and sample_weight vector point removals, feature removal, feature name following.
|
||||
FreqAI uses the [`DataSieve`](https://github.com/emergentmethods/datasieve) pipeline, which follows the SKlearn pipeline API, but adds, among other features, coherence between the X, y, and sample_weight vector point removals, feature removal, feature name following.
|
||||
|
||||
```python
|
||||
from datasieve.transforms import SKLearnWrapper, DissimilarityIndex
|
||||
|
@ -391,3 +391,18 @@ Given a number of data points $N$, and a distance $\varepsilon$, DBSCAN clusters
|
|||
![dbscan](assets/freqai_dbscan.jpg)
|
||||
|
||||
FreqAI uses `sklearn.cluster.DBSCAN` (details are available on scikit-learn's webpage [here](https://scikit-learn.org/stable/modules/generated/sklearn.cluster.DBSCAN.html) (external website)) with `min_samples` ($N$) taken as 1/4 of the no. of time points (candles) in the feature set. `eps` ($\varepsilon$) is computed automatically as the elbow point in the *k-distance graph* computed from the nearest neighbors in the pairwise distances of all data points in the feature set.
|
||||
|
||||
|
||||
### Data dimensionality reduction with Principal Component Analysis
|
||||
|
||||
You can reduce the dimensionality of your features by activating the principal_component_analysis in the config:
|
||||
|
||||
```json
|
||||
"freqai": {
|
||||
"feature_parameters" : {
|
||||
"principal_component_analysis": true
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
This will perform PCA on the features and reduce their dimensionality so that the explained variance of the data set is >= 0.999. Reducing data dimensionality makes training the model faster and hence allows for more up-to-date models.
|
||||
|
|
|
@ -36,7 +36,7 @@ Mandatory parameters are marked as **Required** and have to be set in one of the
|
|||
| `weight_factor` | Weight training data points according to their recency (see details [here](freqai-feature-engineering.md#weighting-features-for-temporal-importance)). <br> **Datatype:** Positive float (typically < 1).
|
||||
| `indicator_max_period_candles` | **No longer used (#7325)**. Replaced by `startup_candle_count` which is set in the [strategy](freqai-configuration.md#building-a-freqai-strategy). `startup_candle_count` is timeframe independent and defines the maximum *period* used in `feature_engineering_*()` for indicator creation. FreqAI uses this parameter together with the maximum timeframe in `include_time_frames` to calculate how many data points to download such that the first data point does not include a NaN. <br> **Datatype:** Positive integer.
|
||||
| `indicator_periods_candles` | Time periods to calculate indicators for. The indicators are added to the base indicator dataset. <br> **Datatype:** List of positive integers.
|
||||
| `principal_component_analysis` | Automatically reduce the dimensionality of the data set using Principal Component Analysis. See details about how it works [here](#reducing-data-dimensionality-with-principal-component-analysis) <br> **Datatype:** Boolean. <br> Default: `False`.
|
||||
| `principal_component_analysis` | Automatically reduce the dimensionality of the data set using Principal Component Analysis. See details about how it works [here](freqai-feature-engineering.md#data-dimensionality-reduction-with-principal-component-analysis) <br> **Datatype:** Boolean. <br> Default: `False`.
|
||||
| `plot_feature_importances` | Create a feature importance plot for each model for the top/bottom `plot_feature_importances` number of features. Plot is stored in `user_data/models/<identifier>/sub-train-<COIN>_<timestamp>.html`. <br> **Datatype:** Integer. <br> Default: `0`.
|
||||
| `DI_threshold` | Activates the use of the Dissimilarity Index for outlier detection when set to > 0. See details about how it works [here](freqai-feature-engineering.md#identifying-outliers-with-the-dissimilarity-index-di). <br> **Datatype:** Positive float (typically < 1).
|
||||
| `use_SVM_to_remove_outliers` | Train a support vector machine to detect and remove outliers from the training dataset, as well as from incoming data points. See details about how it works [here](freqai-feature-engineering.md#identifying-outliers-using-a-support-vector-machine-svm). <br> **Datatype:** Boolean.
|
||||
|
|
|
@ -14,8 +14,7 @@ To learn how to get data for the pairs and exchange you're interested in, head o
|
|||
|
||||
!!! Note
|
||||
Since 2021.4 release you no longer have to write a separate hyperopt class, but can configure the parameters directly in the strategy.
|
||||
The legacy method is still supported, but it is no longer the recommended way of setting up hyperopt.
|
||||
The legacy documentation is available at [Legacy Hyperopt](advanced-hyperopt.md#legacy-hyperopt).
|
||||
The legacy method was supported up to 2021.8 and has been removed in 2021.9.
|
||||
|
||||
## Install hyperopt dependencies
|
||||
|
||||
|
@ -765,7 +764,7 @@ Override the `roi_space()` method if you need components of the ROI tables to va
|
|||
A sample for these methods can be found in the [overriding pre-defined spaces section](advanced-hyperopt.md#overriding-pre-defined-spaces).
|
||||
|
||||
!!! Note "Reduced search space"
|
||||
To limit the search space further, Decimals are limited to 3 decimal places (a precision of 0.001). This is usually sufficient, every value more precise than this will usually result in overfitted results. You can however [overriding pre-defined spaces](advanced-hyperopt.md#pverriding-pre-defined-spaces) to change this to your needs.
|
||||
To limit the search space further, Decimals are limited to 3 decimal places (a precision of 0.001). This is usually sufficient, every value more precise than this will usually result in overfitted results. You can however [overriding pre-defined spaces](advanced-hyperopt.md#overriding-pre-defined-spaces) to change this to your needs.
|
||||
|
||||
### Understand Hyperopt Stoploss results
|
||||
|
||||
|
@ -807,7 +806,7 @@ If you have the `stoploss_space()` method in your custom hyperopt file, remove i
|
|||
Override the `stoploss_space()` method and define the desired range in it if you need stoploss values to vary in other range during hyperoptimization. A sample for this method can be found in the [overriding pre-defined spaces section](advanced-hyperopt.md#overriding-pre-defined-spaces).
|
||||
|
||||
!!! Note "Reduced search space"
|
||||
To limit the search space further, Decimals are limited to 3 decimal places (a precision of 0.001). This is usually sufficient, every value more precise than this will usually result in overfitted results. You can however [overriding pre-defined spaces](advanced-hyperopt.md#pverriding-pre-defined-spaces) to change this to your needs.
|
||||
To limit the search space further, Decimals are limited to 3 decimal places (a precision of 0.001). This is usually sufficient, every value more precise than this will usually result in overfitted results. You can however [overriding pre-defined spaces](advanced-hyperopt.md#overriding-pre-defined-spaces) to change this to your needs.
|
||||
|
||||
### Understand Hyperopt Trailing Stop results
|
||||
|
||||
|
|
43
docs/includes/cors.md
Normal file
|
@ -0,0 +1,43 @@
|
|||
## CORS
|
||||
|
||||
This whole section is only necessary in cross-origin cases (where you multiple bot API's running on `localhost:8081`, `localhost:8082`, ...), and want to combine them into one FreqUI instance.
|
||||
|
||||
??? info "Technical explanation"
|
||||
All web-based front-ends are subject to [CORS](https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS) - Cross-Origin Resource Sharing.
|
||||
Since most of the requests to the Freqtrade API must be authenticated, a proper CORS policy is key to avoid security problems.
|
||||
Also, the standard disallows `*` CORS policies for requests with credentials, so this setting must be set appropriately.
|
||||
|
||||
Users can allow access from different origin URL's to the bot API via the `CORS_origins` configuration setting.
|
||||
It consists of a list of allowed URL's that are allowed to consume resources from the bot's API.
|
||||
|
||||
Assuming your application is deployed as `https://frequi.freqtrade.io/home/` - this would mean that the following configuration becomes necessary:
|
||||
|
||||
```jsonc
|
||||
{
|
||||
//...
|
||||
"jwt_secret_key": "somethingrandom",
|
||||
"CORS_origins": ["https://frequi.freqtrade.io"],
|
||||
//...
|
||||
}
|
||||
```
|
||||
|
||||
In the following (pretty common) case, FreqUI is accessible on `http://localhost:8080/trade` (this is what you see in your navbar when navigating to freqUI).
|
||||
![freqUI url](assets/frequi_url.png)
|
||||
|
||||
The correct configuration for this case is `http://localhost:8080` - the main part of the URL including the port.
|
||||
|
||||
```jsonc
|
||||
{
|
||||
//...
|
||||
"jwt_secret_key": "somethingrandom",
|
||||
"CORS_origins": ["http://localhost:8080"],
|
||||
//...
|
||||
}
|
||||
```
|
||||
|
||||
!!! Tip "trailing Slash"
|
||||
The trailing slash is not allowed in the `CORS_origins` configuration (e.g. `"http://localhots:8080/"`).
|
||||
Such a configuration will not take effect, and the cors errors will remain.
|
||||
|
||||
!!! Note
|
||||
We strongly recommend to also set `jwt_secret_key` to something random and known only to yourself to avoid unauthorized access to your bot.
|
|
@ -41,6 +41,7 @@ Please read the [exchange specific notes](exchanges.md) to learn about eventual,
|
|||
|
||||
- [X] [Binance](https://www.binance.com/)
|
||||
- [X] [Bitmart](https://bitmart.com/)
|
||||
- [X] [BingX](https://bingx.com/invite/0EM9RX)
|
||||
- [X] [Gate.io](https://www.gate.io/ref/6266643)
|
||||
- [X] [HTX](https://www.htx.com/) (Former Huobi)
|
||||
- [X] [Kraken](https://kraken.com/)
|
||||
|
|
|
@ -51,7 +51,7 @@ These requirements apply to both [Script Installation](#script-installation) and
|
|||
### Install code
|
||||
|
||||
We've included/collected install instructions for Ubuntu, MacOS, and Windows. These are guidelines and your success may vary with other distros.
|
||||
OS Specific steps are listed first, the [Common](#common) section below is necessary for all systems.
|
||||
OS Specific steps are listed first, the common section below is necessary for all systems.
|
||||
|
||||
!!! Note
|
||||
Python3.9 or higher and the corresponding pip are assumed to be available.
|
||||
|
@ -286,7 +286,7 @@ cd freqtrade
|
|||
#### Freqtrade install: Conda Environment
|
||||
|
||||
```bash
|
||||
conda create --name freqtrade python=3.11
|
||||
conda create --name freqtrade python=3.12
|
||||
```
|
||||
|
||||
!!! Note "Creating Conda Environment"
|
||||
|
|
|
@ -17,7 +17,7 @@ If you already have an existing strategy, please read the [strategy migration gu
|
|||
|
||||
## Shorting
|
||||
|
||||
Shorting is not possible when trading with [`trading_mode`](#understand-tradingmode) set to `spot`. To short trade, `trading_mode` must be set to `margin`(currently unavailable) or [`futures`](#futures), with [`margin_mode`](#margin-mode) set to `cross`(currently unavailable) or [`isolated`](#isolated-margin-mode)
|
||||
Shorting is not possible when trading with [`trading_mode`](#leverage-trading-modes) set to `spot`. To short trade, `trading_mode` must be set to `margin`(currently unavailable) or [`futures`](#futures), with [`margin_mode`](#margin-mode) set to `cross`(currently unavailable) or [`isolated`](#isolated-margin-mode)
|
||||
|
||||
For a strategy to short, the strategy class must set the class variable `can_short = True`
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
markdown==3.6
|
||||
mkdocs==1.5.3
|
||||
mkdocs-material==9.5.18
|
||||
mkdocs==1.6.0
|
||||
mkdocs-material==9.5.24
|
||||
mdx_truly_sane_lists==1.3
|
||||
pymdown-extensions==10.8
|
||||
jinja2==3.1.3
|
||||
pymdown-extensions==10.8.1
|
||||
jinja2==3.1.4
|
||||
|
|
|
@ -1,16 +1,8 @@
|
|||
# REST API & FreqUI
|
||||
# REST API
|
||||
|
||||
## FreqUI
|
||||
|
||||
Freqtrade provides a builtin webserver, which can serve [FreqUI](https://github.com/freqtrade/frequi), the freqtrade UI.
|
||||
|
||||
By default, the UI is not included in the installation (except for docker images), and must be installed explicitly with `freqtrade install-ui`.
|
||||
This same command can also be used to update freqUI, should there be a new release.
|
||||
|
||||
Once the bot is started in trade / dry-run mode (with `freqtrade trade`) - the UI will be available under the configured port below (usually `http://127.0.0.1:8080`).
|
||||
|
||||
!!! Note "developers"
|
||||
Developers should not use this method, but instead use the method described in the [freqUI repository](https://github.com/freqtrade/frequi) to get the source-code of freqUI.
|
||||
FreqUI now has it's own dedicated [documentation section](frequi.md) - please refer to that section for all information regarding the FreqUI.
|
||||
|
||||
## Configuration
|
||||
|
||||
|
@ -169,7 +161,7 @@ freqtrade-client --config rest_config.json <command> [optional parameters]
|
|||
| `delete_lock <lock_id>` | Deletes (disables) the lock by id.
|
||||
| `locks add <pair>, <until>, [side], [reason]` | Locks a pair until "until". (Until will be rounded up to the nearest timeframe).
|
||||
| `profit` | Display a summary of your profit/loss from close trades and some stats about your performance.
|
||||
| `forceexit <trade_id>` | Instantly exits the given trade (Ignoring `minimum_roi`).
|
||||
| `forceexit <trade_id> [order_type] [amount]` | Instantly exits the given trade (ignoring `minimum_roi`), using the given order type ("market" or "limit", uses your config setting if not specified), and the chosen amount (full sell if not specified).
|
||||
| `forceexit all` | Instantly exits all open trades (Ignoring `minimum_roi`).
|
||||
| `forceenter <pair> [rate]` | Instantly enters the given pair. Rate is optional. (`force_entry_enable` must be set to True)
|
||||
| `forceenter <pair> <side> [rate]` | Instantly longs or shorts the given pair. Rate is optional. (`force_entry_enable` must be set to True)
|
||||
|
@ -488,42 +480,4 @@ Since the access token has a short timeout (15 min) - the `token/refresh` reques
|
|||
{"access_token":"eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJpYXQiOjE1ODkxMTk5NzQsIm5iZiI6MTU4OTExOTk3NCwianRpIjoiMDBjNTlhMWUtMjBmYS00ZTk0LTliZjAtNWQwNTg2MTdiZDIyIiwiZXhwIjoxNTg5MTIwODc0LCJpZGVudGl0eSI6eyJ1IjoiRnJlcXRyYWRlciJ9LCJmcmVzaCI6ZmFsc2UsInR5cGUiOiJhY2Nlc3MifQ.1seHlII3WprjjclY6DpRhen0rqdF4j6jbvxIhUFaSbs"}
|
||||
```
|
||||
|
||||
### CORS
|
||||
|
||||
This whole section is only necessary in cross-origin cases (where you multiple bot API's running on `localhost:8081`, `localhost:8082`, ...), and want to combine them into one FreqUI instance.
|
||||
|
||||
??? info "Technical explanation"
|
||||
All web-based front-ends are subject to [CORS](https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS) - Cross-Origin Resource Sharing.
|
||||
Since most of the requests to the Freqtrade API must be authenticated, a proper CORS policy is key to avoid security problems.
|
||||
Also, the standard disallows `*` CORS policies for requests with credentials, so this setting must be set appropriately.
|
||||
|
||||
Users can allow access from different origin URL's to the bot API via the `CORS_origins` configuration setting.
|
||||
It consists of a list of allowed URL's that are allowed to consume resources from the bot's API.
|
||||
|
||||
Assuming your application is deployed as `https://frequi.freqtrade.io/home/` - this would mean that the following configuration becomes necessary:
|
||||
|
||||
```jsonc
|
||||
{
|
||||
//...
|
||||
"jwt_secret_key": "somethingrandom",
|
||||
"CORS_origins": ["https://frequi.freqtrade.io"],
|
||||
//...
|
||||
}
|
||||
```
|
||||
|
||||
In the following (pretty common) case, FreqUI is accessible on `http://localhost:8080/trade` (this is what you see in your navbar when navigating to freqUI).
|
||||
![freqUI url](assets/frequi_url.png)
|
||||
|
||||
The correct configuration for this case is `http://localhost:8080` - the main part of the URL including the port.
|
||||
|
||||
```jsonc
|
||||
{
|
||||
//...
|
||||
"jwt_secret_key": "somethingrandom",
|
||||
"CORS_origins": ["http://localhost:8080"],
|
||||
//...
|
||||
}
|
||||
```
|
||||
|
||||
!!! Note
|
||||
We strongly recommend to also set `jwt_secret_key` to something random and known only to yourself to avoid unauthorized access to your bot.
|
||||
--8<-- "includes/cors.md"
|
||||
|
|
|
@ -30,6 +30,7 @@ The Order-type will be ignored if only one mode is available.
|
|||
|----------|-------------|
|
||||
| Binance | limit |
|
||||
| Binance Futures | market, limit |
|
||||
| Bingx | market, limit |
|
||||
| HTX (former Huobi) | limit |
|
||||
| kraken | market, limit |
|
||||
| Gate | limit |
|
||||
|
@ -158,7 +159,7 @@ You could also have a default stop loss when you are in the red with your buy (b
|
|||
For example, your default stop loss is -10%, but once you have more than 0% profit (example 0.1%) a different trailing stoploss will be used.
|
||||
|
||||
!!! Note
|
||||
If you want the stoploss to only be changed when you break even of making a profit (what most users want) please refer to next section with [offset enabled](#Trailing-stop-loss-only-once-the-trade-has-reached-a-certain-offset).
|
||||
If you want the stoploss to only be changed when you break even of making a profit (what most users want) please refer to next section with [offset enabled](#trailing-stop-loss-only-once-the-trade-has-reached-a-certain-offset).
|
||||
|
||||
Both values require `trailing_stop` to be set to true and `trailing_stop_positive` with a value.
|
||||
|
||||
|
|
|
@ -209,7 +209,7 @@ def custom_exit(self, pair: str, trade: Trade, current_time: datetime, current_r
|
|||
|
||||
## Exit tag
|
||||
|
||||
Similar to [Buy Tagging](#buy-tag), you can also specify a sell tag.
|
||||
Similar to [Entry Tagging](#enter-tag), you can also specify an exit tag.
|
||||
|
||||
``` python
|
||||
def populate_exit_trend(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
|
||||
|
|
|
@ -167,7 +167,7 @@ During backtesting, `current_rate` (and `current_profit`) are provided against t
|
|||
The absolute value of the return value is used (the sign is ignored), so returning `0.05` or `-0.05` have the same result, a stoploss 5% below the current price.
|
||||
Returning None will be interpreted as "no desire to change", and is the only safe way to return when you'd like to not modify the stoploss.
|
||||
|
||||
Stoploss on exchange works similar to `trailing_stop`, and the stoploss on exchange is updated as configured in `stoploss_on_exchange_interval` ([More details about stoploss on exchange](stoploss.md#stop-loss-on-exchange-freqtrade)).
|
||||
Stoploss on exchange works similar to `trailing_stop`, and the stoploss on exchange is updated as configured in `stoploss_on_exchange_interval` ([More details about stoploss on exchange](stoploss.md#stop-loss-on-exchangefreqtrade)).
|
||||
|
||||
!!! Note "Use of dates"
|
||||
All time-based calculations should be done based on `current_time` - using `datetime.now()` or `datetime.utcnow()` is discouraged, as this will break backtesting support.
|
||||
|
@ -450,7 +450,7 @@ Stoploss values returned from `custom_stoploss()` must specify a percentage rela
|
|||
|
||||
```
|
||||
|
||||
Full examples can be found in the [Custom stoploss](strategy-advanced.md#custom-stoploss) section of the Documentation.
|
||||
Full examples can be found in the [Custom stoploss](strategy-callbacks.md#custom-stoploss) section of the Documentation.
|
||||
|
||||
!!! Note
|
||||
Providing invalid input to `stoploss_from_open()` may produce "CustomStoploss function did not return valid stoploss" warnings.
|
||||
|
|
|
@ -405,7 +405,7 @@ The metadata-dict (available for `populate_entry_trend`, `populate_exit_trend`,
|
|||
Currently this is `pair`, which can be accessed using `metadata['pair']` - and will return a pair in the format `XRP/BTC`.
|
||||
|
||||
The Metadata-dict should not be modified and does not persist information across multiple calls.
|
||||
Instead, have a look at the [Storing information](strategy-advanced.md#Storing-information) section.
|
||||
Instead, have a look at the [Storing information](strategy-advanced.md#storing-information-persistent) section.
|
||||
|
||||
## Strategy file loading
|
||||
|
||||
|
|
|
@ -53,7 +53,7 @@ You can use bots in telegram groups by just adding them to the group. You can fi
|
|||
}
|
||||
```
|
||||
|
||||
For the Freqtrade configuration, you can then use the the full value (including `-` if it's there) as string:
|
||||
For the Freqtrade configuration, you can then use the full value (including `-` if it's there) as string:
|
||||
|
||||
```json
|
||||
"chat_id": "-1001332619709"
|
||||
|
|
|
@ -24,7 +24,7 @@ git clone https://github.com/freqtrade/freqtrade.git
|
|||
|
||||
Install ta-lib according to the [ta-lib documentation](https://github.com/TA-Lib/ta-lib-python#windows).
|
||||
|
||||
As compiling from source on windows has heavy dependencies (requires a partial visual studio installation), Freqtrade provides these dependencies (in the binary wheel format) for the latest 3 Python versions (3.9, 3.10 and 3.11) and for 64bit Windows.
|
||||
As compiling from source on windows has heavy dependencies (requires a partial visual studio installation), Freqtrade provides these dependencies (in the binary wheel format) for the latest 3 Python versions (3.9, 3.10, 3.11 and 3.12) and for 64bit Windows.
|
||||
These Wheels are also used by CI running on windows, and are therefore tested together with freqtrade.
|
||||
|
||||
Other versions must be downloaded from the above link.
|
||||
|
|
|
@ -1,21 +1,33 @@
|
|||
""" Freqtrade bot """
|
||||
__version__ = '2024.4-dev'
|
||||
"""Freqtrade bot"""
|
||||
|
||||
if 'dev' in __version__:
|
||||
__version__ = "2024.6-dev"
|
||||
|
||||
if "dev" in __version__:
|
||||
from pathlib import Path
|
||||
|
||||
try:
|
||||
import subprocess
|
||||
|
||||
freqtrade_basedir = Path(__file__).parent
|
||||
|
||||
__version__ = __version__ + '-' + subprocess.check_output(
|
||||
['git', 'log', '--format="%h"', '-n 1'],
|
||||
stderr=subprocess.DEVNULL, cwd=freqtrade_basedir).decode("utf-8").rstrip().strip('"')
|
||||
__version__ = (
|
||||
__version__
|
||||
+ "-"
|
||||
+ subprocess.check_output(
|
||||
["git", "log", '--format="%h"', "-n 1"],
|
||||
stderr=subprocess.DEVNULL,
|
||||
cwd=freqtrade_basedir,
|
||||
)
|
||||
.decode("utf-8")
|
||||
.rstrip()
|
||||
.strip('"')
|
||||
)
|
||||
|
||||
except Exception: # pragma: no cover
|
||||
# git not available, ignore
|
||||
try:
|
||||
# Try Fallback to freqtrade_commit file (created by CI while building docker image)
|
||||
versionfile = Path('./freqtrade_commit')
|
||||
versionfile = Path("./freqtrade_commit")
|
||||
if versionfile.is_file():
|
||||
__version__ = f"docker-{__version__}-{versionfile.read_text()[:8]}"
|
||||
except Exception:
|
||||
|
|
|
@ -9,5 +9,5 @@ To launch Freqtrade as a module
|
|||
from freqtrade import main
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
main.main()
|
||||
|
|
|
@ -6,22 +6,39 @@ Contains all start-commands, subcommands and CLI Interface creation.
|
|||
Note: Be careful with file-scoped imports in these subfiles.
|
||||
as they are parsed on startup, nothing containing optional modules should be loaded.
|
||||
"""
|
||||
|
||||
from freqtrade.commands.analyze_commands import start_analysis_entries_exits
|
||||
from freqtrade.commands.arguments import Arguments
|
||||
from freqtrade.commands.build_config_commands import start_new_config, start_show_config
|
||||
from freqtrade.commands.data_commands import (start_convert_data, start_convert_trades,
|
||||
start_download_data, start_list_data)
|
||||
from freqtrade.commands.data_commands import (
|
||||
start_convert_data,
|
||||
start_convert_trades,
|
||||
start_download_data,
|
||||
start_list_data,
|
||||
)
|
||||
from freqtrade.commands.db_commands import start_convert_db
|
||||
from freqtrade.commands.deploy_commands import (start_create_userdir, start_install_ui,
|
||||
start_new_strategy)
|
||||
from freqtrade.commands.deploy_commands import (
|
||||
start_create_userdir,
|
||||
start_install_ui,
|
||||
start_new_strategy,
|
||||
)
|
||||
from freqtrade.commands.hyperopt_commands import start_hyperopt_list, start_hyperopt_show
|
||||
from freqtrade.commands.list_commands import (start_list_exchanges, start_list_freqAI_models,
|
||||
start_list_markets, start_list_strategies,
|
||||
start_list_timeframes, start_show_trades)
|
||||
from freqtrade.commands.optimize_commands import (start_backtesting, start_backtesting_show,
|
||||
start_edge, start_hyperopt,
|
||||
start_lookahead_analysis,
|
||||
start_recursive_analysis)
|
||||
from freqtrade.commands.list_commands import (
|
||||
start_list_exchanges,
|
||||
start_list_freqAI_models,
|
||||
start_list_markets,
|
||||
start_list_strategies,
|
||||
start_list_timeframes,
|
||||
start_show_trades,
|
||||
)
|
||||
from freqtrade.commands.optimize_commands import (
|
||||
start_backtesting,
|
||||
start_backtesting_show,
|
||||
start_edge,
|
||||
start_hyperopt,
|
||||
start_lookahead_analysis,
|
||||
start_recursive_analysis,
|
||||
)
|
||||
from freqtrade.commands.pairlist_commands import start_test_pairlist
|
||||
from freqtrade.commands.plot_commands import start_plot_dataframe, start_plot_profit
|
||||
from freqtrade.commands.strategy_utils_commands import start_strategy_update
|
||||
|
|
|
@ -20,25 +20,25 @@ def setup_analyze_configuration(args: Dict[str, Any], method: RunMode) -> Dict[s
|
|||
config = setup_utils_configuration(args, method)
|
||||
|
||||
no_unlimited_runmodes = {
|
||||
RunMode.BACKTEST: 'backtesting',
|
||||
RunMode.BACKTEST: "backtesting",
|
||||
}
|
||||
if method in no_unlimited_runmodes.keys():
|
||||
from freqtrade.data.btanalysis import get_latest_backtest_filename
|
||||
|
||||
if 'exportfilename' in config:
|
||||
if config['exportfilename'].is_dir():
|
||||
btfile = Path(get_latest_backtest_filename(config['exportfilename']))
|
||||
if "exportfilename" in config:
|
||||
if config["exportfilename"].is_dir():
|
||||
btfile = Path(get_latest_backtest_filename(config["exportfilename"]))
|
||||
signals_file = f"{config['exportfilename']}/{btfile.stem}_signals.pkl"
|
||||
else:
|
||||
if config['exportfilename'].exists():
|
||||
btfile = Path(config['exportfilename'])
|
||||
if config["exportfilename"].exists():
|
||||
btfile = Path(config["exportfilename"])
|
||||
signals_file = f"{btfile.parent}/{btfile.stem}_signals.pkl"
|
||||
else:
|
||||
raise ConfigurationError(f"{config['exportfilename']} does not exist.")
|
||||
else:
|
||||
raise ConfigurationError('exportfilename not in config.')
|
||||
raise ConfigurationError("exportfilename not in config.")
|
||||
|
||||
if (not Path(signals_file).exists()):
|
||||
if not Path(signals_file).exists():
|
||||
raise OperationalException(
|
||||
f"Cannot find latest backtest signals file: {signals_file}."
|
||||
"Run backtesting with `--export signals`."
|
||||
|
@ -58,6 +58,6 @@ def start_analysis_entries_exits(args: Dict[str, Any]) -> None:
|
|||
# Initialize configuration
|
||||
config = setup_analyze_configuration(args, RunMode.BACKTEST)
|
||||
|
||||
logger.info('Starting freqtrade in analysis mode')
|
||||
logger.info("Starting freqtrade in analysis mode")
|
||||
|
||||
process_entry_exit_reasons(config)
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
"""
|
||||
This module contains the argument manager class
|
||||
"""
|
||||
|
||||
import argparse
|
||||
from functools import partial
|
||||
from pathlib import Path
|
||||
|
@ -12,35 +13,72 @@ from freqtrade.constants import DEFAULT_CONFIG
|
|||
|
||||
ARGS_COMMON = ["verbosity", "logfile", "version", "config", "datadir", "user_data_dir"]
|
||||
|
||||
ARGS_STRATEGY = ["strategy", "strategy_path", "recursive_strategy_search", "freqaimodel",
|
||||
"freqaimodel_path"]
|
||||
ARGS_STRATEGY = [
|
||||
"strategy",
|
||||
"strategy_path",
|
||||
"recursive_strategy_search",
|
||||
"freqaimodel",
|
||||
"freqaimodel_path",
|
||||
]
|
||||
|
||||
ARGS_TRADE = ["db_url", "sd_notify", "dry_run", "dry_run_wallet", "fee"]
|
||||
|
||||
ARGS_WEBSERVER: List[str] = []
|
||||
|
||||
ARGS_COMMON_OPTIMIZE = ["timeframe", "timerange", "dataformat_ohlcv",
|
||||
"max_open_trades", "stake_amount", "fee", "pairs"]
|
||||
ARGS_COMMON_OPTIMIZE = [
|
||||
"timeframe",
|
||||
"timerange",
|
||||
"dataformat_ohlcv",
|
||||
"max_open_trades",
|
||||
"stake_amount",
|
||||
"fee",
|
||||
"pairs",
|
||||
]
|
||||
|
||||
ARGS_BACKTEST = ARGS_COMMON_OPTIMIZE + ["position_stacking", "use_max_market_positions",
|
||||
"enable_protections", "dry_run_wallet", "timeframe_detail",
|
||||
"strategy_list", "export", "exportfilename",
|
||||
"backtest_breakdown", "backtest_cache",
|
||||
"freqai_backtest_live_models"]
|
||||
ARGS_BACKTEST = ARGS_COMMON_OPTIMIZE + [
|
||||
"position_stacking",
|
||||
"use_max_market_positions",
|
||||
"enable_protections",
|
||||
"dry_run_wallet",
|
||||
"timeframe_detail",
|
||||
"strategy_list",
|
||||
"export",
|
||||
"exportfilename",
|
||||
"backtest_breakdown",
|
||||
"backtest_cache",
|
||||
"freqai_backtest_live_models",
|
||||
]
|
||||
|
||||
ARGS_HYPEROPT = ARGS_COMMON_OPTIMIZE + ["hyperopt", "hyperopt_path",
|
||||
"position_stacking", "use_max_market_positions",
|
||||
"enable_protections", "dry_run_wallet", "timeframe_detail",
|
||||
"epochs", "spaces", "print_all",
|
||||
"print_colorized", "print_json", "hyperopt_jobs",
|
||||
"hyperopt_random_state", "hyperopt_min_trades",
|
||||
"hyperopt_loss", "disableparamexport",
|
||||
"hyperopt_ignore_missing_space", "analyze_per_epoch"]
|
||||
ARGS_HYPEROPT = ARGS_COMMON_OPTIMIZE + [
|
||||
"hyperopt",
|
||||
"hyperopt_path",
|
||||
"position_stacking",
|
||||
"use_max_market_positions",
|
||||
"enable_protections",
|
||||
"dry_run_wallet",
|
||||
"timeframe_detail",
|
||||
"epochs",
|
||||
"spaces",
|
||||
"print_all",
|
||||
"print_colorized",
|
||||
"print_json",
|
||||
"hyperopt_jobs",
|
||||
"hyperopt_random_state",
|
||||
"hyperopt_min_trades",
|
||||
"hyperopt_loss",
|
||||
"disableparamexport",
|
||||
"hyperopt_ignore_missing_space",
|
||||
"analyze_per_epoch",
|
||||
]
|
||||
|
||||
ARGS_EDGE = ARGS_COMMON_OPTIMIZE + ["stoploss_range"]
|
||||
|
||||
ARGS_LIST_STRATEGIES = ["strategy_path", "print_one_column", "print_colorized",
|
||||
"recursive_strategy_search"]
|
||||
ARGS_LIST_STRATEGIES = [
|
||||
"strategy_path",
|
||||
"print_one_column",
|
||||
"print_colorized",
|
||||
"recursive_strategy_search",
|
||||
]
|
||||
|
||||
ARGS_LIST_FREQAIMODELS = ["freqaimodel_path", "print_one_column", "print_colorized"]
|
||||
|
||||
|
@ -52,12 +90,27 @@ ARGS_LIST_EXCHANGES = ["print_one_column", "list_exchanges_all"]
|
|||
|
||||
ARGS_LIST_TIMEFRAMES = ["exchange", "print_one_column"]
|
||||
|
||||
ARGS_LIST_PAIRS = ["exchange", "print_list", "list_pairs_print_json", "print_one_column",
|
||||
"print_csv", "base_currencies", "quote_currencies", "list_pairs_all",
|
||||
"trading_mode"]
|
||||
ARGS_LIST_PAIRS = [
|
||||
"exchange",
|
||||
"print_list",
|
||||
"list_pairs_print_json",
|
||||
"print_one_column",
|
||||
"print_csv",
|
||||
"base_currencies",
|
||||
"quote_currencies",
|
||||
"list_pairs_all",
|
||||
"trading_mode",
|
||||
]
|
||||
|
||||
ARGS_TEST_PAIRLIST = ["user_data_dir", "verbosity", "config", "quote_currencies",
|
||||
"print_one_column", "list_pairs_print_json", "exchange"]
|
||||
ARGS_TEST_PAIRLIST = [
|
||||
"user_data_dir",
|
||||
"verbosity",
|
||||
"config",
|
||||
"quote_currencies",
|
||||
"print_one_column",
|
||||
"list_pairs_print_json",
|
||||
"exchange",
|
||||
]
|
||||
|
||||
ARGS_CREATE_USERDIR = ["user_data_dir", "reset"]
|
||||
|
||||
|
@ -70,22 +123,59 @@ ARGS_CONVERT_DATA_TRADES = ["pairs", "format_from_trades", "format_to", "erase",
|
|||
ARGS_CONVERT_DATA = ["pairs", "format_from", "format_to", "erase", "exchange"]
|
||||
ARGS_CONVERT_DATA_OHLCV = ARGS_CONVERT_DATA + ["timeframes", "trading_mode", "candle_types"]
|
||||
|
||||
ARGS_CONVERT_TRADES = ["pairs", "timeframes", "exchange", "dataformat_ohlcv", "dataformat_trades",
|
||||
"trading_mode"]
|
||||
ARGS_CONVERT_TRADES = [
|
||||
"pairs",
|
||||
"timeframes",
|
||||
"exchange",
|
||||
"dataformat_ohlcv",
|
||||
"dataformat_trades",
|
||||
"trading_mode",
|
||||
]
|
||||
|
||||
ARGS_LIST_DATA = ["exchange", "dataformat_ohlcv", "pairs", "trading_mode", "show_timerange"]
|
||||
|
||||
ARGS_DOWNLOAD_DATA = ["pairs", "pairs_file", "days", "new_pairs_days", "include_inactive",
|
||||
"timerange", "download_trades", "exchange", "timeframes",
|
||||
"erase", "dataformat_ohlcv", "dataformat_trades", "trading_mode",
|
||||
"prepend_data"]
|
||||
ARGS_DOWNLOAD_DATA = [
|
||||
"pairs",
|
||||
"pairs_file",
|
||||
"days",
|
||||
"new_pairs_days",
|
||||
"include_inactive",
|
||||
"timerange",
|
||||
"download_trades",
|
||||
"convert_trades",
|
||||
"exchange",
|
||||
"timeframes",
|
||||
"erase",
|
||||
"dataformat_ohlcv",
|
||||
"dataformat_trades",
|
||||
"trading_mode",
|
||||
"prepend_data",
|
||||
]
|
||||
|
||||
ARGS_PLOT_DATAFRAME = ["pairs", "indicators1", "indicators2", "plot_limit",
|
||||
"db_url", "trade_source", "export", "exportfilename",
|
||||
"timerange", "timeframe", "no_trades"]
|
||||
ARGS_PLOT_DATAFRAME = [
|
||||
"pairs",
|
||||
"indicators1",
|
||||
"indicators2",
|
||||
"plot_limit",
|
||||
"db_url",
|
||||
"trade_source",
|
||||
"export",
|
||||
"exportfilename",
|
||||
"timerange",
|
||||
"timeframe",
|
||||
"no_trades",
|
||||
]
|
||||
|
||||
ARGS_PLOT_PROFIT = ["pairs", "timerange", "export", "exportfilename", "db_url",
|
||||
"trade_source", "timeframe", "plot_auto_open", ]
|
||||
ARGS_PLOT_PROFIT = [
|
||||
"pairs",
|
||||
"timerange",
|
||||
"export",
|
||||
"exportfilename",
|
||||
"db_url",
|
||||
"trade_source",
|
||||
"timeframe",
|
||||
"plot_auto_open",
|
||||
]
|
||||
|
||||
ARGS_CONVERT_DB = ["db_url", "db_url_from"]
|
||||
|
||||
|
@ -93,36 +183,76 @@ ARGS_INSTALL_UI = ["erase_ui_only", "ui_version"]
|
|||
|
||||
ARGS_SHOW_TRADES = ["db_url", "trade_ids", "print_json"]
|
||||
|
||||
ARGS_HYPEROPT_LIST = ["hyperopt_list_best", "hyperopt_list_profitable",
|
||||
"hyperopt_list_min_trades", "hyperopt_list_max_trades",
|
||||
"hyperopt_list_min_avg_time", "hyperopt_list_max_avg_time",
|
||||
"hyperopt_list_min_avg_profit", "hyperopt_list_max_avg_profit",
|
||||
"hyperopt_list_min_total_profit", "hyperopt_list_max_total_profit",
|
||||
"hyperopt_list_min_objective", "hyperopt_list_max_objective",
|
||||
"print_colorized", "print_json", "hyperopt_list_no_details",
|
||||
"hyperoptexportfilename", "export_csv"]
|
||||
ARGS_HYPEROPT_LIST = [
|
||||
"hyperopt_list_best",
|
||||
"hyperopt_list_profitable",
|
||||
"hyperopt_list_min_trades",
|
||||
"hyperopt_list_max_trades",
|
||||
"hyperopt_list_min_avg_time",
|
||||
"hyperopt_list_max_avg_time",
|
||||
"hyperopt_list_min_avg_profit",
|
||||
"hyperopt_list_max_avg_profit",
|
||||
"hyperopt_list_min_total_profit",
|
||||
"hyperopt_list_max_total_profit",
|
||||
"hyperopt_list_min_objective",
|
||||
"hyperopt_list_max_objective",
|
||||
"print_colorized",
|
||||
"print_json",
|
||||
"hyperopt_list_no_details",
|
||||
"hyperoptexportfilename",
|
||||
"export_csv",
|
||||
]
|
||||
|
||||
ARGS_HYPEROPT_SHOW = ["hyperopt_list_best", "hyperopt_list_profitable", "hyperopt_show_index",
|
||||
"print_json", "hyperoptexportfilename", "hyperopt_show_no_header",
|
||||
"disableparamexport", "backtest_breakdown"]
|
||||
ARGS_HYPEROPT_SHOW = [
|
||||
"hyperopt_list_best",
|
||||
"hyperopt_list_profitable",
|
||||
"hyperopt_show_index",
|
||||
"print_json",
|
||||
"hyperoptexportfilename",
|
||||
"hyperopt_show_no_header",
|
||||
"disableparamexport",
|
||||
"backtest_breakdown",
|
||||
]
|
||||
|
||||
ARGS_ANALYZE_ENTRIES_EXITS = ["exportfilename", "analysis_groups", "enter_reason_list",
|
||||
"exit_reason_list", "indicator_list", "timerange",
|
||||
"analysis_rejected", "analysis_to_csv", "analysis_csv_path"]
|
||||
ARGS_ANALYZE_ENTRIES_EXITS = [
|
||||
"exportfilename",
|
||||
"analysis_groups",
|
||||
"enter_reason_list",
|
||||
"exit_reason_list",
|
||||
"indicator_list",
|
||||
"timerange",
|
||||
"analysis_rejected",
|
||||
"analysis_to_csv",
|
||||
"analysis_csv_path",
|
||||
]
|
||||
|
||||
NO_CONF_REQURIED = ["convert-data", "convert-trade-data", "download-data", "list-timeframes",
|
||||
"list-markets", "list-pairs", "list-strategies", "list-freqaimodels",
|
||||
"list-data", "hyperopt-list", "hyperopt-show", "backtest-filter",
|
||||
"plot-dataframe", "plot-profit", "show-trades", "trades-to-ohlcv",
|
||||
"strategy-updater"]
|
||||
NO_CONF_REQURIED = [
|
||||
"convert-data",
|
||||
"convert-trade-data",
|
||||
"download-data",
|
||||
"list-timeframes",
|
||||
"list-markets",
|
||||
"list-pairs",
|
||||
"list-strategies",
|
||||
"list-freqaimodels",
|
||||
"list-data",
|
||||
"hyperopt-list",
|
||||
"hyperopt-show",
|
||||
"backtest-filter",
|
||||
"plot-dataframe",
|
||||
"plot-profit",
|
||||
"show-trades",
|
||||
"trades-to-ohlcv",
|
||||
"strategy-updater",
|
||||
]
|
||||
|
||||
NO_CONF_ALLOWED = ["create-userdir", "list-exchanges", "new-strategy"]
|
||||
|
||||
ARGS_STRATEGY_UPDATER = ["strategy_list", "strategy_path", "recursive_strategy_search"]
|
||||
|
||||
ARGS_LOOKAHEAD_ANALYSIS = [
|
||||
a for a in ARGS_BACKTEST if a not in ("position_stacking", "use_max_market_positions", 'cache')
|
||||
] + ["minimum_trade_amount", "targeted_trade_amount", "lookahead_analysis_exportfilename"]
|
||||
a for a in ARGS_BACKTEST if a not in ("position_stacking", "use_max_market_positions", "cache")
|
||||
] + ["minimum_trade_amount", "targeted_trade_amount", "lookahead_analysis_exportfilename"]
|
||||
|
||||
ARGS_RECURSIVE_ANALYSIS = ["timeframe", "timerange", "dataformat_ohlcv", "pairs", "startup_candle"]
|
||||
|
||||
|
@ -156,14 +286,14 @@ class Arguments:
|
|||
# Workaround issue in argparse with action='append' and default value
|
||||
# (see https://bugs.python.org/issue16399)
|
||||
# Allow no-config for certain commands (like downloading / plotting)
|
||||
if ('config' in parsed_arg and parsed_arg.config is None):
|
||||
conf_required = ('command' in parsed_arg and parsed_arg.command in NO_CONF_REQURIED)
|
||||
if "config" in parsed_arg and parsed_arg.config is None:
|
||||
conf_required = "command" in parsed_arg and parsed_arg.command in NO_CONF_REQURIED
|
||||
|
||||
if 'user_data_dir' in parsed_arg and parsed_arg.user_data_dir is not None:
|
||||
if "user_data_dir" in parsed_arg and parsed_arg.user_data_dir is not None:
|
||||
user_dir = parsed_arg.user_data_dir
|
||||
else:
|
||||
# Default case
|
||||
user_dir = 'user_data'
|
||||
user_dir = "user_data"
|
||||
# Try loading from "user_data/config.json"
|
||||
cfgfile = Path(user_dir) / DEFAULT_CONFIG
|
||||
if cfgfile.is_file():
|
||||
|
@ -177,7 +307,6 @@ class Arguments:
|
|||
return parsed_arg
|
||||
|
||||
def _build_args(self, optionlist, parser):
|
||||
|
||||
for val in optionlist:
|
||||
opt = AVAILABLE_CLI_OPTIONS[val]
|
||||
parser.add_argument(*opt.cli, dest=val, **opt.kwargs)
|
||||
|
@ -197,41 +326,62 @@ class Arguments:
|
|||
self._build_args(optionlist=ARGS_STRATEGY, parser=strategy_group)
|
||||
|
||||
# Build main command
|
||||
self.parser = argparse.ArgumentParser(description='Free, open source crypto trading bot')
|
||||
self._build_args(optionlist=['version'], parser=self.parser)
|
||||
self.parser = argparse.ArgumentParser(
|
||||
prog="freqtrade", description="Free, open source crypto trading bot"
|
||||
)
|
||||
self._build_args(optionlist=["version"], parser=self.parser)
|
||||
|
||||
from freqtrade.commands import (start_analysis_entries_exits, start_backtesting,
|
||||
start_backtesting_show, start_convert_data,
|
||||
start_convert_db, start_convert_trades,
|
||||
start_create_userdir, start_download_data, start_edge,
|
||||
start_hyperopt, start_hyperopt_list, start_hyperopt_show,
|
||||
start_install_ui, start_list_data, start_list_exchanges,
|
||||
start_list_freqAI_models, start_list_markets,
|
||||
start_list_strategies, start_list_timeframes,
|
||||
start_lookahead_analysis, start_new_config,
|
||||
start_new_strategy, start_plot_dataframe, start_plot_profit,
|
||||
start_recursive_analysis, start_show_config,
|
||||
start_show_trades, start_strategy_update,
|
||||
start_test_pairlist, start_trading, start_webserver)
|
||||
from freqtrade.commands import (
|
||||
start_analysis_entries_exits,
|
||||
start_backtesting,
|
||||
start_backtesting_show,
|
||||
start_convert_data,
|
||||
start_convert_db,
|
||||
start_convert_trades,
|
||||
start_create_userdir,
|
||||
start_download_data,
|
||||
start_edge,
|
||||
start_hyperopt,
|
||||
start_hyperopt_list,
|
||||
start_hyperopt_show,
|
||||
start_install_ui,
|
||||
start_list_data,
|
||||
start_list_exchanges,
|
||||
start_list_freqAI_models,
|
||||
start_list_markets,
|
||||
start_list_strategies,
|
||||
start_list_timeframes,
|
||||
start_lookahead_analysis,
|
||||
start_new_config,
|
||||
start_new_strategy,
|
||||
start_plot_dataframe,
|
||||
start_plot_profit,
|
||||
start_recursive_analysis,
|
||||
start_show_config,
|
||||
start_show_trades,
|
||||
start_strategy_update,
|
||||
start_test_pairlist,
|
||||
start_trading,
|
||||
start_webserver,
|
||||
)
|
||||
|
||||
subparsers = self.parser.add_subparsers(dest='command',
|
||||
# Use custom message when no subhandler is added
|
||||
# shown from `main.py`
|
||||
# required=True
|
||||
)
|
||||
subparsers = self.parser.add_subparsers(
|
||||
dest="command",
|
||||
# Use custom message when no subhandler is added
|
||||
# shown from `main.py`
|
||||
# required=True
|
||||
)
|
||||
|
||||
# Add trade subcommand
|
||||
trade_cmd = subparsers.add_parser(
|
||||
'trade',
|
||||
help='Trade module.',
|
||||
parents=[_common_parser, _strategy_parser]
|
||||
"trade", help="Trade module.", parents=[_common_parser, _strategy_parser]
|
||||
)
|
||||
trade_cmd.set_defaults(func=start_trading)
|
||||
self._build_args(optionlist=ARGS_TRADE, parser=trade_cmd)
|
||||
|
||||
# add create-userdir subcommand
|
||||
create_userdir_cmd = subparsers.add_parser(
|
||||
'create-userdir',
|
||||
"create-userdir",
|
||||
help="Create user-data directory.",
|
||||
)
|
||||
create_userdir_cmd.set_defaults(func=start_create_userdir)
|
||||
|
@ -239,7 +389,7 @@ class Arguments:
|
|||
|
||||
# add new-config subcommand
|
||||
build_config_cmd = subparsers.add_parser(
|
||||
'new-config',
|
||||
"new-config",
|
||||
help="Create new config",
|
||||
)
|
||||
build_config_cmd.set_defaults(func=start_new_config)
|
||||
|
@ -247,7 +397,7 @@ class Arguments:
|
|||
|
||||
# add show-config subcommand
|
||||
show_config_cmd = subparsers.add_parser(
|
||||
'show-config',
|
||||
"show-config",
|
||||
help="Show resolved config",
|
||||
)
|
||||
show_config_cmd.set_defaults(func=start_show_config)
|
||||
|
@ -255,7 +405,7 @@ class Arguments:
|
|||
|
||||
# add new-strategy subcommand
|
||||
build_strategy_cmd = subparsers.add_parser(
|
||||
'new-strategy',
|
||||
"new-strategy",
|
||||
help="Create new strategy",
|
||||
)
|
||||
build_strategy_cmd.set_defaults(func=start_new_strategy)
|
||||
|
@ -263,8 +413,8 @@ class Arguments:
|
|||
|
||||
# Add download-data subcommand
|
||||
download_data_cmd = subparsers.add_parser(
|
||||
'download-data',
|
||||
help='Download backtesting data.',
|
||||
"download-data",
|
||||
help="Download backtesting data.",
|
||||
parents=[_common_parser],
|
||||
)
|
||||
download_data_cmd.set_defaults(func=start_download_data)
|
||||
|
@ -272,8 +422,8 @@ class Arguments:
|
|||
|
||||
# Add convert-data subcommand
|
||||
convert_data_cmd = subparsers.add_parser(
|
||||
'convert-data',
|
||||
help='Convert candle (OHLCV) data from one format to another.',
|
||||
"convert-data",
|
||||
help="Convert candle (OHLCV) data from one format to another.",
|
||||
parents=[_common_parser],
|
||||
)
|
||||
convert_data_cmd.set_defaults(func=partial(start_convert_data, ohlcv=True))
|
||||
|
@ -281,8 +431,8 @@ class Arguments:
|
|||
|
||||
# Add convert-trade-data subcommand
|
||||
convert_trade_data_cmd = subparsers.add_parser(
|
||||
'convert-trade-data',
|
||||
help='Convert trade data from one format to another.',
|
||||
"convert-trade-data",
|
||||
help="Convert trade data from one format to another.",
|
||||
parents=[_common_parser],
|
||||
)
|
||||
convert_trade_data_cmd.set_defaults(func=partial(start_convert_data, ohlcv=False))
|
||||
|
@ -290,8 +440,8 @@ class Arguments:
|
|||
|
||||
# Add trades-to-ohlcv subcommand
|
||||
convert_trade_data_cmd = subparsers.add_parser(
|
||||
'trades-to-ohlcv',
|
||||
help='Convert trade data to OHLCV data.',
|
||||
"trades-to-ohlcv",
|
||||
help="Convert trade data to OHLCV data.",
|
||||
parents=[_common_parser],
|
||||
)
|
||||
convert_trade_data_cmd.set_defaults(func=start_convert_trades)
|
||||
|
@ -299,8 +449,8 @@ class Arguments:
|
|||
|
||||
# Add list-data subcommand
|
||||
list_data_cmd = subparsers.add_parser(
|
||||
'list-data',
|
||||
help='List downloaded data.',
|
||||
"list-data",
|
||||
help="List downloaded data.",
|
||||
parents=[_common_parser],
|
||||
)
|
||||
list_data_cmd.set_defaults(func=start_list_data)
|
||||
|
@ -308,17 +458,15 @@ class Arguments:
|
|||
|
||||
# Add backtesting subcommand
|
||||
backtesting_cmd = subparsers.add_parser(
|
||||
'backtesting',
|
||||
help='Backtesting module.',
|
||||
parents=[_common_parser, _strategy_parser]
|
||||
"backtesting", help="Backtesting module.", parents=[_common_parser, _strategy_parser]
|
||||
)
|
||||
backtesting_cmd.set_defaults(func=start_backtesting)
|
||||
self._build_args(optionlist=ARGS_BACKTEST, parser=backtesting_cmd)
|
||||
|
||||
# Add backtesting-show subcommand
|
||||
backtesting_show_cmd = subparsers.add_parser(
|
||||
'backtesting-show',
|
||||
help='Show past Backtest results',
|
||||
"backtesting-show",
|
||||
help="Show past Backtest results",
|
||||
parents=[_common_parser],
|
||||
)
|
||||
backtesting_show_cmd.set_defaults(func=start_backtesting_show)
|
||||
|
@ -326,26 +474,22 @@ class Arguments:
|
|||
|
||||
# Add backtesting analysis subcommand
|
||||
analysis_cmd = subparsers.add_parser(
|
||||
'backtesting-analysis',
|
||||
help='Backtest Analysis module.',
|
||||
parents=[_common_parser]
|
||||
"backtesting-analysis", help="Backtest Analysis module.", parents=[_common_parser]
|
||||
)
|
||||
analysis_cmd.set_defaults(func=start_analysis_entries_exits)
|
||||
self._build_args(optionlist=ARGS_ANALYZE_ENTRIES_EXITS, parser=analysis_cmd)
|
||||
|
||||
# Add edge subcommand
|
||||
edge_cmd = subparsers.add_parser(
|
||||
'edge',
|
||||
help='Edge module.',
|
||||
parents=[_common_parser, _strategy_parser]
|
||||
"edge", help="Edge module.", parents=[_common_parser, _strategy_parser]
|
||||
)
|
||||
edge_cmd.set_defaults(func=start_edge)
|
||||
self._build_args(optionlist=ARGS_EDGE, parser=edge_cmd)
|
||||
|
||||
# Add hyperopt subcommand
|
||||
hyperopt_cmd = subparsers.add_parser(
|
||||
'hyperopt',
|
||||
help='Hyperopt module.',
|
||||
"hyperopt",
|
||||
help="Hyperopt module.",
|
||||
parents=[_common_parser, _strategy_parser],
|
||||
)
|
||||
hyperopt_cmd.set_defaults(func=start_hyperopt)
|
||||
|
@ -353,8 +497,8 @@ class Arguments:
|
|||
|
||||
# Add hyperopt-list subcommand
|
||||
hyperopt_list_cmd = subparsers.add_parser(
|
||||
'hyperopt-list',
|
||||
help='List Hyperopt results',
|
||||
"hyperopt-list",
|
||||
help="List Hyperopt results",
|
||||
parents=[_common_parser],
|
||||
)
|
||||
hyperopt_list_cmd.set_defaults(func=start_hyperopt_list)
|
||||
|
@ -362,8 +506,8 @@ class Arguments:
|
|||
|
||||
# Add hyperopt-show subcommand
|
||||
hyperopt_show_cmd = subparsers.add_parser(
|
||||
'hyperopt-show',
|
||||
help='Show details of Hyperopt results',
|
||||
"hyperopt-show",
|
||||
help="Show details of Hyperopt results",
|
||||
parents=[_common_parser],
|
||||
)
|
||||
hyperopt_show_cmd.set_defaults(func=start_hyperopt_show)
|
||||
|
@ -371,8 +515,8 @@ class Arguments:
|
|||
|
||||
# Add list-exchanges subcommand
|
||||
list_exchanges_cmd = subparsers.add_parser(
|
||||
'list-exchanges',
|
||||
help='Print available exchanges.',
|
||||
"list-exchanges",
|
||||
help="Print available exchanges.",
|
||||
parents=[_common_parser],
|
||||
)
|
||||
list_exchanges_cmd.set_defaults(func=start_list_exchanges)
|
||||
|
@ -380,8 +524,8 @@ class Arguments:
|
|||
|
||||
# Add list-markets subcommand
|
||||
list_markets_cmd = subparsers.add_parser(
|
||||
'list-markets',
|
||||
help='Print markets on exchange.',
|
||||
"list-markets",
|
||||
help="Print markets on exchange.",
|
||||
parents=[_common_parser],
|
||||
)
|
||||
list_markets_cmd.set_defaults(func=partial(start_list_markets, pairs_only=False))
|
||||
|
@ -389,8 +533,8 @@ class Arguments:
|
|||
|
||||
# Add list-pairs subcommand
|
||||
list_pairs_cmd = subparsers.add_parser(
|
||||
'list-pairs',
|
||||
help='Print pairs on exchange.',
|
||||
"list-pairs",
|
||||
help="Print pairs on exchange.",
|
||||
parents=[_common_parser],
|
||||
)
|
||||
list_pairs_cmd.set_defaults(func=partial(start_list_markets, pairs_only=True))
|
||||
|
@ -398,8 +542,8 @@ class Arguments:
|
|||
|
||||
# Add list-strategies subcommand
|
||||
list_strategies_cmd = subparsers.add_parser(
|
||||
'list-strategies',
|
||||
help='Print available strategies.',
|
||||
"list-strategies",
|
||||
help="Print available strategies.",
|
||||
parents=[_common_parser],
|
||||
)
|
||||
list_strategies_cmd.set_defaults(func=start_list_strategies)
|
||||
|
@ -407,8 +551,8 @@ class Arguments:
|
|||
|
||||
# Add list-freqAI Models subcommand
|
||||
list_freqaimodels_cmd = subparsers.add_parser(
|
||||
'list-freqaimodels',
|
||||
help='Print available freqAI models.',
|
||||
"list-freqaimodels",
|
||||
help="Print available freqAI models.",
|
||||
parents=[_common_parser],
|
||||
)
|
||||
list_freqaimodels_cmd.set_defaults(func=start_list_freqAI_models)
|
||||
|
@ -416,8 +560,8 @@ class Arguments:
|
|||
|
||||
# Add list-timeframes subcommand
|
||||
list_timeframes_cmd = subparsers.add_parser(
|
||||
'list-timeframes',
|
||||
help='Print available timeframes for the exchange.',
|
||||
"list-timeframes",
|
||||
help="Print available timeframes for the exchange.",
|
||||
parents=[_common_parser],
|
||||
)
|
||||
list_timeframes_cmd.set_defaults(func=start_list_timeframes)
|
||||
|
@ -425,8 +569,8 @@ class Arguments:
|
|||
|
||||
# Add show-trades subcommand
|
||||
show_trades = subparsers.add_parser(
|
||||
'show-trades',
|
||||
help='Show trades.',
|
||||
"show-trades",
|
||||
help="Show trades.",
|
||||
parents=[_common_parser],
|
||||
)
|
||||
show_trades.set_defaults(func=start_show_trades)
|
||||
|
@ -434,8 +578,8 @@ class Arguments:
|
|||
|
||||
# Add test-pairlist subcommand
|
||||
test_pairlist_cmd = subparsers.add_parser(
|
||||
'test-pairlist',
|
||||
help='Test your pairlist configuration.',
|
||||
"test-pairlist",
|
||||
help="Test your pairlist configuration.",
|
||||
)
|
||||
test_pairlist_cmd.set_defaults(func=start_test_pairlist)
|
||||
self._build_args(optionlist=ARGS_TEST_PAIRLIST, parser=test_pairlist_cmd)
|
||||
|
@ -450,16 +594,16 @@ class Arguments:
|
|||
|
||||
# Add install-ui subcommand
|
||||
install_ui_cmd = subparsers.add_parser(
|
||||
'install-ui',
|
||||
help='Install FreqUI',
|
||||
"install-ui",
|
||||
help="Install FreqUI",
|
||||
)
|
||||
install_ui_cmd.set_defaults(func=start_install_ui)
|
||||
self._build_args(optionlist=ARGS_INSTALL_UI, parser=install_ui_cmd)
|
||||
|
||||
# Add Plotting subcommand
|
||||
plot_dataframe_cmd = subparsers.add_parser(
|
||||
'plot-dataframe',
|
||||
help='Plot candles with indicators.',
|
||||
"plot-dataframe",
|
||||
help="Plot candles with indicators.",
|
||||
parents=[_common_parser, _strategy_parser],
|
||||
)
|
||||
plot_dataframe_cmd.set_defaults(func=start_plot_dataframe)
|
||||
|
@ -467,8 +611,8 @@ class Arguments:
|
|||
|
||||
# Plot profit
|
||||
plot_profit_cmd = subparsers.add_parser(
|
||||
'plot-profit',
|
||||
help='Generate plot showing profits.',
|
||||
"plot-profit",
|
||||
help="Generate plot showing profits.",
|
||||
parents=[_common_parser, _strategy_parser],
|
||||
)
|
||||
plot_profit_cmd.set_defaults(func=start_plot_profit)
|
||||
|
@ -476,40 +620,36 @@ class Arguments:
|
|||
|
||||
# Add webserver subcommand
|
||||
webserver_cmd = subparsers.add_parser(
|
||||
'webserver',
|
||||
help='Webserver module.',
|
||||
parents=[_common_parser]
|
||||
"webserver", help="Webserver module.", parents=[_common_parser]
|
||||
)
|
||||
webserver_cmd.set_defaults(func=start_webserver)
|
||||
self._build_args(optionlist=ARGS_WEBSERVER, parser=webserver_cmd)
|
||||
|
||||
# Add strategy_updater subcommand
|
||||
strategy_updater_cmd = subparsers.add_parser(
|
||||
'strategy-updater',
|
||||
help='updates outdated strategy files to the current version',
|
||||
parents=[_common_parser]
|
||||
"strategy-updater",
|
||||
help="updates outdated strategy files to the current version",
|
||||
parents=[_common_parser],
|
||||
)
|
||||
strategy_updater_cmd.set_defaults(func=start_strategy_update)
|
||||
self._build_args(optionlist=ARGS_STRATEGY_UPDATER, parser=strategy_updater_cmd)
|
||||
|
||||
# Add lookahead_analysis subcommand
|
||||
lookahead_analayis_cmd = subparsers.add_parser(
|
||||
'lookahead-analysis',
|
||||
"lookahead-analysis",
|
||||
help="Check for potential look ahead bias.",
|
||||
parents=[_common_parser, _strategy_parser]
|
||||
parents=[_common_parser, _strategy_parser],
|
||||
)
|
||||
lookahead_analayis_cmd.set_defaults(func=start_lookahead_analysis)
|
||||
|
||||
self._build_args(optionlist=ARGS_LOOKAHEAD_ANALYSIS,
|
||||
parser=lookahead_analayis_cmd)
|
||||
self._build_args(optionlist=ARGS_LOOKAHEAD_ANALYSIS, parser=lookahead_analayis_cmd)
|
||||
|
||||
# Add recursive_analysis subcommand
|
||||
recursive_analayis_cmd = subparsers.add_parser(
|
||||
'recursive-analysis',
|
||||
"recursive-analysis",
|
||||
help="Check for potential recursive formula issue.",
|
||||
parents=[_common_parser, _strategy_parser]
|
||||
parents=[_common_parser, _strategy_parser],
|
||||
)
|
||||
recursive_analayis_cmd.set_defaults(func=start_recursive_analysis)
|
||||
|
||||
self._build_args(optionlist=ARGS_RECURSIVE_ANALYSIS,
|
||||
parser=recursive_analayis_cmd)
|
||||
self._build_args(optionlist=ARGS_RECURSIVE_ANALYSIS, parser=recursive_analayis_cmd)
|
||||
|
|
|
@ -45,7 +45,7 @@ def ask_user_overwrite(config_path: Path) -> bool:
|
|||
},
|
||||
]
|
||||
answers = prompt(questions)
|
||||
return answers['overwrite']
|
||||
return answers["overwrite"]
|
||||
|
||||
|
||||
def ask_user_config() -> Dict[str, Any]:
|
||||
|
@ -65,7 +65,7 @@ def ask_user_config() -> Dict[str, Any]:
|
|||
"type": "text",
|
||||
"name": "stake_currency",
|
||||
"message": "Please insert your stake currency:",
|
||||
"default": 'USDT',
|
||||
"default": "USDT",
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
|
@ -73,36 +73,38 @@ def ask_user_config() -> Dict[str, Any]:
|
|||
"message": f"Please insert your stake amount (Number or '{UNLIMITED_STAKE_AMOUNT}'):",
|
||||
"default": "unlimited",
|
||||
"validate": lambda val: val == UNLIMITED_STAKE_AMOUNT or validate_is_float(val),
|
||||
"filter": lambda val: '"' + UNLIMITED_STAKE_AMOUNT + '"'
|
||||
if val == UNLIMITED_STAKE_AMOUNT
|
||||
else val
|
||||
"filter": lambda val: (
|
||||
'"' + UNLIMITED_STAKE_AMOUNT + '"' if val == UNLIMITED_STAKE_AMOUNT else val
|
||||
),
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"name": "max_open_trades",
|
||||
"message": "Please insert max_open_trades (Integer or -1 for unlimited open trades):",
|
||||
"default": "3",
|
||||
"validate": lambda val: validate_is_int(val)
|
||||
"validate": lambda val: validate_is_int(val),
|
||||
},
|
||||
{
|
||||
"type": "select",
|
||||
"name": "timeframe_in_config",
|
||||
"message": "Time",
|
||||
"choices": ["Have the strategy define timeframe.", "Override in configuration."]
|
||||
"choices": ["Have the strategy define timeframe.", "Override in configuration."],
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"name": "timeframe",
|
||||
"message": "Please insert your desired timeframe (e.g. 5m):",
|
||||
"default": "5m",
|
||||
"when": lambda x: x["timeframe_in_config"] == 'Override in configuration.'
|
||||
|
||||
"when": lambda x: x["timeframe_in_config"] == "Override in configuration.",
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"name": "fiat_display_currency",
|
||||
"message": "Please insert your display Currency (for reporting):",
|
||||
"default": 'USD',
|
||||
"message": (
|
||||
"Please insert your display Currency for reporting "
|
||||
"(leave empty to disable FIAT conversion):"
|
||||
),
|
||||
"default": "USD",
|
||||
},
|
||||
{
|
||||
"type": "select",
|
||||
|
@ -111,6 +113,7 @@ def ask_user_config() -> Dict[str, Any]:
|
|||
"choices": [
|
||||
"binance",
|
||||
"binanceus",
|
||||
"bingx",
|
||||
"gate",
|
||||
"htx",
|
||||
"kraken",
|
||||
|
@ -125,33 +128,33 @@ def ask_user_config() -> Dict[str, Any]:
|
|||
"name": "trading_mode",
|
||||
"message": "Do you want to trade Perpetual Swaps (perpetual futures)?",
|
||||
"default": False,
|
||||
"filter": lambda val: 'futures' if val else 'spot',
|
||||
"when": lambda x: x["exchange_name"] in ['binance', 'gate', 'okx'],
|
||||
"filter": lambda val: "futures" if val else "spot",
|
||||
"when": lambda x: x["exchange_name"] in ["binance", "gate", "okx", "bybit"],
|
||||
},
|
||||
{
|
||||
"type": "autocomplete",
|
||||
"name": "exchange_name",
|
||||
"message": "Type your exchange name (Must be supported by ccxt)",
|
||||
"choices": available_exchanges(),
|
||||
"when": lambda x: x["exchange_name"] == 'other'
|
||||
"when": lambda x: x["exchange_name"] == "other",
|
||||
},
|
||||
{
|
||||
"type": "password",
|
||||
"name": "exchange_key",
|
||||
"message": "Insert Exchange Key",
|
||||
"when": lambda x: not x['dry_run']
|
||||
"when": lambda x: not x["dry_run"],
|
||||
},
|
||||
{
|
||||
"type": "password",
|
||||
"name": "exchange_secret",
|
||||
"message": "Insert Exchange Secret",
|
||||
"when": lambda x: not x['dry_run']
|
||||
"when": lambda x: not x["dry_run"],
|
||||
},
|
||||
{
|
||||
"type": "password",
|
||||
"name": "exchange_key_password",
|
||||
"message": "Insert Exchange API Key password",
|
||||
"when": lambda x: not x['dry_run'] and x['exchange_name'] in ('kucoin', 'okx')
|
||||
"when": lambda x: not x["dry_run"] and x["exchange_name"] in ("kucoin", "okx"),
|
||||
},
|
||||
{
|
||||
"type": "confirm",
|
||||
|
@ -163,13 +166,13 @@ def ask_user_config() -> Dict[str, Any]:
|
|||
"type": "password",
|
||||
"name": "telegram_token",
|
||||
"message": "Insert Telegram token",
|
||||
"when": lambda x: x['telegram']
|
||||
"when": lambda x: x["telegram"],
|
||||
},
|
||||
{
|
||||
"type": "password",
|
||||
"name": "telegram_chat_id",
|
||||
"message": "Insert Telegram chat id",
|
||||
"when": lambda x: x['telegram']
|
||||
"when": lambda x: x["telegram"],
|
||||
},
|
||||
{
|
||||
"type": "confirm",
|
||||
|
@ -180,23 +183,25 @@ def ask_user_config() -> Dict[str, Any]:
|
|||
{
|
||||
"type": "text",
|
||||
"name": "api_server_listen_addr",
|
||||
"message": ("Insert Api server Listen Address (0.0.0.0 for docker, "
|
||||
"otherwise best left untouched)"),
|
||||
"message": (
|
||||
"Insert Api server Listen Address (0.0.0.0 for docker, "
|
||||
"otherwise best left untouched)"
|
||||
),
|
||||
"default": "127.0.0.1" if not running_in_docker() else "0.0.0.0",
|
||||
"when": lambda x: x['api_server']
|
||||
"when": lambda x: x["api_server"],
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"name": "api_server_username",
|
||||
"message": "Insert api-server username",
|
||||
"default": "freqtrader",
|
||||
"when": lambda x: x['api_server']
|
||||
"when": lambda x: x["api_server"],
|
||||
},
|
||||
{
|
||||
"type": "password",
|
||||
"name": "api_server_password",
|
||||
"message": "Insert api-server password",
|
||||
"when": lambda x: x['api_server']
|
||||
"when": lambda x: x["api_server"],
|
||||
},
|
||||
]
|
||||
answers = prompt(questions)
|
||||
|
@ -205,15 +210,11 @@ def ask_user_config() -> Dict[str, Any]:
|
|||
# Interrupted questionary sessions return an empty dict.
|
||||
raise OperationalException("User interrupted interactive questions.")
|
||||
# Ensure default is set for non-futures exchanges
|
||||
answers['trading_mode'] = answers.get('trading_mode', "spot")
|
||||
answers['margin_mode'] = (
|
||||
'isolated'
|
||||
if answers.get('trading_mode') == 'futures'
|
||||
else ''
|
||||
)
|
||||
answers["trading_mode"] = answers.get("trading_mode", "spot")
|
||||
answers["margin_mode"] = "isolated" if answers.get("trading_mode") == "futures" else ""
|
||||
# Force JWT token to be a random string
|
||||
answers['api_server_jwt_key'] = secrets.token_hex()
|
||||
answers['api_server_ws_token'] = secrets.token_urlsafe(25)
|
||||
answers["api_server_jwt_key"] = secrets.token_hex()
|
||||
answers["api_server_ws_token"] = secrets.token_urlsafe(25)
|
||||
|
||||
return answers
|
||||
|
||||
|
@ -225,26 +226,26 @@ def deploy_new_config(config_path: Path, selections: Dict[str, Any]) -> None:
|
|||
:param selections: Dict containing selections taken by the user.
|
||||
"""
|
||||
from jinja2.exceptions import TemplateNotFound
|
||||
|
||||
try:
|
||||
exchange_template = MAP_EXCHANGE_CHILDCLASS.get(
|
||||
selections['exchange_name'], selections['exchange_name'])
|
||||
selections["exchange_name"], selections["exchange_name"]
|
||||
)
|
||||
|
||||
selections['exchange'] = render_template(
|
||||
templatefile=f"subtemplates/exchange_{exchange_template}.j2",
|
||||
arguments=selections
|
||||
selections["exchange"] = render_template(
|
||||
templatefile=f"subtemplates/exchange_{exchange_template}.j2", arguments=selections
|
||||
)
|
||||
except TemplateNotFound:
|
||||
selections['exchange'] = render_template(
|
||||
templatefile="subtemplates/exchange_generic.j2",
|
||||
arguments=selections
|
||||
selections["exchange"] = render_template(
|
||||
templatefile="subtemplates/exchange_generic.j2", arguments=selections
|
||||
)
|
||||
|
||||
config_text = render_template(templatefile='base_config.json.j2',
|
||||
arguments=selections)
|
||||
config_text = render_template(templatefile="base_config.json.j2", arguments=selections)
|
||||
|
||||
logger.info(f"Writing config to `{config_path}`.")
|
||||
logger.info(
|
||||
"Please make sure to check the configuration contents and adjust settings to your needs.")
|
||||
"Please make sure to check the configuration contents and adjust settings to your needs."
|
||||
)
|
||||
|
||||
config_path.write_text(config_text)
|
||||
|
||||
|
@ -255,7 +256,7 @@ def start_new_config(args: Dict[str, Any]) -> None:
|
|||
Asking the user questions to fill out the template accordingly.
|
||||
"""
|
||||
|
||||
config_path = Path(args['config'][0])
|
||||
config_path = Path(args["config"][0])
|
||||
chown_user_directory(config_path.parent)
|
||||
if config_path.exists():
|
||||
overwrite = ask_user_overwrite(config_path)
|
||||
|
@ -264,22 +265,22 @@ def start_new_config(args: Dict[str, Any]) -> None:
|
|||
else:
|
||||
raise OperationalException(
|
||||
f"Configuration file `{config_path}` already exists. "
|
||||
"Please delete it or use a different configuration file name.")
|
||||
"Please delete it or use a different configuration file name."
|
||||
)
|
||||
selections = ask_user_config()
|
||||
deploy_new_config(config_path, selections)
|
||||
|
||||
|
||||
def start_show_config(args: Dict[str, Any]) -> None:
|
||||
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_EXCHANGE, set_dry=False)
|
||||
|
||||
# TODO: Sanitize from sensitive info before printing
|
||||
|
||||
print("Your combined configuration is:")
|
||||
config_sanitized = sanitize_config(
|
||||
config['original_config'],
|
||||
show_sensitive=args.get('show_sensitive', False)
|
||||
config["original_config"], show_sensitive=args.get("show_sensitive", False)
|
||||
)
|
||||
|
||||
from rich import print_json
|
||||
|
||||
print_json(data=config_sanitized)
|
||||
|
|
|
@ -5,8 +5,11 @@ from typing import Any, Dict
|
|||
|
||||
from freqtrade.configuration import TimeRange, setup_utils_configuration
|
||||
from freqtrade.constants import DATETIME_PRINT_FORMAT, DL_DATA_TIMEFRAMES, Config
|
||||
from freqtrade.data.converter import (convert_ohlcv_format, convert_trades_format,
|
||||
convert_trades_to_ohlcv)
|
||||
from freqtrade.data.converter import (
|
||||
convert_ohlcv_format,
|
||||
convert_trades_format,
|
||||
convert_trades_to_ohlcv,
|
||||
)
|
||||
from freqtrade.data.history import download_data_main
|
||||
from freqtrade.enums import CandleType, RunMode, TradingMode
|
||||
from freqtrade.exceptions import ConfigurationError
|
||||
|
@ -20,14 +23,17 @@ logger = logging.getLogger(__name__)
|
|||
|
||||
|
||||
def _check_data_config_download_sanity(config: Config) -> None:
|
||||
if 'days' in config and 'timerange' in config:
|
||||
raise ConfigurationError("--days and --timerange are mutually exclusive. "
|
||||
"You can only specify one or the other.")
|
||||
if "days" in config and "timerange" in config:
|
||||
raise ConfigurationError(
|
||||
"--days and --timerange are mutually exclusive. "
|
||||
"You can only specify one or the other."
|
||||
)
|
||||
|
||||
if 'pairs' not in config:
|
||||
if "pairs" not in config:
|
||||
raise ConfigurationError(
|
||||
"Downloading data requires a list of pairs. "
|
||||
"Please check the documentation on how to configure this.")
|
||||
"Please check the documentation on how to configure this."
|
||||
)
|
||||
|
||||
|
||||
def start_download_data(args: Dict[str, Any]) -> None:
|
||||
|
@ -46,38 +52,41 @@ def start_download_data(args: Dict[str, Any]) -> None:
|
|||
|
||||
|
||||
def start_convert_trades(args: Dict[str, Any]) -> None:
|
||||
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_EXCHANGE)
|
||||
|
||||
timerange = TimeRange()
|
||||
|
||||
# Remove stake-currency to skip checks which are not relevant for datadownload
|
||||
config['stake_currency'] = ''
|
||||
config["stake_currency"] = ""
|
||||
|
||||
if 'timeframes' not in config:
|
||||
config['timeframes'] = DL_DATA_TIMEFRAMES
|
||||
if "timeframes" not in config:
|
||||
config["timeframes"] = DL_DATA_TIMEFRAMES
|
||||
|
||||
# Init exchange
|
||||
exchange = ExchangeResolver.load_exchange(config, validate=False)
|
||||
# Manual validations of relevant settings
|
||||
|
||||
for timeframe in config['timeframes']:
|
||||
for timeframe in config["timeframes"]:
|
||||
exchange.validate_timeframes(timeframe)
|
||||
available_pairs = [
|
||||
p for p in exchange.get_markets(
|
||||
tradable_only=True, active_only=not config.get('include_inactive')
|
||||
).keys()
|
||||
p
|
||||
for p in exchange.get_markets(
|
||||
tradable_only=True, active_only=not config.get("include_inactive")
|
||||
).keys()
|
||||
]
|
||||
|
||||
expanded_pairs = dynamic_expand_pairlist(config, available_pairs)
|
||||
|
||||
# Convert downloaded trade data to different timeframes
|
||||
convert_trades_to_ohlcv(
|
||||
pairs=expanded_pairs, timeframes=config['timeframes'],
|
||||
datadir=config['datadir'], timerange=timerange, erase=bool(config.get('erase')),
|
||||
data_format_ohlcv=config['dataformat_ohlcv'],
|
||||
data_format_trades=config['dataformat_trades'],
|
||||
candle_type=config.get('candle_type_def', CandleType.SPOT)
|
||||
pairs=expanded_pairs,
|
||||
timeframes=config["timeframes"],
|
||||
datadir=config["datadir"],
|
||||
timerange=timerange,
|
||||
erase=bool(config.get("erase")),
|
||||
data_format_ohlcv=config["dataformat_ohlcv"],
|
||||
data_format_trades=config["dataformat_trades"],
|
||||
candle_type=config.get("candle_type_def", CandleType.SPOT),
|
||||
)
|
||||
|
||||
|
||||
|
@ -88,14 +97,19 @@ def start_convert_data(args: Dict[str, Any], ohlcv: bool = True) -> None:
|
|||
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||
if ohlcv:
|
||||
migrate_data(config)
|
||||
convert_ohlcv_format(config,
|
||||
convert_from=args['format_from'],
|
||||
convert_to=args['format_to'],
|
||||
erase=args['erase'])
|
||||
convert_ohlcv_format(
|
||||
config,
|
||||
convert_from=args["format_from"],
|
||||
convert_to=args["format_to"],
|
||||
erase=args["erase"],
|
||||
)
|
||||
else:
|
||||
convert_trades_format(config,
|
||||
convert_from=args['format_from_trades'], convert_to=args['format_to'],
|
||||
erase=args['erase'])
|
||||
convert_trades_format(
|
||||
config,
|
||||
convert_from=args["format_from_trades"],
|
||||
convert_to=args["format_to"],
|
||||
erase=args["erase"],
|
||||
)
|
||||
|
||||
|
||||
def start_list_data(args: Dict[str, Any]) -> None:
|
||||
|
@ -108,45 +122,59 @@ def start_list_data(args: Dict[str, Any]) -> None:
|
|||
from tabulate import tabulate
|
||||
|
||||
from freqtrade.data.history import get_datahandler
|
||||
dhc = get_datahandler(config['datadir'], config['dataformat_ohlcv'])
|
||||
|
||||
dhc = get_datahandler(config["datadir"], config["dataformat_ohlcv"])
|
||||
|
||||
paircombs = dhc.ohlcv_get_available_data(
|
||||
config['datadir'],
|
||||
config.get('trading_mode', TradingMode.SPOT)
|
||||
)
|
||||
config["datadir"], config.get("trading_mode", TradingMode.SPOT)
|
||||
)
|
||||
|
||||
if args['pairs']:
|
||||
paircombs = [comb for comb in paircombs if comb[0] in args['pairs']]
|
||||
if args["pairs"]:
|
||||
paircombs = [comb for comb in paircombs if comb[0] in args["pairs"]]
|
||||
|
||||
print(f"Found {len(paircombs)} pair / timeframe combinations.")
|
||||
if not config.get('show_timerange'):
|
||||
if not config.get("show_timerange"):
|
||||
groupedpair = defaultdict(list)
|
||||
for pair, timeframe, candle_type in sorted(
|
||||
paircombs,
|
||||
key=lambda x: (x[0], timeframe_to_minutes(x[1]), x[2])
|
||||
paircombs, key=lambda x: (x[0], timeframe_to_minutes(x[1]), x[2])
|
||||
):
|
||||
groupedpair[(pair, candle_type)].append(timeframe)
|
||||
|
||||
if groupedpair:
|
||||
print(tabulate([
|
||||
(pair, ', '.join(timeframes), candle_type)
|
||||
for (pair, candle_type), timeframes in groupedpair.items()
|
||||
],
|
||||
headers=("Pair", "Timeframe", "Type"),
|
||||
tablefmt='psql', stralign='right'))
|
||||
print(
|
||||
tabulate(
|
||||
[
|
||||
(pair, ", ".join(timeframes), candle_type)
|
||||
for (pair, candle_type), timeframes in groupedpair.items()
|
||||
],
|
||||
headers=("Pair", "Timeframe", "Type"),
|
||||
tablefmt="psql",
|
||||
stralign="right",
|
||||
)
|
||||
)
|
||||
else:
|
||||
paircombs1 = [(
|
||||
pair, timeframe, candle_type,
|
||||
*dhc.ohlcv_data_min_max(pair, timeframe, candle_type)
|
||||
) for pair, timeframe, candle_type in paircombs]
|
||||
paircombs1 = [
|
||||
(pair, timeframe, candle_type, *dhc.ohlcv_data_min_max(pair, timeframe, candle_type))
|
||||
for pair, timeframe, candle_type in paircombs
|
||||
]
|
||||
|
||||
print(tabulate([
|
||||
(pair, timeframe, candle_type,
|
||||
start.strftime(DATETIME_PRINT_FORMAT),
|
||||
end.strftime(DATETIME_PRINT_FORMAT), length)
|
||||
for pair, timeframe, candle_type, start, end, length in sorted(
|
||||
paircombs1,
|
||||
key=lambda x: (x[0], timeframe_to_minutes(x[1]), x[2]))
|
||||
],
|
||||
headers=("Pair", "Timeframe", "Type", 'From', 'To', 'Candles'),
|
||||
tablefmt='psql', stralign='right'))
|
||||
print(
|
||||
tabulate(
|
||||
[
|
||||
(
|
||||
pair,
|
||||
timeframe,
|
||||
candle_type,
|
||||
start.strftime(DATETIME_PRINT_FORMAT),
|
||||
end.strftime(DATETIME_PRINT_FORMAT),
|
||||
length,
|
||||
)
|
||||
for pair, timeframe, candle_type, start, end, length in sorted(
|
||||
paircombs1, key=lambda x: (x[0], timeframe_to_minutes(x[1]), x[2])
|
||||
)
|
||||
],
|
||||
headers=("Pair", "Timeframe", "Type", "From", "To", "Candles"),
|
||||
tablefmt="psql",
|
||||
stralign="right",
|
||||
)
|
||||
)
|
||||
|
|
|
@ -19,9 +19,9 @@ def start_convert_db(args: Dict[str, Any]) -> None:
|
|||
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||
|
||||
init_db(config['db_url'])
|
||||
init_db(config["db_url"])
|
||||
session_target = Trade.session
|
||||
init_db(config['db_url_from'])
|
||||
init_db(config["db_url_from"])
|
||||
logger.info("Starting db migration.")
|
||||
|
||||
trade_count = 0
|
||||
|
@ -47,9 +47,11 @@ def start_convert_db(args: Dict[str, Any]) -> None:
|
|||
max_order_id = session_target.scalar(select(func.max(Order.id)))
|
||||
max_pairlock_id = session_target.scalar(select(func.max(PairLock.id)))
|
||||
|
||||
set_sequence_ids(session_target.get_bind(),
|
||||
trade_id=max_trade_id,
|
||||
order_id=max_order_id,
|
||||
pairlock_id=max_pairlock_id)
|
||||
set_sequence_ids(
|
||||
session_target.get_bind(),
|
||||
trade_id=max_trade_id,
|
||||
order_id=max_order_id,
|
||||
pairlock_id=max_pairlock_id,
|
||||
)
|
||||
|
||||
logger.info(f"Migrated {trade_count} Trades, and {pairlock_count} Pairlocks.")
|
||||
|
|
|
@ -38,7 +38,7 @@ def deploy_new_strategy(strategy_name: str, strategy_path: Path, subtemplate: st
|
|||
"""
|
||||
Deploy new strategy from template to strategy_path
|
||||
"""
|
||||
fallback = 'full'
|
||||
fallback = "full"
|
||||
attributes = render_template_with_fallback(
|
||||
templatefile=f"strategy_subtemplates/strategy_attributes_{subtemplate}.j2",
|
||||
templatefallbackfile=f"strategy_subtemplates/strategy_attributes_{fallback}.j2",
|
||||
|
@ -64,33 +64,35 @@ def deploy_new_strategy(strategy_name: str, strategy_path: Path, subtemplate: st
|
|||
templatefallbackfile="strategy_subtemplates/strategy_methods_empty.j2",
|
||||
)
|
||||
|
||||
strategy_text = render_template(templatefile='base_strategy.py.j2',
|
||||
arguments={"strategy": strategy_name,
|
||||
"attributes": attributes,
|
||||
"indicators": indicators,
|
||||
"buy_trend": buy_trend,
|
||||
"sell_trend": sell_trend,
|
||||
"plot_config": plot_config,
|
||||
"additional_methods": additional_methods,
|
||||
})
|
||||
strategy_text = render_template(
|
||||
templatefile="base_strategy.py.j2",
|
||||
arguments={
|
||||
"strategy": strategy_name,
|
||||
"attributes": attributes,
|
||||
"indicators": indicators,
|
||||
"buy_trend": buy_trend,
|
||||
"sell_trend": sell_trend,
|
||||
"plot_config": plot_config,
|
||||
"additional_methods": additional_methods,
|
||||
},
|
||||
)
|
||||
|
||||
logger.info(f"Writing strategy to `{strategy_path}`.")
|
||||
strategy_path.write_text(strategy_text)
|
||||
|
||||
|
||||
def start_new_strategy(args: Dict[str, Any]) -> None:
|
||||
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||
|
||||
if "strategy" in args and args["strategy"]:
|
||||
|
||||
new_path = config['user_data_dir'] / USERPATH_STRATEGIES / (args['strategy'] + '.py')
|
||||
new_path = config["user_data_dir"] / USERPATH_STRATEGIES / (args["strategy"] + ".py")
|
||||
|
||||
if new_path.exists():
|
||||
raise OperationalException(f"`{new_path}` already exists. "
|
||||
"Please choose another Strategy Name.")
|
||||
raise OperationalException(
|
||||
f"`{new_path}` already exists. Please choose another Strategy Name."
|
||||
)
|
||||
|
||||
deploy_new_strategy(args['strategy'], new_path, args['template'])
|
||||
deploy_new_strategy(args["strategy"], new_path, args["template"])
|
||||
|
||||
else:
|
||||
raise ConfigurationError("`new-strategy` requires --strategy to be set.")
|
||||
|
@ -100,8 +102,8 @@ def clean_ui_subdir(directory: Path):
|
|||
if directory.is_dir():
|
||||
logger.info("Removing UI directory content.")
|
||||
|
||||
for p in reversed(list(directory.glob('**/*'))): # iterate contents from leaves to root
|
||||
if p.name in ('.gitkeep', 'fallback_file.html'):
|
||||
for p in reversed(list(directory.glob("**/*"))): # iterate contents from leaves to root
|
||||
if p.name in (".gitkeep", "fallback_file.html"):
|
||||
continue
|
||||
if p.is_file():
|
||||
p.unlink()
|
||||
|
@ -110,11 +112,11 @@ def clean_ui_subdir(directory: Path):
|
|||
|
||||
|
||||
def read_ui_version(dest_folder: Path) -> Optional[str]:
|
||||
file = dest_folder / '.uiversion'
|
||||
file = dest_folder / ".uiversion"
|
||||
if not file.is_file():
|
||||
return None
|
||||
|
||||
with file.open('r') as f:
|
||||
with file.open("r") as f:
|
||||
return f.read()
|
||||
|
||||
|
||||
|
@ -133,12 +135,12 @@ def download_and_install_ui(dest_folder: Path, dl_url: str, version: str):
|
|||
destfile.mkdir(exist_ok=True)
|
||||
else:
|
||||
destfile.write_bytes(x.read())
|
||||
with (dest_folder / '.uiversion').open('w') as f:
|
||||
with (dest_folder / ".uiversion").open("w") as f:
|
||||
f.write(version)
|
||||
|
||||
|
||||
def get_ui_download_url(version: Optional[str] = None) -> Tuple[str, str]:
|
||||
base_url = 'https://api.github.com/repos/freqtrade/frequi/'
|
||||
base_url = "https://api.github.com/repos/freqtrade/frequi/"
|
||||
# Get base UI Repo path
|
||||
|
||||
resp = requests.get(f"{base_url}releases", timeout=req_timeout)
|
||||
|
@ -146,42 +148,41 @@ def get_ui_download_url(version: Optional[str] = None) -> Tuple[str, str]:
|
|||
r = resp.json()
|
||||
|
||||
if version:
|
||||
tmp = [x for x in r if x['name'] == version]
|
||||
tmp = [x for x in r if x["name"] == version]
|
||||
if tmp:
|
||||
latest_version = tmp[0]['name']
|
||||
assets = tmp[0].get('assets', [])
|
||||
latest_version = tmp[0]["name"]
|
||||
assets = tmp[0].get("assets", [])
|
||||
else:
|
||||
raise ValueError("UI-Version not found.")
|
||||
else:
|
||||
latest_version = r[0]['name']
|
||||
assets = r[0].get('assets', [])
|
||||
dl_url = ''
|
||||
latest_version = r[0]["name"]
|
||||
assets = r[0].get("assets", [])
|
||||
dl_url = ""
|
||||
if assets and len(assets) > 0:
|
||||
dl_url = assets[0]['browser_download_url']
|
||||
dl_url = assets[0]["browser_download_url"]
|
||||
|
||||
# URL not found - try assets url
|
||||
if not dl_url:
|
||||
assets = r[0]['assets_url']
|
||||
assets = r[0]["assets_url"]
|
||||
resp = requests.get(assets, timeout=req_timeout)
|
||||
r = resp.json()
|
||||
dl_url = r[0]['browser_download_url']
|
||||
dl_url = r[0]["browser_download_url"]
|
||||
|
||||
return dl_url, latest_version
|
||||
|
||||
|
||||
def start_install_ui(args: Dict[str, Any]) -> None:
|
||||
|
||||
dest_folder = Path(__file__).parents[1] / 'rpc/api_server/ui/installed/'
|
||||
dest_folder = Path(__file__).parents[1] / "rpc/api_server/ui/installed/"
|
||||
# First make sure the assets are removed.
|
||||
dl_url, latest_version = get_ui_download_url(args.get('ui_version'))
|
||||
dl_url, latest_version = get_ui_download_url(args.get("ui_version"))
|
||||
|
||||
curr_version = read_ui_version(dest_folder)
|
||||
if curr_version == latest_version and not args.get('erase_ui_only'):
|
||||
if curr_version == latest_version and not args.get("erase_ui_only"):
|
||||
logger.info(f"UI already up-to-date, FreqUI Version {curr_version}.")
|
||||
return
|
||||
|
||||
clean_ui_subdir(dest_folder)
|
||||
if args.get('erase_ui_only'):
|
||||
if args.get("erase_ui_only"):
|
||||
logger.info("Erased UI directory content. Not downloading new version.")
|
||||
else:
|
||||
# Download a new version
|
||||
|
|
|
@ -22,15 +22,15 @@ def start_hyperopt_list(args: Dict[str, Any]) -> None:
|
|||
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||
|
||||
print_colorized = config.get('print_colorized', False)
|
||||
print_json = config.get('print_json', False)
|
||||
export_csv = config.get('export_csv')
|
||||
no_details = config.get('hyperopt_list_no_details', False)
|
||||
print_colorized = config.get("print_colorized", False)
|
||||
print_json = config.get("print_json", False)
|
||||
export_csv = config.get("export_csv")
|
||||
no_details = config.get("hyperopt_list_no_details", False)
|
||||
no_header = False
|
||||
|
||||
results_file = get_latest_hyperopt_file(
|
||||
config['user_data_dir'] / 'hyperopt_results',
|
||||
config.get('hyperoptexportfilename'))
|
||||
config["user_data_dir"] / "hyperopt_results", config.get("hyperoptexportfilename")
|
||||
)
|
||||
|
||||
# Previous evaluations
|
||||
epochs, total_epochs = HyperoptTools.load_filtered_results(results_file, config)
|
||||
|
@ -40,21 +40,26 @@ def start_hyperopt_list(args: Dict[str, Any]) -> None:
|
|||
|
||||
if not export_csv:
|
||||
try:
|
||||
print(HyperoptTools.get_result_table(config, epochs, total_epochs,
|
||||
not config.get('hyperopt_list_best', False),
|
||||
print_colorized, 0))
|
||||
print(
|
||||
HyperoptTools.get_result_table(
|
||||
config,
|
||||
epochs,
|
||||
total_epochs,
|
||||
not config.get("hyperopt_list_best", False),
|
||||
print_colorized,
|
||||
0,
|
||||
)
|
||||
)
|
||||
except KeyboardInterrupt:
|
||||
print('User interrupted..')
|
||||
print("User interrupted..")
|
||||
|
||||
if epochs and not no_details:
|
||||
sorted_epochs = sorted(epochs, key=itemgetter('loss'))
|
||||
sorted_epochs = sorted(epochs, key=itemgetter("loss"))
|
||||
results = sorted_epochs[0]
|
||||
HyperoptTools.show_epoch_details(results, total_epochs, print_json, no_header)
|
||||
|
||||
if epochs and export_csv:
|
||||
HyperoptTools.export_csv_file(
|
||||
config, epochs, export_csv
|
||||
)
|
||||
HyperoptTools.export_csv_file(config, epochs, export_csv)
|
||||
|
||||
|
||||
def start_hyperopt_show(args: Dict[str, Any]) -> None:
|
||||
|
@ -65,13 +70,13 @@ def start_hyperopt_show(args: Dict[str, Any]) -> None:
|
|||
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||
|
||||
print_json = config.get('print_json', False)
|
||||
no_header = config.get('hyperopt_show_no_header', False)
|
||||
print_json = config.get("print_json", False)
|
||||
no_header = config.get("hyperopt_show_no_header", False)
|
||||
results_file = get_latest_hyperopt_file(
|
||||
config['user_data_dir'] / 'hyperopt_results',
|
||||
config.get('hyperoptexportfilename'))
|
||||
config["user_data_dir"] / "hyperopt_results", config.get("hyperoptexportfilename")
|
||||
)
|
||||
|
||||
n = config.get('hyperopt_show_index', -1)
|
||||
n = config.get("hyperopt_show_index", -1)
|
||||
|
||||
# Previous evaluations
|
||||
epochs, total_epochs = HyperoptTools.load_filtered_results(results_file, config)
|
||||
|
@ -80,10 +85,12 @@ def start_hyperopt_show(args: Dict[str, Any]) -> None:
|
|||
|
||||
if n > filtered_epochs:
|
||||
raise OperationalException(
|
||||
f"The index of the epoch to show should be less than {filtered_epochs + 1}.")
|
||||
f"The index of the epoch to show should be less than {filtered_epochs + 1}."
|
||||
)
|
||||
if n < -filtered_epochs:
|
||||
raise OperationalException(
|
||||
f"The index of the epoch to show should be greater than {-filtered_epochs - 1}.")
|
||||
f"The index of the epoch to show should be greater than {-filtered_epochs - 1}."
|
||||
)
|
||||
|
||||
# Translate epoch index from human-readable format to pythonic
|
||||
if n > 0:
|
||||
|
@ -92,13 +99,18 @@ def start_hyperopt_show(args: Dict[str, Any]) -> None:
|
|||
if epochs:
|
||||
val = epochs[n]
|
||||
|
||||
metrics = val['results_metrics']
|
||||
if 'strategy_name' in metrics:
|
||||
strategy_name = metrics['strategy_name']
|
||||
show_backtest_result(strategy_name, metrics,
|
||||
metrics['stake_currency'], config.get('backtest_breakdown', []))
|
||||
metrics = val["results_metrics"]
|
||||
if "strategy_name" in metrics:
|
||||
strategy_name = metrics["strategy_name"]
|
||||
show_backtest_result(
|
||||
strategy_name,
|
||||
metrics,
|
||||
metrics["stake_currency"],
|
||||
config.get("backtest_breakdown", []),
|
||||
)
|
||||
|
||||
HyperoptTools.try_export_params(config, strategy_name, val)
|
||||
|
||||
HyperoptTools.show_epoch_details(val, total_epochs, print_json, no_header,
|
||||
header_str="Epoch details")
|
||||
HyperoptTools.show_epoch_details(
|
||||
val, total_epochs, print_json, no_header, header_str="Epoch details"
|
||||
)
|
||||
|
|
|
@ -26,42 +26,47 @@ def start_list_exchanges(args: Dict[str, Any]) -> None:
|
|||
:param args: Cli args from Arguments()
|
||||
:return: None
|
||||
"""
|
||||
exchanges = list_available_exchanges(args['list_exchanges_all'])
|
||||
exchanges = list_available_exchanges(args["list_exchanges_all"])
|
||||
|
||||
if args['print_one_column']:
|
||||
print('\n'.join([e['name'] for e in exchanges]))
|
||||
if args["print_one_column"]:
|
||||
print("\n".join([e["name"] for e in exchanges]))
|
||||
else:
|
||||
headers = {
|
||||
'name': 'Exchange name',
|
||||
'supported': 'Supported',
|
||||
'trade_modes': 'Markets',
|
||||
'comment': 'Reason',
|
||||
}
|
||||
headers.update({'valid': 'Valid'} if args['list_exchanges_all'] else {})
|
||||
"name": "Exchange name",
|
||||
"supported": "Supported",
|
||||
"trade_modes": "Markets",
|
||||
"comment": "Reason",
|
||||
}
|
||||
headers.update({"valid": "Valid"} if args["list_exchanges_all"] else {})
|
||||
|
||||
def build_entry(exchange: ValidExchangesType, valid: bool):
|
||||
valid_entry = {'valid': exchange['valid']} if valid else {}
|
||||
valid_entry = {"valid": exchange["valid"]} if valid else {}
|
||||
result: Dict[str, Union[str, bool]] = {
|
||||
'name': exchange['name'],
|
||||
"name": exchange["name"],
|
||||
**valid_entry,
|
||||
'supported': 'Official' if exchange['supported'] else '',
|
||||
'trade_modes': ', '.join(
|
||||
(f"{a['margin_mode']} " if a['margin_mode'] else '') + a['trading_mode']
|
||||
for a in exchange['trade_modes']
|
||||
"supported": "Official" if exchange["supported"] else "",
|
||||
"trade_modes": ", ".join(
|
||||
(f"{a['margin_mode']} " if a["margin_mode"] else "") + a["trading_mode"]
|
||||
for a in exchange["trade_modes"]
|
||||
),
|
||||
'comment': exchange['comment'],
|
||||
"comment": exchange["comment"],
|
||||
}
|
||||
|
||||
return result
|
||||
|
||||
if args['list_exchanges_all']:
|
||||
if args["list_exchanges_all"]:
|
||||
print("All exchanges supported by the ccxt library:")
|
||||
exchanges = [build_entry(e, True) for e in exchanges]
|
||||
else:
|
||||
print("Exchanges available for Freqtrade:")
|
||||
exchanges = [build_entry(e, False) for e in exchanges if e['valid'] is not False]
|
||||
exchanges = [build_entry(e, False) for e in exchanges if e["valid"] is not False]
|
||||
|
||||
print(tabulate(exchanges, headers=headers, ))
|
||||
print(
|
||||
tabulate(
|
||||
exchanges,
|
||||
headers=headers,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def _print_objs_tabular(objs: List, print_colorized: bool) -> None:
|
||||
|
@ -71,26 +76,35 @@ def _print_objs_tabular(objs: List, print_colorized: bool) -> None:
|
|||
yellow = Fore.YELLOW
|
||||
reset = Style.RESET_ALL
|
||||
else:
|
||||
red = ''
|
||||
yellow = ''
|
||||
reset = ''
|
||||
red = ""
|
||||
yellow = ""
|
||||
reset = ""
|
||||
|
||||
names = [s['name'] for s in objs]
|
||||
objs_to_print = [{
|
||||
'name': s['name'] if s['name'] else "--",
|
||||
'location': s['location_rel'],
|
||||
'status': (red + "LOAD FAILED" + reset if s['class'] is None
|
||||
else "OK" if names.count(s['name']) == 1
|
||||
else yellow + "DUPLICATE NAME" + reset)
|
||||
} for s in objs]
|
||||
names = [s["name"] for s in objs]
|
||||
objs_to_print = [
|
||||
{
|
||||
"name": s["name"] if s["name"] else "--",
|
||||
"location": s["location_rel"],
|
||||
"status": (
|
||||
red + "LOAD FAILED" + reset
|
||||
if s["class"] is None
|
||||
else "OK"
|
||||
if names.count(s["name"]) == 1
|
||||
else yellow + "DUPLICATE NAME" + reset
|
||||
),
|
||||
}
|
||||
for s in objs
|
||||
]
|
||||
for idx, s in enumerate(objs):
|
||||
if 'hyperoptable' in s:
|
||||
objs_to_print[idx].update({
|
||||
'hyperoptable': "Yes" if s['hyperoptable']['count'] > 0 else "No",
|
||||
'buy-Params': len(s['hyperoptable'].get('buy', [])),
|
||||
'sell-Params': len(s['hyperoptable'].get('sell', [])),
|
||||
})
|
||||
print(tabulate(objs_to_print, headers='keys', tablefmt='psql', stralign='right'))
|
||||
if "hyperoptable" in s:
|
||||
objs_to_print[idx].update(
|
||||
{
|
||||
"hyperoptable": "Yes" if s["hyperoptable"]["count"] > 0 else "No",
|
||||
"buy-Params": len(s["hyperoptable"].get("buy", [])),
|
||||
"sell-Params": len(s["hyperoptable"].get("sell", [])),
|
||||
}
|
||||
)
|
||||
print(tabulate(objs_to_print, headers="keys", tablefmt="psql", stralign="right"))
|
||||
|
||||
|
||||
def start_list_strategies(args: Dict[str, Any]) -> None:
|
||||
|
@ -100,19 +114,20 @@ def start_list_strategies(args: Dict[str, Any]) -> None:
|
|||
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||
|
||||
strategy_objs = StrategyResolver.search_all_objects(
|
||||
config, not args['print_one_column'], config.get('recursive_strategy_search', False))
|
||||
config, not args["print_one_column"], config.get("recursive_strategy_search", False)
|
||||
)
|
||||
# Sort alphabetically
|
||||
strategy_objs = sorted(strategy_objs, key=lambda x: x['name'])
|
||||
strategy_objs = sorted(strategy_objs, key=lambda x: x["name"])
|
||||
for obj in strategy_objs:
|
||||
if obj['class']:
|
||||
obj['hyperoptable'] = obj['class'].detect_all_parameters()
|
||||
if obj["class"]:
|
||||
obj["hyperoptable"] = obj["class"].detect_all_parameters()
|
||||
else:
|
||||
obj['hyperoptable'] = {'count': 0}
|
||||
obj["hyperoptable"] = {"count": 0}
|
||||
|
||||
if args['print_one_column']:
|
||||
print('\n'.join([s['name'] for s in strategy_objs]))
|
||||
if args["print_one_column"]:
|
||||
print("\n".join([s["name"] for s in strategy_objs]))
|
||||
else:
|
||||
_print_objs_tabular(strategy_objs, config.get('print_colorized', False))
|
||||
_print_objs_tabular(strategy_objs, config.get("print_colorized", False))
|
||||
|
||||
|
||||
def start_list_freqAI_models(args: Dict[str, Any]) -> None:
|
||||
|
@ -121,13 +136,14 @@ def start_list_freqAI_models(args: Dict[str, Any]) -> None:
|
|||
"""
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||
from freqtrade.resolvers.freqaimodel_resolver import FreqaiModelResolver
|
||||
model_objs = FreqaiModelResolver.search_all_objects(config, not args['print_one_column'])
|
||||
|
||||
model_objs = FreqaiModelResolver.search_all_objects(config, not args["print_one_column"])
|
||||
# Sort alphabetically
|
||||
model_objs = sorted(model_objs, key=lambda x: x['name'])
|
||||
if args['print_one_column']:
|
||||
print('\n'.join([s['name'] for s in model_objs]))
|
||||
model_objs = sorted(model_objs, key=lambda x: x["name"])
|
||||
if args["print_one_column"]:
|
||||
print("\n".join([s["name"] for s in model_objs]))
|
||||
else:
|
||||
_print_objs_tabular(model_objs, config.get('print_colorized', False))
|
||||
_print_objs_tabular(model_objs, config.get("print_colorized", False))
|
||||
|
||||
|
||||
def start_list_timeframes(args: Dict[str, Any]) -> None:
|
||||
|
@ -136,16 +152,18 @@ def start_list_timeframes(args: Dict[str, Any]) -> None:
|
|||
"""
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_EXCHANGE)
|
||||
# Do not use timeframe set in the config
|
||||
config['timeframe'] = None
|
||||
config["timeframe"] = None
|
||||
|
||||
# Init exchange
|
||||
exchange = ExchangeResolver.load_exchange(config, validate=False)
|
||||
|
||||
if args['print_one_column']:
|
||||
print('\n'.join(exchange.timeframes))
|
||||
if args["print_one_column"]:
|
||||
print("\n".join(exchange.timeframes))
|
||||
else:
|
||||
print(f"Timeframes available for the exchange `{exchange.name}`: "
|
||||
f"{', '.join(exchange.timeframes)}")
|
||||
print(
|
||||
f"Timeframes available for the exchange `{exchange.name}`: "
|
||||
f"{', '.join(exchange.timeframes)}"
|
||||
)
|
||||
|
||||
|
||||
def start_list_markets(args: Dict[str, Any], pairs_only: bool = False) -> None:
|
||||
|
@ -161,51 +179,75 @@ def start_list_markets(args: Dict[str, Any], pairs_only: bool = False) -> None:
|
|||
exchange = ExchangeResolver.load_exchange(config, validate=False)
|
||||
|
||||
# By default only active pairs/markets are to be shown
|
||||
active_only = not args.get('list_pairs_all', False)
|
||||
active_only = not args.get("list_pairs_all", False)
|
||||
|
||||
base_currencies = args.get('base_currencies', [])
|
||||
quote_currencies = args.get('quote_currencies', [])
|
||||
base_currencies = args.get("base_currencies", [])
|
||||
quote_currencies = args.get("quote_currencies", [])
|
||||
|
||||
try:
|
||||
pairs = exchange.get_markets(base_currencies=base_currencies,
|
||||
quote_currencies=quote_currencies,
|
||||
tradable_only=pairs_only,
|
||||
active_only=active_only)
|
||||
pairs = exchange.get_markets(
|
||||
base_currencies=base_currencies,
|
||||
quote_currencies=quote_currencies,
|
||||
tradable_only=pairs_only,
|
||||
active_only=active_only,
|
||||
)
|
||||
# Sort the pairs/markets by symbol
|
||||
pairs = dict(sorted(pairs.items()))
|
||||
except Exception as e:
|
||||
raise OperationalException(f"Cannot get markets. Reason: {e}") from e
|
||||
|
||||
else:
|
||||
summary_str = ((f"Exchange {exchange.name} has {len(pairs)} ") +
|
||||
("active " if active_only else "") +
|
||||
(plural(len(pairs), "pair" if pairs_only else "market")) +
|
||||
(f" with {', '.join(base_currencies)} as base "
|
||||
f"{plural(len(base_currencies), 'currency', 'currencies')}"
|
||||
if base_currencies else "") +
|
||||
(" and" if base_currencies and quote_currencies else "") +
|
||||
(f" with {', '.join(quote_currencies)} as quote "
|
||||
f"{plural(len(quote_currencies), 'currency', 'currencies')}"
|
||||
if quote_currencies else ""))
|
||||
summary_str = (
|
||||
(f"Exchange {exchange.name} has {len(pairs)} ")
|
||||
+ ("active " if active_only else "")
|
||||
+ (plural(len(pairs), "pair" if pairs_only else "market"))
|
||||
+ (
|
||||
f" with {', '.join(base_currencies)} as base "
|
||||
f"{plural(len(base_currencies), 'currency', 'currencies')}"
|
||||
if base_currencies
|
||||
else ""
|
||||
)
|
||||
+ (" and" if base_currencies and quote_currencies else "")
|
||||
+ (
|
||||
f" with {', '.join(quote_currencies)} as quote "
|
||||
f"{plural(len(quote_currencies), 'currency', 'currencies')}"
|
||||
if quote_currencies
|
||||
else ""
|
||||
)
|
||||
)
|
||||
|
||||
headers = ["Id", "Symbol", "Base", "Quote", "Active",
|
||||
"Spot", "Margin", "Future", "Leverage"]
|
||||
headers = [
|
||||
"Id",
|
||||
"Symbol",
|
||||
"Base",
|
||||
"Quote",
|
||||
"Active",
|
||||
"Spot",
|
||||
"Margin",
|
||||
"Future",
|
||||
"Leverage",
|
||||
]
|
||||
|
||||
tabular_data = [{
|
||||
'Id': v['id'],
|
||||
'Symbol': v['symbol'],
|
||||
'Base': v['base'],
|
||||
'Quote': v['quote'],
|
||||
'Active': market_is_active(v),
|
||||
'Spot': 'Spot' if exchange.market_is_spot(v) else '',
|
||||
'Margin': 'Margin' if exchange.market_is_margin(v) else '',
|
||||
'Future': 'Future' if exchange.market_is_future(v) else '',
|
||||
'Leverage': exchange.get_max_leverage(v['symbol'], 20)
|
||||
} for _, v in pairs.items()]
|
||||
tabular_data = [
|
||||
{
|
||||
"Id": v["id"],
|
||||
"Symbol": v["symbol"],
|
||||
"Base": v["base"],
|
||||
"Quote": v["quote"],
|
||||
"Active": market_is_active(v),
|
||||
"Spot": "Spot" if exchange.market_is_spot(v) else "",
|
||||
"Margin": "Margin" if exchange.market_is_margin(v) else "",
|
||||
"Future": "Future" if exchange.market_is_future(v) else "",
|
||||
"Leverage": exchange.get_max_leverage(v["symbol"], 20),
|
||||
}
|
||||
for _, v in pairs.items()
|
||||
]
|
||||
|
||||
if (args.get('print_one_column', False) or
|
||||
args.get('list_pairs_print_json', False) or
|
||||
args.get('print_csv', False)):
|
||||
if (
|
||||
args.get("print_one_column", False)
|
||||
or args.get("list_pairs_print_json", False)
|
||||
or args.get("print_csv", False)
|
||||
):
|
||||
# Print summary string in the log in case of machine-readable
|
||||
# regular formats.
|
||||
logger.info(f"{summary_str}.")
|
||||
|
@ -215,24 +257,26 @@ def start_list_markets(args: Dict[str, Any], pairs_only: bool = False) -> None:
|
|||
print()
|
||||
|
||||
if pairs:
|
||||
if args.get('print_list', False):
|
||||
if args.get("print_list", False):
|
||||
# print data as a list, with human-readable summary
|
||||
print(f"{summary_str}: {', '.join(pairs.keys())}.")
|
||||
elif args.get('print_one_column', False):
|
||||
print('\n'.join(pairs.keys()))
|
||||
elif args.get('list_pairs_print_json', False):
|
||||
elif args.get("print_one_column", False):
|
||||
print("\n".join(pairs.keys()))
|
||||
elif args.get("list_pairs_print_json", False):
|
||||
print(rapidjson.dumps(list(pairs.keys()), default=str))
|
||||
elif args.get('print_csv', False):
|
||||
elif args.get("print_csv", False):
|
||||
writer = csv.DictWriter(sys.stdout, fieldnames=headers)
|
||||
writer.writeheader()
|
||||
writer.writerows(tabular_data)
|
||||
else:
|
||||
# print data as a table, with the human-readable summary
|
||||
print(f"{summary_str}:")
|
||||
print(tabulate(tabular_data, headers='keys', tablefmt='psql', stralign='right'))
|
||||
elif not (args.get('print_one_column', False) or
|
||||
args.get('list_pairs_print_json', False) or
|
||||
args.get('print_csv', False)):
|
||||
print(tabulate(tabular_data, headers="keys", tablefmt="psql", stralign="right"))
|
||||
elif not (
|
||||
args.get("print_one_column", False)
|
||||
or args.get("list_pairs_print_json", False)
|
||||
or args.get("print_csv", False)
|
||||
):
|
||||
print(f"{summary_str}.")
|
||||
|
||||
|
||||
|
@ -243,21 +287,22 @@ def start_show_trades(args: Dict[str, Any]) -> None:
|
|||
import json
|
||||
|
||||
from freqtrade.persistence import Trade, init_db
|
||||
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||
|
||||
if 'db_url' not in config:
|
||||
if "db_url" not in config:
|
||||
raise ConfigurationError("--db-url is required for this command.")
|
||||
|
||||
logger.info(f'Using DB: "{parse_db_uri_for_logging(config["db_url"])}"')
|
||||
init_db(config['db_url'])
|
||||
init_db(config["db_url"])
|
||||
tfilter = []
|
||||
|
||||
if config.get('trade_ids'):
|
||||
tfilter.append(Trade.id.in_(config['trade_ids']))
|
||||
if config.get("trade_ids"):
|
||||
tfilter.append(Trade.id.in_(config["trade_ids"]))
|
||||
|
||||
trades = Trade.get_trades(tfilter).all()
|
||||
logger.info(f"Printing {len(trades)} Trades: ")
|
||||
if config.get('print_json', False):
|
||||
if config.get("print_json", False):
|
||||
print(json.dumps([trade.to_json() for trade in trades], indent=4))
|
||||
else:
|
||||
for trade in trades:
|
||||
|
|
|
@ -21,20 +21,22 @@ def setup_optimize_configuration(args: Dict[str, Any], method: RunMode) -> Dict[
|
|||
config = setup_utils_configuration(args, method)
|
||||
|
||||
no_unlimited_runmodes = {
|
||||
RunMode.BACKTEST: 'backtesting',
|
||||
RunMode.HYPEROPT: 'hyperoptimization',
|
||||
RunMode.BACKTEST: "backtesting",
|
||||
RunMode.HYPEROPT: "hyperoptimization",
|
||||
}
|
||||
if method in no_unlimited_runmodes.keys():
|
||||
wallet_size = config['dry_run_wallet'] * config['tradable_balance_ratio']
|
||||
wallet_size = config["dry_run_wallet"] * config["tradable_balance_ratio"]
|
||||
# tradable_balance_ratio
|
||||
if (config['stake_amount'] != constants.UNLIMITED_STAKE_AMOUNT
|
||||
and config['stake_amount'] > wallet_size):
|
||||
wallet = fmt_coin(wallet_size, config['stake_currency'])
|
||||
stake = fmt_coin(config['stake_amount'], config['stake_currency'])
|
||||
if (
|
||||
config["stake_amount"] != constants.UNLIMITED_STAKE_AMOUNT
|
||||
and config["stake_amount"] > wallet_size
|
||||
):
|
||||
wallet = fmt_coin(wallet_size, config["stake_currency"])
|
||||
stake = fmt_coin(config["stake_amount"], config["stake_currency"])
|
||||
raise ConfigurationError(
|
||||
f"Starting balance ({wallet}) is smaller than stake_amount {stake}. "
|
||||
f"Wallet is calculated as `dry_run_wallet * tradable_balance_ratio`."
|
||||
)
|
||||
)
|
||||
|
||||
return config
|
||||
|
||||
|
@ -51,7 +53,7 @@ def start_backtesting(args: Dict[str, Any]) -> None:
|
|||
# Initialize configuration
|
||||
config = setup_optimize_configuration(args, RunMode.BACKTEST)
|
||||
|
||||
logger.info('Starting freqtrade in Backtesting mode')
|
||||
logger.info("Starting freqtrade in Backtesting mode")
|
||||
|
||||
# Initialize backtesting object
|
||||
backtesting = Backtesting(config)
|
||||
|
@ -68,7 +70,7 @@ def start_backtesting_show(args: Dict[str, Any]) -> None:
|
|||
from freqtrade.data.btanalysis import load_backtest_stats
|
||||
from freqtrade.optimize.optimize_reports import show_backtest_results, show_sorted_pairlist
|
||||
|
||||
results = load_backtest_stats(config['exportfilename'])
|
||||
results = load_backtest_stats(config["exportfilename"])
|
||||
|
||||
show_backtest_results(config, results)
|
||||
show_sorted_pairlist(config, results)
|
||||
|
@ -87,20 +89,20 @@ def start_hyperopt(args: Dict[str, Any]) -> None:
|
|||
from freqtrade.optimize.hyperopt import Hyperopt
|
||||
except ImportError as e:
|
||||
raise OperationalException(
|
||||
f"{e}. Please ensure that the hyperopt dependencies are installed.") from e
|
||||
f"{e}. Please ensure that the hyperopt dependencies are installed."
|
||||
) from e
|
||||
# Initialize configuration
|
||||
config = setup_optimize_configuration(args, RunMode.HYPEROPT)
|
||||
|
||||
logger.info('Starting freqtrade in Hyperopt mode')
|
||||
logger.info("Starting freqtrade in Hyperopt mode")
|
||||
|
||||
lock = FileLock(Hyperopt.get_lock_filename(config))
|
||||
|
||||
try:
|
||||
with lock.acquire(timeout=1):
|
||||
|
||||
# Remove noisy log messages
|
||||
logging.getLogger('hyperopt.tpe').setLevel(logging.WARNING)
|
||||
logging.getLogger('filelock').setLevel(logging.WARNING)
|
||||
logging.getLogger("hyperopt.tpe").setLevel(logging.WARNING)
|
||||
logging.getLogger("filelock").setLevel(logging.WARNING)
|
||||
|
||||
# Initialize backtesting object
|
||||
hyperopt = Hyperopt(config)
|
||||
|
@ -108,9 +110,11 @@ def start_hyperopt(args: Dict[str, Any]) -> None:
|
|||
|
||||
except Timeout:
|
||||
logger.info("Another running instance of freqtrade Hyperopt detected.")
|
||||
logger.info("Simultaneous execution of multiple Hyperopt commands is not supported. "
|
||||
"Hyperopt module is resource hungry. Please run your Hyperopt sequentially "
|
||||
"or on separate machines.")
|
||||
logger.info(
|
||||
"Simultaneous execution of multiple Hyperopt commands is not supported. "
|
||||
"Hyperopt module is resource hungry. Please run your Hyperopt sequentially "
|
||||
"or on separate machines."
|
||||
)
|
||||
logger.info("Quitting now.")
|
||||
# TODO: return False here in order to help freqtrade to exit
|
||||
# with non-zero exit code...
|
||||
|
@ -127,7 +131,7 @@ def start_edge(args: Dict[str, Any]) -> None:
|
|||
|
||||
# Initialize configuration
|
||||
config = setup_optimize_configuration(args, RunMode.EDGE)
|
||||
logger.info('Starting freqtrade in Edge mode')
|
||||
logger.info("Starting freqtrade in Edge mode")
|
||||
|
||||
# Initialize Edge object
|
||||
edge_cli = EdgeCli(config)
|
||||
|
|
|
@ -17,28 +17,29 @@ def start_test_pairlist(args: Dict[str, Any]) -> None:
|
|||
"""
|
||||
from freqtrade.persistence import FtNoDBContext
|
||||
from freqtrade.plugins.pairlistmanager import PairListManager
|
||||
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_EXCHANGE)
|
||||
|
||||
exchange = ExchangeResolver.load_exchange(config, validate=False)
|
||||
|
||||
quote_currencies = args.get('quote_currencies')
|
||||
quote_currencies = args.get("quote_currencies")
|
||||
if not quote_currencies:
|
||||
quote_currencies = [config.get('stake_currency')]
|
||||
quote_currencies = [config.get("stake_currency")]
|
||||
results = {}
|
||||
with FtNoDBContext():
|
||||
for curr in quote_currencies:
|
||||
config['stake_currency'] = curr
|
||||
config["stake_currency"] = curr
|
||||
pairlists = PairListManager(exchange, config)
|
||||
pairlists.refresh_pairlist()
|
||||
results[curr] = pairlists.whitelist
|
||||
|
||||
for curr, pairlist in results.items():
|
||||
if not args.get('print_one_column', False) and not args.get('list_pairs_print_json', False):
|
||||
if not args.get("print_one_column", False) and not args.get("list_pairs_print_json", False):
|
||||
print(f"Pairs for {curr}: ")
|
||||
|
||||
if args.get('print_one_column', False):
|
||||
print('\n'.join(pairlist))
|
||||
elif args.get('list_pairs_print_json', False):
|
||||
if args.get("print_one_column", False):
|
||||
print("\n".join(pairlist))
|
||||
elif args.get("list_pairs_print_json", False):
|
||||
print(rapidjson.dumps(list(pairlist), default=str))
|
||||
else:
|
||||
print(pairlist)
|
||||
|
|
|
@ -6,10 +6,11 @@ from freqtrade.exceptions import ConfigurationError
|
|||
|
||||
|
||||
def validate_plot_args(args: Dict[str, Any]) -> None:
|
||||
if not args.get('datadir') and not args.get('config'):
|
||||
if not args.get("datadir") and not args.get("config"):
|
||||
raise ConfigurationError(
|
||||
"You need to specify either `--datadir` or `--config` "
|
||||
"for plot-profit and plot-dataframe.")
|
||||
"for plot-profit and plot-dataframe."
|
||||
)
|
||||
|
||||
|
||||
def start_plot_dataframe(args: Dict[str, Any]) -> None:
|
||||
|
@ -18,6 +19,7 @@ def start_plot_dataframe(args: Dict[str, Any]) -> None:
|
|||
"""
|
||||
# Import here to avoid errors if plot-dependencies are not installed.
|
||||
from freqtrade.plot.plotting import load_and_plot_trades
|
||||
|
||||
validate_plot_args(args)
|
||||
config = setup_utils_configuration(args, RunMode.PLOT)
|
||||
|
||||
|
@ -30,6 +32,7 @@ def start_plot_profit(args: Dict[str, Any]) -> None:
|
|||
"""
|
||||
# Import here to avoid errors if plot-dependencies are not installed.
|
||||
from freqtrade.plot.plotting import plot_profit
|
||||
|
||||
validate_plot_args(args)
|
||||
config = setup_utils_configuration(args, RunMode.PLOT)
|
||||
|
||||
|
|
|
@ -26,13 +26,15 @@ def start_strategy_update(args: Dict[str, Any]) -> None:
|
|||
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||
|
||||
strategy_objs = StrategyResolver.search_all_objects(
|
||||
config, enum_failed=False, recursive=config.get('recursive_strategy_search', False))
|
||||
config, enum_failed=False, recursive=config.get("recursive_strategy_search", False)
|
||||
)
|
||||
|
||||
filtered_strategy_objs = []
|
||||
if args['strategy_list']:
|
||||
if args["strategy_list"]:
|
||||
filtered_strategy_objs = [
|
||||
strategy_obj for strategy_obj in strategy_objs
|
||||
if strategy_obj['name'] in args['strategy_list']
|
||||
strategy_obj
|
||||
for strategy_obj in strategy_objs
|
||||
if strategy_obj["name"] in args["strategy_list"]
|
||||
]
|
||||
|
||||
else:
|
||||
|
@ -41,8 +43,8 @@ def start_strategy_update(args: Dict[str, Any]) -> None:
|
|||
|
||||
processed_locations = set()
|
||||
for strategy_obj in filtered_strategy_objs:
|
||||
if strategy_obj['location'] not in processed_locations:
|
||||
processed_locations.add(strategy_obj['location'])
|
||||
if strategy_obj["location"] not in processed_locations:
|
||||
processed_locations.add(strategy_obj["location"])
|
||||
start_conversion(strategy_obj, config)
|
||||
|
||||
|
||||
|
|
|
@ -24,13 +24,13 @@ def sanitize_config(config: Config, *, show_sensitive: bool = False) -> Config:
|
|||
]
|
||||
config = deepcopy(config)
|
||||
for key in keys_to_remove:
|
||||
if '.' in key:
|
||||
nested_keys = key.split('.')
|
||||
if "." in key:
|
||||
nested_keys = key.split(".")
|
||||
nested_config = config
|
||||
for nested_key in nested_keys[:-1]:
|
||||
nested_config = nested_config.get(nested_key, {})
|
||||
nested_config[nested_keys[-1]] = 'REDACTED'
|
||||
nested_config[nested_keys[-1]] = "REDACTED"
|
||||
else:
|
||||
config[key] = 'REDACTED'
|
||||
config[key] = "REDACTED"
|
||||
|
||||
return config
|
||||
|
|
|
@ -11,7 +11,8 @@ logger = logging.getLogger(__name__)
|
|||
|
||||
|
||||
def setup_utils_configuration(
|
||||
args: Dict[str, Any], method: RunMode, *, set_dry: bool = True) -> Dict[str, Any]:
|
||||
args: Dict[str, Any], method: RunMode, *, set_dry: bool = True
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Prepare the configuration for utils subcommands
|
||||
:param args: Cli args from Arguments()
|
||||
|
@ -23,7 +24,7 @@ def setup_utils_configuration(
|
|||
|
||||
# Ensure these modes are using Dry-run
|
||||
if set_dry:
|
||||
config['dry_run'] = True
|
||||
config["dry_run"] = True
|
||||
validate_config_consistency(config, preliminary=True)
|
||||
|
||||
return config
|
||||
|
|
|
@ -20,18 +20,16 @@ def _extend_validator(validator_class):
|
|||
Extended validator for the Freqtrade configuration JSON Schema.
|
||||
Currently it only handles defaults for subschemas.
|
||||
"""
|
||||
validate_properties = validator_class.VALIDATORS['properties']
|
||||
validate_properties = validator_class.VALIDATORS["properties"]
|
||||
|
||||
def set_defaults(validator, properties, instance, schema):
|
||||
for prop, subschema in properties.items():
|
||||
if 'default' in subschema:
|
||||
instance.setdefault(prop, subschema['default'])
|
||||
if "default" in subschema:
|
||||
instance.setdefault(prop, subschema["default"])
|
||||
|
||||
yield from validate_properties(validator, properties, instance, schema)
|
||||
|
||||
return validators.extend(
|
||||
validator_class, {'properties': set_defaults}
|
||||
)
|
||||
return validators.extend(validator_class, {"properties": set_defaults})
|
||||
|
||||
|
||||
FreqtradeValidator = _extend_validator(Draft4Validator)
|
||||
|
@ -44,27 +42,23 @@ def validate_config_schema(conf: Dict[str, Any], preliminary: bool = False) -> D
|
|||
:return: Returns the config if valid, otherwise throw an exception
|
||||
"""
|
||||
conf_schema = deepcopy(constants.CONF_SCHEMA)
|
||||
if conf.get('runmode', RunMode.OTHER) in (RunMode.DRY_RUN, RunMode.LIVE):
|
||||
conf_schema['required'] = constants.SCHEMA_TRADE_REQUIRED
|
||||
elif conf.get('runmode', RunMode.OTHER) in (RunMode.BACKTEST, RunMode.HYPEROPT):
|
||||
if conf.get("runmode", RunMode.OTHER) in (RunMode.DRY_RUN, RunMode.LIVE):
|
||||
conf_schema["required"] = constants.SCHEMA_TRADE_REQUIRED
|
||||
elif conf.get("runmode", RunMode.OTHER) in (RunMode.BACKTEST, RunMode.HYPEROPT):
|
||||
if preliminary:
|
||||
conf_schema['required'] = constants.SCHEMA_BACKTEST_REQUIRED
|
||||
conf_schema["required"] = constants.SCHEMA_BACKTEST_REQUIRED
|
||||
else:
|
||||
conf_schema['required'] = constants.SCHEMA_BACKTEST_REQUIRED_FINAL
|
||||
elif conf.get('runmode', RunMode.OTHER) == RunMode.WEBSERVER:
|
||||
conf_schema['required'] = constants.SCHEMA_MINIMAL_WEBSERVER
|
||||
conf_schema["required"] = constants.SCHEMA_BACKTEST_REQUIRED_FINAL
|
||||
elif conf.get("runmode", RunMode.OTHER) == RunMode.WEBSERVER:
|
||||
conf_schema["required"] = constants.SCHEMA_MINIMAL_WEBSERVER
|
||||
else:
|
||||
conf_schema['required'] = constants.SCHEMA_MINIMAL_REQUIRED
|
||||
conf_schema["required"] = constants.SCHEMA_MINIMAL_REQUIRED
|
||||
try:
|
||||
FreqtradeValidator(conf_schema).validate(conf)
|
||||
return conf
|
||||
except ValidationError as e:
|
||||
logger.critical(
|
||||
f"Invalid configuration. Reason: {e}"
|
||||
)
|
||||
raise ValidationError(
|
||||
best_match(Draft4Validator(conf_schema).iter_errors(conf)).message
|
||||
)
|
||||
logger.critical(f"Invalid configuration. Reason: {e}")
|
||||
raise ValidationError(best_match(Draft4Validator(conf_schema).iter_errors(conf)).message)
|
||||
|
||||
|
||||
def validate_config_consistency(conf: Dict[str, Any], *, preliminary: bool = False) -> None:
|
||||
|
@ -91,7 +85,7 @@ def validate_config_consistency(conf: Dict[str, Any], *, preliminary: bool = Fal
|
|||
validate_migrated_strategy_settings(conf)
|
||||
|
||||
# validate configuration before returning
|
||||
logger.info('Validating configuration ...')
|
||||
logger.info("Validating configuration ...")
|
||||
validate_config_schema(conf, preliminary=preliminary)
|
||||
|
||||
|
||||
|
@ -100,9 +94,11 @@ def _validate_unlimited_amount(conf: Dict[str, Any]) -> None:
|
|||
If edge is disabled, either max_open_trades or stake_amount need to be set.
|
||||
:raise: ConfigurationError if config validation failed
|
||||
"""
|
||||
if (not conf.get('edge', {}).get('enabled')
|
||||
and conf.get('max_open_trades') == float('inf')
|
||||
and conf.get('stake_amount') == constants.UNLIMITED_STAKE_AMOUNT):
|
||||
if (
|
||||
not conf.get("edge", {}).get("enabled")
|
||||
and conf.get("max_open_trades") == float("inf")
|
||||
and conf.get("stake_amount") == constants.UNLIMITED_STAKE_AMOUNT
|
||||
):
|
||||
raise ConfigurationError("`max_open_trades` and `stake_amount` cannot both be unlimited.")
|
||||
|
||||
|
||||
|
@ -111,45 +107,47 @@ def _validate_price_config(conf: Dict[str, Any]) -> None:
|
|||
When using market orders, price sides must be using the "other" side of the price
|
||||
"""
|
||||
# TODO: The below could be an enforced setting when using market orders
|
||||
if (conf.get('order_types', {}).get('entry') == 'market'
|
||||
and conf.get('entry_pricing', {}).get('price_side') not in ('ask', 'other')):
|
||||
raise ConfigurationError(
|
||||
'Market entry orders require entry_pricing.price_side = "other".')
|
||||
if conf.get("order_types", {}).get("entry") == "market" and conf.get("entry_pricing", {}).get(
|
||||
"price_side"
|
||||
) not in ("ask", "other"):
|
||||
raise ConfigurationError('Market entry orders require entry_pricing.price_side = "other".')
|
||||
|
||||
if (conf.get('order_types', {}).get('exit') == 'market'
|
||||
and conf.get('exit_pricing', {}).get('price_side') not in ('bid', 'other')):
|
||||
if conf.get("order_types", {}).get("exit") == "market" and conf.get("exit_pricing", {}).get(
|
||||
"price_side"
|
||||
) not in ("bid", "other"):
|
||||
raise ConfigurationError('Market exit orders require exit_pricing.price_side = "other".')
|
||||
|
||||
|
||||
def _validate_trailing_stoploss(conf: Dict[str, Any]) -> None:
|
||||
|
||||
if conf.get('stoploss') == 0.0:
|
||||
if conf.get("stoploss") == 0.0:
|
||||
raise ConfigurationError(
|
||||
'The config stoploss needs to be different from 0 to avoid problems with sell orders.'
|
||||
"The config stoploss needs to be different from 0 to avoid problems with sell orders."
|
||||
)
|
||||
# Skip if trailing stoploss is not activated
|
||||
if not conf.get('trailing_stop', False):
|
||||
if not conf.get("trailing_stop", False):
|
||||
return
|
||||
|
||||
tsl_positive = float(conf.get('trailing_stop_positive', 0))
|
||||
tsl_offset = float(conf.get('trailing_stop_positive_offset', 0))
|
||||
tsl_only_offset = conf.get('trailing_only_offset_is_reached', False)
|
||||
tsl_positive = float(conf.get("trailing_stop_positive", 0))
|
||||
tsl_offset = float(conf.get("trailing_stop_positive_offset", 0))
|
||||
tsl_only_offset = conf.get("trailing_only_offset_is_reached", False)
|
||||
|
||||
if tsl_only_offset:
|
||||
if tsl_positive == 0.0:
|
||||
raise ConfigurationError(
|
||||
'The config trailing_only_offset_is_reached needs '
|
||||
'trailing_stop_positive_offset to be more than 0 in your config.')
|
||||
"The config trailing_only_offset_is_reached needs "
|
||||
"trailing_stop_positive_offset to be more than 0 in your config."
|
||||
)
|
||||
if tsl_positive > 0 and 0 < tsl_offset <= tsl_positive:
|
||||
raise ConfigurationError(
|
||||
'The config trailing_stop_positive_offset needs '
|
||||
'to be greater than trailing_stop_positive in your config.')
|
||||
"The config trailing_stop_positive_offset needs "
|
||||
"to be greater than trailing_stop_positive in your config."
|
||||
)
|
||||
|
||||
# Fetch again without default
|
||||
if 'trailing_stop_positive' in conf and float(conf['trailing_stop_positive']) == 0.0:
|
||||
if "trailing_stop_positive" in conf and float(conf["trailing_stop_positive"]) == 0.0:
|
||||
raise ConfigurationError(
|
||||
'The config trailing_stop_positive needs to be different from 0 '
|
||||
'to avoid problems with sell orders.'
|
||||
"The config trailing_stop_positive needs to be different from 0 "
|
||||
"to avoid problems with sell orders."
|
||||
)
|
||||
|
||||
|
||||
|
@ -158,10 +156,10 @@ def _validate_edge(conf: Dict[str, Any]) -> None:
|
|||
Edge and Dynamic whitelist should not both be enabled, since edge overrides dynamic whitelists.
|
||||
"""
|
||||
|
||||
if not conf.get('edge', {}).get('enabled'):
|
||||
if not conf.get("edge", {}).get("enabled"):
|
||||
return
|
||||
|
||||
if not conf.get('use_exit_signal', True):
|
||||
if not conf.get("use_exit_signal", True):
|
||||
raise ConfigurationError(
|
||||
"Edge requires `use_exit_signal` to be True, otherwise no sells will happen."
|
||||
)
|
||||
|
@ -171,13 +169,20 @@ def _validate_whitelist(conf: Dict[str, Any]) -> None:
|
|||
"""
|
||||
Dynamic whitelist does not require pair_whitelist to be set - however StaticWhitelist does.
|
||||
"""
|
||||
if conf.get('runmode', RunMode.OTHER) in [RunMode.OTHER, RunMode.PLOT,
|
||||
RunMode.UTIL_NO_EXCHANGE, RunMode.UTIL_EXCHANGE]:
|
||||
if conf.get("runmode", RunMode.OTHER) in [
|
||||
RunMode.OTHER,
|
||||
RunMode.PLOT,
|
||||
RunMode.UTIL_NO_EXCHANGE,
|
||||
RunMode.UTIL_EXCHANGE,
|
||||
]:
|
||||
return
|
||||
|
||||
for pl in conf.get('pairlists', [{'method': 'StaticPairList'}]):
|
||||
if (isinstance(pl, dict) and pl.get('method') == 'StaticPairList'
|
||||
and not conf.get('exchange', {}).get('pair_whitelist')):
|
||||
for pl in conf.get("pairlists", [{"method": "StaticPairList"}]):
|
||||
if (
|
||||
isinstance(pl, dict)
|
||||
and pl.get("method") == "StaticPairList"
|
||||
and not conf.get("exchange", {}).get("pair_whitelist")
|
||||
):
|
||||
raise ConfigurationError("StaticPairList requires pair_whitelist to be set.")
|
||||
|
||||
|
||||
|
@ -186,14 +191,14 @@ def _validate_protections(conf: Dict[str, Any]) -> None:
|
|||
Validate protection configuration validity
|
||||
"""
|
||||
|
||||
for prot in conf.get('protections', []):
|
||||
if ('stop_duration' in prot and 'stop_duration_candles' in prot):
|
||||
for prot in conf.get("protections", []):
|
||||
if "stop_duration" in prot and "stop_duration_candles" in prot:
|
||||
raise ConfigurationError(
|
||||
"Protections must specify either `stop_duration` or `stop_duration_candles`.\n"
|
||||
f"Please fix the protection {prot.get('method')}"
|
||||
)
|
||||
|
||||
if ('lookback_period' in prot and 'lookback_period_candles' in prot):
|
||||
if "lookback_period" in prot and "lookback_period_candles" in prot:
|
||||
raise ConfigurationError(
|
||||
"Protections must specify either `lookback_period` or `lookback_period_candles`.\n"
|
||||
f"Please fix the protection {prot.get('method')}"
|
||||
|
@ -201,10 +206,10 @@ def _validate_protections(conf: Dict[str, Any]) -> None:
|
|||
|
||||
|
||||
def _validate_ask_orderbook(conf: Dict[str, Any]) -> None:
|
||||
ask_strategy = conf.get('exit_pricing', {})
|
||||
ob_min = ask_strategy.get('order_book_min')
|
||||
ob_max = ask_strategy.get('order_book_max')
|
||||
if ob_min is not None and ob_max is not None and ask_strategy.get('use_order_book'):
|
||||
ask_strategy = conf.get("exit_pricing", {})
|
||||
ob_min = ask_strategy.get("order_book_min")
|
||||
ob_max = ask_strategy.get("order_book_max")
|
||||
if ob_min is not None and ob_max is not None and ask_strategy.get("use_order_book"):
|
||||
if ob_min != ob_max:
|
||||
raise ConfigurationError(
|
||||
"Using order_book_max != order_book_min in exit_pricing is no longer supported."
|
||||
|
@ -212,7 +217,7 @@ def _validate_ask_orderbook(conf: Dict[str, Any]) -> None:
|
|||
)
|
||||
else:
|
||||
# Move value to order_book_top
|
||||
ask_strategy['order_book_top'] = ob_min
|
||||
ask_strategy["order_book_top"] = ob_min
|
||||
logger.warning(
|
||||
"DEPRECATED: "
|
||||
"Please use `order_book_top` instead of `order_book_min` and `order_book_max` "
|
||||
|
@ -221,7 +226,6 @@ def _validate_ask_orderbook(conf: Dict[str, Any]) -> None:
|
|||
|
||||
|
||||
def validate_migrated_strategy_settings(conf: Dict[str, Any]) -> None:
|
||||
|
||||
_validate_time_in_force(conf)
|
||||
_validate_order_types(conf)
|
||||
_validate_unfilledtimeout(conf)
|
||||
|
@ -230,119 +234,129 @@ def validate_migrated_strategy_settings(conf: Dict[str, Any]) -> None:
|
|||
|
||||
|
||||
def _validate_time_in_force(conf: Dict[str, Any]) -> None:
|
||||
|
||||
time_in_force = conf.get('order_time_in_force', {})
|
||||
if 'buy' in time_in_force or 'sell' in time_in_force:
|
||||
if conf.get('trading_mode', TradingMode.SPOT) != TradingMode.SPOT:
|
||||
time_in_force = conf.get("order_time_in_force", {})
|
||||
if "buy" in time_in_force or "sell" in time_in_force:
|
||||
if conf.get("trading_mode", TradingMode.SPOT) != TradingMode.SPOT:
|
||||
raise ConfigurationError(
|
||||
"Please migrate your time_in_force settings to use 'entry' and 'exit'.")
|
||||
"Please migrate your time_in_force settings to use 'entry' and 'exit'."
|
||||
)
|
||||
else:
|
||||
logger.warning(
|
||||
"DEPRECATED: Using 'buy' and 'sell' for time_in_force is deprecated."
|
||||
"Please migrate your time_in_force settings to use 'entry' and 'exit'."
|
||||
)
|
||||
process_deprecated_setting(
|
||||
conf, 'order_time_in_force', 'buy', 'order_time_in_force', 'entry')
|
||||
conf, "order_time_in_force", "buy", "order_time_in_force", "entry"
|
||||
)
|
||||
|
||||
process_deprecated_setting(
|
||||
conf, 'order_time_in_force', 'sell', 'order_time_in_force', 'exit')
|
||||
conf, "order_time_in_force", "sell", "order_time_in_force", "exit"
|
||||
)
|
||||
|
||||
|
||||
def _validate_order_types(conf: Dict[str, Any]) -> None:
|
||||
|
||||
order_types = conf.get('order_types', {})
|
||||
old_order_types = ['buy', 'sell', 'emergencysell', 'forcebuy',
|
||||
'forcesell', 'emergencyexit', 'forceexit', 'forceentry']
|
||||
order_types = conf.get("order_types", {})
|
||||
old_order_types = [
|
||||
"buy",
|
||||
"sell",
|
||||
"emergencysell",
|
||||
"forcebuy",
|
||||
"forcesell",
|
||||
"emergencyexit",
|
||||
"forceexit",
|
||||
"forceentry",
|
||||
]
|
||||
if any(x in order_types for x in old_order_types):
|
||||
if conf.get('trading_mode', TradingMode.SPOT) != TradingMode.SPOT:
|
||||
if conf.get("trading_mode", TradingMode.SPOT) != TradingMode.SPOT:
|
||||
raise ConfigurationError(
|
||||
"Please migrate your order_types settings to use the new wording.")
|
||||
"Please migrate your order_types settings to use the new wording."
|
||||
)
|
||||
else:
|
||||
logger.warning(
|
||||
"DEPRECATED: Using 'buy' and 'sell' for order_types is deprecated."
|
||||
"Please migrate your order_types settings to use 'entry' and 'exit' wording."
|
||||
)
|
||||
for o, n in [
|
||||
('buy', 'entry'),
|
||||
('sell', 'exit'),
|
||||
('emergencysell', 'emergency_exit'),
|
||||
('forcesell', 'force_exit'),
|
||||
('forcebuy', 'force_entry'),
|
||||
('emergencyexit', 'emergency_exit'),
|
||||
('forceexit', 'force_exit'),
|
||||
('forceentry', 'force_entry'),
|
||||
("buy", "entry"),
|
||||
("sell", "exit"),
|
||||
("emergencysell", "emergency_exit"),
|
||||
("forcesell", "force_exit"),
|
||||
("forcebuy", "force_entry"),
|
||||
("emergencyexit", "emergency_exit"),
|
||||
("forceexit", "force_exit"),
|
||||
("forceentry", "force_entry"),
|
||||
]:
|
||||
|
||||
process_deprecated_setting(conf, 'order_types', o, 'order_types', n)
|
||||
process_deprecated_setting(conf, "order_types", o, "order_types", n)
|
||||
|
||||
|
||||
def _validate_unfilledtimeout(conf: Dict[str, Any]) -> None:
|
||||
unfilledtimeout = conf.get('unfilledtimeout', {})
|
||||
if any(x in unfilledtimeout for x in ['buy', 'sell']):
|
||||
if conf.get('trading_mode', TradingMode.SPOT) != TradingMode.SPOT:
|
||||
unfilledtimeout = conf.get("unfilledtimeout", {})
|
||||
if any(x in unfilledtimeout for x in ["buy", "sell"]):
|
||||
if conf.get("trading_mode", TradingMode.SPOT) != TradingMode.SPOT:
|
||||
raise ConfigurationError(
|
||||
"Please migrate your unfilledtimeout settings to use the new wording.")
|
||||
"Please migrate your unfilledtimeout settings to use the new wording."
|
||||
)
|
||||
else:
|
||||
|
||||
logger.warning(
|
||||
"DEPRECATED: Using 'buy' and 'sell' for unfilledtimeout is deprecated."
|
||||
"Please migrate your unfilledtimeout settings to use 'entry' and 'exit' wording."
|
||||
)
|
||||
for o, n in [
|
||||
('buy', 'entry'),
|
||||
('sell', 'exit'),
|
||||
("buy", "entry"),
|
||||
("sell", "exit"),
|
||||
]:
|
||||
|
||||
process_deprecated_setting(conf, 'unfilledtimeout', o, 'unfilledtimeout', n)
|
||||
process_deprecated_setting(conf, "unfilledtimeout", o, "unfilledtimeout", n)
|
||||
|
||||
|
||||
def _validate_pricing_rules(conf: Dict[str, Any]) -> None:
|
||||
|
||||
if conf.get('ask_strategy') or conf.get('bid_strategy'):
|
||||
if conf.get('trading_mode', TradingMode.SPOT) != TradingMode.SPOT:
|
||||
raise ConfigurationError(
|
||||
"Please migrate your pricing settings to use the new wording.")
|
||||
if conf.get("ask_strategy") or conf.get("bid_strategy"):
|
||||
if conf.get("trading_mode", TradingMode.SPOT) != TradingMode.SPOT:
|
||||
raise ConfigurationError("Please migrate your pricing settings to use the new wording.")
|
||||
else:
|
||||
|
||||
logger.warning(
|
||||
"DEPRECATED: Using 'ask_strategy' and 'bid_strategy' is deprecated."
|
||||
"Please migrate your settings to use 'entry_pricing' and 'exit_pricing'."
|
||||
)
|
||||
conf['entry_pricing'] = {}
|
||||
for obj in list(conf.get('bid_strategy', {}).keys()):
|
||||
if obj == 'ask_last_balance':
|
||||
process_deprecated_setting(conf, 'bid_strategy', obj,
|
||||
'entry_pricing', 'price_last_balance')
|
||||
conf["entry_pricing"] = {}
|
||||
for obj in list(conf.get("bid_strategy", {}).keys()):
|
||||
if obj == "ask_last_balance":
|
||||
process_deprecated_setting(
|
||||
conf, "bid_strategy", obj, "entry_pricing", "price_last_balance"
|
||||
)
|
||||
else:
|
||||
process_deprecated_setting(conf, 'bid_strategy', obj, 'entry_pricing', obj)
|
||||
del conf['bid_strategy']
|
||||
process_deprecated_setting(conf, "bid_strategy", obj, "entry_pricing", obj)
|
||||
del conf["bid_strategy"]
|
||||
|
||||
conf['exit_pricing'] = {}
|
||||
for obj in list(conf.get('ask_strategy', {}).keys()):
|
||||
if obj == 'bid_last_balance':
|
||||
process_deprecated_setting(conf, 'ask_strategy', obj,
|
||||
'exit_pricing', 'price_last_balance')
|
||||
conf["exit_pricing"] = {}
|
||||
for obj in list(conf.get("ask_strategy", {}).keys()):
|
||||
if obj == "bid_last_balance":
|
||||
process_deprecated_setting(
|
||||
conf, "ask_strategy", obj, "exit_pricing", "price_last_balance"
|
||||
)
|
||||
else:
|
||||
process_deprecated_setting(conf, 'ask_strategy', obj, 'exit_pricing', obj)
|
||||
del conf['ask_strategy']
|
||||
process_deprecated_setting(conf, "ask_strategy", obj, "exit_pricing", obj)
|
||||
del conf["ask_strategy"]
|
||||
|
||||
|
||||
def _validate_freqai_hyperopt(conf: Dict[str, Any]) -> None:
|
||||
freqai_enabled = conf.get('freqai', {}).get('enabled', False)
|
||||
analyze_per_epoch = conf.get('analyze_per_epoch', False)
|
||||
freqai_enabled = conf.get("freqai", {}).get("enabled", False)
|
||||
analyze_per_epoch = conf.get("analyze_per_epoch", False)
|
||||
if analyze_per_epoch and freqai_enabled:
|
||||
raise ConfigurationError(
|
||||
'Using analyze-per-epoch parameter is not supported with a FreqAI strategy.')
|
||||
"Using analyze-per-epoch parameter is not supported with a FreqAI strategy."
|
||||
)
|
||||
|
||||
|
||||
def _validate_freqai_include_timeframes(conf: Dict[str, Any], preliminary: bool) -> None:
|
||||
freqai_enabled = conf.get('freqai', {}).get('enabled', False)
|
||||
freqai_enabled = conf.get("freqai", {}).get("enabled", False)
|
||||
if freqai_enabled:
|
||||
main_tf = conf.get('timeframe', '5m')
|
||||
freqai_include_timeframes = conf.get('freqai', {}).get('feature_parameters', {}
|
||||
).get('include_timeframes', [])
|
||||
main_tf = conf.get("timeframe", "5m")
|
||||
freqai_include_timeframes = (
|
||||
conf.get("freqai", {}).get("feature_parameters", {}).get("include_timeframes", [])
|
||||
)
|
||||
|
||||
from freqtrade.exchange import timeframe_to_seconds
|
||||
|
||||
main_tf_s = timeframe_to_seconds(main_tf)
|
||||
offending_lines = []
|
||||
for tf in freqai_include_timeframes:
|
||||
|
@ -352,57 +366,65 @@ def _validate_freqai_include_timeframes(conf: Dict[str, Any], preliminary: bool)
|
|||
if offending_lines:
|
||||
raise ConfigurationError(
|
||||
f"Main timeframe of {main_tf} must be smaller or equal to FreqAI "
|
||||
f"`include_timeframes`.Offending include-timeframes: {', '.join(offending_lines)}")
|
||||
f"`include_timeframes`.Offending include-timeframes: {', '.join(offending_lines)}"
|
||||
)
|
||||
|
||||
# Ensure that the base timeframe is included in the include_timeframes list
|
||||
if not preliminary and main_tf not in freqai_include_timeframes:
|
||||
feature_parameters = conf.get('freqai', {}).get('feature_parameters', {})
|
||||
feature_parameters = conf.get("freqai", {}).get("feature_parameters", {})
|
||||
include_timeframes = [main_tf] + freqai_include_timeframes
|
||||
conf.get('freqai', {}).get('feature_parameters', {}) \
|
||||
.update({**feature_parameters, 'include_timeframes': include_timeframes})
|
||||
conf.get("freqai", {}).get("feature_parameters", {}).update(
|
||||
{**feature_parameters, "include_timeframes": include_timeframes}
|
||||
)
|
||||
|
||||
|
||||
def _validate_freqai_backtest(conf: Dict[str, Any]) -> None:
|
||||
if conf.get('runmode', RunMode.OTHER) == RunMode.BACKTEST:
|
||||
freqai_enabled = conf.get('freqai', {}).get('enabled', False)
|
||||
timerange = conf.get('timerange')
|
||||
freqai_backtest_live_models = conf.get('freqai_backtest_live_models', False)
|
||||
if conf.get("runmode", RunMode.OTHER) == RunMode.BACKTEST:
|
||||
freqai_enabled = conf.get("freqai", {}).get("enabled", False)
|
||||
timerange = conf.get("timerange")
|
||||
freqai_backtest_live_models = conf.get("freqai_backtest_live_models", False)
|
||||
if freqai_backtest_live_models and freqai_enabled and timerange:
|
||||
raise ConfigurationError(
|
||||
'Using timerange parameter is not supported with '
|
||||
'--freqai-backtest-live-models parameter.')
|
||||
"Using timerange parameter is not supported with "
|
||||
"--freqai-backtest-live-models parameter."
|
||||
)
|
||||
|
||||
if freqai_backtest_live_models and not freqai_enabled:
|
||||
raise ConfigurationError(
|
||||
'Using --freqai-backtest-live-models parameter is only '
|
||||
'supported with a FreqAI strategy.')
|
||||
"Using --freqai-backtest-live-models parameter is only "
|
||||
"supported with a FreqAI strategy."
|
||||
)
|
||||
|
||||
if freqai_enabled and not freqai_backtest_live_models and not timerange:
|
||||
raise ConfigurationError(
|
||||
'Please pass --timerange if you intend to use FreqAI for backtesting.')
|
||||
"Please pass --timerange if you intend to use FreqAI for backtesting."
|
||||
)
|
||||
|
||||
|
||||
def _validate_consumers(conf: Dict[str, Any]) -> None:
|
||||
emc_conf = conf.get('external_message_consumer', {})
|
||||
if emc_conf.get('enabled', False):
|
||||
if len(emc_conf.get('producers', [])) < 1:
|
||||
emc_conf = conf.get("external_message_consumer", {})
|
||||
if emc_conf.get("enabled", False):
|
||||
if len(emc_conf.get("producers", [])) < 1:
|
||||
raise ConfigurationError("You must specify at least 1 Producer to connect to.")
|
||||
|
||||
producer_names = [p['name'] for p in emc_conf.get('producers', [])]
|
||||
producer_names = [p["name"] for p in emc_conf.get("producers", [])]
|
||||
duplicates = [item for item, count in Counter(producer_names).items() if count > 1]
|
||||
if duplicates:
|
||||
raise ConfigurationError(
|
||||
f"Producer names must be unique. Duplicate: {', '.join(duplicates)}")
|
||||
if conf.get('process_only_new_candles', True):
|
||||
f"Producer names must be unique. Duplicate: {', '.join(duplicates)}"
|
||||
)
|
||||
if conf.get("process_only_new_candles", True):
|
||||
# Warning here or require it?
|
||||
logger.warning("To receive best performance with external data, "
|
||||
"please set `process_only_new_candles` to False")
|
||||
logger.warning(
|
||||
"To receive best performance with external data, "
|
||||
"please set `process_only_new_candles` to False"
|
||||
)
|
||||
|
||||
|
||||
def _strategy_settings(conf: Dict[str, Any]) -> None:
|
||||
|
||||
process_deprecated_setting(conf, None, 'use_sell_signal', None, 'use_exit_signal')
|
||||
process_deprecated_setting(conf, None, 'sell_profit_only', None, 'exit_profit_only')
|
||||
process_deprecated_setting(conf, None, 'sell_profit_offset', None, 'exit_profit_offset')
|
||||
process_deprecated_setting(conf, None, 'ignore_roi_if_buy_signal',
|
||||
None, 'ignore_roi_if_entry_signal')
|
||||
process_deprecated_setting(conf, None, "use_sell_signal", None, "use_exit_signal")
|
||||
process_deprecated_setting(conf, None, "sell_profit_only", None, "exit_profit_only")
|
||||
process_deprecated_setting(conf, None, "sell_profit_offset", None, "exit_profit_offset")
|
||||
process_deprecated_setting(
|
||||
conf, None, "ignore_roi_if_buy_signal", None, "ignore_roi_if_entry_signal"
|
||||
)
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
"""
|
||||
This module contains the configuration class
|
||||
"""
|
||||
|
||||
import logging
|
||||
import warnings
|
||||
from copy import deepcopy
|
||||
|
@ -56,7 +57,7 @@ class Configuration:
|
|||
:return: configuration dictionary
|
||||
"""
|
||||
# Keep this method as staticmethod, so it can be used from interactive environments
|
||||
c = Configuration({'config': files}, RunMode.OTHER)
|
||||
c = Configuration({"config": files}, RunMode.OTHER)
|
||||
return c.get_config()
|
||||
|
||||
def load_config(self) -> Dict[str, Any]:
|
||||
|
@ -69,19 +70,20 @@ class Configuration:
|
|||
|
||||
# Load environment variables
|
||||
from freqtrade.commands.arguments import NO_CONF_ALLOWED
|
||||
if self.args.get('command') not in NO_CONF_ALLOWED:
|
||||
|
||||
if self.args.get("command") not in NO_CONF_ALLOWED:
|
||||
env_data = enironment_vars_to_dict()
|
||||
config = deep_merge_dicts(env_data, config)
|
||||
|
||||
# Normalize config
|
||||
if 'internals' not in config:
|
||||
config['internals'] = {}
|
||||
if "internals" not in config:
|
||||
config["internals"] = {}
|
||||
|
||||
if 'pairlists' not in config:
|
||||
config['pairlists'] = []
|
||||
if "pairlists" not in config:
|
||||
config["pairlists"] = []
|
||||
|
||||
# Keep a copy of the original configuration file
|
||||
config['original_config'] = deepcopy(config)
|
||||
config["original_config"] = deepcopy(config)
|
||||
|
||||
self._process_logging_options(config)
|
||||
|
||||
|
@ -105,7 +107,7 @@ class Configuration:
|
|||
from freqtrade.exchange.check_exchange import check_exchange
|
||||
|
||||
# Check if the exchange set by the user is supported
|
||||
check_exchange(config, config.get('experimental', {}).get('block_bad_exchanges', True))
|
||||
check_exchange(config, config.get("experimental", {}).get("block_bad_exchanges", True))
|
||||
|
||||
self._resolve_pairs_list(config)
|
||||
|
||||
|
@ -119,52 +121,56 @@ class Configuration:
|
|||
the -v/--verbose, --logfile options
|
||||
"""
|
||||
# Log level
|
||||
config.update({'verbosity': self.args.get('verbosity', 0)})
|
||||
config.update({"verbosity": self.args.get("verbosity", 0)})
|
||||
|
||||
if 'logfile' in self.args and self.args['logfile']:
|
||||
config.update({'logfile': self.args['logfile']})
|
||||
if "logfile" in self.args and self.args["logfile"]:
|
||||
config.update({"logfile": self.args["logfile"]})
|
||||
|
||||
setup_logging(config)
|
||||
|
||||
def _process_trading_options(self, config: Config) -> None:
|
||||
if config['runmode'] not in TRADE_MODES:
|
||||
if config["runmode"] not in TRADE_MODES:
|
||||
return
|
||||
|
||||
if config.get('dry_run', False):
|
||||
logger.info('Dry run is enabled')
|
||||
if config.get('db_url') in [None, constants.DEFAULT_DB_PROD_URL]:
|
||||
if config.get("dry_run", False):
|
||||
logger.info("Dry run is enabled")
|
||||
if config.get("db_url") in [None, constants.DEFAULT_DB_PROD_URL]:
|
||||
# Default to in-memory db for dry_run if not specified
|
||||
config['db_url'] = constants.DEFAULT_DB_DRYRUN_URL
|
||||
config["db_url"] = constants.DEFAULT_DB_DRYRUN_URL
|
||||
else:
|
||||
if not config.get('db_url'):
|
||||
config['db_url'] = constants.DEFAULT_DB_PROD_URL
|
||||
logger.info('Dry run is disabled')
|
||||
if not config.get("db_url"):
|
||||
config["db_url"] = constants.DEFAULT_DB_PROD_URL
|
||||
logger.info("Dry run is disabled")
|
||||
|
||||
logger.info(f'Using DB: "{parse_db_uri_for_logging(config["db_url"])}"')
|
||||
|
||||
def _process_common_options(self, config: Config) -> None:
|
||||
|
||||
# Set strategy if not specified in config and or if it's non default
|
||||
if self.args.get('strategy') or not config.get('strategy'):
|
||||
config.update({'strategy': self.args.get('strategy')})
|
||||
if self.args.get("strategy") or not config.get("strategy"):
|
||||
config.update({"strategy": self.args.get("strategy")})
|
||||
|
||||
self._args_to_config(config, argname='strategy_path',
|
||||
logstring='Using additional Strategy lookup path: {}')
|
||||
self._args_to_config(
|
||||
config, argname="strategy_path", logstring="Using additional Strategy lookup path: {}"
|
||||
)
|
||||
|
||||
if ('db_url' in self.args and self.args['db_url'] and
|
||||
self.args['db_url'] != constants.DEFAULT_DB_PROD_URL):
|
||||
config.update({'db_url': self.args['db_url']})
|
||||
logger.info('Parameter --db-url detected ...')
|
||||
if (
|
||||
"db_url" in self.args
|
||||
and self.args["db_url"]
|
||||
and self.args["db_url"] != constants.DEFAULT_DB_PROD_URL
|
||||
):
|
||||
config.update({"db_url": self.args["db_url"]})
|
||||
logger.info("Parameter --db-url detected ...")
|
||||
|
||||
self._args_to_config(config, argname='db_url_from',
|
||||
logstring='Parameter --db-url-from detected ...')
|
||||
self._args_to_config(
|
||||
config, argname="db_url_from", logstring="Parameter --db-url-from detected ..."
|
||||
)
|
||||
|
||||
if config.get('force_entry_enable', False):
|
||||
logger.warning('`force_entry_enable` RPC message enabled.')
|
||||
if config.get("force_entry_enable", False):
|
||||
logger.warning("`force_entry_enable` RPC message enabled.")
|
||||
|
||||
# Support for sd_notify
|
||||
if 'sd_notify' in self.args and self.args['sd_notify']:
|
||||
config['internals'].update({'sd_notify': True})
|
||||
if "sd_notify" in self.args and self.args["sd_notify"]:
|
||||
config["internals"].update({"sd_notify": True})
|
||||
|
||||
def _process_datadir_options(self, config: Config) -> None:
|
||||
"""
|
||||
|
@ -172,245 +178,275 @@ class Configuration:
|
|||
--user-data, --datadir
|
||||
"""
|
||||
# Check exchange parameter here - otherwise `datadir` might be wrong.
|
||||
if 'exchange' in self.args and self.args['exchange']:
|
||||
config['exchange']['name'] = self.args['exchange']
|
||||
if "exchange" in self.args and self.args["exchange"]:
|
||||
config["exchange"]["name"] = self.args["exchange"]
|
||||
logger.info(f"Using exchange {config['exchange']['name']}")
|
||||
|
||||
if 'pair_whitelist' not in config['exchange']:
|
||||
config['exchange']['pair_whitelist'] = []
|
||||
if "pair_whitelist" not in config["exchange"]:
|
||||
config["exchange"]["pair_whitelist"] = []
|
||||
|
||||
if 'user_data_dir' in self.args and self.args['user_data_dir']:
|
||||
config.update({'user_data_dir': self.args['user_data_dir']})
|
||||
elif 'user_data_dir' not in config:
|
||||
if "user_data_dir" in self.args and self.args["user_data_dir"]:
|
||||
config.update({"user_data_dir": self.args["user_data_dir"]})
|
||||
elif "user_data_dir" not in config:
|
||||
# Default to cwd/user_data (legacy option ...)
|
||||
config.update({'user_data_dir': str(Path.cwd() / 'user_data')})
|
||||
config.update({"user_data_dir": str(Path.cwd() / "user_data")})
|
||||
|
||||
# reset to user_data_dir so this contains the absolute path.
|
||||
config['user_data_dir'] = create_userdata_dir(config['user_data_dir'], create_dir=False)
|
||||
logger.info('Using user-data directory: %s ...', config['user_data_dir'])
|
||||
config["user_data_dir"] = create_userdata_dir(config["user_data_dir"], create_dir=False)
|
||||
logger.info("Using user-data directory: %s ...", config["user_data_dir"])
|
||||
|
||||
config.update({'datadir': create_datadir(config, self.args.get('datadir'))})
|
||||
logger.info('Using data directory: %s ...', config.get('datadir'))
|
||||
config.update({"datadir": create_datadir(config, self.args.get("datadir"))})
|
||||
logger.info("Using data directory: %s ...", config.get("datadir"))
|
||||
|
||||
if self.args.get('exportfilename'):
|
||||
self._args_to_config(config, argname='exportfilename',
|
||||
logstring='Storing backtest results to {} ...')
|
||||
config['exportfilename'] = Path(config['exportfilename'])
|
||||
if self.args.get("exportfilename"):
|
||||
self._args_to_config(
|
||||
config, argname="exportfilename", logstring="Storing backtest results to {} ..."
|
||||
)
|
||||
config["exportfilename"] = Path(config["exportfilename"])
|
||||
else:
|
||||
config['exportfilename'] = (config['user_data_dir']
|
||||
/ 'backtest_results')
|
||||
config["exportfilename"] = config["user_data_dir"] / "backtest_results"
|
||||
|
||||
if self.args.get('show_sensitive'):
|
||||
if self.args.get("show_sensitive"):
|
||||
logger.warning(
|
||||
"Sensitive information will be shown in the upcoming output. "
|
||||
"Please make sure to never share this output without redacting "
|
||||
"the information yourself.")
|
||||
"the information yourself."
|
||||
)
|
||||
|
||||
def _process_optimize_options(self, config: Config) -> None:
|
||||
|
||||
# This will override the strategy configuration
|
||||
self._args_to_config(config, argname='timeframe',
|
||||
logstring='Parameter -i/--timeframe detected ... '
|
||||
'Using timeframe: {} ...')
|
||||
|
||||
self._args_to_config(config, argname='position_stacking',
|
||||
logstring='Parameter --enable-position-stacking detected ...')
|
||||
self._args_to_config(
|
||||
config,
|
||||
argname="timeframe",
|
||||
logstring="Parameter -i/--timeframe detected ... Using timeframe: {} ...",
|
||||
)
|
||||
|
||||
self._args_to_config(
|
||||
config, argname='enable_protections',
|
||||
logstring='Parameter --enable-protections detected, enabling Protections. ...')
|
||||
config,
|
||||
argname="position_stacking",
|
||||
logstring="Parameter --enable-position-stacking detected ...",
|
||||
)
|
||||
|
||||
if 'use_max_market_positions' in self.args and not self.args["use_max_market_positions"]:
|
||||
config.update({'use_max_market_positions': False})
|
||||
logger.info('Parameter --disable-max-market-positions detected ...')
|
||||
logger.info('max_open_trades set to unlimited ...')
|
||||
elif 'max_open_trades' in self.args and self.args['max_open_trades']:
|
||||
config.update({'max_open_trades': self.args['max_open_trades']})
|
||||
logger.info('Parameter --max-open-trades detected, '
|
||||
'overriding max_open_trades to: %s ...', config.get('max_open_trades'))
|
||||
elif config['runmode'] in NON_UTIL_MODES:
|
||||
logger.info('Using max_open_trades: %s ...', config.get('max_open_trades'))
|
||||
self._args_to_config(
|
||||
config,
|
||||
argname="enable_protections",
|
||||
logstring="Parameter --enable-protections detected, enabling Protections. ...",
|
||||
)
|
||||
|
||||
if "use_max_market_positions" in self.args and not self.args["use_max_market_positions"]:
|
||||
config.update({"use_max_market_positions": False})
|
||||
logger.info("Parameter --disable-max-market-positions detected ...")
|
||||
logger.info("max_open_trades set to unlimited ...")
|
||||
elif "max_open_trades" in self.args and self.args["max_open_trades"]:
|
||||
config.update({"max_open_trades": self.args["max_open_trades"]})
|
||||
logger.info(
|
||||
"Parameter --max-open-trades detected, overriding max_open_trades to: %s ...",
|
||||
config.get("max_open_trades"),
|
||||
)
|
||||
elif config["runmode"] in NON_UTIL_MODES:
|
||||
logger.info("Using max_open_trades: %s ...", config.get("max_open_trades"))
|
||||
# Setting max_open_trades to infinite if -1
|
||||
if config.get('max_open_trades') == -1:
|
||||
config['max_open_trades'] = float('inf')
|
||||
if config.get("max_open_trades") == -1:
|
||||
config["max_open_trades"] = float("inf")
|
||||
|
||||
if self.args.get('stake_amount'):
|
||||
if self.args.get("stake_amount"):
|
||||
# Convert explicitly to float to support CLI argument for both unlimited and value
|
||||
try:
|
||||
self.args['stake_amount'] = float(self.args['stake_amount'])
|
||||
self.args["stake_amount"] = float(self.args["stake_amount"])
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
configurations = [
|
||||
('timeframe_detail',
|
||||
'Parameter --timeframe-detail detected, using {} for intra-candle backtesting ...'),
|
||||
('backtest_show_pair_list', 'Parameter --show-pair-list detected.'),
|
||||
('stake_amount',
|
||||
'Parameter --stake-amount detected, overriding stake_amount to: {} ...'),
|
||||
('dry_run_wallet',
|
||||
'Parameter --dry-run-wallet detected, overriding dry_run_wallet to: {} ...'),
|
||||
('fee', 'Parameter --fee detected, setting fee to: {} ...'),
|
||||
('timerange', 'Parameter --timerange detected: {} ...'),
|
||||
]
|
||||
(
|
||||
"timeframe_detail",
|
||||
"Parameter --timeframe-detail detected, using {} for intra-candle backtesting ...",
|
||||
),
|
||||
("backtest_show_pair_list", "Parameter --show-pair-list detected."),
|
||||
(
|
||||
"stake_amount",
|
||||
"Parameter --stake-amount detected, overriding stake_amount to: {} ...",
|
||||
),
|
||||
(
|
||||
"dry_run_wallet",
|
||||
"Parameter --dry-run-wallet detected, overriding dry_run_wallet to: {} ...",
|
||||
),
|
||||
("fee", "Parameter --fee detected, setting fee to: {} ..."),
|
||||
("timerange", "Parameter --timerange detected: {} ..."),
|
||||
]
|
||||
|
||||
self._args_to_config_loop(config, configurations)
|
||||
|
||||
self._process_datadir_options(config)
|
||||
|
||||
self._args_to_config(config, argname='strategy_list',
|
||||
logstring='Using strategy list of {} strategies', logfun=len)
|
||||
self._args_to_config(
|
||||
config,
|
||||
argname="strategy_list",
|
||||
logstring="Using strategy list of {} strategies",
|
||||
logfun=len,
|
||||
)
|
||||
|
||||
configurations = [
|
||||
('recursive_strategy_search',
|
||||
'Recursively searching for a strategy in the strategies folder.'),
|
||||
('timeframe', 'Overriding timeframe with Command line argument'),
|
||||
('export', 'Parameter --export detected: {} ...'),
|
||||
('backtest_breakdown', 'Parameter --breakdown detected ...'),
|
||||
('backtest_cache', 'Parameter --cache={} detected ...'),
|
||||
('disableparamexport', 'Parameter --disableparamexport detected: {} ...'),
|
||||
('freqai_backtest_live_models',
|
||||
'Parameter --freqai-backtest-live-models detected ...'),
|
||||
(
|
||||
"recursive_strategy_search",
|
||||
"Recursively searching for a strategy in the strategies folder.",
|
||||
),
|
||||
("timeframe", "Overriding timeframe with Command line argument"),
|
||||
("export", "Parameter --export detected: {} ..."),
|
||||
("backtest_breakdown", "Parameter --breakdown detected ..."),
|
||||
("backtest_cache", "Parameter --cache={} detected ..."),
|
||||
("disableparamexport", "Parameter --disableparamexport detected: {} ..."),
|
||||
("freqai_backtest_live_models", "Parameter --freqai-backtest-live-models detected ..."),
|
||||
]
|
||||
self._args_to_config_loop(config, configurations)
|
||||
|
||||
# Edge section:
|
||||
if 'stoploss_range' in self.args and self.args["stoploss_range"]:
|
||||
if "stoploss_range" in self.args and self.args["stoploss_range"]:
|
||||
txt_range = eval(self.args["stoploss_range"])
|
||||
config['edge'].update({'stoploss_range_min': txt_range[0]})
|
||||
config['edge'].update({'stoploss_range_max': txt_range[1]})
|
||||
config['edge'].update({'stoploss_range_step': txt_range[2]})
|
||||
logger.info('Parameter --stoplosses detected: %s ...', self.args["stoploss_range"])
|
||||
config["edge"].update({"stoploss_range_min": txt_range[0]})
|
||||
config["edge"].update({"stoploss_range_max": txt_range[1]})
|
||||
config["edge"].update({"stoploss_range_step": txt_range[2]})
|
||||
logger.info("Parameter --stoplosses detected: %s ...", self.args["stoploss_range"])
|
||||
|
||||
# Hyperopt section
|
||||
|
||||
configurations = [
|
||||
('hyperopt', 'Using Hyperopt class name: {}'),
|
||||
('hyperopt_path', 'Using additional Hyperopt lookup path: {}'),
|
||||
('hyperoptexportfilename', 'Using hyperopt file: {}'),
|
||||
('lookahead_analysis_exportfilename', 'Saving lookahead analysis results into {} ...'),
|
||||
('epochs', 'Parameter --epochs detected ... Will run Hyperopt with for {} epochs ...'),
|
||||
('spaces', 'Parameter -s/--spaces detected: {}'),
|
||||
('analyze_per_epoch', 'Parameter --analyze-per-epoch detected.'),
|
||||
('print_all', 'Parameter --print-all detected ...'),
|
||||
("hyperopt", "Using Hyperopt class name: {}"),
|
||||
("hyperopt_path", "Using additional Hyperopt lookup path: {}"),
|
||||
("hyperoptexportfilename", "Using hyperopt file: {}"),
|
||||
("lookahead_analysis_exportfilename", "Saving lookahead analysis results into {} ..."),
|
||||
("epochs", "Parameter --epochs detected ... Will run Hyperopt with for {} epochs ..."),
|
||||
("spaces", "Parameter -s/--spaces detected: {}"),
|
||||
("analyze_per_epoch", "Parameter --analyze-per-epoch detected."),
|
||||
("print_all", "Parameter --print-all detected ..."),
|
||||
]
|
||||
self._args_to_config_loop(config, configurations)
|
||||
|
||||
if 'print_colorized' in self.args and not self.args["print_colorized"]:
|
||||
logger.info('Parameter --no-color detected ...')
|
||||
config.update({'print_colorized': False})
|
||||
if "print_colorized" in self.args and not self.args["print_colorized"]:
|
||||
logger.info("Parameter --no-color detected ...")
|
||||
config.update({"print_colorized": False})
|
||||
else:
|
||||
config.update({'print_colorized': True})
|
||||
config.update({"print_colorized": True})
|
||||
|
||||
configurations = [
|
||||
('print_json', 'Parameter --print-json detected ...'),
|
||||
('export_csv', 'Parameter --export-csv detected: {}'),
|
||||
('hyperopt_jobs', 'Parameter -j/--job-workers detected: {}'),
|
||||
('hyperopt_random_state', 'Parameter --random-state detected: {}'),
|
||||
('hyperopt_min_trades', 'Parameter --min-trades detected: {}'),
|
||||
('hyperopt_loss', 'Using Hyperopt loss class name: {}'),
|
||||
('hyperopt_show_index', 'Parameter -n/--index detected: {}'),
|
||||
('hyperopt_list_best', 'Parameter --best detected: {}'),
|
||||
('hyperopt_list_profitable', 'Parameter --profitable detected: {}'),
|
||||
('hyperopt_list_min_trades', 'Parameter --min-trades detected: {}'),
|
||||
('hyperopt_list_max_trades', 'Parameter --max-trades detected: {}'),
|
||||
('hyperopt_list_min_avg_time', 'Parameter --min-avg-time detected: {}'),
|
||||
('hyperopt_list_max_avg_time', 'Parameter --max-avg-time detected: {}'),
|
||||
('hyperopt_list_min_avg_profit', 'Parameter --min-avg-profit detected: {}'),
|
||||
('hyperopt_list_max_avg_profit', 'Parameter --max-avg-profit detected: {}'),
|
||||
('hyperopt_list_min_total_profit', 'Parameter --min-total-profit detected: {}'),
|
||||
('hyperopt_list_max_total_profit', 'Parameter --max-total-profit detected: {}'),
|
||||
('hyperopt_list_min_objective', 'Parameter --min-objective detected: {}'),
|
||||
('hyperopt_list_max_objective', 'Parameter --max-objective detected: {}'),
|
||||
('hyperopt_list_no_details', 'Parameter --no-details detected: {}'),
|
||||
('hyperopt_show_no_header', 'Parameter --no-header detected: {}'),
|
||||
('hyperopt_ignore_missing_space', 'Paramter --ignore-missing-space detected: {}'),
|
||||
("print_json", "Parameter --print-json detected ..."),
|
||||
("export_csv", "Parameter --export-csv detected: {}"),
|
||||
("hyperopt_jobs", "Parameter -j/--job-workers detected: {}"),
|
||||
("hyperopt_random_state", "Parameter --random-state detected: {}"),
|
||||
("hyperopt_min_trades", "Parameter --min-trades detected: {}"),
|
||||
("hyperopt_loss", "Using Hyperopt loss class name: {}"),
|
||||
("hyperopt_show_index", "Parameter -n/--index detected: {}"),
|
||||
("hyperopt_list_best", "Parameter --best detected: {}"),
|
||||
("hyperopt_list_profitable", "Parameter --profitable detected: {}"),
|
||||
("hyperopt_list_min_trades", "Parameter --min-trades detected: {}"),
|
||||
("hyperopt_list_max_trades", "Parameter --max-trades detected: {}"),
|
||||
("hyperopt_list_min_avg_time", "Parameter --min-avg-time detected: {}"),
|
||||
("hyperopt_list_max_avg_time", "Parameter --max-avg-time detected: {}"),
|
||||
("hyperopt_list_min_avg_profit", "Parameter --min-avg-profit detected: {}"),
|
||||
("hyperopt_list_max_avg_profit", "Parameter --max-avg-profit detected: {}"),
|
||||
("hyperopt_list_min_total_profit", "Parameter --min-total-profit detected: {}"),
|
||||
("hyperopt_list_max_total_profit", "Parameter --max-total-profit detected: {}"),
|
||||
("hyperopt_list_min_objective", "Parameter --min-objective detected: {}"),
|
||||
("hyperopt_list_max_objective", "Parameter --max-objective detected: {}"),
|
||||
("hyperopt_list_no_details", "Parameter --no-details detected: {}"),
|
||||
("hyperopt_show_no_header", "Parameter --no-header detected: {}"),
|
||||
("hyperopt_ignore_missing_space", "Parameter --ignore-missing-space detected: {}"),
|
||||
]
|
||||
|
||||
self._args_to_config_loop(config, configurations)
|
||||
|
||||
def _process_plot_options(self, config: Config) -> None:
|
||||
|
||||
configurations = [
|
||||
('pairs', 'Using pairs {}'),
|
||||
('indicators1', 'Using indicators1: {}'),
|
||||
('indicators2', 'Using indicators2: {}'),
|
||||
('trade_ids', 'Filtering on trade_ids: {}'),
|
||||
('plot_limit', 'Limiting plot to: {}'),
|
||||
('plot_auto_open', 'Parameter --auto-open detected.'),
|
||||
('trade_source', 'Using trades from: {}'),
|
||||
('prepend_data', 'Prepend detected. Allowing data prepending.'),
|
||||
('erase', 'Erase detected. Deleting existing data.'),
|
||||
('no_trades', 'Parameter --no-trades detected.'),
|
||||
('timeframes', 'timeframes --timeframes: {}'),
|
||||
('days', 'Detected --days: {}'),
|
||||
('include_inactive', 'Detected --include-inactive-pairs: {}'),
|
||||
('download_trades', 'Detected --dl-trades: {}'),
|
||||
('dataformat_ohlcv', 'Using "{}" to store OHLCV data.'),
|
||||
('dataformat_trades', 'Using "{}" to store trades data.'),
|
||||
('show_timerange', 'Detected --show-timerange'),
|
||||
("pairs", "Using pairs {}"),
|
||||
("indicators1", "Using indicators1: {}"),
|
||||
("indicators2", "Using indicators2: {}"),
|
||||
("trade_ids", "Filtering on trade_ids: {}"),
|
||||
("plot_limit", "Limiting plot to: {}"),
|
||||
("plot_auto_open", "Parameter --auto-open detected."),
|
||||
("trade_source", "Using trades from: {}"),
|
||||
("prepend_data", "Prepend detected. Allowing data prepending."),
|
||||
("erase", "Erase detected. Deleting existing data."),
|
||||
("no_trades", "Parameter --no-trades detected."),
|
||||
("timeframes", "timeframes --timeframes: {}"),
|
||||
("days", "Detected --days: {}"),
|
||||
("include_inactive", "Detected --include-inactive-pairs: {}"),
|
||||
("download_trades", "Detected --dl-trades: {}"),
|
||||
("convert_trades", "Detected --convert: {} - Converting Trade data to OHCV {}"),
|
||||
("dataformat_ohlcv", 'Using "{}" to store OHLCV data.'),
|
||||
("dataformat_trades", 'Using "{}" to store trades data.'),
|
||||
("show_timerange", "Detected --show-timerange"),
|
||||
]
|
||||
self._args_to_config_loop(config, configurations)
|
||||
|
||||
def _process_data_options(self, config: Config) -> None:
|
||||
self._args_to_config(config, argname='new_pairs_days',
|
||||
logstring='Detected --new-pairs-days: {}')
|
||||
self._args_to_config(config, argname='trading_mode',
|
||||
logstring='Detected --trading-mode: {}')
|
||||
config['candle_type_def'] = CandleType.get_default(
|
||||
config.get('trading_mode', 'spot') or 'spot')
|
||||
config['trading_mode'] = TradingMode(config.get('trading_mode', 'spot') or 'spot')
|
||||
self._args_to_config(config, argname='candle_types',
|
||||
logstring='Detected --candle-types: {}')
|
||||
self._args_to_config(
|
||||
config, argname="new_pairs_days", logstring="Detected --new-pairs-days: {}"
|
||||
)
|
||||
self._args_to_config(
|
||||
config, argname="trading_mode", logstring="Detected --trading-mode: {}"
|
||||
)
|
||||
config["candle_type_def"] = CandleType.get_default(
|
||||
config.get("trading_mode", "spot") or "spot"
|
||||
)
|
||||
config["trading_mode"] = TradingMode(config.get("trading_mode", "spot") or "spot")
|
||||
self._args_to_config(
|
||||
config, argname="candle_types", logstring="Detected --candle-types: {}"
|
||||
)
|
||||
|
||||
def _process_analyze_options(self, config: Config) -> None:
|
||||
configurations = [
|
||||
('analysis_groups', 'Analysis reason groups: {}'),
|
||||
('enter_reason_list', 'Analysis enter tag list: {}'),
|
||||
('exit_reason_list', 'Analysis exit tag list: {}'),
|
||||
('indicator_list', 'Analysis indicator list: {}'),
|
||||
('timerange', 'Filter trades by timerange: {}'),
|
||||
('analysis_rejected', 'Analyse rejected signals: {}'),
|
||||
('analysis_to_csv', 'Store analysis tables to CSV: {}'),
|
||||
('analysis_csv_path', 'Path to store analysis CSVs: {}'),
|
||||
("analysis_groups", "Analysis reason groups: {}"),
|
||||
("enter_reason_list", "Analysis enter tag list: {}"),
|
||||
("exit_reason_list", "Analysis exit tag list: {}"),
|
||||
("indicator_list", "Analysis indicator list: {}"),
|
||||
("timerange", "Filter trades by timerange: {}"),
|
||||
("analysis_rejected", "Analyse rejected signals: {}"),
|
||||
("analysis_to_csv", "Store analysis tables to CSV: {}"),
|
||||
("analysis_csv_path", "Path to store analysis CSVs: {}"),
|
||||
# Lookahead analysis results
|
||||
('targeted_trade_amount', 'Targeted Trade amount: {}'),
|
||||
('minimum_trade_amount', 'Minimum Trade amount: {}'),
|
||||
('lookahead_analysis_exportfilename', 'Path to store lookahead-analysis-results: {}'),
|
||||
('startup_candle', 'Startup candle to be used on recursive analysis: {}'),
|
||||
("targeted_trade_amount", "Targeted Trade amount: {}"),
|
||||
("minimum_trade_amount", "Minimum Trade amount: {}"),
|
||||
("lookahead_analysis_exportfilename", "Path to store lookahead-analysis-results: {}"),
|
||||
("startup_candle", "Startup candle to be used on recursive analysis: {}"),
|
||||
]
|
||||
self._args_to_config_loop(config, configurations)
|
||||
|
||||
def _args_to_config_loop(self, config, configurations: List[Tuple[str, str]]) -> None:
|
||||
|
||||
for argname, logstring in configurations:
|
||||
self._args_to_config(config, argname=argname, logstring=logstring)
|
||||
|
||||
def _process_runmode(self, config: Config) -> None:
|
||||
|
||||
self._args_to_config(config, argname='dry_run',
|
||||
logstring='Parameter --dry-run detected, '
|
||||
'overriding dry_run to: {} ...')
|
||||
self._args_to_config(
|
||||
config,
|
||||
argname="dry_run",
|
||||
logstring="Parameter --dry-run detected, overriding dry_run to: {} ...",
|
||||
)
|
||||
|
||||
if not self.runmode:
|
||||
# Handle real mode, infer dry/live from config
|
||||
self.runmode = RunMode.DRY_RUN if config.get('dry_run', True) else RunMode.LIVE
|
||||
self.runmode = RunMode.DRY_RUN if config.get("dry_run", True) else RunMode.LIVE
|
||||
logger.info(f"Runmode set to {self.runmode.value}.")
|
||||
|
||||
config.update({'runmode': self.runmode})
|
||||
config.update({"runmode": self.runmode})
|
||||
|
||||
def _process_freqai_options(self, config: Config) -> None:
|
||||
self._args_to_config(
|
||||
config, argname="freqaimodel", logstring="Using freqaimodel class name: {}"
|
||||
)
|
||||
|
||||
self._args_to_config(config, argname='freqaimodel',
|
||||
logstring='Using freqaimodel class name: {}')
|
||||
|
||||
self._args_to_config(config, argname='freqaimodel_path',
|
||||
logstring='Using freqaimodel path: {}')
|
||||
self._args_to_config(
|
||||
config, argname="freqaimodel_path", logstring="Using freqaimodel path: {}"
|
||||
)
|
||||
|
||||
return
|
||||
|
||||
def _args_to_config(self, config: Config, argname: str,
|
||||
logstring: str, logfun: Optional[Callable] = None,
|
||||
deprecated_msg: Optional[str] = None) -> None:
|
||||
def _args_to_config(
|
||||
self,
|
||||
config: Config,
|
||||
argname: str,
|
||||
logstring: str,
|
||||
logfun: Optional[Callable] = None,
|
||||
deprecated_msg: Optional[str] = None,
|
||||
) -> None:
|
||||
"""
|
||||
:param config: Configuration dictionary
|
||||
:param argname: Argumentname in self.args - will be copied to config dict.
|
||||
|
@ -420,9 +456,11 @@ class Configuration:
|
|||
sample: logfun=len (prints the length of the found
|
||||
configuration instead of the content)
|
||||
"""
|
||||
if (argname in self.args and self.args[argname] is not None
|
||||
and self.args[argname] is not False):
|
||||
|
||||
if (
|
||||
argname in self.args
|
||||
and self.args[argname] is not None
|
||||
and self.args[argname] is not False
|
||||
):
|
||||
config.update({argname: self.args[argname]})
|
||||
if logfun:
|
||||
logger.info(logstring.format(logfun(config[argname])))
|
||||
|
@ -441,7 +479,7 @@ class Configuration:
|
|||
"""
|
||||
|
||||
if "pairs" in config:
|
||||
config['exchange']['pair_whitelist'] = config['pairs']
|
||||
config["exchange"]["pair_whitelist"] = config["pairs"]
|
||||
return
|
||||
|
||||
if "pairs_file" in self.args and self.args["pairs_file"]:
|
||||
|
@ -451,19 +489,19 @@ class Configuration:
|
|||
# or if pairs file is specified explicitly
|
||||
if not pairs_file.exists():
|
||||
raise OperationalException(f'No pairs file found with path "{pairs_file}".')
|
||||
config['pairs'] = load_file(pairs_file)
|
||||
if isinstance(config['pairs'], list):
|
||||
config['pairs'].sort()
|
||||
config["pairs"] = load_file(pairs_file)
|
||||
if isinstance(config["pairs"], list):
|
||||
config["pairs"].sort()
|
||||
return
|
||||
|
||||
if 'config' in self.args and self.args['config']:
|
||||
if "config" in self.args and self.args["config"]:
|
||||
logger.info("Using pairlist from configuration.")
|
||||
config['pairs'] = config.get('exchange', {}).get('pair_whitelist')
|
||||
config["pairs"] = config.get("exchange", {}).get("pair_whitelist")
|
||||
else:
|
||||
# Fall back to /dl_path/pairs.json
|
||||
pairs_file = config['datadir'] / 'pairs.json'
|
||||
pairs_file = config["datadir"] / "pairs.json"
|
||||
if pairs_file.exists():
|
||||
logger.info(f'Reading pairs file "{pairs_file}".')
|
||||
config['pairs'] = load_file(pairs_file)
|
||||
if 'pairs' in config and isinstance(config['pairs'], list):
|
||||
config['pairs'].sort()
|
||||
config["pairs"] = load_file(pairs_file)
|
||||
if "pairs" in config and isinstance(config["pairs"], list):
|
||||
config["pairs"].sort()
|
||||
|
|
|
@ -12,9 +12,13 @@ from freqtrade.exceptions import ConfigurationError, OperationalException
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def check_conflicting_settings(config: Config,
|
||||
section_old: Optional[str], name_old: str,
|
||||
section_new: Optional[str], name_new: str) -> None:
|
||||
def check_conflicting_settings(
|
||||
config: Config,
|
||||
section_old: Optional[str],
|
||||
name_old: str,
|
||||
section_new: Optional[str],
|
||||
name_new: str,
|
||||
) -> None:
|
||||
section_new_config = config.get(section_new, {}) if section_new else config
|
||||
section_old_config = config.get(section_old, {}) if section_old else config
|
||||
if name_new in section_new_config and name_old in section_old_config:
|
||||
|
@ -29,9 +33,9 @@ def check_conflicting_settings(config: Config,
|
|||
)
|
||||
|
||||
|
||||
def process_removed_setting(config: Config,
|
||||
section1: str, name1: str,
|
||||
section2: Optional[str], name2: str) -> None:
|
||||
def process_removed_setting(
|
||||
config: Config, section1: str, name1: str, section2: Optional[str], name2: str
|
||||
) -> None:
|
||||
"""
|
||||
:param section1: Removed section
|
||||
:param name1: Removed setting name
|
||||
|
@ -48,10 +52,13 @@ def process_removed_setting(config: Config,
|
|||
)
|
||||
|
||||
|
||||
def process_deprecated_setting(config: Config,
|
||||
section_old: Optional[str], name_old: str,
|
||||
section_new: Optional[str], name_new: str
|
||||
) -> None:
|
||||
def process_deprecated_setting(
|
||||
config: Config,
|
||||
section_old: Optional[str],
|
||||
name_old: str,
|
||||
section_new: Optional[str],
|
||||
name_new: str,
|
||||
) -> None:
|
||||
check_conflicting_settings(config, section_old, name_old, section_new, name_new)
|
||||
section_old_config = config.get(section_old, {}) if section_old else config
|
||||
|
||||
|
@ -71,57 +78,91 @@ def process_deprecated_setting(config: Config,
|
|||
|
||||
|
||||
def process_temporary_deprecated_settings(config: Config) -> None:
|
||||
|
||||
# Kept for future deprecated / moved settings
|
||||
# check_conflicting_settings(config, 'ask_strategy', 'use_sell_signal',
|
||||
# 'experimental', 'use_sell_signal')
|
||||
|
||||
process_deprecated_setting(config, 'ask_strategy', 'ignore_buying_expired_candle_after',
|
||||
None, 'ignore_buying_expired_candle_after')
|
||||
process_deprecated_setting(
|
||||
config,
|
||||
"ask_strategy",
|
||||
"ignore_buying_expired_candle_after",
|
||||
None,
|
||||
"ignore_buying_expired_candle_after",
|
||||
)
|
||||
|
||||
process_deprecated_setting(config, None, 'forcebuy_enable', None, 'force_entry_enable')
|
||||
process_deprecated_setting(config, None, "forcebuy_enable", None, "force_entry_enable")
|
||||
|
||||
# New settings
|
||||
if config.get('telegram'):
|
||||
process_deprecated_setting(config['telegram'], 'notification_settings', 'sell',
|
||||
'notification_settings', 'exit')
|
||||
process_deprecated_setting(config['telegram'], 'notification_settings', 'sell_fill',
|
||||
'notification_settings', 'exit_fill')
|
||||
process_deprecated_setting(config['telegram'], 'notification_settings', 'sell_cancel',
|
||||
'notification_settings', 'exit_cancel')
|
||||
process_deprecated_setting(config['telegram'], 'notification_settings', 'buy',
|
||||
'notification_settings', 'entry')
|
||||
process_deprecated_setting(config['telegram'], 'notification_settings', 'buy_fill',
|
||||
'notification_settings', 'entry_fill')
|
||||
process_deprecated_setting(config['telegram'], 'notification_settings', 'buy_cancel',
|
||||
'notification_settings', 'entry_cancel')
|
||||
if config.get('webhook'):
|
||||
process_deprecated_setting(config, 'webhook', 'webhookbuy', 'webhook', 'webhookentry')
|
||||
process_deprecated_setting(config, 'webhook', 'webhookbuycancel',
|
||||
'webhook', 'webhookentrycancel')
|
||||
process_deprecated_setting(config, 'webhook', 'webhookbuyfill',
|
||||
'webhook', 'webhookentryfill')
|
||||
process_deprecated_setting(config, 'webhook', 'webhooksell', 'webhook', 'webhookexit')
|
||||
process_deprecated_setting(config, 'webhook', 'webhooksellcancel',
|
||||
'webhook', 'webhookexitcancel')
|
||||
process_deprecated_setting(config, 'webhook', 'webhooksellfill',
|
||||
'webhook', 'webhookexitfill')
|
||||
if config.get("telegram"):
|
||||
process_deprecated_setting(
|
||||
config["telegram"], "notification_settings", "sell", "notification_settings", "exit"
|
||||
)
|
||||
process_deprecated_setting(
|
||||
config["telegram"],
|
||||
"notification_settings",
|
||||
"sell_fill",
|
||||
"notification_settings",
|
||||
"exit_fill",
|
||||
)
|
||||
process_deprecated_setting(
|
||||
config["telegram"],
|
||||
"notification_settings",
|
||||
"sell_cancel",
|
||||
"notification_settings",
|
||||
"exit_cancel",
|
||||
)
|
||||
process_deprecated_setting(
|
||||
config["telegram"], "notification_settings", "buy", "notification_settings", "entry"
|
||||
)
|
||||
process_deprecated_setting(
|
||||
config["telegram"],
|
||||
"notification_settings",
|
||||
"buy_fill",
|
||||
"notification_settings",
|
||||
"entry_fill",
|
||||
)
|
||||
process_deprecated_setting(
|
||||
config["telegram"],
|
||||
"notification_settings",
|
||||
"buy_cancel",
|
||||
"notification_settings",
|
||||
"entry_cancel",
|
||||
)
|
||||
if config.get("webhook"):
|
||||
process_deprecated_setting(config, "webhook", "webhookbuy", "webhook", "webhookentry")
|
||||
process_deprecated_setting(
|
||||
config, "webhook", "webhookbuycancel", "webhook", "webhookentrycancel"
|
||||
)
|
||||
process_deprecated_setting(
|
||||
config, "webhook", "webhookbuyfill", "webhook", "webhookentryfill"
|
||||
)
|
||||
process_deprecated_setting(config, "webhook", "webhooksell", "webhook", "webhookexit")
|
||||
process_deprecated_setting(
|
||||
config, "webhook", "webhooksellcancel", "webhook", "webhookexitcancel"
|
||||
)
|
||||
process_deprecated_setting(
|
||||
config, "webhook", "webhooksellfill", "webhook", "webhookexitfill"
|
||||
)
|
||||
|
||||
# Legacy way - having them in experimental ...
|
||||
|
||||
process_removed_setting(config, 'experimental', 'use_sell_signal', None, 'use_exit_signal')
|
||||
process_removed_setting(config, 'experimental', 'sell_profit_only', None, 'exit_profit_only')
|
||||
process_removed_setting(config, 'experimental', 'ignore_roi_if_buy_signal',
|
||||
None, 'ignore_roi_if_entry_signal')
|
||||
process_removed_setting(config, "experimental", "use_sell_signal", None, "use_exit_signal")
|
||||
process_removed_setting(config, "experimental", "sell_profit_only", None, "exit_profit_only")
|
||||
process_removed_setting(
|
||||
config, "experimental", "ignore_roi_if_buy_signal", None, "ignore_roi_if_entry_signal"
|
||||
)
|
||||
|
||||
process_removed_setting(config, 'ask_strategy', 'use_sell_signal', None, 'use_exit_signal')
|
||||
process_removed_setting(config, 'ask_strategy', 'sell_profit_only', None, 'exit_profit_only')
|
||||
process_removed_setting(config, 'ask_strategy', 'sell_profit_offset',
|
||||
None, 'exit_profit_offset')
|
||||
process_removed_setting(config, 'ask_strategy', 'ignore_roi_if_buy_signal',
|
||||
None, 'ignore_roi_if_entry_signal')
|
||||
if (config.get('edge', {}).get('enabled', False)
|
||||
and 'capital_available_percentage' in config.get('edge', {})):
|
||||
process_removed_setting(config, "ask_strategy", "use_sell_signal", None, "use_exit_signal")
|
||||
process_removed_setting(config, "ask_strategy", "sell_profit_only", None, "exit_profit_only")
|
||||
process_removed_setting(
|
||||
config, "ask_strategy", "sell_profit_offset", None, "exit_profit_offset"
|
||||
)
|
||||
process_removed_setting(
|
||||
config, "ask_strategy", "ignore_roi_if_buy_signal", None, "ignore_roi_if_entry_signal"
|
||||
)
|
||||
if config.get("edge", {}).get(
|
||||
"enabled", False
|
||||
) and "capital_available_percentage" in config.get("edge", {}):
|
||||
raise ConfigurationError(
|
||||
"DEPRECATED: "
|
||||
"Using 'edge.capital_available_percentage' has been deprecated in favor of "
|
||||
|
@ -129,12 +170,11 @@ def process_temporary_deprecated_settings(config: Config) -> None:
|
|||
"'tradable_balance_ratio' and remove 'capital_available_percentage' "
|
||||
"from the edge configuration."
|
||||
)
|
||||
if 'ticker_interval' in config:
|
||||
|
||||
if "ticker_interval" in config:
|
||||
raise ConfigurationError(
|
||||
"DEPRECATED: 'ticker_interval' detected. "
|
||||
"Please use 'timeframe' instead of 'ticker_interval."
|
||||
)
|
||||
|
||||
if 'protections' in config:
|
||||
if "protections" in config:
|
||||
logger.warning("DEPRECATED: Setting 'protections' in the configuration is deprecated.")
|
||||
|
|
|
@ -5,4 +5,4 @@ def running_in_docker() -> bool:
|
|||
"""
|
||||
Check if we are running in a docker container
|
||||
"""
|
||||
return os.environ.get('FT_APP_ENV') == 'docker'
|
||||
return os.environ.get("FT_APP_ENV") == "docker"
|
||||
|
|
|
@ -4,8 +4,14 @@ from pathlib import Path
|
|||
from typing import Optional
|
||||
|
||||
from freqtrade.configuration.detect_environment import running_in_docker
|
||||
from freqtrade.constants import (USER_DATA_FILES, USERPATH_FREQAIMODELS, USERPATH_HYPEROPTS,
|
||||
USERPATH_NOTEBOOKS, USERPATH_STRATEGIES, Config)
|
||||
from freqtrade.constants import (
|
||||
USER_DATA_FILES,
|
||||
USERPATH_FREQAIMODELS,
|
||||
USERPATH_HYPEROPTS,
|
||||
USERPATH_NOTEBOOKS,
|
||||
USERPATH_STRATEGIES,
|
||||
Config,
|
||||
)
|
||||
from freqtrade.exceptions import OperationalException
|
||||
|
||||
|
||||
|
@ -13,16 +19,15 @@ logger = logging.getLogger(__name__)
|
|||
|
||||
|
||||
def create_datadir(config: Config, datadir: Optional[str] = None) -> Path:
|
||||
|
||||
folder = Path(datadir) if datadir else Path(f"{config['user_data_dir']}/data")
|
||||
if not datadir:
|
||||
# set datadir
|
||||
exchange_name = config.get('exchange', {}).get('name', '').lower()
|
||||
exchange_name = config.get("exchange", {}).get("name", "").lower()
|
||||
folder = folder.joinpath(exchange_name)
|
||||
|
||||
if not folder.is_dir():
|
||||
folder.mkdir(parents=True)
|
||||
logger.info(f'Created data directory: {datadir}')
|
||||
logger.info(f"Created data directory: {datadir}")
|
||||
return folder
|
||||
|
||||
|
||||
|
@ -34,8 +39,8 @@ def chown_user_directory(directory: Path) -> None:
|
|||
if running_in_docker():
|
||||
try:
|
||||
import subprocess
|
||||
subprocess.check_output(
|
||||
['sudo', 'chown', '-R', 'ftuser:', str(directory.resolve())])
|
||||
|
||||
subprocess.check_output(["sudo", "chown", "-R", "ftuser:", str(directory.resolve())])
|
||||
except Exception:
|
||||
logger.warning(f"Could not chown {directory}")
|
||||
|
||||
|
@ -50,18 +55,28 @@ def create_userdata_dir(directory: str, create_dir: bool = False) -> Path:
|
|||
:param create_dir: Create directory if it does not exist.
|
||||
:return: Path object containing the directory
|
||||
"""
|
||||
sub_dirs = ["backtest_results", "data", USERPATH_HYPEROPTS, "hyperopt_results", "logs",
|
||||
USERPATH_NOTEBOOKS, "plot", USERPATH_STRATEGIES, USERPATH_FREQAIMODELS]
|
||||
sub_dirs = [
|
||||
"backtest_results",
|
||||
"data",
|
||||
USERPATH_HYPEROPTS,
|
||||
"hyperopt_results",
|
||||
"logs",
|
||||
USERPATH_NOTEBOOKS,
|
||||
"plot",
|
||||
USERPATH_STRATEGIES,
|
||||
USERPATH_FREQAIMODELS,
|
||||
]
|
||||
folder = Path(directory)
|
||||
chown_user_directory(folder)
|
||||
if not folder.is_dir():
|
||||
if create_dir:
|
||||
folder.mkdir(parents=True)
|
||||
logger.info(f'Created user-data directory: {folder}')
|
||||
logger.info(f"Created user-data directory: {folder}")
|
||||
else:
|
||||
raise OperationalException(
|
||||
f"Directory `{folder}` does not exist. "
|
||||
"Please use `freqtrade create-userdir` to create a user directory")
|
||||
"Please use `freqtrade create-userdir` to create a user directory"
|
||||
)
|
||||
|
||||
# Create required subdirectories
|
||||
for f in sub_dirs:
|
||||
|
|
|
@ -16,9 +16,9 @@ def _get_var_typed(val):
|
|||
try:
|
||||
return float(val)
|
||||
except ValueError:
|
||||
if val.lower() in ('t', 'true'):
|
||||
if val.lower() in ("t", "true"):
|
||||
return True
|
||||
elif val.lower() in ('f', 'false'):
|
||||
elif val.lower() in ("f", "false"):
|
||||
return False
|
||||
# keep as string
|
||||
return val
|
||||
|
@ -32,16 +32,21 @@ def _flat_vars_to_nested_dict(env_dict: Dict[str, Any], prefix: str) -> Dict[str
|
|||
:param prefix: Prefix to consider (usually FREQTRADE__)
|
||||
:return: Nested dict based on available and relevant variables.
|
||||
"""
|
||||
no_convert = ['CHAT_ID', 'PASSWORD']
|
||||
no_convert = ["CHAT_ID", "PASSWORD"]
|
||||
relevant_vars: Dict[str, Any] = {}
|
||||
|
||||
for env_var, val in sorted(env_dict.items()):
|
||||
if env_var.startswith(prefix):
|
||||
logger.info(f"Loading variable '{env_var}'")
|
||||
key = env_var.replace(prefix, '')
|
||||
for k in reversed(key.split('__')):
|
||||
val = {k.lower(): _get_var_typed(val)
|
||||
if not isinstance(val, dict) and k not in no_convert else val}
|
||||
key = env_var.replace(prefix, "")
|
||||
for k in reversed(key.split("__")):
|
||||
val = {
|
||||
k.lower(): (
|
||||
_get_var_typed(val)
|
||||
if not isinstance(val, dict) and k not in no_convert
|
||||
else val
|
||||
)
|
||||
}
|
||||
relevant_vars = deep_merge_dicts(val, relevant_vars)
|
||||
return relevant_vars
|
||||
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
"""
|
||||
This module contain functions to load the configuration file
|
||||
"""
|
||||
|
||||
import logging
|
||||
import re
|
||||
import sys
|
||||
|
@ -25,25 +26,25 @@ def log_config_error_range(path: str, errmsg: str) -> str:
|
|||
"""
|
||||
Parses configuration file and prints range around error
|
||||
"""
|
||||
if path != '-':
|
||||
offsetlist = re.findall(r'(?<=Parse\serror\sat\soffset\s)\d+', errmsg)
|
||||
if path != "-":
|
||||
offsetlist = re.findall(r"(?<=Parse\serror\sat\soffset\s)\d+", errmsg)
|
||||
if offsetlist:
|
||||
offset = int(offsetlist[0])
|
||||
text = Path(path).read_text()
|
||||
# Fetch an offset of 80 characters around the error line
|
||||
subtext = text[offset - min(80, offset):offset + 80]
|
||||
segments = subtext.split('\n')
|
||||
subtext = text[offset - min(80, offset) : offset + 80]
|
||||
segments = subtext.split("\n")
|
||||
if len(segments) > 3:
|
||||
# Remove first and last lines, to avoid odd truncations
|
||||
return '\n'.join(segments[1:-1])
|
||||
return "\n".join(segments[1:-1])
|
||||
else:
|
||||
return subtext
|
||||
return ''
|
||||
return ""
|
||||
|
||||
|
||||
def load_file(path: Path) -> Dict[str, Any]:
|
||||
try:
|
||||
with path.open('r') as file:
|
||||
with path.open("r") as file:
|
||||
config = rapidjson.load(file, parse_mode=CONFIG_PARSE_MODE)
|
||||
except FileNotFoundError:
|
||||
raise OperationalException(f'File "{path}" not found!') from None
|
||||
|
@ -58,25 +59,27 @@ def load_config_file(path: str) -> Dict[str, Any]:
|
|||
"""
|
||||
try:
|
||||
# Read config from stdin if requested in the options
|
||||
with Path(path).open() if path != '-' else sys.stdin as file:
|
||||
with Path(path).open() if path != "-" else sys.stdin as file:
|
||||
config = rapidjson.load(file, parse_mode=CONFIG_PARSE_MODE)
|
||||
except FileNotFoundError:
|
||||
raise OperationalException(
|
||||
f'Config file "{path}" not found!'
|
||||
' Please create a config file or check whether it exists.') from None
|
||||
" Please create a config file or check whether it exists."
|
||||
) from None
|
||||
except rapidjson.JSONDecodeError as e:
|
||||
err_range = log_config_error_range(path, str(e))
|
||||
raise ConfigurationError(
|
||||
f'{e}\n'
|
||||
f'Please verify the following segment of your configuration:\n{err_range}'
|
||||
if err_range else 'Please verify your configuration file for syntax errors.'
|
||||
f"{e}\nPlease verify the following segment of your configuration:\n{err_range}"
|
||||
if err_range
|
||||
else "Please verify your configuration file for syntax errors."
|
||||
)
|
||||
|
||||
return config
|
||||
|
||||
|
||||
def load_from_files(
|
||||
files: List[str], base_path: Optional[Path] = None, level: int = 0) -> Dict[str, Any]:
|
||||
files: List[str], base_path: Optional[Path] = None, level: int = 0
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Recursively load configuration files if specified.
|
||||
Sub-files are assumed to be relative to the initial config.
|
||||
|
@ -90,8 +93,8 @@ def load_from_files(
|
|||
files_loaded = []
|
||||
# We expect here a list of config filenames
|
||||
for filename in files:
|
||||
logger.info(f'Using config: {filename} ...')
|
||||
if filename == '-':
|
||||
logger.info(f"Using config: {filename} ...")
|
||||
if filename == "-":
|
||||
# Immediately load stdin and return
|
||||
return load_config_file(filename)
|
||||
file = Path(filename)
|
||||
|
@ -100,10 +103,11 @@ def load_from_files(
|
|||
file = base_path / file
|
||||
|
||||
config_tmp = load_config_file(str(file))
|
||||
if 'add_config_files' in config_tmp:
|
||||
if "add_config_files" in config_tmp:
|
||||
config_sub = load_from_files(
|
||||
config_tmp['add_config_files'], file.resolve().parent, level + 1)
|
||||
files_loaded.extend(config_sub.get('config_files', []))
|
||||
config_tmp["add_config_files"], file.resolve().parent, level + 1
|
||||
)
|
||||
files_loaded.extend(config_sub.get("config_files", []))
|
||||
config_tmp = deep_merge_dicts(config_tmp, config_sub)
|
||||
|
||||
files_loaded.insert(0, str(file))
|
||||
|
@ -111,6 +115,6 @@ def load_from_files(
|
|||
# Merge config options, overwriting prior values
|
||||
config = deep_merge_dicts(config_tmp, config)
|
||||
|
||||
config['config_files'] = files_loaded
|
||||
config["config_files"] = files_loaded
|
||||
|
||||
return config
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
"""
|
||||
This module contains the argument manager class
|
||||
"""
|
||||
|
||||
import logging
|
||||
import re
|
||||
from datetime import datetime, timezone
|
||||
|
@ -22,9 +23,13 @@ class TimeRange:
|
|||
if *type is None, don't use corresponding startvalue.
|
||||
"""
|
||||
|
||||
def __init__(self, starttype: Optional[str] = None, stoptype: Optional[str] = None,
|
||||
startts: int = 0, stopts: int = 0):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
starttype: Optional[str] = None,
|
||||
stoptype: Optional[str] = None,
|
||||
startts: int = 0,
|
||||
stopts: int = 0,
|
||||
):
|
||||
self.starttype: Optional[str] = starttype
|
||||
self.stoptype: Optional[str] = stoptype
|
||||
self.startts: int = startts
|
||||
|
@ -48,12 +53,12 @@ class TimeRange:
|
|||
Returns a string representation of the timerange as used by parse_timerange.
|
||||
Follows the format yyyymmdd-yyyymmdd - leaving out the parts that are not set.
|
||||
"""
|
||||
start = ''
|
||||
stop = ''
|
||||
start = ""
|
||||
stop = ""
|
||||
if startdt := self.startdt:
|
||||
start = startdt.strftime('%Y%m%d')
|
||||
start = startdt.strftime("%Y%m%d")
|
||||
if stopdt := self.stopdt:
|
||||
stop = stopdt.strftime('%Y%m%d')
|
||||
stop = stopdt.strftime("%Y%m%d")
|
||||
return f"{start}-{stop}"
|
||||
|
||||
@property
|
||||
|
@ -61,7 +66,7 @@ class TimeRange:
|
|||
"""
|
||||
Returns a string representation of the start date
|
||||
"""
|
||||
val = 'unbounded'
|
||||
val = "unbounded"
|
||||
if (startdt := self.startdt) is not None:
|
||||
val = startdt.strftime(DATETIME_PRINT_FORMAT)
|
||||
return val
|
||||
|
@ -71,15 +76,19 @@ class TimeRange:
|
|||
"""
|
||||
Returns a string representation of the stop date
|
||||
"""
|
||||
val = 'unbounded'
|
||||
val = "unbounded"
|
||||
if (stopdt := self.stopdt) is not None:
|
||||
val = stopdt.strftime(DATETIME_PRINT_FORMAT)
|
||||
return val
|
||||
|
||||
def __eq__(self, other):
|
||||
"""Override the default Equals behavior"""
|
||||
return (self.starttype == other.starttype and self.stoptype == other.stoptype
|
||||
and self.startts == other.startts and self.stopts == other.stopts)
|
||||
return (
|
||||
self.starttype == other.starttype
|
||||
and self.stoptype == other.stoptype
|
||||
and self.startts == other.startts
|
||||
and self.stopts == other.stopts
|
||||
)
|
||||
|
||||
def subtract_start(self, seconds: int) -> None:
|
||||
"""
|
||||
|
@ -90,8 +99,9 @@ class TimeRange:
|
|||
if self.startts:
|
||||
self.startts = self.startts - seconds
|
||||
|
||||
def adjust_start_if_necessary(self, timeframe_secs: int, startup_candles: int,
|
||||
min_date: datetime) -> None:
|
||||
def adjust_start_if_necessary(
|
||||
self, timeframe_secs: int, startup_candles: int, min_date: datetime
|
||||
) -> None:
|
||||
"""
|
||||
Adjust startts by <startup_candles> candles.
|
||||
Applies only if no startup-candles have been available.
|
||||
|
@ -101,13 +111,13 @@ class TimeRange:
|
|||
has to be moved
|
||||
:return: None (Modifies the object in place)
|
||||
"""
|
||||
if (not self.starttype or (startup_candles
|
||||
and min_date.timestamp() >= self.startts)):
|
||||
if not self.starttype or (startup_candles and min_date.timestamp() >= self.startts):
|
||||
# If no startts was defined, or backtest-data starts at the defined backtest-date
|
||||
logger.warning("Moving start-date by %s candles to account for startup time.",
|
||||
startup_candles)
|
||||
logger.warning(
|
||||
"Moving start-date by %s candles to account for startup time.", startup_candles
|
||||
)
|
||||
self.startts = int(min_date.timestamp() + timeframe_secs * startup_candles)
|
||||
self.starttype = 'date'
|
||||
self.starttype = "date"
|
||||
|
||||
@classmethod
|
||||
def parse_timerange(cls, text: Optional[str]) -> Self:
|
||||
|
@ -118,16 +128,17 @@ class TimeRange:
|
|||
"""
|
||||
if not text:
|
||||
return cls(None, None, 0, 0)
|
||||
syntax = [(r'^-(\d{8})$', (None, 'date')),
|
||||
(r'^(\d{8})-$', ('date', None)),
|
||||
(r'^(\d{8})-(\d{8})$', ('date', 'date')),
|
||||
(r'^-(\d{10})$', (None, 'date')),
|
||||
(r'^(\d{10})-$', ('date', None)),
|
||||
(r'^(\d{10})-(\d{10})$', ('date', 'date')),
|
||||
(r'^-(\d{13})$', (None, 'date')),
|
||||
(r'^(\d{13})-$', ('date', None)),
|
||||
(r'^(\d{13})-(\d{13})$', ('date', 'date')),
|
||||
]
|
||||
syntax = [
|
||||
(r"^-(\d{8})$", (None, "date")),
|
||||
(r"^(\d{8})-$", ("date", None)),
|
||||
(r"^(\d{8})-(\d{8})$", ("date", "date")),
|
||||
(r"^-(\d{10})$", (None, "date")),
|
||||
(r"^(\d{10})-$", ("date", None)),
|
||||
(r"^(\d{10})-(\d{10})$", ("date", "date")),
|
||||
(r"^-(\d{13})$", (None, "date")),
|
||||
(r"^(\d{13})-$", ("date", None)),
|
||||
(r"^(\d{13})-(\d{13})$", ("date", "date")),
|
||||
]
|
||||
for rex, stype in syntax:
|
||||
# Apply the regular expression to text
|
||||
match = re.match(rex, text)
|
||||
|
@ -138,9 +149,12 @@ class TimeRange:
|
|||
stop: int = 0
|
||||
if stype[0]:
|
||||
starts = rvals[index]
|
||||
if stype[0] == 'date' and len(starts) == 8:
|
||||
start = int(datetime.strptime(starts, '%Y%m%d').replace(
|
||||
tzinfo=timezone.utc).timestamp())
|
||||
if stype[0] == "date" and len(starts) == 8:
|
||||
start = int(
|
||||
datetime.strptime(starts, "%Y%m%d")
|
||||
.replace(tzinfo=timezone.utc)
|
||||
.timestamp()
|
||||
)
|
||||
elif len(starts) == 13:
|
||||
start = int(starts) // 1000
|
||||
else:
|
||||
|
@ -148,15 +162,19 @@ class TimeRange:
|
|||
index += 1
|
||||
if stype[1]:
|
||||
stops = rvals[index]
|
||||
if stype[1] == 'date' and len(stops) == 8:
|
||||
stop = int(datetime.strptime(stops, '%Y%m%d').replace(
|
||||
tzinfo=timezone.utc).timestamp())
|
||||
if stype[1] == "date" and len(stops) == 8:
|
||||
stop = int(
|
||||
datetime.strptime(stops, "%Y%m%d")
|
||||
.replace(tzinfo=timezone.utc)
|
||||
.timestamp()
|
||||
)
|
||||
elif len(stops) == 13:
|
||||
stop = int(stops) // 1000
|
||||
else:
|
||||
stop = int(stops)
|
||||
if start > stop > 0:
|
||||
raise ConfigurationError(
|
||||
f'Start date is after stop date for timerange "{text}"')
|
||||
f'Start date is after stop date for timerange "{text}"'
|
||||
)
|
||||
return cls(stype[0], stype[1], start, stop)
|
||||
raise ConfigurationError(f'Incorrect syntax for timerange "{text}"')
|
||||
|
|
|
@ -3,6 +3,4 @@ Module to handle data operations for freqtrade
|
|||
"""
|
||||
|
||||
# limit what's imported when using `from freqtrade.data import *`
|
||||
__all__ = [
|
||||
'converter'
|
||||
]
|
||||
__all__ = ["converter"]
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
"""
|
||||
Helpers when analyzing backtest data
|
||||
"""
|
||||
|
||||
import logging
|
||||
from copy import copy
|
||||
from datetime import datetime, timezone
|
||||
|
@ -21,14 +22,35 @@ from freqtrade.types import BacktestHistoryEntryType, BacktestResultType
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Newest format
|
||||
BT_DATA_COLUMNS = ['pair', 'stake_amount', 'max_stake_amount', 'amount',
|
||||
'open_date', 'close_date', 'open_rate', 'close_rate',
|
||||
'fee_open', 'fee_close', 'trade_duration',
|
||||
'profit_ratio', 'profit_abs', 'exit_reason',
|
||||
'initial_stop_loss_abs', 'initial_stop_loss_ratio', 'stop_loss_abs',
|
||||
'stop_loss_ratio', 'min_rate', 'max_rate', 'is_open', 'enter_tag',
|
||||
'leverage', 'is_short', 'open_timestamp', 'close_timestamp', 'orders'
|
||||
]
|
||||
BT_DATA_COLUMNS = [
|
||||
"pair",
|
||||
"stake_amount",
|
||||
"max_stake_amount",
|
||||
"amount",
|
||||
"open_date",
|
||||
"close_date",
|
||||
"open_rate",
|
||||
"close_rate",
|
||||
"fee_open",
|
||||
"fee_close",
|
||||
"trade_duration",
|
||||
"profit_ratio",
|
||||
"profit_abs",
|
||||
"exit_reason",
|
||||
"initial_stop_loss_abs",
|
||||
"initial_stop_loss_ratio",
|
||||
"stop_loss_abs",
|
||||
"stop_loss_ratio",
|
||||
"min_rate",
|
||||
"max_rate",
|
||||
"is_open",
|
||||
"enter_tag",
|
||||
"leverage",
|
||||
"is_short",
|
||||
"open_timestamp",
|
||||
"close_timestamp",
|
||||
"orders",
|
||||
]
|
||||
|
||||
|
||||
def get_latest_optimize_filename(directory: Union[Path, str], variant: str) -> str:
|
||||
|
@ -50,15 +72,16 @@ def get_latest_optimize_filename(directory: Union[Path, str], variant: str) -> s
|
|||
|
||||
if not filename.is_file():
|
||||
raise ValueError(
|
||||
f"Directory '{directory}' does not seem to contain backtest statistics yet.")
|
||||
f"Directory '{directory}' does not seem to contain backtest statistics yet."
|
||||
)
|
||||
|
||||
with filename.open() as file:
|
||||
data = json_load(file)
|
||||
|
||||
if f'latest_{variant}' not in data:
|
||||
if f"latest_{variant}" not in data:
|
||||
raise ValueError(f"Invalid '{LAST_BT_RESULT_FN}' format.")
|
||||
|
||||
return data[f'latest_{variant}']
|
||||
return data[f"latest_{variant}"]
|
||||
|
||||
|
||||
def get_latest_backtest_filename(directory: Union[Path, str]) -> str:
|
||||
|
@ -71,7 +94,7 @@ def get_latest_backtest_filename(directory: Union[Path, str]) -> str:
|
|||
* `directory/.last_result.json` does not exist
|
||||
* `directory/.last_result.json` has the wrong content
|
||||
"""
|
||||
return get_latest_optimize_filename(directory, 'backtest')
|
||||
return get_latest_optimize_filename(directory, "backtest")
|
||||
|
||||
|
||||
def get_latest_hyperopt_filename(directory: Union[Path, str]) -> str:
|
||||
|
@ -85,14 +108,15 @@ def get_latest_hyperopt_filename(directory: Union[Path, str]) -> str:
|
|||
* `directory/.last_result.json` has the wrong content
|
||||
"""
|
||||
try:
|
||||
return get_latest_optimize_filename(directory, 'hyperopt')
|
||||
return get_latest_optimize_filename(directory, "hyperopt")
|
||||
except ValueError:
|
||||
# Return default (legacy) pickle filename
|
||||
return 'hyperopt_results.pickle'
|
||||
return "hyperopt_results.pickle"
|
||||
|
||||
|
||||
def get_latest_hyperopt_file(
|
||||
directory: Union[Path, str], predef_filename: Optional[str] = None) -> Path:
|
||||
directory: Union[Path, str], predef_filename: Optional[str] = None
|
||||
) -> Path:
|
||||
"""
|
||||
Get latest hyperopt export based on '.last_result.json'.
|
||||
:param directory: Directory to search for last result
|
||||
|
@ -107,7 +131,8 @@ def get_latest_hyperopt_file(
|
|||
if predef_filename:
|
||||
if Path(predef_filename).is_absolute():
|
||||
raise ConfigurationError(
|
||||
"--hyperopt-filename expects only the filename, not an absolute path.")
|
||||
"--hyperopt-filename expects only the filename, not an absolute path."
|
||||
)
|
||||
return directory / predef_filename
|
||||
return directory / get_latest_hyperopt_filename(directory)
|
||||
|
||||
|
@ -126,7 +151,7 @@ def load_backtest_metadata(filename: Union[Path, str]) -> Dict[str, Any]:
|
|||
except FileNotFoundError:
|
||||
return {}
|
||||
except Exception as e:
|
||||
raise OperationalException('Unexpected error while loading backtest metadata.') from e
|
||||
raise OperationalException("Unexpected error while loading backtest metadata.") from e
|
||||
|
||||
|
||||
def load_backtest_stats(filename: Union[Path, str]) -> BacktestResultType:
|
||||
|
@ -147,7 +172,7 @@ def load_backtest_stats(filename: Union[Path, str]) -> BacktestResultType:
|
|||
|
||||
# Legacy list format does not contain metadata.
|
||||
if isinstance(data, dict):
|
||||
data['metadata'] = load_backtest_metadata(filename)
|
||||
data["metadata"] = load_backtest_metadata(filename)
|
||||
return data
|
||||
|
||||
|
||||
|
@ -159,38 +184,39 @@ def load_and_merge_backtest_result(strategy_name: str, filename: Path, results:
|
|||
:param results: dict to merge the result to.
|
||||
"""
|
||||
bt_data = load_backtest_stats(filename)
|
||||
k: Literal['metadata', 'strategy']
|
||||
for k in ('metadata', 'strategy'): # type: ignore
|
||||
k: Literal["metadata", "strategy"]
|
||||
for k in ("metadata", "strategy"): # type: ignore
|
||||
results[k][strategy_name] = bt_data[k][strategy_name]
|
||||
results['metadata'][strategy_name]['filename'] = filename.stem
|
||||
comparison = bt_data['strategy_comparison']
|
||||
results["metadata"][strategy_name]["filename"] = filename.stem
|
||||
comparison = bt_data["strategy_comparison"]
|
||||
for i in range(len(comparison)):
|
||||
if comparison[i]['key'] == strategy_name:
|
||||
results['strategy_comparison'].append(comparison[i])
|
||||
if comparison[i]["key"] == strategy_name:
|
||||
results["strategy_comparison"].append(comparison[i])
|
||||
break
|
||||
|
||||
|
||||
def _get_backtest_files(dirname: Path) -> List[Path]:
|
||||
# Weird glob expression here avoids including .meta.json files.
|
||||
return list(reversed(sorted(dirname.glob('backtest-result-*-[0-9][0-9].json'))))
|
||||
return list(reversed(sorted(dirname.glob("backtest-result-*-[0-9][0-9].json"))))
|
||||
|
||||
|
||||
def _extract_backtest_result(filename: Path) -> List[BacktestHistoryEntryType]:
|
||||
metadata = load_backtest_metadata(filename)
|
||||
return [
|
||||
{
|
||||
'filename': filename.stem,
|
||||
'strategy': s,
|
||||
'run_id': v['run_id'],
|
||||
'notes': v.get('notes', ''),
|
||||
"filename": filename.stem,
|
||||
"strategy": s,
|
||||
"run_id": v["run_id"],
|
||||
"notes": v.get("notes", ""),
|
||||
# Backtest "run" time
|
||||
'backtest_start_time': v['backtest_start_time'],
|
||||
"backtest_start_time": v["backtest_start_time"],
|
||||
# Backtest timerange
|
||||
'backtest_start_ts': v.get('backtest_start_ts', None),
|
||||
'backtest_end_ts': v.get('backtest_end_ts', None),
|
||||
'timeframe': v.get('timeframe', None),
|
||||
'timeframe_detail': v.get('timeframe_detail', None),
|
||||
} for s, v in metadata.items()
|
||||
"backtest_start_ts": v.get("backtest_start_ts", None),
|
||||
"backtest_end_ts": v.get("backtest_end_ts", None),
|
||||
"timeframe": v.get("timeframe", None),
|
||||
"timeframe_detail": v.get("timeframe_detail", None),
|
||||
}
|
||||
for s, v in metadata.items()
|
||||
]
|
||||
|
||||
|
||||
|
@ -218,7 +244,7 @@ def delete_backtest_result(file_abs: Path):
|
|||
"""
|
||||
# *.meta.json
|
||||
logger.info(f"Deleting backtest result file: {file_abs.name}")
|
||||
file_abs_meta = file_abs.with_suffix('.meta.json')
|
||||
file_abs_meta = file_abs.with_suffix(".meta.json")
|
||||
file_abs.unlink()
|
||||
file_abs_meta.unlink()
|
||||
|
||||
|
@ -244,12 +270,13 @@ def get_backtest_market_change(filename: Path, include_ts: bool = True) -> pd.Da
|
|||
"""
|
||||
df = pd.read_feather(filename)
|
||||
if include_ts:
|
||||
df.loc[:, '__date_ts'] = df.loc[:, 'date'].astype(np.int64) // 1000 // 1000
|
||||
df.loc[:, "__date_ts"] = df.loc[:, "date"].astype(np.int64) // 1000 // 1000
|
||||
return df
|
||||
|
||||
|
||||
def find_existing_backtest_stats(dirname: Union[Path, str], run_ids: Dict[str, str],
|
||||
min_backtest_date: Optional[datetime] = None) -> Dict[str, Any]:
|
||||
def find_existing_backtest_stats(
|
||||
dirname: Union[Path, str], run_ids: Dict[str, str], min_backtest_date: Optional[datetime] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Find existing backtest stats that match specified run IDs and load them.
|
||||
:param dirname: pathlib.Path object, or string pointing to the file.
|
||||
|
@ -261,9 +288,9 @@ def find_existing_backtest_stats(dirname: Union[Path, str], run_ids: Dict[str, s
|
|||
run_ids = copy(run_ids)
|
||||
dirname = Path(dirname)
|
||||
results: Dict[str, Any] = {
|
||||
'metadata': {},
|
||||
'strategy': {},
|
||||
'strategy_comparison': [],
|
||||
"metadata": {},
|
||||
"strategy": {},
|
||||
"strategy_comparison": [],
|
||||
}
|
||||
|
||||
for filename in _get_backtest_files(dirname):
|
||||
|
@ -280,14 +307,14 @@ def find_existing_backtest_stats(dirname: Union[Path, str], run_ids: Dict[str, s
|
|||
continue
|
||||
|
||||
if min_backtest_date is not None:
|
||||
backtest_date = strategy_metadata['backtest_start_time']
|
||||
backtest_date = strategy_metadata["backtest_start_time"]
|
||||
backtest_date = datetime.fromtimestamp(backtest_date, tz=timezone.utc)
|
||||
if backtest_date < min_backtest_date:
|
||||
# Do not use a cached result for this strategy as first result is too old.
|
||||
del run_ids[strategy_name]
|
||||
continue
|
||||
|
||||
if strategy_metadata['run_id'] == run_id:
|
||||
if strategy_metadata["run_id"] == run_id:
|
||||
del run_ids[strategy_name]
|
||||
load_and_merge_backtest_result(strategy_name, filename, results)
|
||||
|
||||
|
@ -300,20 +327,20 @@ def _load_backtest_data_df_compatibility(df: pd.DataFrame) -> pd.DataFrame:
|
|||
"""
|
||||
Compatibility support for older backtest data.
|
||||
"""
|
||||
df['open_date'] = pd.to_datetime(df['open_date'], utc=True)
|
||||
df['close_date'] = pd.to_datetime(df['close_date'], utc=True)
|
||||
df["open_date"] = pd.to_datetime(df["open_date"], utc=True)
|
||||
df["close_date"] = pd.to_datetime(df["close_date"], utc=True)
|
||||
# Compatibility support for pre short Columns
|
||||
if 'is_short' not in df.columns:
|
||||
df['is_short'] = False
|
||||
if 'leverage' not in df.columns:
|
||||
df['leverage'] = 1.0
|
||||
if 'enter_tag' not in df.columns:
|
||||
df['enter_tag'] = df['buy_tag']
|
||||
df = df.drop(['buy_tag'], axis=1)
|
||||
if 'max_stake_amount' not in df.columns:
|
||||
df['max_stake_amount'] = df['stake_amount']
|
||||
if 'orders' not in df.columns:
|
||||
df['orders'] = None
|
||||
if "is_short" not in df.columns:
|
||||
df["is_short"] = False
|
||||
if "leverage" not in df.columns:
|
||||
df["leverage"] = 1.0
|
||||
if "enter_tag" not in df.columns:
|
||||
df["enter_tag"] = df["buy_tag"]
|
||||
df = df.drop(["buy_tag"], axis=1)
|
||||
if "max_stake_amount" not in df.columns:
|
||||
df["max_stake_amount"] = df["stake_amount"]
|
||||
if "orders" not in df.columns:
|
||||
df["orders"] = None
|
||||
return df
|
||||
|
||||
|
||||
|
@ -329,23 +356,25 @@ def load_backtest_data(filename: Union[Path, str], strategy: Optional[str] = Non
|
|||
data = load_backtest_stats(filename)
|
||||
if not isinstance(data, list):
|
||||
# new, nested format
|
||||
if 'strategy' not in data:
|
||||
if "strategy" not in data:
|
||||
raise ValueError("Unknown dataformat.")
|
||||
|
||||
if not strategy:
|
||||
if len(data['strategy']) == 1:
|
||||
strategy = list(data['strategy'].keys())[0]
|
||||
if len(data["strategy"]) == 1:
|
||||
strategy = list(data["strategy"].keys())[0]
|
||||
else:
|
||||
raise ValueError("Detected backtest result with more than one strategy. "
|
||||
"Please specify a strategy.")
|
||||
raise ValueError(
|
||||
"Detected backtest result with more than one strategy. "
|
||||
"Please specify a strategy."
|
||||
)
|
||||
|
||||
if strategy not in data['strategy']:
|
||||
if strategy not in data["strategy"]:
|
||||
raise ValueError(
|
||||
f"Strategy {strategy} not available in the backtest result. "
|
||||
f"Available strategies are '{','.join(data['strategy'].keys())}'"
|
||||
)
|
||||
)
|
||||
|
||||
data = data['strategy'][strategy]['trades']
|
||||
data = data["strategy"][strategy]["trades"]
|
||||
df = pd.DataFrame(data)
|
||||
if not df.empty:
|
||||
df = _load_backtest_data_df_compatibility(df)
|
||||
|
@ -353,7 +382,8 @@ def load_backtest_data(filename: Union[Path, str], strategy: Optional[str] = Non
|
|||
else:
|
||||
# old format - only with lists.
|
||||
raise OperationalException(
|
||||
"Backtest-results with only trades data are no longer supported.")
|
||||
"Backtest-results with only trades data are no longer supported."
|
||||
)
|
||||
if not df.empty:
|
||||
df = df.sort_values("open_date").reset_index(drop=True)
|
||||
return df
|
||||
|
@ -368,23 +398,26 @@ def analyze_trade_parallelism(results: pd.DataFrame, timeframe: str) -> pd.DataF
|
|||
:return: dataframe with open-counts per time-period in timeframe
|
||||
"""
|
||||
from freqtrade.exchange import timeframe_to_resample_freq
|
||||
|
||||
timeframe_freq = timeframe_to_resample_freq(timeframe)
|
||||
dates = [pd.Series(pd.date_range(row[1]['open_date'], row[1]['close_date'],
|
||||
freq=timeframe_freq))
|
||||
for row in results[['open_date', 'close_date']].iterrows()]
|
||||
dates = [
|
||||
pd.Series(pd.date_range(row[1]["open_date"], row[1]["close_date"], freq=timeframe_freq))
|
||||
for row in results[["open_date", "close_date"]].iterrows()
|
||||
]
|
||||
deltas = [len(x) for x in dates]
|
||||
dates = pd.Series(pd.concat(dates).values, name='date')
|
||||
dates = pd.Series(pd.concat(dates).values, name="date")
|
||||
df2 = pd.DataFrame(np.repeat(results.values, deltas, axis=0), columns=results.columns)
|
||||
|
||||
df2 = pd.concat([dates, df2], axis=1)
|
||||
df2 = df2.set_index('date')
|
||||
df_final = df2.resample(timeframe_freq)[['pair']].count()
|
||||
df_final = df_final.rename({'pair': 'open_trades'}, axis=1)
|
||||
df2 = df2.set_index("date")
|
||||
df_final = df2.resample(timeframe_freq)[["pair"]].count()
|
||||
df_final = df_final.rename({"pair": "open_trades"}, axis=1)
|
||||
return df_final
|
||||
|
||||
|
||||
def evaluate_result_multi(results: pd.DataFrame, timeframe: str,
|
||||
max_open_trades: IntOrInf) -> pd.DataFrame:
|
||||
def evaluate_result_multi(
|
||||
results: pd.DataFrame, timeframe: str, max_open_trades: IntOrInf
|
||||
) -> pd.DataFrame:
|
||||
"""
|
||||
Find overlapping trades by expanding each trade once per period it was open
|
||||
and then counting overlaps
|
||||
|
@ -394,7 +427,7 @@ def evaluate_result_multi(results: pd.DataFrame, timeframe: str,
|
|||
:return: dataframe with open-counts per time-period in freq
|
||||
"""
|
||||
df_final = analyze_trade_parallelism(results, timeframe)
|
||||
return df_final[df_final['open_trades'] > max_open_trades]
|
||||
return df_final[df_final["open_trades"] > max_open_trades]
|
||||
|
||||
|
||||
def trade_list_to_dataframe(trades: Union[List[Trade], List[LocalTrade]]) -> pd.DataFrame:
|
||||
|
@ -405,9 +438,9 @@ def trade_list_to_dataframe(trades: Union[List[Trade], List[LocalTrade]]) -> pd.
|
|||
"""
|
||||
df = pd.DataFrame.from_records([t.to_json(True) for t in trades], columns=BT_DATA_COLUMNS)
|
||||
if len(df) > 0:
|
||||
df['close_date'] = pd.to_datetime(df['close_date'], utc=True)
|
||||
df['open_date'] = pd.to_datetime(df['open_date'], utc=True)
|
||||
df['close_rate'] = df['close_rate'].astype('float64')
|
||||
df["close_date"] = pd.to_datetime(df["close_date"], utc=True)
|
||||
df["open_date"] = pd.to_datetime(df["open_date"], utc=True)
|
||||
df["close_rate"] = df["close_rate"].astype("float64")
|
||||
return df
|
||||
|
||||
|
||||
|
@ -429,8 +462,13 @@ def load_trades_from_db(db_url: str, strategy: Optional[str] = None) -> pd.DataF
|
|||
return trades
|
||||
|
||||
|
||||
def load_trades(source: str, db_url: str, exportfilename: Path,
|
||||
no_trades: bool = False, strategy: Optional[str] = None) -> pd.DataFrame:
|
||||
def load_trades(
|
||||
source: str,
|
||||
db_url: str,
|
||||
exportfilename: Path,
|
||||
no_trades: bool = False,
|
||||
strategy: Optional[str] = None,
|
||||
) -> pd.DataFrame:
|
||||
"""
|
||||
Based on configuration option 'trade_source':
|
||||
* loads data from DB (using `db_url`)
|
||||
|
@ -451,8 +489,9 @@ def load_trades(source: str, db_url: str, exportfilename: Path,
|
|||
return load_backtest_data(exportfilename, strategy)
|
||||
|
||||
|
||||
def extract_trades_of_period(dataframe: pd.DataFrame, trades: pd.DataFrame,
|
||||
date_index=False) -> pd.DataFrame:
|
||||
def extract_trades_of_period(
|
||||
dataframe: pd.DataFrame, trades: pd.DataFrame, date_index=False
|
||||
) -> pd.DataFrame:
|
||||
"""
|
||||
Compare trades and backtested pair DataFrames to get trades performed on backtested period
|
||||
:return: the DataFrame of a trades of period
|
||||
|
@ -461,8 +500,9 @@ def extract_trades_of_period(dataframe: pd.DataFrame, trades: pd.DataFrame,
|
|||
trades_start = dataframe.index[0]
|
||||
trades_stop = dataframe.index[-1]
|
||||
else:
|
||||
trades_start = dataframe.iloc[0]['date']
|
||||
trades_stop = dataframe.iloc[-1]['date']
|
||||
trades = trades.loc[(trades['open_date'] >= trades_start) &
|
||||
(trades['close_date'] <= trades_stop)]
|
||||
trades_start = dataframe.iloc[0]["date"]
|
||||
trades_stop = dataframe.iloc[-1]["date"]
|
||||
trades = trades.loc[
|
||||
(trades["open_date"] >= trades_start) & (trades["close_date"] <= trades_stop)
|
||||
]
|
||||
return trades
|
||||
|
|
|
@ -1,28 +1,38 @@
|
|||
from freqtrade.data.converter.converter import (clean_ohlcv_dataframe, convert_ohlcv_format,
|
||||
ohlcv_fill_up_missing_data, ohlcv_to_dataframe,
|
||||
order_book_to_dataframe, reduce_dataframe_footprint,
|
||||
trim_dataframe, trim_dataframes)
|
||||
from freqtrade.data.converter.trade_converter import (convert_trades_format,
|
||||
convert_trades_to_ohlcv, trades_convert_types,
|
||||
trades_df_remove_duplicates,
|
||||
trades_dict_to_list, trades_list_to_df,
|
||||
trades_to_ohlcv)
|
||||
from freqtrade.data.converter.converter import (
|
||||
clean_ohlcv_dataframe,
|
||||
convert_ohlcv_format,
|
||||
ohlcv_fill_up_missing_data,
|
||||
ohlcv_to_dataframe,
|
||||
order_book_to_dataframe,
|
||||
reduce_dataframe_footprint,
|
||||
trim_dataframe,
|
||||
trim_dataframes,
|
||||
)
|
||||
from freqtrade.data.converter.trade_converter import (
|
||||
convert_trades_format,
|
||||
convert_trades_to_ohlcv,
|
||||
trades_convert_types,
|
||||
trades_df_remove_duplicates,
|
||||
trades_dict_to_list,
|
||||
trades_list_to_df,
|
||||
trades_to_ohlcv,
|
||||
)
|
||||
|
||||
|
||||
__all__ = [
|
||||
'clean_ohlcv_dataframe',
|
||||
'convert_ohlcv_format',
|
||||
'ohlcv_fill_up_missing_data',
|
||||
'ohlcv_to_dataframe',
|
||||
'order_book_to_dataframe',
|
||||
'reduce_dataframe_footprint',
|
||||
'trim_dataframe',
|
||||
'trim_dataframes',
|
||||
'convert_trades_format',
|
||||
'convert_trades_to_ohlcv',
|
||||
'trades_convert_types',
|
||||
'trades_df_remove_duplicates',
|
||||
'trades_dict_to_list',
|
||||
'trades_list_to_df',
|
||||
'trades_to_ohlcv',
|
||||
"clean_ohlcv_dataframe",
|
||||
"convert_ohlcv_format",
|
||||
"ohlcv_fill_up_missing_data",
|
||||
"ohlcv_to_dataframe",
|
||||
"order_book_to_dataframe",
|
||||
"reduce_dataframe_footprint",
|
||||
"trim_dataframe",
|
||||
"trim_dataframes",
|
||||
"convert_trades_format",
|
||||
"convert_trades_to_ohlcv",
|
||||
"trades_convert_types",
|
||||
"trades_df_remove_duplicates",
|
||||
"trades_dict_to_list",
|
||||
"trades_list_to_df",
|
||||
"trades_to_ohlcv",
|
||||
]
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
"""
|
||||
Functions to convert data from one format to another
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Dict
|
||||
|
||||
|
@ -15,8 +16,14 @@ from freqtrade.enums import CandleType, TradingMode
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def ohlcv_to_dataframe(ohlcv: list, timeframe: str, pair: str, *,
|
||||
fill_missing: bool = True, drop_incomplete: bool = True) -> DataFrame:
|
||||
def ohlcv_to_dataframe(
|
||||
ohlcv: list,
|
||||
timeframe: str,
|
||||
pair: str,
|
||||
*,
|
||||
fill_missing: bool = True,
|
||||
drop_incomplete: bool = True,
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Converts a list with candle (OHLCV) data (in format returned by ccxt.fetch_ohlcv)
|
||||
to a Dataframe
|
||||
|
@ -32,20 +39,28 @@ def ohlcv_to_dataframe(ohlcv: list, timeframe: str, pair: str, *,
|
|||
cols = DEFAULT_DATAFRAME_COLUMNS
|
||||
df = DataFrame(ohlcv, columns=cols)
|
||||
|
||||
df['date'] = to_datetime(df['date'], unit='ms', utc=True)
|
||||
df["date"] = to_datetime(df["date"], unit="ms", utc=True)
|
||||
|
||||
# Some exchanges return int values for Volume and even for OHLC.
|
||||
# Convert them since TA-LIB indicators used in the strategy assume floats
|
||||
# and fail with exception...
|
||||
df = df.astype(dtype={'open': 'float', 'high': 'float', 'low': 'float', 'close': 'float',
|
||||
'volume': 'float'})
|
||||
return clean_ohlcv_dataframe(df, timeframe, pair,
|
||||
fill_missing=fill_missing,
|
||||
drop_incomplete=drop_incomplete)
|
||||
df = df.astype(
|
||||
dtype={
|
||||
"open": "float",
|
||||
"high": "float",
|
||||
"low": "float",
|
||||
"close": "float",
|
||||
"volume": "float",
|
||||
}
|
||||
)
|
||||
return clean_ohlcv_dataframe(
|
||||
df, timeframe, pair, fill_missing=fill_missing, drop_incomplete=drop_incomplete
|
||||
)
|
||||
|
||||
|
||||
def clean_ohlcv_dataframe(data: DataFrame, timeframe: str, pair: str, *,
|
||||
fill_missing: bool, drop_incomplete: bool) -> DataFrame:
|
||||
def clean_ohlcv_dataframe(
|
||||
data: DataFrame, timeframe: str, pair: str, *, fill_missing: bool, drop_incomplete: bool
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Cleanse a OHLCV dataframe by
|
||||
* Grouping it by date (removes duplicate tics)
|
||||
|
@ -60,17 +75,19 @@ def clean_ohlcv_dataframe(data: DataFrame, timeframe: str, pair: str, *,
|
|||
:return: DataFrame
|
||||
"""
|
||||
# group by index and aggregate results to eliminate duplicate ticks
|
||||
data = data.groupby(by='date', as_index=False, sort=True).agg({
|
||||
'open': 'first',
|
||||
'high': 'max',
|
||||
'low': 'min',
|
||||
'close': 'last',
|
||||
'volume': 'max',
|
||||
})
|
||||
data = data.groupby(by="date", as_index=False, sort=True).agg(
|
||||
{
|
||||
"open": "first",
|
||||
"high": "max",
|
||||
"low": "min",
|
||||
"close": "last",
|
||||
"volume": "max",
|
||||
}
|
||||
)
|
||||
# eliminate partial candle
|
||||
if drop_incomplete:
|
||||
data.drop(data.tail(1).index, inplace=True)
|
||||
logger.debug('Dropping last candle')
|
||||
logger.debug("Dropping last candle")
|
||||
|
||||
if fill_missing:
|
||||
return ohlcv_fill_up_missing_data(data, timeframe, pair)
|
||||
|
@ -81,37 +98,35 @@ def clean_ohlcv_dataframe(data: DataFrame, timeframe: str, pair: str, *,
|
|||
def ohlcv_fill_up_missing_data(dataframe: DataFrame, timeframe: str, pair: str) -> DataFrame:
|
||||
"""
|
||||
Fills up missing data with 0 volume rows,
|
||||
using the previous close as price for "open", "high" "low" and "close", volume is set to 0
|
||||
using the previous close as price for "open", "high", "low" and "close", volume is set to 0
|
||||
|
||||
"""
|
||||
from freqtrade.exchange import timeframe_to_resample_freq
|
||||
|
||||
ohlcv_dict = {
|
||||
'open': 'first',
|
||||
'high': 'max',
|
||||
'low': 'min',
|
||||
'close': 'last',
|
||||
'volume': 'sum'
|
||||
}
|
||||
ohlcv_dict = {"open": "first", "high": "max", "low": "min", "close": "last", "volume": "sum"}
|
||||
resample_interval = timeframe_to_resample_freq(timeframe)
|
||||
# Resample to create "NAN" values
|
||||
df = dataframe.resample(resample_interval, on='date').agg(ohlcv_dict)
|
||||
df = dataframe.resample(resample_interval, on="date").agg(ohlcv_dict)
|
||||
|
||||
# Forwardfill close for missing columns
|
||||
df['close'] = df['close'].ffill()
|
||||
df["close"] = df["close"].ffill()
|
||||
# Use close for "open, high, low"
|
||||
df.loc[:, ['open', 'high', 'low']] = df[['open', 'high', 'low']].fillna(
|
||||
value={'open': df['close'],
|
||||
'high': df['close'],
|
||||
'low': df['close'],
|
||||
})
|
||||
df.loc[:, ["open", "high", "low"]] = df[["open", "high", "low"]].fillna(
|
||||
value={
|
||||
"open": df["close"],
|
||||
"high": df["close"],
|
||||
"low": df["close"],
|
||||
}
|
||||
)
|
||||
df.reset_index(inplace=True)
|
||||
len_before = len(dataframe)
|
||||
len_after = len(df)
|
||||
pct_missing = (len_after - len_before) / len_before if len_before > 0 else 0
|
||||
if len_before != len_after:
|
||||
message = (f"Missing data fillup for {pair}, {timeframe}: "
|
||||
f"before: {len_before} - after: {len_after} - {pct_missing:.2%}")
|
||||
message = (
|
||||
f"Missing data fillup for {pair}, {timeframe}: "
|
||||
f"before: {len_before} - after: {len_after} - {pct_missing:.2%}"
|
||||
)
|
||||
if pct_missing > 0.01:
|
||||
logger.info(message)
|
||||
else:
|
||||
|
@ -120,8 +135,9 @@ def ohlcv_fill_up_missing_data(dataframe: DataFrame, timeframe: str, pair: str)
|
|||
return df
|
||||
|
||||
|
||||
def trim_dataframe(df: DataFrame, timerange, *, df_date_col: str = 'date',
|
||||
startup_candles: int = 0) -> DataFrame:
|
||||
def trim_dataframe(
|
||||
df: DataFrame, timerange, *, df_date_col: str = "date", startup_candles: int = 0
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Trim dataframe based on given timerange
|
||||
:param df: Dataframe to trim
|
||||
|
@ -134,15 +150,16 @@ def trim_dataframe(df: DataFrame, timerange, *, df_date_col: str = 'date',
|
|||
# Trim candles instead of timeframe in case of given startup_candle count
|
||||
df = df.iloc[startup_candles:, :]
|
||||
else:
|
||||
if timerange.starttype == 'date':
|
||||
if timerange.starttype == "date":
|
||||
df = df.loc[df[df_date_col] >= timerange.startdt, :]
|
||||
if timerange.stoptype == 'date':
|
||||
if timerange.stoptype == "date":
|
||||
df = df.loc[df[df_date_col] <= timerange.stopdt, :]
|
||||
return df
|
||||
|
||||
|
||||
def trim_dataframes(preprocessed: Dict[str, DataFrame], timerange,
|
||||
startup_candles: int) -> Dict[str, DataFrame]:
|
||||
def trim_dataframes(
|
||||
preprocessed: Dict[str, DataFrame], timerange, startup_candles: int
|
||||
) -> Dict[str, DataFrame]:
|
||||
"""
|
||||
Trim startup period from analyzed dataframes
|
||||
:param preprocessed: Dict of pair: dataframe
|
||||
|
@ -157,8 +174,9 @@ def trim_dataframes(preprocessed: Dict[str, DataFrame], timerange,
|
|||
if not trimed_df.empty:
|
||||
processed[pair] = trimed_df
|
||||
else:
|
||||
logger.warning(f'{pair} has no data left after adjusting for startup candles, '
|
||||
f'skipping.')
|
||||
logger.warning(
|
||||
f"{pair} has no data left after adjusting for startup candles, skipping."
|
||||
)
|
||||
return processed
|
||||
|
||||
|
||||
|
@ -170,19 +188,28 @@ def order_book_to_dataframe(bids: list, asks: list) -> DataFrame:
|
|||
b_sum b_size bids asks a_size a_sum
|
||||
-------------------------------------------------------------------
|
||||
"""
|
||||
cols = ['bids', 'b_size']
|
||||
cols = ["bids", "b_size"]
|
||||
|
||||
bids_frame = DataFrame(bids, columns=cols)
|
||||
# add cumulative sum column
|
||||
bids_frame['b_sum'] = bids_frame['b_size'].cumsum()
|
||||
cols2 = ['asks', 'a_size']
|
||||
bids_frame["b_sum"] = bids_frame["b_size"].cumsum()
|
||||
cols2 = ["asks", "a_size"]
|
||||
asks_frame = DataFrame(asks, columns=cols2)
|
||||
# add cumulative sum column
|
||||
asks_frame['a_sum'] = asks_frame['a_size'].cumsum()
|
||||
asks_frame["a_sum"] = asks_frame["a_size"].cumsum()
|
||||
|
||||
frame = pd.concat([bids_frame['b_sum'], bids_frame['b_size'], bids_frame['bids'],
|
||||
asks_frame['asks'], asks_frame['a_size'], asks_frame['a_sum']], axis=1,
|
||||
keys=['b_sum', 'b_size', 'bids', 'asks', 'a_size', 'a_sum'])
|
||||
frame = pd.concat(
|
||||
[
|
||||
bids_frame["b_sum"],
|
||||
bids_frame["b_size"],
|
||||
bids_frame["bids"],
|
||||
asks_frame["asks"],
|
||||
asks_frame["a_size"],
|
||||
asks_frame["a_sum"],
|
||||
],
|
||||
axis=1,
|
||||
keys=["b_sum", "b_size", "bids", "asks", "a_size", "a_sum"],
|
||||
)
|
||||
# logger.info('order book %s', frame )
|
||||
return frame
|
||||
|
||||
|
@ -201,47 +228,51 @@ def convert_ohlcv_format(
|
|||
:param erase: Erase source data (does not apply if source and target format are identical)
|
||||
"""
|
||||
from freqtrade.data.history import get_datahandler
|
||||
src = get_datahandler(config['datadir'], convert_from)
|
||||
trg = get_datahandler(config['datadir'], convert_to)
|
||||
timeframes = config.get('timeframes', [config.get('timeframe')])
|
||||
|
||||
src = get_datahandler(config["datadir"], convert_from)
|
||||
trg = get_datahandler(config["datadir"], convert_to)
|
||||
timeframes = config.get("timeframes", [config.get("timeframe")])
|
||||
logger.info(f"Converting candle (OHLCV) for timeframe {timeframes}")
|
||||
|
||||
candle_types = [CandleType.from_string(ct) for ct in config.get('candle_types', [
|
||||
c.value for c in CandleType])]
|
||||
candle_types = [
|
||||
CandleType.from_string(ct)
|
||||
for ct in config.get("candle_types", [c.value for c in CandleType])
|
||||
]
|
||||
logger.info(candle_types)
|
||||
paircombs = src.ohlcv_get_available_data(config['datadir'], TradingMode.SPOT)
|
||||
paircombs.extend(src.ohlcv_get_available_data(config['datadir'], TradingMode.FUTURES))
|
||||
paircombs = src.ohlcv_get_available_data(config["datadir"], TradingMode.SPOT)
|
||||
paircombs.extend(src.ohlcv_get_available_data(config["datadir"], TradingMode.FUTURES))
|
||||
|
||||
if 'pairs' in config:
|
||||
if "pairs" in config:
|
||||
# Filter pairs
|
||||
paircombs = [comb for comb in paircombs if comb[0] in config['pairs']]
|
||||
paircombs = [comb for comb in paircombs if comb[0] in config["pairs"]]
|
||||
|
||||
if 'timeframes' in config:
|
||||
paircombs = [comb for comb in paircombs if comb[1] in config['timeframes']]
|
||||
if "timeframes" in config:
|
||||
paircombs = [comb for comb in paircombs if comb[1] in config["timeframes"]]
|
||||
paircombs = [comb for comb in paircombs if comb[2] in candle_types]
|
||||
|
||||
paircombs = sorted(paircombs, key=lambda x: (x[0], x[1], x[2].value))
|
||||
|
||||
formatted_paircombs = '\n'.join([f"{pair}, {timeframe}, {candle_type}"
|
||||
for pair, timeframe, candle_type in paircombs])
|
||||
formatted_paircombs = "\n".join(
|
||||
[f"{pair}, {timeframe}, {candle_type}" for pair, timeframe, candle_type in paircombs]
|
||||
)
|
||||
|
||||
logger.info(f"Converting candle (OHLCV) data for the following pair combinations:\n"
|
||||
f"{formatted_paircombs}")
|
||||
logger.info(
|
||||
f"Converting candle (OHLCV) data for the following pair combinations:\n"
|
||||
f"{formatted_paircombs}"
|
||||
)
|
||||
for pair, timeframe, candle_type in paircombs:
|
||||
data = src.ohlcv_load(pair=pair, timeframe=timeframe,
|
||||
timerange=None,
|
||||
fill_missing=False,
|
||||
drop_incomplete=False,
|
||||
startup_candles=0,
|
||||
candle_type=candle_type)
|
||||
data = src.ohlcv_load(
|
||||
pair=pair,
|
||||
timeframe=timeframe,
|
||||
timerange=None,
|
||||
fill_missing=False,
|
||||
drop_incomplete=False,
|
||||
startup_candles=0,
|
||||
candle_type=candle_type,
|
||||
)
|
||||
logger.info(f"Converting {len(data)} {timeframe} {candle_type} candles for {pair}")
|
||||
if len(data) > 0:
|
||||
trg.ohlcv_store(
|
||||
pair=pair,
|
||||
timeframe=timeframe,
|
||||
data=data,
|
||||
candle_type=candle_type
|
||||
)
|
||||
trg.ohlcv_store(pair=pair, timeframe=timeframe, data=data, candle_type=candle_type)
|
||||
if erase and convert_from != convert_to:
|
||||
logger.info(f"Deleting source data for {pair} / {timeframe}")
|
||||
src.ohlcv_purge(pair=pair, timeframe=timeframe, candle_type=candle_type)
|
||||
|
@ -254,12 +285,11 @@ def reduce_dataframe_footprint(df: DataFrame) -> DataFrame:
|
|||
:return: Dataframe converted to float/int 32s
|
||||
"""
|
||||
|
||||
logger.debug(f"Memory usage of dataframe is "
|
||||
f"{df.memory_usage().sum() / 1024**2:.2f} MB")
|
||||
logger.debug(f"Memory usage of dataframe is {df.memory_usage().sum() / 1024**2:.2f} MB")
|
||||
|
||||
df_dtypes = df.dtypes
|
||||
for column, dtype in df_dtypes.items():
|
||||
if column in ['open', 'high', 'low', 'close', 'volume']:
|
||||
if column in ["open", "high", "low", "close", "volume"]:
|
||||
continue
|
||||
if dtype == np.float64:
|
||||
df_dtypes[column] = np.float32
|
||||
|
@ -267,7 +297,6 @@ def reduce_dataframe_footprint(df: DataFrame) -> DataFrame:
|
|||
df_dtypes[column] = np.int32
|
||||
df = df.astype(df_dtypes)
|
||||
|
||||
logger.debug(f"Memory usage after optimization is: "
|
||||
f"{df.memory_usage().sum() / 1024**2:.2f} MB")
|
||||
logger.debug(f"Memory usage after optimization is: {df.memory_usage().sum() / 1024**2:.2f} MB")
|
||||
|
||||
return df
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
"""
|
||||
Functions to convert data from one format to another
|
||||
"""
|
||||
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Dict, List
|
||||
|
@ -9,8 +10,13 @@ import pandas as pd
|
|||
from pandas import DataFrame, to_datetime
|
||||
|
||||
from freqtrade.configuration import TimeRange
|
||||
from freqtrade.constants import (DEFAULT_DATAFRAME_COLUMNS, DEFAULT_TRADES_COLUMNS, TRADES_DTYPES,
|
||||
Config, TradeList)
|
||||
from freqtrade.constants import (
|
||||
DEFAULT_DATAFRAME_COLUMNS,
|
||||
DEFAULT_TRADES_COLUMNS,
|
||||
TRADES_DTYPES,
|
||||
Config,
|
||||
TradeList,
|
||||
)
|
||||
from freqtrade.enums import CandleType, TradingMode
|
||||
from freqtrade.exceptions import OperationalException
|
||||
|
||||
|
@ -25,7 +31,7 @@ def trades_df_remove_duplicates(trades: pd.DataFrame) -> pd.DataFrame:
|
|||
:param trades: DataFrame with the columns constants.DEFAULT_TRADES_COLUMNS
|
||||
:return: DataFrame with duplicates removed based on the 'timestamp' column
|
||||
"""
|
||||
return trades.drop_duplicates(subset=['timestamp', 'id'])
|
||||
return trades.drop_duplicates(subset=["timestamp", "id"])
|
||||
|
||||
|
||||
def trades_dict_to_list(trades: List[Dict]) -> TradeList:
|
||||
|
@ -42,7 +48,7 @@ def trades_convert_types(trades: DataFrame) -> DataFrame:
|
|||
Convert Trades dtypes and add 'date' column
|
||||
"""
|
||||
trades = trades.astype(TRADES_DTYPES)
|
||||
trades['date'] = to_datetime(trades['timestamp'], unit='ms', utc=True)
|
||||
trades["date"] = to_datetime(trades["timestamp"], unit="ms", utc=True)
|
||||
return trades
|
||||
|
||||
|
||||
|
@ -71,13 +77,14 @@ def trades_to_ohlcv(trades: DataFrame, timeframe: str) -> DataFrame:
|
|||
:raises: ValueError if no trades are provided
|
||||
"""
|
||||
from freqtrade.exchange import timeframe_to_resample_freq
|
||||
|
||||
if trades.empty:
|
||||
raise ValueError('Trade-list empty.')
|
||||
df = trades.set_index('date', drop=True)
|
||||
raise ValueError("Trade-list empty.")
|
||||
df = trades.set_index("date", drop=True)
|
||||
resample_interval = timeframe_to_resample_freq(timeframe)
|
||||
df_new = df['price'].resample(resample_interval).ohlc()
|
||||
df_new['volume'] = df['amount'].resample(resample_interval).sum()
|
||||
df_new['date'] = df_new.index
|
||||
df_new = df["price"].resample(resample_interval).ohlc()
|
||||
df_new["volume"] = df["amount"].resample(resample_interval).sum()
|
||||
df_new["date"] = df_new.index
|
||||
# Drop 0 volume rows
|
||||
df_new = df_new.dropna()
|
||||
return df_new.loc[:, DEFAULT_DATAFRAME_COLUMNS]
|
||||
|
@ -97,24 +104,27 @@ def convert_trades_to_ohlcv(
|
|||
Convert stored trades data to ohlcv data
|
||||
"""
|
||||
from freqtrade.data.history import get_datahandler
|
||||
|
||||
data_handler_trades = get_datahandler(datadir, data_format=data_format_trades)
|
||||
data_handler_ohlcv = get_datahandler(datadir, data_format=data_format_ohlcv)
|
||||
|
||||
logger.info(f"About to convert pairs: '{', '.join(pairs)}', "
|
||||
f"intervals: '{', '.join(timeframes)}' to {datadir}")
|
||||
logger.info(
|
||||
f"About to convert pairs: '{', '.join(pairs)}', "
|
||||
f"intervals: '{', '.join(timeframes)}' to {datadir}"
|
||||
)
|
||||
trading_mode = TradingMode.FUTURES if candle_type != CandleType.SPOT else TradingMode.SPOT
|
||||
for pair in pairs:
|
||||
trades = data_handler_trades.trades_load(pair, trading_mode)
|
||||
for timeframe in timeframes:
|
||||
if erase:
|
||||
if data_handler_ohlcv.ohlcv_purge(pair, timeframe, candle_type=candle_type):
|
||||
logger.info(f'Deleting existing data for pair {pair}, interval {timeframe}.')
|
||||
logger.info(f"Deleting existing data for pair {pair}, interval {timeframe}.")
|
||||
try:
|
||||
ohlcv = trades_to_ohlcv(trades, timeframe)
|
||||
# Store ohlcv
|
||||
data_handler_ohlcv.ohlcv_store(pair, timeframe, data=ohlcv, candle_type=candle_type)
|
||||
except ValueError:
|
||||
logger.warning(f'Could not convert {pair} to OHLCV.')
|
||||
logger.warning(f"Could not convert {pair} to OHLCV.")
|
||||
|
||||
|
||||
def convert_trades_format(config: Config, convert_from: str, convert_to: str, erase: bool):
|
||||
|
@ -125,25 +135,27 @@ def convert_trades_format(config: Config, convert_from: str, convert_to: str, er
|
|||
:param convert_to: Target format
|
||||
:param erase: Erase source data (does not apply if source and target format are identical)
|
||||
"""
|
||||
if convert_from == 'kraken_csv':
|
||||
if config['exchange']['name'] != 'kraken':
|
||||
if convert_from == "kraken_csv":
|
||||
if config["exchange"]["name"] != "kraken":
|
||||
raise OperationalException(
|
||||
'Converting from csv is only supported for kraken.'
|
||||
'Please refer to the documentation for details about this special mode.'
|
||||
"Converting from csv is only supported for kraken."
|
||||
"Please refer to the documentation for details about this special mode."
|
||||
)
|
||||
from freqtrade.data.converter.trade_converter_kraken import import_kraken_trades_from_csv
|
||||
|
||||
import_kraken_trades_from_csv(config, convert_to)
|
||||
return
|
||||
|
||||
from freqtrade.data.history import get_datahandler
|
||||
src = get_datahandler(config['datadir'], convert_from)
|
||||
trg = get_datahandler(config['datadir'], convert_to)
|
||||
|
||||
if 'pairs' not in config:
|
||||
config['pairs'] = src.trades_get_pairs(config['datadir'])
|
||||
src = get_datahandler(config["datadir"], convert_from)
|
||||
trg = get_datahandler(config["datadir"], convert_to)
|
||||
|
||||
if "pairs" not in config:
|
||||
config["pairs"] = src.trades_get_pairs(config["datadir"])
|
||||
logger.info(f"Converting trades for {config['pairs']}")
|
||||
trading_mode: TradingMode = config.get('trading_mode', TradingMode.SPOT)
|
||||
for pair in config['pairs']:
|
||||
trading_mode: TradingMode = config.get("trading_mode", TradingMode.SPOT)
|
||||
for pair in config["pairs"]:
|
||||
data = src.trades_load(pair, trading_mode)
|
||||
logger.info(f"Converting {len(data)} trades for {pair}")
|
||||
trg.trades_store(pair, data, trading_mode)
|
||||
|
|
|
@ -4,8 +4,10 @@ from pathlib import Path
|
|||
import pandas as pd
|
||||
|
||||
from freqtrade.constants import DATETIME_PRINT_FORMAT, DEFAULT_TRADES_COLUMNS, Config
|
||||
from freqtrade.data.converter.trade_converter import (trades_convert_types,
|
||||
trades_df_remove_duplicates)
|
||||
from freqtrade.data.converter.trade_converter import (
|
||||
trades_convert_types,
|
||||
trades_df_remove_duplicates,
|
||||
)
|
||||
from freqtrade.data.history import get_datahandler
|
||||
from freqtrade.enums import TradingMode
|
||||
from freqtrade.exceptions import OperationalException
|
||||
|
@ -15,32 +17,33 @@ from freqtrade.resolvers import ExchangeResolver
|
|||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
KRAKEN_CSV_TRADE_COLUMNS = ['timestamp', 'price', 'amount']
|
||||
KRAKEN_CSV_TRADE_COLUMNS = ["timestamp", "price", "amount"]
|
||||
|
||||
|
||||
def import_kraken_trades_from_csv(config: Config, convert_to: str):
|
||||
"""
|
||||
Import kraken trades from csv
|
||||
"""
|
||||
if config['exchange']['name'] != 'kraken':
|
||||
raise OperationalException('This function is only for the kraken exchange.')
|
||||
if config["exchange"]["name"] != "kraken":
|
||||
raise OperationalException("This function is only for the kraken exchange.")
|
||||
|
||||
datadir: Path = config['datadir']
|
||||
datadir: Path = config["datadir"]
|
||||
data_handler = get_datahandler(datadir, data_format=convert_to)
|
||||
|
||||
tradesdir: Path = config['datadir'] / 'trades_csv'
|
||||
tradesdir: Path = config["datadir"] / "trades_csv"
|
||||
exchange = ExchangeResolver.load_exchange(config, validate=False)
|
||||
# iterate through directories in this directory
|
||||
data_symbols = {p.stem for p in tradesdir.rglob('*.csv')}
|
||||
data_symbols = {p.stem for p in tradesdir.rglob("*.csv")}
|
||||
|
||||
# create pair/filename mapping
|
||||
markets = {
|
||||
(m['symbol'], m['altname']) for m in exchange.markets.values()
|
||||
if m.get('altname') in data_symbols
|
||||
(m["symbol"], m["altname"])
|
||||
for m in exchange.markets.values()
|
||||
if m.get("altname") in data_symbols
|
||||
}
|
||||
logger.info(f"Found csv files for {', '.join(data_symbols)}.")
|
||||
|
||||
if pairs_raw := config.get('pairs'):
|
||||
if pairs_raw := config.get("pairs"):
|
||||
pairs = expand_pairlist(pairs_raw, [m[0] for m in markets])
|
||||
markets = {m for m in markets if m[0] in pairs}
|
||||
if not markets:
|
||||
|
@ -66,18 +69,20 @@ def import_kraken_trades_from_csv(config: Config, convert_to: str):
|
|||
trades = pd.concat(dfs, ignore_index=True)
|
||||
del dfs
|
||||
|
||||
trades.loc[:, 'timestamp'] = trades['timestamp'] * 1e3
|
||||
trades.loc[:, 'cost'] = trades['price'] * trades['amount']
|
||||
trades.loc[:, "timestamp"] = trades["timestamp"] * 1e3
|
||||
trades.loc[:, "cost"] = trades["price"] * trades["amount"]
|
||||
for col in DEFAULT_TRADES_COLUMNS:
|
||||
if col not in trades.columns:
|
||||
trades.loc[:, col] = ''
|
||||
trades.loc[:, col] = ""
|
||||
trades = trades[DEFAULT_TRADES_COLUMNS]
|
||||
trades = trades_convert_types(trades)
|
||||
|
||||
trades_df = trades_df_remove_duplicates(trades)
|
||||
del trades
|
||||
logger.info(f"{pair}: {len(trades_df)} trades, from "
|
||||
f"{trades_df['date'].min():{DATETIME_PRINT_FORMAT}} to "
|
||||
f"{trades_df['date'].max():{DATETIME_PRINT_FORMAT}}")
|
||||
logger.info(
|
||||
f"{pair}: {len(trades_df)} trades, from "
|
||||
f"{trades_df['date'].min():{DATETIME_PRINT_FORMAT}} to "
|
||||
f"{trades_df['date'].max():{DATETIME_PRINT_FORMAT}}"
|
||||
)
|
||||
|
||||
data_handler.trades_store(pair, trades_df, TradingMode.SPOT)
|
||||
|
|
|
@ -4,6 +4,7 @@ Responsible to provide data to the bot
|
|||
including ticker and orderbook data, live and historical candle (OHLCV) data
|
||||
Common Interface for bot and strategy to access data.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from collections import deque
|
||||
from datetime import datetime, timezone
|
||||
|
@ -12,8 +13,12 @@ from typing import Any, Dict, List, Optional, Tuple
|
|||
from pandas import DataFrame, Timedelta, Timestamp, to_timedelta
|
||||
|
||||
from freqtrade.configuration import TimeRange
|
||||
from freqtrade.constants import (FULL_DATAFRAME_THRESHOLD, Config, ListPairsWithTimeframes,
|
||||
PairWithTimeframe)
|
||||
from freqtrade.constants import (
|
||||
FULL_DATAFRAME_THRESHOLD,
|
||||
Config,
|
||||
ListPairsWithTimeframes,
|
||||
PairWithTimeframe,
|
||||
)
|
||||
from freqtrade.data.history import load_pair_history
|
||||
from freqtrade.enums import CandleType, RPCMessageType, RunMode
|
||||
from freqtrade.exceptions import ExchangeError, OperationalException
|
||||
|
@ -27,18 +32,17 @@ from freqtrade.util import PeriodicCache
|
|||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
NO_EXCHANGE_EXCEPTION = 'Exchange is not available to DataProvider.'
|
||||
NO_EXCHANGE_EXCEPTION = "Exchange is not available to DataProvider."
|
||||
MAX_DATAFRAME_CANDLES = 1000
|
||||
|
||||
|
||||
class DataProvider:
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
config: Config,
|
||||
exchange: Optional[Exchange],
|
||||
pairlists=None,
|
||||
rpc: Optional[RPCManager] = None
|
||||
rpc: Optional[RPCManager] = None,
|
||||
) -> None:
|
||||
self._config = config
|
||||
self._exchange = exchange
|
||||
|
@ -49,18 +53,20 @@ class DataProvider:
|
|||
self.__slice_date: Optional[datetime] = None
|
||||
|
||||
self.__cached_pairs_backtesting: Dict[PairWithTimeframe, DataFrame] = {}
|
||||
self.__producer_pairs_df: Dict[str,
|
||||
Dict[PairWithTimeframe, Tuple[DataFrame, datetime]]] = {}
|
||||
self.__producer_pairs_df: Dict[
|
||||
str, Dict[PairWithTimeframe, Tuple[DataFrame, datetime]]
|
||||
] = {}
|
||||
self.__producer_pairs: Dict[str, List[str]] = {}
|
||||
self._msg_queue: deque = deque()
|
||||
|
||||
self._default_candle_type = self._config.get('candle_type_def', CandleType.SPOT)
|
||||
self._default_timeframe = self._config.get('timeframe', '1h')
|
||||
self._default_candle_type = self._config.get("candle_type_def", CandleType.SPOT)
|
||||
self._default_timeframe = self._config.get("timeframe", "1h")
|
||||
|
||||
self.__msg_cache = PeriodicCache(
|
||||
maxsize=1000, ttl=timeframe_to_seconds(self._default_timeframe))
|
||||
maxsize=1000, ttl=timeframe_to_seconds(self._default_timeframe)
|
||||
)
|
||||
|
||||
self.producers = self._config.get('external_message_consumer', {}).get('producers', [])
|
||||
self.producers = self._config.get("external_message_consumer", {}).get("producers", [])
|
||||
self.external_data_enabled = len(self.producers) > 0
|
||||
|
||||
def _set_dataframe_max_index(self, limit_index: int):
|
||||
|
@ -80,11 +86,7 @@ class DataProvider:
|
|||
self.__slice_date = limit_date
|
||||
|
||||
def _set_cached_df(
|
||||
self,
|
||||
pair: str,
|
||||
timeframe: str,
|
||||
dataframe: DataFrame,
|
||||
candle_type: CandleType
|
||||
self, pair: str, timeframe: str, dataframe: DataFrame, candle_type: CandleType
|
||||
) -> None:
|
||||
"""
|
||||
Store cached Dataframe.
|
||||
|
@ -96,8 +98,7 @@ class DataProvider:
|
|||
:param candle_type: Any of the enum CandleType (must match trading mode!)
|
||||
"""
|
||||
pair_key = (pair, timeframe, candle_type)
|
||||
self.__cached_pairs[pair_key] = (
|
||||
dataframe, datetime.now(timezone.utc))
|
||||
self.__cached_pairs[pair_key] = (dataframe, datetime.now(timezone.utc))
|
||||
|
||||
# For multiple producers we will want to merge the pairlists instead of overwriting
|
||||
def _set_producer_pairs(self, pairlist: List[str], producer_name: str = "default"):
|
||||
|
@ -116,12 +117,7 @@ class DataProvider:
|
|||
"""
|
||||
return self.__producer_pairs.get(producer_name, []).copy()
|
||||
|
||||
def _emit_df(
|
||||
self,
|
||||
pair_key: PairWithTimeframe,
|
||||
dataframe: DataFrame,
|
||||
new_candle: bool
|
||||
) -> None:
|
||||
def _emit_df(self, pair_key: PairWithTimeframe, dataframe: DataFrame, new_candle: bool) -> None:
|
||||
"""
|
||||
Send this dataframe as an ANALYZED_DF message to RPC
|
||||
|
||||
|
@ -131,19 +127,21 @@ class DataProvider:
|
|||
"""
|
||||
if self.__rpc:
|
||||
msg: RPCAnalyzedDFMsg = {
|
||||
'type': RPCMessageType.ANALYZED_DF,
|
||||
'data': {
|
||||
'key': pair_key,
|
||||
'df': dataframe.tail(1),
|
||||
'la': datetime.now(timezone.utc)
|
||||
}
|
||||
}
|
||||
"type": RPCMessageType.ANALYZED_DF,
|
||||
"data": {
|
||||
"key": pair_key,
|
||||
"df": dataframe.tail(1),
|
||||
"la": datetime.now(timezone.utc),
|
||||
},
|
||||
}
|
||||
self.__rpc.send_msg(msg)
|
||||
if new_candle:
|
||||
self.__rpc.send_msg({
|
||||
'type': RPCMessageType.NEW_CANDLE,
|
||||
'data': pair_key,
|
||||
})
|
||||
self.__rpc.send_msg(
|
||||
{
|
||||
"type": RPCMessageType.NEW_CANDLE,
|
||||
"data": pair_key,
|
||||
}
|
||||
)
|
||||
|
||||
def _replace_external_df(
|
||||
self,
|
||||
|
@ -152,7 +150,7 @@ class DataProvider:
|
|||
last_analyzed: datetime,
|
||||
timeframe: str,
|
||||
candle_type: CandleType,
|
||||
producer_name: str = "default"
|
||||
producer_name: str = "default",
|
||||
) -> None:
|
||||
"""
|
||||
Add the pair data to this class from an external source.
|
||||
|
@ -178,7 +176,7 @@ class DataProvider:
|
|||
last_analyzed: datetime,
|
||||
timeframe: str,
|
||||
candle_type: CandleType,
|
||||
producer_name: str = "default"
|
||||
producer_name: str = "default",
|
||||
) -> Tuple[bool, int]:
|
||||
"""
|
||||
Append a candle to the existing external dataframe. The incoming dataframe
|
||||
|
@ -204,12 +202,14 @@ class DataProvider:
|
|||
last_analyzed=last_analyzed,
|
||||
timeframe=timeframe,
|
||||
candle_type=candle_type,
|
||||
producer_name=producer_name
|
||||
producer_name=producer_name,
|
||||
)
|
||||
return (True, 0)
|
||||
|
||||
if (producer_name not in self.__producer_pairs_df
|
||||
or pair_key not in self.__producer_pairs_df[producer_name]):
|
||||
if (
|
||||
producer_name not in self.__producer_pairs_df
|
||||
or pair_key not in self.__producer_pairs_df[producer_name]
|
||||
):
|
||||
# We don't have data from this producer yet,
|
||||
# or we don't have data for this pair_key
|
||||
# return False and 1000 for the full df
|
||||
|
@ -220,12 +220,12 @@ class DataProvider:
|
|||
# CHECK FOR MISSING CANDLES
|
||||
# Convert the timeframe to a timedelta for pandas
|
||||
timeframe_delta: Timedelta = to_timedelta(timeframe)
|
||||
local_last: Timestamp = existing_df.iloc[-1]['date'] # We want the last date from our copy
|
||||
local_last: Timestamp = existing_df.iloc[-1]["date"] # We want the last date from our copy
|
||||
# We want the first date from the incoming
|
||||
incoming_first: Timestamp = dataframe.iloc[0]['date']
|
||||
incoming_first: Timestamp = dataframe.iloc[0]["date"]
|
||||
|
||||
# Remove existing candles that are newer than the incoming first candle
|
||||
existing_df1 = existing_df[existing_df['date'] < incoming_first]
|
||||
existing_df1 = existing_df[existing_df["date"] < incoming_first]
|
||||
|
||||
candle_difference = (incoming_first - local_last) / timeframe_delta
|
||||
|
||||
|
@ -243,13 +243,13 @@ class DataProvider:
|
|||
|
||||
# Everything is good, we appended
|
||||
self._replace_external_df(
|
||||
pair,
|
||||
appended_df,
|
||||
last_analyzed=last_analyzed,
|
||||
timeframe=timeframe,
|
||||
candle_type=candle_type,
|
||||
producer_name=producer_name
|
||||
)
|
||||
pair,
|
||||
appended_df,
|
||||
last_analyzed=last_analyzed,
|
||||
timeframe=timeframe,
|
||||
candle_type=candle_type,
|
||||
producer_name=producer_name,
|
||||
)
|
||||
return (True, 0)
|
||||
|
||||
def get_producer_df(
|
||||
|
@ -257,7 +257,7 @@ class DataProvider:
|
|||
pair: str,
|
||||
timeframe: Optional[str] = None,
|
||||
candle_type: Optional[CandleType] = None,
|
||||
producer_name: str = "default"
|
||||
producer_name: str = "default",
|
||||
) -> Tuple[DataFrame, datetime]:
|
||||
"""
|
||||
Get the pair data from producers.
|
||||
|
@ -292,64 +292,64 @@ class DataProvider:
|
|||
"""
|
||||
self._pairlists = pairlists
|
||||
|
||||
def historic_ohlcv(
|
||||
self,
|
||||
pair: str,
|
||||
timeframe: str,
|
||||
candle_type: str = ''
|
||||
) -> DataFrame:
|
||||
def historic_ohlcv(self, pair: str, timeframe: str, candle_type: str = "") -> DataFrame:
|
||||
"""
|
||||
Get stored historical candle (OHLCV) data
|
||||
:param pair: pair to get the data for
|
||||
:param timeframe: timeframe to get data for
|
||||
:param candle_type: '', mark, index, premiumIndex, or funding_rate
|
||||
"""
|
||||
_candle_type = CandleType.from_string(
|
||||
candle_type) if candle_type != '' else self._config['candle_type_def']
|
||||
_candle_type = (
|
||||
CandleType.from_string(candle_type)
|
||||
if candle_type != ""
|
||||
else self._config["candle_type_def"]
|
||||
)
|
||||
saved_pair: PairWithTimeframe = (pair, str(timeframe), _candle_type)
|
||||
if saved_pair not in self.__cached_pairs_backtesting:
|
||||
timerange = TimeRange.parse_timerange(None if self._config.get(
|
||||
'timerange') is None else str(self._config.get('timerange')))
|
||||
timerange = TimeRange.parse_timerange(
|
||||
None
|
||||
if self._config.get("timerange") is None
|
||||
else str(self._config.get("timerange"))
|
||||
)
|
||||
|
||||
startup_candles = self.get_required_startup(str(timeframe))
|
||||
tf_seconds = timeframe_to_seconds(str(timeframe))
|
||||
timerange.subtract_start(tf_seconds * startup_candles)
|
||||
|
||||
logger.info(f"Loading data for {pair} {timeframe} "
|
||||
f"from {timerange.start_fmt} to {timerange.stop_fmt}")
|
||||
logger.info(
|
||||
f"Loading data for {pair} {timeframe} "
|
||||
f"from {timerange.start_fmt} to {timerange.stop_fmt}"
|
||||
)
|
||||
|
||||
self.__cached_pairs_backtesting[saved_pair] = load_pair_history(
|
||||
pair=pair,
|
||||
timeframe=timeframe,
|
||||
datadir=self._config['datadir'],
|
||||
datadir=self._config["datadir"],
|
||||
timerange=timerange,
|
||||
data_format=self._config['dataformat_ohlcv'],
|
||||
data_format=self._config["dataformat_ohlcv"],
|
||||
candle_type=_candle_type,
|
||||
|
||||
)
|
||||
return self.__cached_pairs_backtesting[saved_pair].copy()
|
||||
|
||||
def get_required_startup(self, timeframe: str) -> int:
|
||||
freqai_config = self._config.get('freqai', {})
|
||||
if not freqai_config.get('enabled', False):
|
||||
return self._config.get('startup_candle_count', 0)
|
||||
freqai_config = self._config.get("freqai", {})
|
||||
if not freqai_config.get("enabled", False):
|
||||
return self._config.get("startup_candle_count", 0)
|
||||
else:
|
||||
startup_candles = self._config.get('startup_candle_count', 0)
|
||||
indicator_periods = freqai_config['feature_parameters']['indicator_periods_candles']
|
||||
startup_candles = self._config.get("startup_candle_count", 0)
|
||||
indicator_periods = freqai_config["feature_parameters"]["indicator_periods_candles"]
|
||||
# make sure the startupcandles is at least the set maximum indicator periods
|
||||
self._config['startup_candle_count'] = max(startup_candles, max(indicator_periods))
|
||||
self._config["startup_candle_count"] = max(startup_candles, max(indicator_periods))
|
||||
tf_seconds = timeframe_to_seconds(timeframe)
|
||||
train_candles = freqai_config['train_period_days'] * 86400 / tf_seconds
|
||||
total_candles = int(self._config['startup_candle_count'] + train_candles)
|
||||
train_candles = freqai_config["train_period_days"] * 86400 / tf_seconds
|
||||
total_candles = int(self._config["startup_candle_count"] + train_candles)
|
||||
logger.info(
|
||||
f'Increasing startup_candle_count for freqai on {timeframe} to {total_candles}')
|
||||
f"Increasing startup_candle_count for freqai on {timeframe} to {total_candles}"
|
||||
)
|
||||
return total_candles
|
||||
|
||||
def get_pair_dataframe(
|
||||
self,
|
||||
pair: str,
|
||||
timeframe: Optional[str] = None,
|
||||
candle_type: str = ''
|
||||
self, pair: str, timeframe: Optional[str] = None, candle_type: str = ""
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Return pair candle (OHLCV) data, either live or cached historical -- depending
|
||||
|
@ -366,13 +366,13 @@ class DataProvider:
|
|||
data = self.ohlcv(pair=pair, timeframe=timeframe, candle_type=candle_type)
|
||||
else:
|
||||
# Get historical OHLCV data (cached on disk).
|
||||
timeframe = timeframe or self._config['timeframe']
|
||||
timeframe = timeframe or self._config["timeframe"]
|
||||
data = self.historic_ohlcv(pair=pair, timeframe=timeframe, candle_type=candle_type)
|
||||
# Cut date to timeframe-specific date.
|
||||
# This is necessary to prevent lookahead bias in callbacks through informative pairs.
|
||||
if self.__slice_date:
|
||||
cutoff_date = timeframe_to_prev_date(timeframe, self.__slice_date)
|
||||
data = data.loc[data['date'] < cutoff_date]
|
||||
data = data.loc[data["date"] < cutoff_date]
|
||||
if len(data) == 0:
|
||||
logger.warning(f"No data found for ({pair}, {timeframe}, {candle_type}).")
|
||||
return data
|
||||
|
@ -387,7 +387,7 @@ class DataProvider:
|
|||
combination.
|
||||
Returns empty dataframe and Epoch 0 (1970-01-01) if no dataframe was cached.
|
||||
"""
|
||||
pair_key = (pair, timeframe, self._config.get('candle_type_def', CandleType.SPOT))
|
||||
pair_key = (pair, timeframe, self._config.get("candle_type_def", CandleType.SPOT))
|
||||
if pair_key in self.__cached_pairs:
|
||||
if self.runmode in (RunMode.DRY_RUN, RunMode.LIVE):
|
||||
df, date = self.__cached_pairs[pair_key]
|
||||
|
@ -395,7 +395,7 @@ class DataProvider:
|
|||
df, date = self.__cached_pairs[pair_key]
|
||||
if self.__slice_index is not None:
|
||||
max_index = self.__slice_index
|
||||
df = df.iloc[max(0, max_index - MAX_DATAFRAME_CANDLES):max_index]
|
||||
df = df.iloc[max(0, max_index - MAX_DATAFRAME_CANDLES) : max_index]
|
||||
return df, date
|
||||
else:
|
||||
return (DataFrame(), datetime.fromtimestamp(0, tz=timezone.utc))
|
||||
|
@ -406,7 +406,7 @@ class DataProvider:
|
|||
Get runmode of the bot
|
||||
can be "live", "dry-run", "backtest", "edgecli", "hyperopt" or "other".
|
||||
"""
|
||||
return RunMode(self._config.get('runmode', RunMode.OTHER))
|
||||
return RunMode(self._config.get("runmode", RunMode.OTHER))
|
||||
|
||||
def current_whitelist(self) -> List[str]:
|
||||
"""
|
||||
|
@ -434,9 +434,11 @@ class DataProvider:
|
|||
|
||||
# Exchange functions
|
||||
|
||||
def refresh(self,
|
||||
pairlist: ListPairsWithTimeframes,
|
||||
helping_pairs: Optional[ListPairsWithTimeframes] = None) -> None:
|
||||
def refresh(
|
||||
self,
|
||||
pairlist: ListPairsWithTimeframes,
|
||||
helping_pairs: Optional[ListPairsWithTimeframes] = None,
|
||||
) -> None:
|
||||
"""
|
||||
Refresh data, called with each cycle
|
||||
"""
|
||||
|
@ -456,11 +458,7 @@ class DataProvider:
|
|||
return list(self._exchange._klines.keys())
|
||||
|
||||
def ohlcv(
|
||||
self,
|
||||
pair: str,
|
||||
timeframe: Optional[str] = None,
|
||||
copy: bool = True,
|
||||
candle_type: str = ''
|
||||
self, pair: str, timeframe: Optional[str] = None, copy: bool = True, candle_type: str = ""
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Get candle (OHLCV) data for the given pair as DataFrame
|
||||
|
@ -474,11 +472,13 @@ class DataProvider:
|
|||
if self._exchange is None:
|
||||
raise OperationalException(NO_EXCHANGE_EXCEPTION)
|
||||
if self.runmode in (RunMode.DRY_RUN, RunMode.LIVE):
|
||||
_candle_type = CandleType.from_string(
|
||||
candle_type) if candle_type != '' else self._config['candle_type_def']
|
||||
_candle_type = (
|
||||
CandleType.from_string(candle_type)
|
||||
if candle_type != ""
|
||||
else self._config["candle_type_def"]
|
||||
)
|
||||
return self._exchange.klines(
|
||||
(pair, timeframe or self._config['timeframe'], _candle_type),
|
||||
copy=copy
|
||||
(pair, timeframe or self._config["timeframe"], _candle_type), copy=copy
|
||||
)
|
||||
else:
|
||||
return DataFrame()
|
||||
|
|
|
@ -8,8 +8,11 @@ from tabulate import tabulate
|
|||
|
||||
from freqtrade.configuration import TimeRange
|
||||
from freqtrade.constants import Config
|
||||
from freqtrade.data.btanalysis import (get_latest_backtest_filename, load_backtest_data,
|
||||
load_backtest_stats)
|
||||
from freqtrade.data.btanalysis import (
|
||||
get_latest_backtest_filename,
|
||||
load_backtest_data,
|
||||
load_backtest_stats,
|
||||
)
|
||||
from freqtrade.exceptions import OperationalException
|
||||
|
||||
|
||||
|
@ -18,9 +21,10 @@ logger = logging.getLogger(__name__)
|
|||
|
||||
def _load_backtest_analysis_data(backtest_dir: Path, name: str):
|
||||
if backtest_dir.is_dir():
|
||||
scpf = Path(backtest_dir,
|
||||
Path(get_latest_backtest_filename(backtest_dir)).stem + "_" + name + ".pkl"
|
||||
)
|
||||
scpf = Path(
|
||||
backtest_dir,
|
||||
Path(get_latest_backtest_filename(backtest_dir)).stem + "_" + name + ".pkl",
|
||||
)
|
||||
else:
|
||||
scpf = Path(backtest_dir.parent / f"{backtest_dir.stem}_{name}.pkl")
|
||||
|
||||
|
@ -53,7 +57,8 @@ def _process_candles_and_indicators(pairlist, strategy_name, trades, signal_cand
|
|||
for pair in pairlist:
|
||||
if pair in signal_candles[strategy_name]:
|
||||
analysed_trades_dict[strategy_name][pair] = _analyze_candles_and_indicators(
|
||||
pair, trades, signal_candles[strategy_name][pair])
|
||||
pair, trades, signal_candles[strategy_name][pair]
|
||||
)
|
||||
except Exception as e:
|
||||
print(f"Cannot process entry/exit reasons for {strategy_name}: ", e)
|
||||
|
||||
|
@ -64,28 +69,28 @@ def _analyze_candles_and_indicators(pair, trades: pd.DataFrame, signal_candles:
|
|||
buyf = signal_candles
|
||||
|
||||
if len(buyf) > 0:
|
||||
buyf = buyf.set_index('date', drop=False)
|
||||
trades_red = trades.loc[trades['pair'] == pair].copy()
|
||||
buyf = buyf.set_index("date", drop=False)
|
||||
trades_red = trades.loc[trades["pair"] == pair].copy()
|
||||
|
||||
trades_inds = pd.DataFrame()
|
||||
|
||||
if trades_red.shape[0] > 0 and buyf.shape[0] > 0:
|
||||
for t, v in trades_red.open_date.items():
|
||||
allinds = buyf.loc[(buyf['date'] < v)]
|
||||
allinds = buyf.loc[(buyf["date"] < v)]
|
||||
if allinds.shape[0] > 0:
|
||||
tmp_inds = allinds.iloc[[-1]]
|
||||
|
||||
trades_red.loc[t, 'signal_date'] = tmp_inds['date'].values[0]
|
||||
trades_red.loc[t, 'enter_reason'] = trades_red.loc[t, 'enter_tag']
|
||||
tmp_inds.index.rename('signal_date', inplace=True)
|
||||
trades_red.loc[t, "signal_date"] = tmp_inds["date"].values[0]
|
||||
trades_red.loc[t, "enter_reason"] = trades_red.loc[t, "enter_tag"]
|
||||
tmp_inds.index.rename("signal_date", inplace=True)
|
||||
trades_inds = pd.concat([trades_inds, tmp_inds])
|
||||
|
||||
if 'signal_date' in trades_red:
|
||||
trades_red['signal_date'] = pd.to_datetime(trades_red['signal_date'], utc=True)
|
||||
trades_red.set_index('signal_date', inplace=True)
|
||||
if "signal_date" in trades_red:
|
||||
trades_red["signal_date"] = pd.to_datetime(trades_red["signal_date"], utc=True)
|
||||
trades_red.set_index("signal_date", inplace=True)
|
||||
|
||||
try:
|
||||
trades_red = pd.merge(trades_red, trades_inds, on='signal_date', how='outer')
|
||||
trades_red = pd.merge(trades_red, trades_inds, on="signal_date", how="outer")
|
||||
except Exception as e:
|
||||
raise e
|
||||
return trades_red
|
||||
|
@ -93,138 +98,166 @@ def _analyze_candles_and_indicators(pair, trades: pd.DataFrame, signal_candles:
|
|||
return pd.DataFrame()
|
||||
|
||||
|
||||
def _do_group_table_output(bigdf, glist, csv_path: Path, to_csv=False, ):
|
||||
def _do_group_table_output(
|
||||
bigdf,
|
||||
glist,
|
||||
csv_path: Path,
|
||||
to_csv=False,
|
||||
):
|
||||
for g in glist:
|
||||
# 0: summary wins/losses grouped by enter tag
|
||||
if g == "0":
|
||||
group_mask = ['enter_reason']
|
||||
wins = bigdf.loc[bigdf['profit_abs'] >= 0] \
|
||||
.groupby(group_mask) \
|
||||
.agg({'profit_abs': ['sum']})
|
||||
group_mask = ["enter_reason"]
|
||||
wins = (
|
||||
bigdf.loc[bigdf["profit_abs"] >= 0].groupby(group_mask).agg({"profit_abs": ["sum"]})
|
||||
)
|
||||
|
||||
wins.columns = ['profit_abs_wins']
|
||||
loss = bigdf.loc[bigdf['profit_abs'] < 0] \
|
||||
.groupby(group_mask) \
|
||||
.agg({'profit_abs': ['sum']})
|
||||
loss.columns = ['profit_abs_loss']
|
||||
wins.columns = ["profit_abs_wins"]
|
||||
loss = (
|
||||
bigdf.loc[bigdf["profit_abs"] < 0].groupby(group_mask).agg({"profit_abs": ["sum"]})
|
||||
)
|
||||
loss.columns = ["profit_abs_loss"]
|
||||
|
||||
new = bigdf.groupby(group_mask).agg({'profit_abs': [
|
||||
'count',
|
||||
lambda x: sum(x > 0),
|
||||
lambda x: sum(x <= 0)]})
|
||||
new = bigdf.groupby(group_mask).agg(
|
||||
{"profit_abs": ["count", lambda x: sum(x > 0), lambda x: sum(x <= 0)]}
|
||||
)
|
||||
new = pd.concat([new, wins, loss], axis=1).fillna(0)
|
||||
|
||||
new['profit_tot'] = new['profit_abs_wins'] - abs(new['profit_abs_loss'])
|
||||
new['wl_ratio_pct'] = (new.iloc[:, 1] / new.iloc[:, 0] * 100).fillna(0)
|
||||
new['avg_win'] = (new['profit_abs_wins'] / new.iloc[:, 1]).fillna(0)
|
||||
new['avg_loss'] = (new['profit_abs_loss'] / new.iloc[:, 2]).fillna(0)
|
||||
new["profit_tot"] = new["profit_abs_wins"] - abs(new["profit_abs_loss"])
|
||||
new["wl_ratio_pct"] = (new.iloc[:, 1] / new.iloc[:, 0] * 100).fillna(0)
|
||||
new["avg_win"] = (new["profit_abs_wins"] / new.iloc[:, 1]).fillna(0)
|
||||
new["avg_loss"] = (new["profit_abs_loss"] / new.iloc[:, 2]).fillna(0)
|
||||
|
||||
new['exp_ratio'] = (
|
||||
(
|
||||
(1 + (new['avg_win'] / abs(new['avg_loss']))) * (new['wl_ratio_pct'] / 100)
|
||||
) - 1).fillna(0)
|
||||
new["exp_ratio"] = (
|
||||
((1 + (new["avg_win"] / abs(new["avg_loss"]))) * (new["wl_ratio_pct"] / 100)) - 1
|
||||
).fillna(0)
|
||||
|
||||
new.columns = ['total_num_buys', 'wins', 'losses',
|
||||
'profit_abs_wins', 'profit_abs_loss',
|
||||
'profit_tot', 'wl_ratio_pct',
|
||||
'avg_win', 'avg_loss', 'exp_ratio']
|
||||
new.columns = [
|
||||
"total_num_buys",
|
||||
"wins",
|
||||
"losses",
|
||||
"profit_abs_wins",
|
||||
"profit_abs_loss",
|
||||
"profit_tot",
|
||||
"wl_ratio_pct",
|
||||
"avg_win",
|
||||
"avg_loss",
|
||||
"exp_ratio",
|
||||
]
|
||||
|
||||
sortcols = ['total_num_buys']
|
||||
sortcols = ["total_num_buys"]
|
||||
|
||||
_print_table(new, sortcols, show_index=True, name="Group 0:",
|
||||
to_csv=to_csv, csv_path=csv_path)
|
||||
_print_table(
|
||||
new, sortcols, show_index=True, name="Group 0:", to_csv=to_csv, csv_path=csv_path
|
||||
)
|
||||
|
||||
else:
|
||||
agg_mask = {'profit_abs': ['count', 'sum', 'median', 'mean'],
|
||||
'profit_ratio': ['median', 'mean', 'sum']}
|
||||
agg_cols = ['num_buys', 'profit_abs_sum', 'profit_abs_median',
|
||||
'profit_abs_mean', 'median_profit_pct', 'mean_profit_pct',
|
||||
'total_profit_pct']
|
||||
sortcols = ['profit_abs_sum', 'enter_reason']
|
||||
agg_mask = {
|
||||
"profit_abs": ["count", "sum", "median", "mean"],
|
||||
"profit_ratio": ["median", "mean", "sum"],
|
||||
}
|
||||
agg_cols = [
|
||||
"num_buys",
|
||||
"profit_abs_sum",
|
||||
"profit_abs_median",
|
||||
"profit_abs_mean",
|
||||
"median_profit_pct",
|
||||
"mean_profit_pct",
|
||||
"total_profit_pct",
|
||||
]
|
||||
sortcols = ["profit_abs_sum", "enter_reason"]
|
||||
|
||||
# 1: profit summaries grouped by enter_tag
|
||||
if g == "1":
|
||||
group_mask = ['enter_reason']
|
||||
group_mask = ["enter_reason"]
|
||||
|
||||
# 2: profit summaries grouped by enter_tag and exit_tag
|
||||
if g == "2":
|
||||
group_mask = ['enter_reason', 'exit_reason']
|
||||
group_mask = ["enter_reason", "exit_reason"]
|
||||
|
||||
# 3: profit summaries grouped by pair and enter_tag
|
||||
if g == "3":
|
||||
group_mask = ['pair', 'enter_reason']
|
||||
group_mask = ["pair", "enter_reason"]
|
||||
|
||||
# 4: profit summaries grouped by pair, enter_ and exit_tag (this can get quite large)
|
||||
if g == "4":
|
||||
group_mask = ['pair', 'enter_reason', 'exit_reason']
|
||||
group_mask = ["pair", "enter_reason", "exit_reason"]
|
||||
|
||||
# 5: profit summaries grouped by exit_tag
|
||||
if g == "5":
|
||||
group_mask = ['exit_reason']
|
||||
sortcols = ['exit_reason']
|
||||
group_mask = ["exit_reason"]
|
||||
sortcols = ["exit_reason"]
|
||||
|
||||
if group_mask:
|
||||
new = bigdf.groupby(group_mask).agg(agg_mask).reset_index()
|
||||
new.columns = group_mask + agg_cols
|
||||
new['median_profit_pct'] = new['median_profit_pct'] * 100
|
||||
new['mean_profit_pct'] = new['mean_profit_pct'] * 100
|
||||
new['total_profit_pct'] = new['total_profit_pct'] * 100
|
||||
new["median_profit_pct"] = new["median_profit_pct"] * 100
|
||||
new["mean_profit_pct"] = new["mean_profit_pct"] * 100
|
||||
new["total_profit_pct"] = new["total_profit_pct"] * 100
|
||||
|
||||
_print_table(new, sortcols, name=f"Group {g}:",
|
||||
to_csv=to_csv, csv_path=csv_path)
|
||||
_print_table(new, sortcols, name=f"Group {g}:", to_csv=to_csv, csv_path=csv_path)
|
||||
else:
|
||||
logger.warning("Invalid group mask specified.")
|
||||
|
||||
|
||||
def _do_rejected_signals_output(rejected_signals_df: pd.DataFrame,
|
||||
to_csv: bool = False, csv_path=None) -> None:
|
||||
cols = ['pair', 'date', 'enter_tag']
|
||||
sortcols = ['date', 'pair', 'enter_tag']
|
||||
_print_table(rejected_signals_df[cols],
|
||||
sortcols,
|
||||
show_index=False,
|
||||
name="Rejected Signals:",
|
||||
to_csv=to_csv,
|
||||
csv_path=csv_path)
|
||||
def _do_rejected_signals_output(
|
||||
rejected_signals_df: pd.DataFrame, to_csv: bool = False, csv_path=None
|
||||
) -> None:
|
||||
cols = ["pair", "date", "enter_tag"]
|
||||
sortcols = ["date", "pair", "enter_tag"]
|
||||
_print_table(
|
||||
rejected_signals_df[cols],
|
||||
sortcols,
|
||||
show_index=False,
|
||||
name="Rejected Signals:",
|
||||
to_csv=to_csv,
|
||||
csv_path=csv_path,
|
||||
)
|
||||
|
||||
|
||||
def _select_rows_within_dates(df, timerange=None, df_date_col: str = 'date'):
|
||||
def _select_rows_within_dates(df, timerange=None, df_date_col: str = "date"):
|
||||
if timerange:
|
||||
if timerange.starttype == 'date':
|
||||
if timerange.starttype == "date":
|
||||
df = df.loc[(df[df_date_col] >= timerange.startdt)]
|
||||
if timerange.stoptype == 'date':
|
||||
if timerange.stoptype == "date":
|
||||
df = df.loc[(df[df_date_col] < timerange.stopdt)]
|
||||
return df
|
||||
|
||||
|
||||
def _select_rows_by_tags(df, enter_reason_list, exit_reason_list):
|
||||
if enter_reason_list and "all" not in enter_reason_list:
|
||||
df = df.loc[(df['enter_reason'].isin(enter_reason_list))]
|
||||
df = df.loc[(df["enter_reason"].isin(enter_reason_list))]
|
||||
|
||||
if exit_reason_list and "all" not in exit_reason_list:
|
||||
df = df.loc[(df['exit_reason'].isin(exit_reason_list))]
|
||||
df = df.loc[(df["exit_reason"].isin(exit_reason_list))]
|
||||
return df
|
||||
|
||||
|
||||
def prepare_results(analysed_trades, stratname,
|
||||
enter_reason_list, exit_reason_list,
|
||||
timerange=None):
|
||||
def prepare_results(
|
||||
analysed_trades, stratname, enter_reason_list, exit_reason_list, timerange=None
|
||||
):
|
||||
res_df = pd.DataFrame()
|
||||
for pair, trades in analysed_trades[stratname].items():
|
||||
if (trades.shape[0] > 0):
|
||||
trades.dropna(subset=['close_date'], inplace=True)
|
||||
if trades.shape[0] > 0:
|
||||
trades.dropna(subset=["close_date"], inplace=True)
|
||||
res_df = pd.concat([res_df, trades], ignore_index=True)
|
||||
|
||||
res_df = _select_rows_within_dates(res_df, timerange)
|
||||
|
||||
if res_df is not None and res_df.shape[0] > 0 and ('enter_reason' in res_df.columns):
|
||||
if res_df is not None and res_df.shape[0] > 0 and ("enter_reason" in res_df.columns):
|
||||
res_df = _select_rows_by_tags(res_df, enter_reason_list, exit_reason_list)
|
||||
|
||||
return res_df
|
||||
|
||||
|
||||
def print_results(res_df: pd.DataFrame, analysis_groups: List[str], indicator_list: List[str],
|
||||
csv_path: Path, rejected_signals=None, to_csv=False):
|
||||
def print_results(
|
||||
res_df: pd.DataFrame,
|
||||
analysis_groups: List[str],
|
||||
indicator_list: List[str],
|
||||
csv_path: Path,
|
||||
rejected_signals=None,
|
||||
to_csv=False,
|
||||
):
|
||||
if res_df.shape[0] > 0:
|
||||
if analysis_groups:
|
||||
_do_group_table_output(res_df, analysis_groups, to_csv=to_csv, csv_path=csv_path)
|
||||
|
@ -237,30 +270,31 @@ def print_results(res_df: pd.DataFrame, analysis_groups: List[str], indicator_li
|
|||
|
||||
# NB this can be large for big dataframes!
|
||||
if "all" in indicator_list:
|
||||
_print_table(res_df,
|
||||
show_index=False,
|
||||
name="Indicators:",
|
||||
to_csv=to_csv,
|
||||
csv_path=csv_path)
|
||||
_print_table(
|
||||
res_df, show_index=False, name="Indicators:", to_csv=to_csv, csv_path=csv_path
|
||||
)
|
||||
elif indicator_list is not None and indicator_list:
|
||||
available_inds = []
|
||||
for ind in indicator_list:
|
||||
if ind in res_df:
|
||||
available_inds.append(ind)
|
||||
ilist = ["pair", "enter_reason", "exit_reason"] + available_inds
|
||||
_print_table(res_df[ilist],
|
||||
sortcols=['exit_reason'],
|
||||
show_index=False,
|
||||
name="Indicators:",
|
||||
to_csv=to_csv,
|
||||
csv_path=csv_path)
|
||||
_print_table(
|
||||
res_df[ilist],
|
||||
sortcols=["exit_reason"],
|
||||
show_index=False,
|
||||
name="Indicators:",
|
||||
to_csv=to_csv,
|
||||
csv_path=csv_path,
|
||||
)
|
||||
else:
|
||||
print("\\No trades to show")
|
||||
|
||||
|
||||
def _print_table(df: pd.DataFrame, sortcols=None, *, show_index=False, name=None,
|
||||
to_csv=False, csv_path: Path):
|
||||
if (sortcols is not None):
|
||||
def _print_table(
|
||||
df: pd.DataFrame, sortcols=None, *, show_index=False, name=None, to_csv=False, csv_path: Path
|
||||
):
|
||||
if sortcols is not None:
|
||||
data = df.sort_values(sortcols)
|
||||
else:
|
||||
data = df
|
||||
|
@ -273,60 +307,64 @@ def _print_table(df: pd.DataFrame, sortcols=None, *, show_index=False, name=None
|
|||
if name is not None:
|
||||
print(name)
|
||||
|
||||
print(
|
||||
tabulate(
|
||||
data,
|
||||
headers='keys',
|
||||
tablefmt='psql',
|
||||
showindex=show_index
|
||||
)
|
||||
)
|
||||
print(tabulate(data, headers="keys", tablefmt="psql", showindex=show_index))
|
||||
|
||||
|
||||
def process_entry_exit_reasons(config: Config):
|
||||
try:
|
||||
analysis_groups = config.get('analysis_groups', [])
|
||||
enter_reason_list = config.get('enter_reason_list', ["all"])
|
||||
exit_reason_list = config.get('exit_reason_list', ["all"])
|
||||
indicator_list = config.get('indicator_list', [])
|
||||
do_rejected = config.get('analysis_rejected', False)
|
||||
to_csv = config.get('analysis_to_csv', False)
|
||||
csv_path = Path(config.get('analysis_csv_path', config['exportfilename']))
|
||||
analysis_groups = config.get("analysis_groups", [])
|
||||
enter_reason_list = config.get("enter_reason_list", ["all"])
|
||||
exit_reason_list = config.get("exit_reason_list", ["all"])
|
||||
indicator_list = config.get("indicator_list", [])
|
||||
do_rejected = config.get("analysis_rejected", False)
|
||||
to_csv = config.get("analysis_to_csv", False)
|
||||
csv_path = Path(config.get("analysis_csv_path", config["exportfilename"]))
|
||||
if to_csv and not csv_path.is_dir():
|
||||
raise OperationalException(f"Specified directory {csv_path} does not exist.")
|
||||
|
||||
timerange = TimeRange.parse_timerange(None if config.get(
|
||||
'timerange') is None else str(config.get('timerange')))
|
||||
timerange = TimeRange.parse_timerange(
|
||||
None if config.get("timerange") is None else str(config.get("timerange"))
|
||||
)
|
||||
|
||||
backtest_stats = load_backtest_stats(config['exportfilename'])
|
||||
backtest_stats = load_backtest_stats(config["exportfilename"])
|
||||
|
||||
for strategy_name, results in backtest_stats['strategy'].items():
|
||||
trades = load_backtest_data(config['exportfilename'], strategy_name)
|
||||
for strategy_name, results in backtest_stats["strategy"].items():
|
||||
trades = load_backtest_data(config["exportfilename"], strategy_name)
|
||||
|
||||
if trades is not None and not trades.empty:
|
||||
signal_candles = _load_signal_candles(config['exportfilename'])
|
||||
signal_candles = _load_signal_candles(config["exportfilename"])
|
||||
|
||||
rej_df = None
|
||||
if do_rejected:
|
||||
rejected_signals_dict = _load_rejected_signals(config['exportfilename'])
|
||||
rej_df = prepare_results(rejected_signals_dict, strategy_name,
|
||||
enter_reason_list, exit_reason_list,
|
||||
timerange=timerange)
|
||||
rejected_signals_dict = _load_rejected_signals(config["exportfilename"])
|
||||
rej_df = prepare_results(
|
||||
rejected_signals_dict,
|
||||
strategy_name,
|
||||
enter_reason_list,
|
||||
exit_reason_list,
|
||||
timerange=timerange,
|
||||
)
|
||||
|
||||
analysed_trades_dict = _process_candles_and_indicators(
|
||||
config['exchange']['pair_whitelist'], strategy_name,
|
||||
trades, signal_candles)
|
||||
config["exchange"]["pair_whitelist"], strategy_name, trades, signal_candles
|
||||
)
|
||||
|
||||
res_df = prepare_results(analysed_trades_dict, strategy_name,
|
||||
enter_reason_list, exit_reason_list,
|
||||
timerange=timerange)
|
||||
res_df = prepare_results(
|
||||
analysed_trades_dict,
|
||||
strategy_name,
|
||||
enter_reason_list,
|
||||
exit_reason_list,
|
||||
timerange=timerange,
|
||||
)
|
||||
|
||||
print_results(res_df,
|
||||
analysis_groups,
|
||||
indicator_list,
|
||||
rejected_signals=rej_df,
|
||||
to_csv=to_csv,
|
||||
csv_path=csv_path)
|
||||
print_results(
|
||||
res_df,
|
||||
analysis_groups,
|
||||
indicator_list,
|
||||
rejected_signals=rej_df,
|
||||
to_csv=to_csv,
|
||||
csv_path=csv_path,
|
||||
)
|
||||
|
||||
except ValueError as e:
|
||||
raise OperationalException(e) from e
|
||||
|
|
|
@ -5,8 +5,17 @@ Includes:
|
|||
* load data for a pair (or a list of pairs) from disk
|
||||
* download data from exchange and store to disk
|
||||
"""
|
||||
|
||||
# flake8: noqa: F401
|
||||
from .datahandlers import get_datahandler
|
||||
from .history_utils import (convert_trades_to_ohlcv, download_data_main, get_timerange, load_data,
|
||||
load_pair_history, refresh_backtest_ohlcv_data,
|
||||
refresh_backtest_trades_data, refresh_data, validate_backtest_data)
|
||||
from .history_utils import (
|
||||
convert_trades_to_ohlcv,
|
||||
download_data_main,
|
||||
get_timerange,
|
||||
load_data,
|
||||
load_pair_history,
|
||||
refresh_backtest_ohlcv_data,
|
||||
refresh_backtest_trades_data,
|
||||
refresh_data,
|
||||
validate_backtest_data,
|
||||
)
|
||||
|
|
|
@ -14,11 +14,11 @@ logger = logging.getLogger(__name__)
|
|||
|
||||
|
||||
class FeatherDataHandler(IDataHandler):
|
||||
|
||||
_columns = DEFAULT_DATAFRAME_COLUMNS
|
||||
|
||||
def ohlcv_store(
|
||||
self, pair: str, timeframe: str, data: DataFrame, candle_type: CandleType) -> None:
|
||||
self, pair: str, timeframe: str, data: DataFrame, candle_type: CandleType
|
||||
) -> None:
|
||||
"""
|
||||
Store data in json format "values".
|
||||
format looks as follows:
|
||||
|
@ -33,11 +33,12 @@ class FeatherDataHandler(IDataHandler):
|
|||
self.create_dir_if_needed(filename)
|
||||
|
||||
data.reset_index(drop=True).loc[:, self._columns].to_feather(
|
||||
filename, compression_level=9, compression='lz4')
|
||||
filename, compression_level=9, compression="lz4"
|
||||
)
|
||||
|
||||
def _ohlcv_load(self, pair: str, timeframe: str,
|
||||
timerange: Optional[TimeRange], candle_type: CandleType
|
||||
) -> DataFrame:
|
||||
def _ohlcv_load(
|
||||
self, pair: str, timeframe: str, timerange: Optional[TimeRange], candle_type: CandleType
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Internal method used to load data for one pair from disk.
|
||||
Implements the loading and conversion to a Pandas dataframe.
|
||||
|
@ -50,28 +51,31 @@ class FeatherDataHandler(IDataHandler):
|
|||
:param candle_type: Any of the enum CandleType (must match trading mode!)
|
||||
:return: DataFrame with ohlcv data, or empty DataFrame
|
||||
"""
|
||||
filename = self._pair_data_filename(
|
||||
self._datadir, pair, timeframe, candle_type=candle_type)
|
||||
filename = self._pair_data_filename(self._datadir, pair, timeframe, candle_type=candle_type)
|
||||
if not filename.exists():
|
||||
# Fallback mode for 1M files
|
||||
filename = self._pair_data_filename(
|
||||
self._datadir, pair, timeframe, candle_type=candle_type, no_timeframe_modify=True)
|
||||
self._datadir, pair, timeframe, candle_type=candle_type, no_timeframe_modify=True
|
||||
)
|
||||
if not filename.exists():
|
||||
return DataFrame(columns=self._columns)
|
||||
|
||||
pairdata = read_feather(filename)
|
||||
pairdata.columns = self._columns
|
||||
pairdata = pairdata.astype(dtype={'open': 'float', 'high': 'float',
|
||||
'low': 'float', 'close': 'float', 'volume': 'float'})
|
||||
pairdata['date'] = to_datetime(pairdata['date'], unit='ms', utc=True)
|
||||
pairdata = pairdata.astype(
|
||||
dtype={
|
||||
"open": "float",
|
||||
"high": "float",
|
||||
"low": "float",
|
||||
"close": "float",
|
||||
"volume": "float",
|
||||
}
|
||||
)
|
||||
pairdata["date"] = to_datetime(pairdata["date"], unit="ms", utc=True)
|
||||
return pairdata
|
||||
|
||||
def ohlcv_append(
|
||||
self,
|
||||
pair: str,
|
||||
timeframe: str,
|
||||
data: DataFrame,
|
||||
candle_type: CandleType
|
||||
self, pair: str, timeframe: str, data: DataFrame, candle_type: CandleType
|
||||
) -> None:
|
||||
"""
|
||||
Append data to existing data structures
|
||||
|
@ -92,7 +96,7 @@ class FeatherDataHandler(IDataHandler):
|
|||
"""
|
||||
filename = self._pair_trades_filename(self._datadir, pair, trading_mode)
|
||||
self.create_dir_if_needed(filename)
|
||||
data.reset_index(drop=True).to_feather(filename, compression_level=9, compression='lz4')
|
||||
data.reset_index(drop=True).to_feather(filename, compression_level=9, compression="lz4")
|
||||
|
||||
def trades_append(self, pair: str, data: DataFrame):
|
||||
"""
|
||||
|
@ -104,7 +108,7 @@ class FeatherDataHandler(IDataHandler):
|
|||
raise NotImplementedError()
|
||||
|
||||
def _trades_load(
|
||||
self, pair: str, trading_mode: TradingMode, timerange: Optional[TimeRange] = None
|
||||
self, pair: str, trading_mode: TradingMode, timerange: Optional[TimeRange] = None
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Load a pair from file, either .json.gz or .json
|
||||
|
|