mirror of
https://github.com/freqtrade/freqtrade.git
synced 2024-11-12 19:23:55 +00:00
Merge pull request #10863 from freqtrade/new_release
Some checks failed
Update Docker Hub Description / dockerHubDescription (push) Has been cancelled
Some checks failed
Update Docker Hub Description / dockerHubDescription (push) Has been cancelled
New release 2024.10
This commit is contained in:
commit
87c5668b14
23
.github/workflows/ci.yml
vendored
23
.github/workflows/ci.yml
vendored
|
@ -25,7 +25,7 @@ jobs:
|
|||
strategy:
|
||||
matrix:
|
||||
os: [ "ubuntu-20.04", "ubuntu-22.04", "ubuntu-24.04" ]
|
||||
python-version: ["3.9", "3.10", "3.11", "3.12"]
|
||||
python-version: ["3.10", "3.11", "3.12"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
@ -68,11 +68,17 @@ jobs:
|
|||
python build_helpers/freqtrade_client_version_align.py
|
||||
|
||||
- name: Tests
|
||||
if: (!(runner.os == 'Linux' && matrix.python-version == '3.12' && matrix.os == 'ubuntu-22.04'))
|
||||
run: |
|
||||
pytest --random-order
|
||||
|
||||
- name: Tests with Coveralls
|
||||
if: (runner.os == 'Linux' && matrix.python-version == '3.12' && matrix.os == 'ubuntu-22.04')
|
||||
run: |
|
||||
pytest --random-order --cov=freqtrade --cov=freqtrade_client --cov-config=.coveragerc
|
||||
|
||||
- name: Coveralls
|
||||
if: (runner.os == 'Linux' && matrix.python-version == '3.10' && matrix.os == 'ubuntu-22.04')
|
||||
if: (runner.os == 'Linux' && matrix.python-version == '3.12' && matrix.os == 'ubuntu-22.04')
|
||||
env:
|
||||
# Coveralls token. Not used as secret due to github not providing secrets to forked repositories
|
||||
COVERALLS_REPO_TOKEN: 6D1m0xupS3FgutfuGao8keFf9Hc0FpIXu
|
||||
|
@ -138,11 +144,8 @@ jobs:
|
|||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ "macos-12", "macos-13", "macos-14" ]
|
||||
python-version: ["3.9", "3.10", "3.11", "3.12"]
|
||||
exclude:
|
||||
- os: "macos-14"
|
||||
python-version: "3.9"
|
||||
os: [ "macos-13", "macos-14", "macos-15" ]
|
||||
python-version: ["3.10", "3.11", "3.12"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
@ -263,7 +266,7 @@ jobs:
|
|||
strategy:
|
||||
matrix:
|
||||
os: [ windows-latest ]
|
||||
python-version: ["3.9", "3.10", "3.11", "3.12"]
|
||||
python-version: ["3.10", "3.11", "3.12"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
@ -537,12 +540,12 @@ jobs:
|
|||
|
||||
|
||||
- name: Publish to PyPI (Test)
|
||||
uses: pypa/gh-action-pypi-publish@v1.10.2
|
||||
uses: pypa/gh-action-pypi-publish@v1.10.3
|
||||
with:
|
||||
repository-url: https://test.pypi.org/legacy/
|
||||
|
||||
- name: Publish to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@v1.10.2
|
||||
uses: pypa/gh-action-pypi-publish@v1.10.3
|
||||
|
||||
|
||||
deploy-docker:
|
||||
|
|
|
@ -9,17 +9,17 @@ repos:
|
|||
# stages: [push]
|
||||
|
||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||
rev: "v1.11.2"
|
||||
rev: "v1.13.0"
|
||||
hooks:
|
||||
- id: mypy
|
||||
exclude: build_helpers
|
||||
additional_dependencies:
|
||||
- types-cachetools==5.5.0.20240820
|
||||
- types-filelock==3.2.7
|
||||
- types-requests==2.32.0.20240914
|
||||
- types-requests==2.32.0.20241016
|
||||
- types-tabulate==0.9.0.20240106
|
||||
- types-python-dateutil==2.9.0.20240906
|
||||
- SQLAlchemy==2.0.35
|
||||
- types-python-dateutil==2.9.0.20241003
|
||||
- SQLAlchemy==2.0.36
|
||||
# stages: [push]
|
||||
|
||||
- repo: https://github.com/pycqa/isort
|
||||
|
@ -31,13 +31,13 @@ repos:
|
|||
|
||||
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: 'v0.6.7'
|
||||
rev: 'v0.7.1'
|
||||
hooks:
|
||||
- id: ruff
|
||||
- id: ruff-format
|
||||
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.6.0
|
||||
rev: v5.0.0
|
||||
hooks:
|
||||
- id: end-of-file-fixer
|
||||
exclude: |
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
FROM python:3.12.6-slim-bookworm as base
|
||||
FROM python:3.12.7-slim-bookworm as base
|
||||
|
||||
# Setup env
|
||||
ENV LANG C.UTF-8
|
||||
|
|
|
@ -61,7 +61,7 @@ Please find the complete documentation on the [freqtrade website](https://www.fr
|
|||
|
||||
## Features
|
||||
|
||||
- [x] **Based on Python 3.9+**: For botting on any operating system - Windows, macOS and Linux.
|
||||
- [x] **Based on Python 3.10+**: For botting on any operating system - Windows, macOS and Linux.
|
||||
- [x] **Persistence**: Persistence is achieved through sqlite.
|
||||
- [x] **Dry-run**: Run the bot without paying money.
|
||||
- [x] **Backtesting**: Run a simulation of your buy/sell strategy.
|
||||
|
@ -218,7 +218,7 @@ To run this bot we recommend you a cloud instance with a minimum of:
|
|||
|
||||
### Software requirements
|
||||
|
||||
- [Python >= 3.9](http://docs.python-guide.org/en/latest/starting/installation/)
|
||||
- [Python >= 3.10](http://docs.python-guide.org/en/latest/starting/installation/)
|
||||
- [pip](https://pip.pypa.io/en/stable/installing/)
|
||||
- [git](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git)
|
||||
- [TA-Lib](https://ta-lib.github.io/ta-lib-python/)
|
||||
|
|
Binary file not shown.
Binary file not shown.
|
@ -579,57 +579,6 @@
|
|||
]
|
||||
}
|
||||
},
|
||||
"protections": {
|
||||
"description": "Configuration for various protections.",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"method": {
|
||||
"description": "Method used for the protection.",
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"CooldownPeriod",
|
||||
"LowProfitPairs",
|
||||
"MaxDrawdown",
|
||||
"StoplossGuard"
|
||||
]
|
||||
},
|
||||
"stop_duration": {
|
||||
"description": "Duration to lock the pair after a protection is triggered, in minutes.",
|
||||
"type": "number",
|
||||
"minimum": 0.0
|
||||
},
|
||||
"stop_duration_candles": {
|
||||
"description": "Duration to lock the pair after a protection is triggered, in number of candles.",
|
||||
"type": "number",
|
||||
"minimum": 0
|
||||
},
|
||||
"unlock_at": {
|
||||
"description": "Time when trading will be unlocked regularly. Format: HH:MM",
|
||||
"type": "string"
|
||||
},
|
||||
"trade_limit": {
|
||||
"description": "Minimum number of trades required during lookback period.",
|
||||
"type": "number",
|
||||
"minimum": 1
|
||||
},
|
||||
"lookback_period": {
|
||||
"description": "Period to look back for protection checks, in minutes.",
|
||||
"type": "number",
|
||||
"minimum": 1
|
||||
},
|
||||
"lookback_period_candles": {
|
||||
"description": "Period to look back for protection checks, in number of candles.",
|
||||
"type": "number",
|
||||
"minimum": 1
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"method"
|
||||
]
|
||||
}
|
||||
},
|
||||
"telegram": {
|
||||
"description": "Telegram settings.",
|
||||
"type": "object",
|
||||
|
@ -1434,6 +1383,11 @@
|
|||
"type": "string",
|
||||
"default": "example"
|
||||
},
|
||||
"wait_for_training_iteration_on_reload": {
|
||||
"description": "Wait for the next training iteration to complete after /reload or ctrl+c.",
|
||||
"type": "boolean",
|
||||
"default": true
|
||||
},
|
||||
"feature_parameters": {
|
||||
"description": "The parameters used to engineer the feature set",
|
||||
"type": "object",
|
||||
|
|
|
@ -229,7 +229,6 @@ Mandatory parameters are marked as **Required**, which means that they are requi
|
|||
| | **Plugins**
|
||||
| `edge.*` | Please refer to [edge configuration document](edge.md) for detailed explanation of all possible configuration options.
|
||||
| `pairlists` | Define one or more pairlists to be used. [More information](plugins.md#pairlists-and-pairlist-handlers). <br>*Defaults to `StaticPairList`.* <br> **Datatype:** List of Dicts
|
||||
| `protections` | Define one or more protections to be used. [More information](plugins.md#protections). <br> **Datatype:** List of Dicts
|
||||
| | **Telegram**
|
||||
| `telegram.enabled` | Enable the usage of Telegram. <br> **Datatype:** Boolean
|
||||
| `telegram.token` | Your Telegram bot token. Only required if `telegram.enabled` is `true`. <br>**Keep it in secret, do not disclose publicly.** <br> **Datatype:** String
|
||||
|
|
|
@ -75,7 +75,10 @@ Webhook terminology changed from "sell" to "exit", and from "buy" to "entry", re
|
|||
* `webhooksellfill`, `webhookexitfill` -> `exit_fill`
|
||||
* `webhooksellcancel`, `webhookexitcancel` -> `exit_cancel`
|
||||
|
||||
|
||||
## Removal of `populate_any_indicators`
|
||||
|
||||
version 2023.3 saw the removal of `populate_any_indicators` in favor of split methods for feature engineering and targets. Please read the [migration document](strategy_migration.md#freqai-strategy) for full details.
|
||||
|
||||
## Removal of `protections` from configuration
|
||||
|
||||
Setting protections from the configuration via `"protections": [],` has been removed in 2024.10, after having raised deprecation warnings for over 3 years.
|
||||
|
|
|
@ -116,7 +116,7 @@ A similar setup can also be taken for Pycharm - using `freqtrade` as module name
|
|||
![Pycharm debug configuration](assets/pycharm_debug.png)
|
||||
|
||||
!!! Note "Startup directory"
|
||||
This assumes that you have the repository checked out, and the editor is started at the repository root level (so setup.py is at the top level of your repository).
|
||||
This assumes that you have the repository checked out, and the editor is started at the repository root level (so pyproject.toml is at the top level of your repository).
|
||||
|
||||
## ErrorHandling
|
||||
|
||||
|
@ -241,7 +241,6 @@ No protection should use datetime directly, but use the provided `date_now` vari
|
|||
|
||||
!!! Tip "Writing a new Protection"
|
||||
Best copy one of the existing Protections to have a good example.
|
||||
Don't forget to register your protection in `constants.py` under the variable `AVAILABLE_PROTECTIONS` - otherwise it will not be selectable.
|
||||
|
||||
#### Implementation of a new protection
|
||||
|
||||
|
|
|
@ -22,6 +22,7 @@ Mandatory parameters are marked as **Required** and have to be set in one of the
|
|||
| `write_metrics_to_disk` | Collect train timings, inference timings and cpu usage in json file. <br> **Datatype:** Boolean. <br> Default: `False`
|
||||
| `data_kitchen_thread_count` | <br> Designate the number of threads you want to use for data processing (outlier methods, normalization, etc.). This has no impact on the number of threads used for training. If user does not set it (default), FreqAI will use max number of threads - 2 (leaving 1 physical core available for Freqtrade bot and FreqUI) <br> **Datatype:** Positive integer.
|
||||
| `activate_tensorboard` | <br> Indicate whether or not to activate tensorboard for the tensorboard enabled modules (currently Reinforcment Learning, XGBoost, Catboost, and PyTorch). Tensorboard needs Torch installed, which means you will need the torch/RL docker image or you need to answer "yes" to the install question about whether or not you wish to install Torch. <br> **Datatype:** Boolean. <br> Default: `True`.
|
||||
| `wait_for_training_iteration_on_reload` | <br> When using /reload or ctrl-c, wait for the current training iteration to finish before completing graceful shutdown. If set to `False`, FreqAI will break the current training iteration, allowing you to shutdown gracefully more quickly, but you will lose your current training iteration. <br> **Datatype:** Boolean. <br> Default: `True`.
|
||||
|
||||
### Feature parameters
|
||||
|
||||
|
|
|
@ -445,7 +445,6 @@ While this strategy is most likely too simple to provide consistent profit, it s
|
|||
|
||||
Whether you are using `.range` functionality or the alternatives above, you should try to use space ranges as small as possible since this will improve CPU/RAM usage.
|
||||
|
||||
|
||||
## Optimizing protections
|
||||
|
||||
Freqtrade can also optimize protections. How you optimize protections is up to you, and the following should be considered as example only.
|
||||
|
@ -589,14 +588,15 @@ Currently, the following loss functions are builtin:
|
|||
|
||||
* `ShortTradeDurHyperOptLoss` - (default legacy Freqtrade hyperoptimization loss function) - Mostly for short trade duration and avoiding losses.
|
||||
* `OnlyProfitHyperOptLoss` - takes only amount of profit into consideration.
|
||||
* `SharpeHyperOptLoss` - optimizes Sharpe Ratio calculated on trade returns relative to standard deviation.
|
||||
* `SharpeHyperOptLossDaily` - optimizes Sharpe Ratio calculated on **daily** trade returns relative to standard deviation.
|
||||
* `SortinoHyperOptLoss` - optimizes Sortino Ratio calculated on trade returns relative to **downside** standard deviation.
|
||||
* `SharpeHyperOptLoss` - Optimizes Sharpe Ratio calculated on trade returns relative to standard deviation.
|
||||
* `SharpeHyperOptLossDaily` - Optimizes Sharpe Ratio calculated on **daily** trade returns relative to standard deviation.
|
||||
* `SortinoHyperOptLoss` - Optimizes Sortino Ratio calculated on trade returns relative to **downside** standard deviation.
|
||||
* `SortinoHyperOptLossDaily` - optimizes Sortino Ratio calculated on **daily** trade returns relative to **downside** standard deviation.
|
||||
* `MaxDrawDownHyperOptLoss` - Optimizes Maximum absolute drawdown.
|
||||
* `MaxDrawDownRelativeHyperOptLoss` - Optimizes both maximum absolute drawdown while also adjusting for maximum relative drawdown.
|
||||
* `CalmarHyperOptLoss` - Optimizes Calmar Ratio calculated on trade returns relative to max drawdown.
|
||||
* `ProfitDrawDownHyperOptLoss` - Optimizes by max Profit & min Drawdown objective. `DRAWDOWN_MULT` variable within the hyperoptloss file can be adjusted to be stricter or more flexible on drawdown purposes.
|
||||
* `MultiMetricHyperOptLoss` - Optimizes by several key metrics to achieve balanced performance. The primary focus is on maximizing Profit and minimizing Drawdown, while also considering additional metrics such as Profit Factor, Expectancy Ratio and Winrate. Moreover, it applies a penalty for epochs with a low number of trades, encouraging strategies with adequate trade frequency.
|
||||
|
||||
Creation of a custom loss function is covered in the [Advanced Hyperopt](advanced-hyperopt.md) part of the documentation.
|
||||
|
||||
|
|
|
@ -1,24 +1,16 @@
|
|||
## Protections
|
||||
|
||||
!!! Warning "Beta feature"
|
||||
This feature is still in it's testing phase. Should you notice something you think is wrong please let us know via Discord or via Github Issue.
|
||||
|
||||
Protections will protect your strategy from unexpected events and market conditions by temporarily stop trading for either one pair, or for all pairs.
|
||||
All protection end times are rounded up to the next candle to avoid sudden, unexpected intra-candle buys.
|
||||
|
||||
!!! Note
|
||||
!!! Tip "Usage tips"
|
||||
Not all Protections will work for all strategies, and parameters will need to be tuned for your strategy to improve performance.
|
||||
|
||||
!!! Tip
|
||||
Each Protection can be configured multiple times with different parameters, to allow different levels of protection (short-term / long-term).
|
||||
|
||||
!!! Note "Backtesting"
|
||||
Protections are supported by backtesting and hyperopt, but must be explicitly enabled by using the `--enable-protections` flag.
|
||||
|
||||
!!! Warning "Setting protections from the configuration"
|
||||
Setting protections from the configuration via `"protections": [],` key should be considered deprecated and will be removed in a future version.
|
||||
It is also no longer guaranteed that your protections apply to the strategy in cases where the strategy defines [protections as property](hyperopt.md#optimizing-protections).
|
||||
|
||||
### Available Protections
|
||||
|
||||
* [`StoplossGuard`](#stoploss-guard) Stop trading if a certain amount of stoploss occurred within a certain time window.
|
||||
|
|
|
@ -85,7 +85,7 @@ To run this bot we recommend you a linux cloud instance with a minimum of:
|
|||
|
||||
Alternatively
|
||||
|
||||
- Python 3.9+
|
||||
- Python 3.10+
|
||||
- pip (pip3)
|
||||
- git
|
||||
- TA-Lib
|
||||
|
|
|
@ -24,7 +24,7 @@ The easiest way to install and run Freqtrade is to clone the bot Github reposito
|
|||
The `stable` branch contains the code of the last release (done usually once per month on an approximately one week old snapshot of the `develop` branch to prevent packaging bugs, so potentially it's more stable).
|
||||
|
||||
!!! Note
|
||||
Python3.9 or higher and the corresponding `pip` are assumed to be available. The install-script will warn you and stop if that's not the case. `git` is also needed to clone the Freqtrade repository.
|
||||
Python3.10 or higher and the corresponding `pip` are assumed to be available. The install-script will warn you and stop if that's not the case. `git` is also needed to clone the Freqtrade repository.
|
||||
Also, python headers (`python<yourversion>-dev` / `python<yourversion>-devel`) must be available for the installation to complete successfully.
|
||||
|
||||
!!! Warning "Up-to-date clock"
|
||||
|
@ -42,7 +42,7 @@ These requirements apply to both [Script Installation](#script-installation) and
|
|||
|
||||
### Install guide
|
||||
|
||||
* [Python >= 3.9](http://docs.python-guide.org/en/latest/starting/installation/)
|
||||
* [Python >= 3.10](http://docs.python-guide.org/en/latest/starting/installation/)
|
||||
* [pip](https://pip.pypa.io/en/stable/installing/)
|
||||
* [git](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git)
|
||||
* [virtualenv](https://virtualenv.pypa.io/en/stable/installation.html) (Recommended)
|
||||
|
@ -54,7 +54,7 @@ We've included/collected install instructions for Ubuntu, MacOS, and Windows. Th
|
|||
OS Specific steps are listed first, the common section below is necessary for all systems.
|
||||
|
||||
!!! Note
|
||||
Python3.9 or higher and the corresponding pip are assumed to be available.
|
||||
Python3.10 or higher and the corresponding pip are assumed to be available.
|
||||
|
||||
=== "Debian/Ubuntu"
|
||||
#### Install necessary dependencies
|
||||
|
@ -69,7 +69,7 @@ OS Specific steps are listed first, the common section below is necessary for al
|
|||
|
||||
=== "RaspberryPi/Raspbian"
|
||||
The following assumes the latest [Raspbian Buster lite image](https://www.raspberrypi.org/downloads/raspbian/).
|
||||
This image comes with python3.9 preinstalled, making it easy to get freqtrade up and running.
|
||||
This image comes with python3.11 preinstalled, making it easy to get freqtrade up and running.
|
||||
|
||||
Tested using a Raspberry Pi 3 with the Raspbian Buster lite image, all updates applied.
|
||||
|
||||
|
@ -169,7 +169,7 @@ You can as well update, configure and reset the codebase of your bot with `./scr
|
|||
** --install **
|
||||
|
||||
With this option, the script will install the bot and most dependencies:
|
||||
You will need to have git and python3.9+ installed beforehand for this to work.
|
||||
You will need to have git and python3.10+ installed beforehand for this to work.
|
||||
|
||||
* Mandatory software as: `ta-lib`
|
||||
* Setup your virtualenv under `.venv/`
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
markdown==3.7
|
||||
mkdocs==1.6.1
|
||||
mkdocs-material==9.5.36
|
||||
mkdocs-material==9.5.42
|
||||
mdx_truly_sane_lists==1.3
|
||||
pymdown-extensions==10.10.1
|
||||
pymdown-extensions==10.11.2
|
||||
jinja2==3.1.4
|
||||
mike==2.1.3
|
||||
|
|
196
docs/strategy-101.md
Normal file
196
docs/strategy-101.md
Normal file
|
@ -0,0 +1,196 @@
|
|||
# Freqtrade Strategies 101: A Quick Start for Strategy Development
|
||||
|
||||
For the purposes of this quick start, we are assuming you are familiar with the basics of trading, and have read the
|
||||
[Freqtrade basics](bot-basics.md) page.
|
||||
|
||||
## Required Knowledge
|
||||
|
||||
A strategy in Freqtrade is a Python class that defines the logic for buying and selling cryptocurrency `assets`.
|
||||
|
||||
Assets are defined as `pairs`, which represent the `coin` and the `stake`. The coin is the asset you are trading using another currency as the stake.
|
||||
|
||||
Data is supplied by the exchange in the form of `candles`, which are made up of a six values: `date`, `open`, `high`, `low`, `close` and `volume`.
|
||||
|
||||
`Technical analysis` functions analyse the candle data using various computational and statistical formulae, and produce secondary values called `indicators`.
|
||||
|
||||
Indicators are analysed on the asset pair candles to generate `signals`.
|
||||
|
||||
Signals are turned into `orders` on a cryptocurrency `exchange`, i.e. `trades`.
|
||||
|
||||
We use the terms `entry` and `exit` instead of `buying` and `selling` because Freqtrade supports both `long` and `short` trades.
|
||||
|
||||
- **long**: You buy the coin based on a stake, e.g. buying the coin BTC using USDT as your stake, and you make a profit by selling the coin at a higher rate than you paid for. In long trades, profits are made by the coin value going up versus the stake.
|
||||
- **short**: You borrow capital from the exchange in the form of the coin, and you pay back the stake value of the coin later. In short trades profits are made by the coin value going down versus the stake (you pay the loan off at a lower rate).
|
||||
|
||||
Whilst Freqtrade supports spot and futures markets for certain exchanges, for simplicity we will focus on spot (long) trades only.
|
||||
|
||||
## Structure of a Basic Strategy
|
||||
|
||||
### Main dataframe
|
||||
|
||||
Freqtrade strategies use a tabular data structure with rows and columns known as a `dataframe` to generate signals to enter and exit trades.
|
||||
|
||||
Each pair in your configured pairlist has its own dataframe. Dataframes are indexed by the `date` column, e.g. `2024-06-31 12:00`.
|
||||
|
||||
The next 5 columns represent the `open`, `high`, `low`, `close` and `volume` (OHLCV) data.
|
||||
|
||||
### Populate indicator values
|
||||
|
||||
The `populate_indicators` function adds columns to the dataframe that represent the technical analysis indicator values.
|
||||
|
||||
Examples of common indicators include Relative Strength Index, Bollinger Bands, Money Flow Index, Moving Average, and Average True Range.
|
||||
|
||||
Columns are added to the dataframe by calling technical analysis functions, e.g. ta-lib's RSI function `ta.RSI()`, and assigning them to a column name, e.g. `rsi`
|
||||
|
||||
```python
|
||||
dataframe['rsi'] = ta.RSI(dataframe)
|
||||
```
|
||||
|
||||
??? Hint "Technical Analysis libraries"
|
||||
Different libraries work in different ways to generate indicator values. Please check the documentation of each library to understand
|
||||
how to integrate it into your strategy. You can also check the [Freqtrade example strategies](https://github.com/freqtrade/freqtrade-strategies) to give you ideas.
|
||||
|
||||
### Populate entry signals
|
||||
|
||||
The `populate_entry_trend` function defines conditions for an entry signal.
|
||||
|
||||
The dataframe column `enter_long` is added to the dataframe, and when a value of `1` is in this column, Freqtrade sees an entry signal.
|
||||
|
||||
??? Hint "Shorting"
|
||||
To enter short trades, use the `enter_short` column.
|
||||
|
||||
### Populate exit signals
|
||||
|
||||
The `populate_exit_trend` function defines conditions for an exit signal.
|
||||
|
||||
The dataframe column `exit_long` is added to the dataframe, and when a value of `1` is in this column, Freqtrade sees an exit signal.
|
||||
|
||||
??? Hint "Shorting"
|
||||
To exit short trades, use the `exit_short` column.
|
||||
|
||||
## A simple strategy
|
||||
|
||||
Here is a minimal example of a Freqtrade strategy:
|
||||
|
||||
```python
|
||||
from freqtrade.strategy import IStrategy
|
||||
from pandas import DataFrame
|
||||
import talib.abstract as ta
|
||||
|
||||
class MyStrategy(IStrategy):
|
||||
|
||||
# set the initial stoploss to -10%
|
||||
stoploss = -0.10
|
||||
|
||||
# exit profitable positions at any time when the profit is greater than 1%
|
||||
minimal_roi = {"0": 0.01}
|
||||
|
||||
def populate_indicators(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
|
||||
# generate values for technical analysis indicators
|
||||
dataframe['rsi'] = ta.RSI(dataframe, timeperiod=14)
|
||||
|
||||
return dataframe
|
||||
|
||||
def populate_entry_trend(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
|
||||
# generate entry signals based on indicator values
|
||||
dataframe.loc[
|
||||
(dataframe['rsi'] < 30),
|
||||
'enter_long'] = 1
|
||||
|
||||
return dataframe
|
||||
|
||||
def populate_exit_trend(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
|
||||
# generate exit signals based on indicator values
|
||||
dataframe.loc[
|
||||
(dataframe['rsi'] > 70),
|
||||
'exit_long'] = 1
|
||||
|
||||
return dataframe
|
||||
```
|
||||
|
||||
## Making trades
|
||||
|
||||
When a signal is found (a `1` in an entry or exit column), Freqtrade will attempt to make an order, i.e. a `trade` or `position`.
|
||||
|
||||
Each new trade position takes up a `slot`. Slots represent the maximum number of concurrent new trades that can be opened.
|
||||
|
||||
The number of slots is defined by the `max_open_trades` [configuration](configuration.md) option.
|
||||
|
||||
However, there can be a range of scenarios where generating a signal does not always create a trade order. These include:
|
||||
|
||||
- not enough remaining stake to buy an asset, or funds in your wallet to sell an asset (including any fees)
|
||||
- not enough remaining free slots for a new trade to be opened (the number of positions you have open equals the `max_open_trades` option)
|
||||
- there is already an open trade for a pair (Freqtrade cannot stack positions - however it can [adjust existing positions](strategy-callbacks.md#adjust-trade-position))
|
||||
- if an entry and exit signal is present on the same candle, they are considered as [colliding](strategy-customization.md#colliding-signals), and no order will be raised
|
||||
- the strategy actively rejects the trade order due to logic you specify by using one of the relevant [entry](strategy-callbacks.md#trade-entry-buy-order-confirmation) or [exit](strategy-callbacks.md#trade-exit-sell-order-confirmation) callbacks
|
||||
|
||||
Read through the [strategy customization](strategy-customization.md) documentation for more details.
|
||||
|
||||
## Backtesting and forward testing
|
||||
|
||||
Strategy development can be a long and frustrating process, as turning our human "gut instincts" into a working computer-controlled
|
||||
("algo") strategy is not always straightforward.
|
||||
|
||||
Therefore a strategy should be tested to verify that it is going to work as intended.
|
||||
|
||||
Freqtrade has two testing modes:
|
||||
|
||||
- **backtesting**: using historical data that you [download from an exchange](data-download.md), backtesting is a quick way to assess performance of a strategy. However, it can be very easy to distort results so a strategy will look a lot more profitable than it really is. Check the [backtesting documentation](backtesting.md) for more information.
|
||||
- **dry run**: often referred to as _forward testing_, dry runs use real time data from the exchange. However, any signals that would result in trades are tracked as normal by Freqtrade, but do not have any trades opened on the exchange itself. Forward testing runs in real time, so whilst it takes longer to get results it is a much more reliable indicator of **potential** performance than backtesting.
|
||||
|
||||
Dry runs are enabled by setting `dry_run` to true in your [configuration](configuration.md#using-dry-run-mode).
|
||||
|
||||
!!! Warning "Backtests can be very inaccurate"
|
||||
There are many reasons why backtest results may not match reality. Please check the [backtesting assumptions](backtesting.md#assumptions-made-by-backtesting) and [common strategy mistakes](strategy-customization.md#common-mistakes-when-developing-strategies) documentation.
|
||||
Some websites that list and rank Freqtrade strategies show impressive backtest results. Do not assume these results are achieveable or realistic.
|
||||
|
||||
??? Hint "Useful commands"
|
||||
Freqtrade includes two useful commands to check for basic flaws in strategies: [lookahead-analysis](lookahead-analysis.md) and [recursive-analysis](recursive-analysis.md).
|
||||
|
||||
### Assessing backtesting and dry run results
|
||||
|
||||
Always dry run your strategy after backtesting it to see if backtesting and dry run results are sufficiently similar.
|
||||
|
||||
If there is any significant difference, verify that your entry and exit signals are consistent and appear on the same candles between the two modes. However, there will always be differences between dry runs and backtests:
|
||||
|
||||
- Backtesting assumes all orders fill. In dry runs this might not be the case if using limit orders or there is no volume on the exchange.
|
||||
- Following an entry signal on candle close, backtesting assumes trades enter at the next candle's open price (unless you have custom pricing callbacks in your strategy). In dry runs, there is often a delay between signals and trades opening.
|
||||
This is because when new candles come in on your main timeframe, e.g. every 5 minutes, it takes time for Freqtrade to analyse all pair dataframes. Therefore, Freqtrade will attempt to open trades a few seconds (ideally a small a delay as possible)
|
||||
after candle open.
|
||||
- As entry rates in dry runs might not match backtesting, this means profit calculations will also differ. Therefore, it is normal if ROI, stoploss, trailing stoploss and callback exits are not identical.
|
||||
- The more computational "lag" you have between new candles coming in and your signals being raised and trades being opened will result in greater price unpredictability. Make sure your computer is powerful enough to process the data for the number
|
||||
of pairs you have in your pairlist within a reasonable time. Freqtrade will warn you in the logs if there are significant data processing delays.
|
||||
|
||||
## Controlling or monitoring a running bot
|
||||
|
||||
Once your bot is running in dry or live mode, Freqtrade has five mechanisms to control or monitor a running bot:
|
||||
|
||||
- **[FreqUI](freq-ui.md)**: The easiest to get started with, FreqUI is a web interface to see and control current activity of your bot.
|
||||
- **[Telegram](telegram-usage.md)**: On mobile devices, Telegram integration is available to get alerts about your bot activity and to control certain aspects.
|
||||
- **[FTUI](https://github.com/freqtrade/ftui)**: FTUI is a terminal (command line) interface to Freqtrade, and allows monitoring of a running bot only.
|
||||
- **[REST API](rest-api.md)**: The REST API allows programmers to develop their own tools to interact with a Freqtrade bot.
|
||||
- **[Webhooks](webhook-config.md)**: Freqtrade can send information to other services, e.g. discord, by webhooks.
|
||||
|
||||
### Logs
|
||||
|
||||
Freqtrade generates extensive debugging logs to help you understand what's happening. Please familiarise yourself with the information and error messages you might see in your bot logs.
|
||||
|
||||
## Final Thoughts
|
||||
|
||||
Algo trading is difficult, and most public strategies are not good performers due to the time and effort to make a strategy work profitably in multiple scenarios.
|
||||
|
||||
Therefore, taking public strategies and using backtests as a way to assess performance is often problematic. However, Freqtrade provides useful ways to help you make decisions and do your due diligence.
|
||||
|
||||
There are many different ways to achieve profitability, and there is no one single tip, trick or config option that will fix a poorly performing strategy.
|
||||
|
||||
Freqtrade is an open source platform with a large and helpful community - make sure to visit our [discord channel](https://discord.gg/p7nuUNVfP7) to discuss your strategy with others!
|
||||
|
||||
As always, only invest what you are willing to lose.
|
||||
|
||||
## Conclusion
|
||||
|
||||
Developing a strategy in Freqtrade involves defining entry and exit signals based on technical indicators. By following the structure and methods outlined above, you can create and test your own trading strategies.
|
||||
|
||||
Common questions and answers are available on our [FAQ](faq.md).
|
||||
|
||||
To continue, refer to the more in-depth [Freqtrade strategy customization documentation](strategy-customization.md).
|
|
@ -975,7 +975,7 @@ class AwesomeStrategy(IStrategy):
|
|||
pair == "BTC/USDT"
|
||||
and entry_tag == "long_sma200"
|
||||
and side == "long"
|
||||
and (current_time - timedelta(minutes=10)) > trade.open_date_utc
|
||||
and (current_time - timedelta(minutes=10)) <= trade.open_date_utc
|
||||
):
|
||||
# just cancel the order if it has been filled more than half of the amount
|
||||
if order.filled > order.remaining:
|
||||
|
|
|
@ -2,52 +2,93 @@
|
|||
|
||||
This page explains how to customize your strategies, add new indicators and set up trading rules.
|
||||
|
||||
Please familiarize yourself with [Freqtrade basics](bot-basics.md) first, which provides overall info on how the bot operates.
|
||||
If you haven't already, please familiarize yourself with:
|
||||
|
||||
- the [Freqtrade strategy 101](freqtrade-101.md), which provides a quick start to strategy development
|
||||
- the [Freqtrade bot basics](bot-basics.md), which provides overall info on how the bot operates
|
||||
|
||||
## Develop your own strategy
|
||||
|
||||
The bot includes a default strategy file.
|
||||
|
||||
Also, several other strategies are available in the [strategy repository](https://github.com/freqtrade/freqtrade-strategies).
|
||||
|
||||
You will however most likely have your own idea for a strategy.
|
||||
This document intends to help you convert your strategy idea into your own strategy.
|
||||
|
||||
To get started, use `freqtrade new-strategy --strategy AwesomeStrategy` (you can obviously use your own naming for your strategy).
|
||||
This will create a new strategy file from a template, which will be located under `user_data/strategies/AwesomeStrategy.py`.
|
||||
This document intends to help you convert your ideas into a working strategy.
|
||||
|
||||
### Generating a strategy template
|
||||
|
||||
To get started, you can use the command:
|
||||
|
||||
```bash
|
||||
freqtrade new-strategy --strategy AwesomeStrategy
|
||||
```
|
||||
|
||||
This will create a new strategy called `AwesomeStrategy` from a template, which will be located using the filename `user_data/strategies/AwesomeStrategy.py`.
|
||||
|
||||
!!! Note
|
||||
This is just a template file, which will most likely not be profitable out of the box.
|
||||
There is a difference between the *name* of the strategy and the filename. In most commands, Freqtrade uses the *name* of the strategy, *not the filename*.
|
||||
|
||||
!!! Note
|
||||
The `new-strategy` command generates starting examples which will not be profitable out of the box.
|
||||
|
||||
??? Hint "Different template levels"
|
||||
`freqtrade new-strategy` has an additional parameter, `--template`, which controls the amount of pre-build information you get in the created strategy. Use `--template minimal` to get an empty strategy without any indicator examples, or `--template advanced` to get a template with most callbacks defined.
|
||||
`freqtrade new-strategy` has an additional parameter, `--template`, which controls the amount of pre-build information you get in the created strategy. Use `--template minimal` to get an empty strategy without any indicator examples, or `--template advanced` to get a template with more complicated features defined.
|
||||
|
||||
### Anatomy of a strategy
|
||||
|
||||
A strategy file contains all the information needed to build a good strategy:
|
||||
A strategy file contains all the information needed to build the strategy logic:
|
||||
|
||||
- Candle data in OHLCV format
|
||||
- Indicators
|
||||
- Entry strategy rules
|
||||
- Exit strategy rules
|
||||
- Minimal ROI recommended
|
||||
- Stoploss strongly recommended
|
||||
- Entry logic
|
||||
- Signals
|
||||
- Exit logic
|
||||
- Signals
|
||||
- Minimal ROI
|
||||
- Callbacks ("custom functions")
|
||||
- Stoploss
|
||||
- Fixed/absolute
|
||||
- Trailing
|
||||
- Callbacks ("custom functions")
|
||||
- Pricing [optional]
|
||||
- Position adjustment [optional]
|
||||
|
||||
The bot also include a sample strategy called `SampleStrategy` you can update: `user_data/strategies/sample_strategy.py`.
|
||||
You can test it with the parameter: `--strategy SampleStrategy`
|
||||
The bot includes a sample strategy called `SampleStrategy` that you can use as a basis: `user_data/strategies/sample_strategy.py`.
|
||||
You can test it with the parameter: `--strategy SampleStrategy`. Remember that you use the strategy class name, not the filename.
|
||||
|
||||
Additionally, there is an attribute called `INTERFACE_VERSION`, which defines the version of the strategy interface the bot should use.
|
||||
The current version is 3 - which is also the default when it's not set explicitly in the strategy.
|
||||
|
||||
Future versions will require this to be set.
|
||||
You may see older strategies set to interface version 2, and these will need to be updated to v3 terminology as future versions will require this to be set.
|
||||
|
||||
Starting the bot in dry or live mode is accomplished using the `trade` command:
|
||||
|
||||
```bash
|
||||
freqtrade trade --strategy AwesomeStrategy
|
||||
```
|
||||
|
||||
### Bot modes
|
||||
|
||||
Freqtrade strategies can be processed by the Freqtrade bot in 5 main modes:
|
||||
|
||||
- backtesting
|
||||
- hyperopting
|
||||
- dry ("forward testing")
|
||||
- live
|
||||
- FreqAI (not covered here)
|
||||
|
||||
Check the [configuration documentation](configuration.md) about how to set the bot to dry or live mode.
|
||||
|
||||
**Always use dry mode when testing as this gives you an idea of how your strategy will work in reality without risking capital.**
|
||||
|
||||
## Diving in deeper
|
||||
**For the following section we will use the [user_data/strategies/sample_strategy.py](https://github.com/freqtrade/freqtrade/blob/develop/freqtrade/templates/sample_strategy.py)
|
||||
file as reference.**
|
||||
|
||||
!!! Note "Strategies and Backtesting"
|
||||
To avoid problems and unexpected differences between Backtesting and dry/live modes, please be aware
|
||||
To avoid problems and unexpected differences between backtesting and dry/live modes, please be aware
|
||||
that during backtesting the full time range is passed to the `populate_*()` methods at once.
|
||||
It is therefore best to use vectorized operations (across the whole dataframe, not loops) and
|
||||
avoid index referencing (`df.iloc[-1]`), but instead use `df.shift()` to get to the previous candle.
|
||||
|
@ -57,14 +98,22 @@ file as reference.**
|
|||
needs to take care to avoid having the strategy utilize data from the future.
|
||||
Some common patterns for this are listed in the [Common Mistakes](#common-mistakes-when-developing-strategies) section of this document.
|
||||
|
||||
??? Hint "Lookahead and recursive analysis"
|
||||
Freqtrade includes two helpful commands to help assess common lookahead (using future data) and
|
||||
recursive bias (variance in indicator values) issues. Before running a strategy in dry or live more,
|
||||
you should always use these commands first. Please check the relevant documentation for
|
||||
[lookahead](lookahead-analysis.md) and [recursive](recursive-analysis.md) analysis.
|
||||
|
||||
### Dataframe
|
||||
|
||||
Freqtrade uses [pandas](https://pandas.pydata.org/) to store/provide the candlestick (OHLCV) data.
|
||||
Pandas is a great library developed for processing large amounts of data.
|
||||
Pandas is a great library developed for processing large amounts of data in tabular format.
|
||||
|
||||
Each row in a dataframe corresponds to one candle on a chart, with the latest candle always being the last in the dataframe (sorted by date).
|
||||
Each row in a dataframe corresponds to one candle on a chart, with the latest complete candle always being the last in the dataframe (sorted by date).
|
||||
|
||||
``` output
|
||||
If we were to look at the first few rows of the main dataframe using the pandas `head()` function, we would see:
|
||||
|
||||
```output
|
||||
> dataframe.head()
|
||||
date open high low close volume
|
||||
0 2021-11-09 23:25:00+00:00 67279.67 67321.84 67255.01 67300.97 44.62253
|
||||
|
@ -74,20 +123,16 @@ Each row in a dataframe corresponds to one candle on a chart, with the latest ca
|
|||
4 2021-11-09 23:45:00+00:00 67160.48 67160.48 66901.26 66943.37 111.39292
|
||||
```
|
||||
|
||||
Pandas provides fast ways to calculate metrics. To benefit from this speed, it's advised to not use loops, but use vectorized methods instead.
|
||||
|
||||
Vectorized operations perform calculations across the whole range of data and are therefore, compared to looping through each row, a lot faster when calculating indicators.
|
||||
|
||||
As a dataframe is a table, simple python comparisons like the following will not work
|
||||
A dataframe is a table where columns are not single values, but a series of data values. As such, simple python comparisons like the following will not work:
|
||||
|
||||
``` python
|
||||
if dataframe['rsi'] > 30:
|
||||
dataframe['enter_long'] = 1
|
||||
```
|
||||
|
||||
The above section will fail with `The truth value of a Series is ambiguous. [...]`.
|
||||
The above section will fail with `The truth value of a Series is ambiguous [...]`.
|
||||
|
||||
This must instead be written in a pandas-compatible way, so the operation is performed across the whole dataframe.
|
||||
This must instead be written in a pandas-compatible way, so the operation is performed across the whole dataframe, i.e. `vectorisation`.
|
||||
|
||||
``` python
|
||||
dataframe.loc[
|
||||
|
@ -97,13 +142,38 @@ This must instead be written in a pandas-compatible way, so the operation is per
|
|||
|
||||
With this section, you have a new column in your dataframe, which has `1` assigned whenever RSI is above 30.
|
||||
|
||||
Freqtrade uses this new column as an entry signal, where it is assumed that a trade will subsequently open on the next open candle.
|
||||
|
||||
Pandas provides fast ways to calculate metrics, i.e. "vectorisation". To benefit from this speed, it is advised to not use loops, but use vectorized methods instead.
|
||||
|
||||
Vectorized operations perform calculations across the whole range of data and are therefore, compared to looping through each row, a lot faster when calculating indicators.
|
||||
|
||||
??? Hint "Signals vs Trades"
|
||||
- Signals are generated from indicators at candle close, and are intentions to enter a trade.
|
||||
- Trades are orders that are executed (on the exchange in live mode) where a trade will then open as close to next candle open as possible.
|
||||
|
||||
!!! Warning "Trade order assumptions"
|
||||
In backtesting, signals are generated on candle close. Trades are then initiated immeditely on next candle open.
|
||||
|
||||
In dry and live, this may be delayed due to all pair dataframes needing to be analysed first, then trade processing
|
||||
for each of those pairs happens. This means that in dry/live you need to be mindful of having as low a computation
|
||||
delay as possible, usually by running a low number of pairs and having a CPU with a good clock speed.
|
||||
|
||||
#### Why can't I see "real time" candle data?
|
||||
|
||||
Freqtrade does not store incomplete/unfinished candles in the dataframe.
|
||||
|
||||
The use of incomplete data for making strategy decisions is called "repainting" and you might see other platforms allow this.
|
||||
|
||||
Freqtrade does not. Only complete/finished candle data is available in the dataframe.
|
||||
|
||||
### Customize Indicators
|
||||
|
||||
Buy and sell signals need indicators. You can add more indicators by extending the list contained in the method `populate_indicators()` from your strategy file.
|
||||
Entry and exit signals need indicators. You can add more indicators by extending the list contained in the method `populate_indicators()` from your strategy file.
|
||||
|
||||
You should only add the indicators used in either `populate_entry_trend()`, `populate_exit_trend()`, or to populate another indicator, otherwise performance may suffer.
|
||||
|
||||
It's important to always return the dataframe without removing/modifying the columns `"open", "high", "low", "close", "volume"`, otherwise these fields would contain something unexpected.
|
||||
It's important to always return the dataframe from these three functions without removing/modifying the columns `"open", "high", "low", "close", "volume"`, otherwise these fields would contain something unexpected.
|
||||
|
||||
Sample:
|
||||
|
||||
|
@ -124,7 +194,7 @@ def populate_indicators(self, dataframe: DataFrame, metadata: dict) -> DataFrame
|
|||
stoch = ta.STOCHF(dataframe)
|
||||
dataframe['fastd'] = stoch['fastd']
|
||||
dataframe['fastk'] = stoch['fastk']
|
||||
dataframe['blower'] = ta.BBANDS(dataframe, nbdevup=2, nbdevdn=2)['lowerband']
|
||||
dataframe['bb_lower'] = ta.BBANDS(dataframe, nbdevup=2, nbdevdn=2)['lowerband']
|
||||
dataframe['sma'] = ta.SMA(dataframe, timeperiod=40)
|
||||
dataframe['tema'] = ta.TEMA(dataframe, timeperiod=9)
|
||||
dataframe['mfi'] = ta.MFI(dataframe)
|
||||
|
@ -145,6 +215,8 @@ def populate_indicators(self, dataframe: DataFrame, metadata: dict) -> DataFrame
|
|||
dataframe['plus_di'] = ta.PLUS_DI(dataframe)
|
||||
dataframe['minus_dm'] = ta.MINUS_DM(dataframe)
|
||||
dataframe['minus_di'] = ta.MINUS_DI(dataframe)
|
||||
|
||||
# remember to always return the dataframe
|
||||
return dataframe
|
||||
```
|
||||
|
||||
|
@ -164,11 +236,13 @@ Additional technical libraries can be installed as necessary, or custom indicato
|
|||
|
||||
### Strategy startup period
|
||||
|
||||
Most indicators have an instable startup period, in which they are either not available (NaN), or the calculation is incorrect. This can lead to inconsistencies, since Freqtrade does not know how long this instable period should be.
|
||||
Some indicators have an unstable startup period in which there isn't enough candle data to calculate any values (NaN), or the calculation is incorrect. This can lead to inconsistencies, since Freqtrade does not know how long this unstable period is and uses whatever indicator values are in the dataframe.
|
||||
|
||||
To account for this, the strategy can be assigned the `startup_candle_count` attribute.
|
||||
|
||||
This should be set to the maximum number of candles that the strategy requires to calculate stable indicators. In the case where a user includes higher timeframes with informative pairs, the `startup_candle_count` does not necessarily change. The value is the maximum period (in candles) that any of the informatives timeframes need to compute stable indicators.
|
||||
|
||||
You can use [recursive-analysis](recursive-analysis.md) to check and find the correct `startup_candle_count` to be used.
|
||||
You can use [recursive-analysis](recursive-analysis.md) to check and find the correct `startup_candle_count` to be used. When recursive analysis shows a variance of 0%, then you can be sure that you have enough startup candle data.
|
||||
|
||||
In this example strategy, this should be set to 400 (`startup_candle_count = 400`), since the minimum needed history for ema100 calculation to make sure the value is correct is 400 candles.
|
||||
|
||||
|
@ -195,19 +269,22 @@ Let's try to backtest 1 month (January 2019) of 5m candles using an example stra
|
|||
freqtrade backtesting --timerange 20190101-20190201 --timeframe 5m
|
||||
```
|
||||
|
||||
Assuming `startup_candle_count` is set to 400, backtesting knows it needs 400 candles to generate valid buy signals. It will load data from `20190101 - (400 * 5m)` - which is ~2018-12-30 11:40:00.
|
||||
If this data is available, indicators will be calculated with this extended timerange. The instable startup period (up to 2019-01-01 00:00:00) will then be removed before starting backtesting.
|
||||
Assuming `startup_candle_count` is set to 400, backtesting knows it needs 400 candles to generate valid entry signals. It will load data from `20190101 - (400 * 5m)` - which is ~2018-12-30 11:40:00.
|
||||
|
||||
!!! Note
|
||||
If data for the startup period is not available, then the timerange will be adjusted to account for this startup period - so Backtesting would start at 2019-01-02 09:20:00.
|
||||
If this data is available, indicators will be calculated with this extended timerange. The unstable startup period (up to 2019-01-01 00:00:00) will then be removed before backtesting is carried out.
|
||||
|
||||
!!! Note "Unavailable startup candle data"
|
||||
If data for the startup period is not available, then the timerange will be adjusted to account for this startup period. In our example, backtesting would then start from 2019-01-02 09:20:00.
|
||||
|
||||
### Entry signal rules
|
||||
|
||||
Edit the method `populate_entry_trend()` in your strategy file to update your entry strategy.
|
||||
|
||||
It's important to always return the dataframe without removing/modifying the columns `"open", "high", "low", "close", "volume"`, otherwise these fields would contain something unexpected.
|
||||
It's important to always return the dataframe without removing/modifying the columns `"open", "high", "low", "close", "volume"`, otherwise these fields would contain something unexpected. The strategy may then produce invalid values, or cease to work entirely.
|
||||
|
||||
This method will also define a new column, `"enter_long"` (`"enter_short"` for shorts), which needs to contain 1 for entries, and 0 for "no action". `enter_long` is a mandatory column that must be set even if the strategy is shorting only.
|
||||
This method will also define a new column, `"enter_long"` (`"enter_short"` for shorts), which needs to contain `1` for entries, and `0` for "no action". `enter_long` is a mandatory column that must be set even if the strategy is shorting only.
|
||||
|
||||
You can name your entry signals by using the `"enter_tag"` column, which can help debug and assess your strategy later.
|
||||
|
||||
Sample from `user_data/strategies/sample_strategy.py`:
|
||||
|
||||
|
@ -232,12 +309,15 @@ def populate_entry_trend(self, dataframe: DataFrame, metadata: dict) -> DataFram
|
|||
```
|
||||
|
||||
??? Note "Enter short trades"
|
||||
Short-entries can be created by setting `enter_short` (corresponds to `enter_long` for long trades).
|
||||
Short entries can be created by setting `enter_short` (corresponds to `enter_long` for long trades).
|
||||
The `enter_tag` column remains identical.
|
||||
Short-trades need to be supported by your exchange and market configuration!
|
||||
Please make sure to set [`can_short`]() appropriately on your strategy if you intend to short.
|
||||
Shorting needs to be supported by your exchange and market configuration!
|
||||
Also, make sure you set [`can_short`](#can-short) appropriately on your strategy if you intend to short.
|
||||
|
||||
```python
|
||||
# allow both long and short trades
|
||||
can_short = True
|
||||
|
||||
def populate_entry_trend(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
|
||||
dataframe.loc[
|
||||
(
|
||||
|
@ -261,17 +341,21 @@ def populate_entry_trend(self, dataframe: DataFrame, metadata: dict) -> DataFram
|
|||
```
|
||||
|
||||
!!! Note
|
||||
Buying requires sellers to buy from - therefore volume needs to be > 0 (`dataframe['volume'] > 0`) to make sure that the bot does not buy/sell in no-activity periods.
|
||||
Buying requires sellers to buy from. Therefore volume needs to be > 0 (`dataframe['volume'] > 0`) to make sure that the bot does not buy/sell in no-activity periods.
|
||||
|
||||
### Exit signal rules
|
||||
|
||||
Edit the method `populate_exit_trend()` into your strategy file to update your exit strategy.
|
||||
|
||||
The exit-signal can be suppressed by setting `use_exit_signal` to false in the configuration or strategy.
|
||||
|
||||
`use_exit_signal` will not influence [signal collision rules](#colliding-signals) - which will still apply and can prevent entries.
|
||||
|
||||
It's important to always return the dataframe without removing/modifying the columns `"open", "high", "low", "close", "volume"`, otherwise these fields would contain something unexpected.
|
||||
It's important to always return the dataframe without removing/modifying the columns `"open", "high", "low", "close", "volume"`, otherwise these fields would contain something unexpected. The strategy may then produce invalid values, or cease to work entirely.
|
||||
|
||||
This method will also define a new column, `"exit_long"` (`"exit_short"` for shorts), which needs to contain 1 for exits, and 0 for "no action".
|
||||
This method will also define a new column, `"exit_long"` (`"exit_short"` for shorts), which needs to contain `1` for exits, and `0` for "no action".
|
||||
|
||||
You can name your exit signals by using the `"exit_tag"` column, which can help debug and assess your strategy later.
|
||||
|
||||
Sample from `user_data/strategies/sample_strategy.py`:
|
||||
|
||||
|
@ -295,11 +379,15 @@ def populate_exit_trend(self, dataframe: DataFrame, metadata: dict) -> DataFrame
|
|||
```
|
||||
|
||||
??? Note "Exit short trades"
|
||||
Short-exits can be created by setting `exit_short` (corresponds to `exit_long`).
|
||||
Short exits can be created by setting `exit_short` (corresponds to `exit_long`).
|
||||
The `exit_tag` column remains identical.
|
||||
Short-trades need to be supported by your exchange and market configuration!
|
||||
Shorting needs to be supported by your exchange and market configuration!
|
||||
Also, make sure you set [`can_short`](#can-short) appropriately on your strategy if you intend to short.
|
||||
|
||||
```python
|
||||
# allow both long and short trades
|
||||
can_short = True
|
||||
|
||||
def populate_exit_trend(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
|
||||
dataframe.loc[
|
||||
(
|
||||
|
@ -322,9 +410,9 @@ def populate_exit_trend(self, dataframe: DataFrame, metadata: dict) -> DataFrame
|
|||
|
||||
### Minimal ROI
|
||||
|
||||
This dict defines the minimal Return On Investment (ROI) a trade should reach before exiting, independent from the exit signal.
|
||||
The `minimal_roi` strategy variable defines the minimal Return On Investment (ROI) a trade should reach before exiting, independent from the exit signal.
|
||||
|
||||
It is of the following format, with the dict key (left side of the colon) being the minutes passed since the trade opened, and the value (right side of the colon) being the percentage.
|
||||
It is of the following format, i.e. a python `dict`, with the dict key (left side of the colon) being the minutes passed since the trade opened, and the value (right side of the colon) being the percentage.
|
||||
|
||||
```python
|
||||
minimal_roi = {
|
||||
|
@ -344,14 +432,19 @@ The above configuration would therefore mean:
|
|||
|
||||
The calculation does include fees.
|
||||
|
||||
#### Disabling minimal ROI
|
||||
|
||||
To disable ROI completely, set it to an empty dictionary:
|
||||
|
||||
```python
|
||||
minimal_roi = {}
|
||||
```
|
||||
|
||||
#### Using calculations in minimal ROI
|
||||
|
||||
To use times based on candle duration (timeframe), the following snippet can be handy.
|
||||
This will allow you to change the timeframe for the strategy, and ROI times will still be set as candles (e.g. after 3 candles ...)
|
||||
|
||||
This will allow you to change the timeframe for the strategy, but the minimal ROI times will still be set as candles, e.g. after 3 candles.
|
||||
|
||||
``` python
|
||||
from freqtrade.exchange import timeframe_to_minutes
|
||||
|
@ -368,9 +461,9 @@ class AwesomeStrategy(IStrategy):
|
|||
```
|
||||
|
||||
??? info "Orders that don't fill immediately"
|
||||
`minimal_roi` will take the `trade.open_date` as reference, which is the time the trade was initialized / the first order for this trade was placed.
|
||||
This will also hold true for limit orders that don't fill immediately (usually in combination with "off-spot" prices through `custom_entry_price()`), as well as for cases where the initial order is replaced through `adjust_entry_price()`.
|
||||
The time used will still be from the initial `trade.open_date` (when the initial order was first placed), not from the newly placed order date.
|
||||
`minimal_roi` will take the `trade.open_date` as reference, which is the time the trade was initialized, i.e. when the first order for this trade was placed.
|
||||
This will also hold true for limit orders that don't fill immediately (usually in combination with "off-spot" prices through `custom_entry_price()`), as well as for cases where the initial order price is replaced through `adjust_entry_price()`.
|
||||
The time used will still be from the initial `trade.open_date` (when the initial order was first placed), not from the newly placed or adjusted order date.
|
||||
|
||||
### Stoploss
|
||||
|
||||
|
@ -386,35 +479,44 @@ For the full documentation on stoploss features, look at the dedicated [stoploss
|
|||
|
||||
### Timeframe
|
||||
|
||||
This is the set of candles the bot should download and use for the analysis.
|
||||
This is the periodicity of candles the bot should use in the strategy.
|
||||
|
||||
Common values are `"1m"`, `"5m"`, `"15m"`, `"1h"`, however all values supported by your exchange should work.
|
||||
|
||||
Please note that the same entry/exit signals may work well with one timeframe, but not with the others.
|
||||
Please note that the same entry/exit signals may work well with one timeframe, but not with others.
|
||||
|
||||
This setting is accessible within the strategy methods as the `self.timeframe` attribute.
|
||||
|
||||
### Can short
|
||||
|
||||
To use short signals in futures markets, you will have to let us know to do so by setting `can_short=True`.
|
||||
To use short signals in futures markets, you will have to set `can_short = True`.
|
||||
|
||||
Strategies which enable this will fail to load on spot markets.
|
||||
Disabling of this will have short signals ignored (also in futures markets).
|
||||
|
||||
If you have `1` values in the `enter_short` column to raise short signals, setting `can_short = False` (which is the default) will mean that these short signals are ignored, even if you have specified futures markets in your configuration.
|
||||
|
||||
### Metadata dict
|
||||
|
||||
The metadata-dict (available for `populate_entry_trend`, `populate_exit_trend`, `populate_indicators`) contains additional information.
|
||||
Currently this is `pair`, which can be accessed using `metadata['pair']` - and will return a pair in the format `XRP/BTC`.
|
||||
The `metadata` dict (available for `populate_entry_trend`, `populate_exit_trend`, `populate_indicators`) contains additional information.
|
||||
Currently this is `pair`, which can be accessed using `metadata['pair']`, and will return a pair in the format `XRP/BTC` (or `XRP/BTC:BTC` for futures markets).
|
||||
|
||||
The Metadata-dict should not be modified and does not persist information across multiple calls.
|
||||
Instead, have a look at the [Storing information](strategy-advanced.md#storing-information-persistent) section.
|
||||
The metadata dict should not be modified and does not persist information across multiple functions in your strategy.
|
||||
|
||||
Instead, please check the [Storing information](strategy-advanced.md#storing-information-persistent) section.
|
||||
|
||||
--8<-- "includes/strategy-imports.md"
|
||||
|
||||
## Strategy file loading
|
||||
|
||||
By default, freqtrade will attempt to load strategies from all `.py` files within `user_data/strategies`.
|
||||
By default, freqtrade will attempt to load strategies from all `.py` files within the `userdir` (default `user_data/strategies`).
|
||||
|
||||
Assuming your strategy is called `AwesomeStrategy`, stored in the file `user_data/strategies/AwesomeStrategy.py`, then you can start freqtrade with `freqtrade trade --strategy AwesomeStrategy`.
|
||||
Note that we're using the class-name, not the file name.
|
||||
Assuming your strategy is called `AwesomeStrategy`, stored in the file `user_data/strategies/AwesomeStrategy.py`, then you can start freqtrade in dry (or live, depending on your configuration) mode with:
|
||||
|
||||
```bash
|
||||
freqtrade trade --strategy AwesomeStrategy`
|
||||
```
|
||||
|
||||
Note that we're using the class name, not the file name.
|
||||
|
||||
You can use `freqtrade list-strategies` to see a list of all strategies Freqtrade is able to load (all strategies in the correct folder).
|
||||
It will also include a "status" field, highlighting potential problems.
|
||||
|
@ -426,9 +528,11 @@ It will also include a "status" field, highlighting potential problems.
|
|||
|
||||
### Get data for non-tradeable pairs
|
||||
|
||||
Data for additional, informative pairs (reference pairs) can be beneficial for some strategies.
|
||||
Data for additional, informative pairs (reference pairs) can be beneficial for some strategies to see data on a wider timeframe.
|
||||
|
||||
OHLCV data for these pairs will be downloaded as part of the regular whitelist refresh process and is available via `DataProvider` just as other pairs (see below).
|
||||
These parts will **not** be traded unless they are also specified in the pair whitelist, or have been selected by Dynamic Whitelisting.
|
||||
|
||||
These pairs will **not** be traded unless they are also specified in the pair whitelist, or have been selected by Dynamic Whitelisting, e.g. `VolumePairlist`.
|
||||
|
||||
The pairs need to be specified as tuples in the format `("pair", "timeframe")`, with pair as the first and timeframe as the second argument.
|
||||
|
||||
|
@ -468,10 +572,13 @@ A full sample can be found [in the DataProvider section](#complete-data-provider
|
|||
|
||||
### Informative pairs decorator (`@informative()`)
|
||||
|
||||
In most common case it is possible to easily define informative pairs by using a decorator. All decorated `populate_indicators_*` methods run in isolation,
|
||||
not having access to data from other informative pairs, in the end all informative dataframes are merged and passed to main `populate_indicators()` method.
|
||||
When hyperopting, use of hyperoptable parameter `.value` attribute is not supported. Please use `.range` attribute. See [optimizing an indicator parameter](hyperopt.md#optimizing-an-indicator-parameter)
|
||||
for more information.
|
||||
To easily define informative pairs, use the `@informative` decorator. All decorated `populate_indicators_*` methods run in isolation,
|
||||
and do not have access to data from other informative pairs. However, all informative dataframes for each pair are merged and passed to main `populate_indicators()` method.
|
||||
|
||||
!!! Note
|
||||
Do not use the `@informative` decorator if you need to use data from one informative pair when generating another informative pair. Instead, define informative pairs manually as described [in the DataProvider section](#complete-data-provider-sample).
|
||||
|
||||
When hyperopting, use of the hyperoptable parameter `.value` attribute is not supported. Please use the `.range` attribute. See [optimizing an indicator parameter](hyperopt.md#optimizing-an-indicator-parameter) for more information.
|
||||
|
||||
??? info "Full documentation"
|
||||
``` python
|
||||
|
@ -568,10 +675,6 @@ for more information.
|
|||
|
||||
```
|
||||
|
||||
!!! Note
|
||||
Do not use `@informative` decorator if you need to use data of one informative pair when generating another informative pair. Instead, define informative pairs
|
||||
manually as described [in the DataProvider section](#complete-data-provider-sample).
|
||||
|
||||
!!! Note
|
||||
Use string formatting when accessing informative dataframes of other pairs. This will allow easily changing stake currency in config without having to adjust strategy code.
|
||||
|
||||
|
@ -592,18 +695,15 @@ for more information.
|
|||
Alternatively column renaming may be used to remove stake currency from column names: `@informative('1h', 'BTC/{stake}', fmt='{base}_{column}_{timeframe}')`.
|
||||
|
||||
!!! Warning "Duplicate method names"
|
||||
Methods tagged with `@informative()` decorator must always have unique names! Re-using same name (for example when copy-pasting already defined informative method)
|
||||
will overwrite previously defined method and not produce any errors due to limitations of Python programming language. In such cases you will find that indicators
|
||||
created in earlier-defined methods are not available in the dataframe. Carefully review method names and make sure they are unique!
|
||||
Methods tagged with the `@informative()` decorator must always have unique names! Reusing the same name (for example when copy-pasting already defined informative methods) will overwrite previously defined methods and not produce any errors due to limitations of Python programming language. In such cases you will find that indicators created in methods higher up in the strategy file are not available in the dataframe. Carefully review method names and make sure they are unique!
|
||||
|
||||
### *merge_informative_pair()*
|
||||
|
||||
This method helps you merge an informative pair to a regular dataframe without lookahead bias.
|
||||
It's there to help you merge the dataframe in a safe and consistent way.
|
||||
This method helps you merge an informative pair to the regular main dataframe safely and consistently, without lookahead bias.
|
||||
|
||||
Options:
|
||||
|
||||
- Rename the columns for you to create unique columns
|
||||
- Rename the columns to create unique columns
|
||||
- Merge the dataframe without lookahead bias
|
||||
- Forward-fill (optional)
|
||||
|
||||
|
@ -654,20 +754,20 @@ All columns of the informative dataframe will be available on the returning data
|
|||
```
|
||||
|
||||
!!! Warning "Informative timeframe < timeframe"
|
||||
Using informative timeframes smaller than the dataframe timeframe is not recommended with this method, as it will not use any of the additional information this would provide.
|
||||
To use the more detailed information properly, more advanced methods should be applied (which are out of scope for freqtrade documentation, as it'll depend on the respective need).
|
||||
Using informative timeframes smaller than the main dataframe timeframe is not recommended with this method, as it will not use any of the additional information this would provide.
|
||||
To use the more detailed information properly, more advanced methods should be applied (which are out of scope for this documentation).
|
||||
|
||||
## Additional data (DataProvider)
|
||||
|
||||
The strategy provides access to the `DataProvider`. This allows you to get additional data to use in your strategy.
|
||||
|
||||
All methods return `None` in case of failure (do not raise an exception).
|
||||
All methods return `None` in case of failure, i.e. failures do not raise an exception.
|
||||
|
||||
Please always check the mode of operation to select the correct method to get data (samples see below).
|
||||
Please always check the mode of operation to select the correct method to get data (see below for examples).
|
||||
|
||||
!!! Warning "Hyperopt"
|
||||
Dataprovider is available during hyperopt, however it can only be used in `populate_indicators()` within a strategy.
|
||||
It is not available in `populate_buy()` and `populate_sell()` methods, nor in `populate_indicators()`, if this method located in the hyperopt file.
|
||||
!!! Warning "Hyperopt Limitations"
|
||||
The DataProvider is available during hyperopt, however it can only be used in `populate_indicators()` **within a strategy**, not within a hyperopt class file.
|
||||
It is also not available in `populate_entry_trend()` and `populate_exit_trend()` methods.
|
||||
|
||||
### Possible options for DataProvider
|
||||
|
||||
|
@ -693,31 +793,31 @@ for pair, timeframe in self.dp.available_pairs:
|
|||
|
||||
### *current_whitelist()*
|
||||
|
||||
Imagine you've developed a strategy that trades the `5m` timeframe using signals generated from a `1d` timeframe on the top 10 volume pairs by volume.
|
||||
Imagine you've developed a strategy that trades the `5m` timeframe using signals generated from a `1d` timeframe on the top 10 exchange pairs by volume.
|
||||
|
||||
The strategy might look something like this:
|
||||
The strategy logic might look something like this:
|
||||
|
||||
*Scan through the top 10 pairs by volume using the `VolumePairList` every 5 minutes and use a 14 day RSI to buy and sell.*
|
||||
*Scan through the top 10 pairs by volume using the `VolumePairList` every 5 minutes and use a 14 day RSI to enter and exit.*
|
||||
|
||||
Due to the limited available data, it's very difficult to resample `5m` candles into daily candles for use in a 14 day RSI. Most exchanges limit us to just 500-1000 candles which effectively gives us around 1.74 daily candles. We need 14 days at least!
|
||||
Due to the limited available data, it's very difficult to resample `5m` candles into daily candles for use in a 14 day RSI. Most exchanges limit users to just 500-1000 candles which effectively gives us around 1.74 daily candles. We need 14 days at least!
|
||||
|
||||
Since we can't resample the data we will have to use an informative pair; and since the whitelist will be dynamic we don't know which pair(s) to use.
|
||||
Since we can't resample the data we will have to use an informative pair, and since the whitelist will be dynamic we don't know which pair(s) to use! We have a problem!
|
||||
|
||||
This is where calling `self.dp.current_whitelist()` comes in handy.
|
||||
This is where calling `self.dp.current_whitelist()` comes in handy to retrieve only those pairs in the whitelist.
|
||||
|
||||
```python
|
||||
def informative_pairs(self):
|
||||
|
||||
# get access to all pairs available in whitelist.
|
||||
pairs = self.dp.current_whitelist()
|
||||
# Assign tf to each pair so they can be downloaded and cached for strategy.
|
||||
# Assign timeframe to each pair so they can be downloaded and cached for strategy.
|
||||
informative_pairs = [(pair, '1d') for pair in pairs]
|
||||
return informative_pairs
|
||||
```
|
||||
|
||||
??? Note "Plotting with current_whitelist"
|
||||
Current whitelist is not supported for `plot-dataframe`, as this command is usually used by providing an explicit pairlist - and would therefore make the return values of this method misleading.
|
||||
It's also not supported for freqUI visualization in [webserver mode](utils.md#webserver-mode) - as the configuration for webserver mode doesn't require a pairlist to be set.
|
||||
Current whitelist is not supported for `plot-dataframe`, as this command is usually used by providing an explicit pairlist and would therefore make the return values of this method misleading.
|
||||
It's also not supported for FreqUI visualization in [webserver mode](utils.md#webserver-mode), as the configuration for webserver mode doesn't require a pairlist to be set.
|
||||
|
||||
### *get_pair_dataframe(pair, timeframe)*
|
||||
|
||||
|
@ -758,7 +858,7 @@ if self.dp.runmode.value in ('live', 'dry_run'):
|
|||
dataframe['best_ask'] = ob['asks'][0][0]
|
||||
```
|
||||
|
||||
The orderbook structure is aligned with the order structure from [ccxt](https://github.com/ccxt/ccxt/wiki/Manual#order-book-structure), so the result will look as follows:
|
||||
The orderbook structure is aligned with the order structure from [ccxt](https://github.com/ccxt/ccxt/wiki/Manual#order-book-structure), so the result will be formatted as follows:
|
||||
|
||||
``` js
|
||||
{
|
||||
|
@ -776,7 +876,7 @@ The orderbook structure is aligned with the order structure from [ccxt](https://
|
|||
}
|
||||
```
|
||||
|
||||
Therefore, using `ob['bids'][0][0]` as demonstrated above will result in using the best bid price. `ob['bids'][0][1]` would look at the amount at this orderbook position.
|
||||
Therefore, using `ob['bids'][0][0]` as demonstrated above will use the best bid price. `ob['bids'][0][1]` would look at the amount at this orderbook position.
|
||||
|
||||
!!! Warning "Warning about backtesting"
|
||||
The order book is not part of the historic data which means backtesting and hyperopt will not work correctly if this method is used, as the method will return up-to-date values.
|
||||
|
@ -793,12 +893,12 @@ if self.dp.runmode.value in ('live', 'dry_run'):
|
|||
|
||||
!!! Warning
|
||||
Although the ticker data structure is a part of the ccxt Unified Interface, the values returned by this method can
|
||||
vary for different exchanges. For instance, many exchanges do not return `vwap` values, some exchanges
|
||||
does not always fills in the `last` field (so it can be None), etc. So you need to carefully verify the ticker
|
||||
vary for different exchanges. For instance, many exchanges do not return `vwap` values, and some exchanges
|
||||
do not always fill in the `last` field (so it can be None), etc. So you need to carefully verify the ticker
|
||||
data returned from the exchange and add appropriate error handling / defaults.
|
||||
|
||||
!!! Warning "Warning about backtesting"
|
||||
This method will always return up-to-date values - so usage during backtesting / hyperopt without runmode checks will lead to wrong results.
|
||||
This method will always return up-to-date / real-time values. As such, usage during backtesting / hyperopt without runmode checks will lead to wrong results, e.g. your whole dataframe will contain the same single value in all rows.
|
||||
|
||||
### Send Notification
|
||||
|
||||
|
@ -817,7 +917,7 @@ Notifications will only be sent in trading modes (Live/Dry-run) - so this method
|
|||
!!! Warning "Spamming"
|
||||
You can spam yourself pretty good by setting `always_send=True` in this method. Use this with great care and only in conditions you know will not happen throughout a candle to avoid a message every 5 seconds.
|
||||
|
||||
### Complete Data-provider sample
|
||||
### Complete DataProvider sample
|
||||
|
||||
```python
|
||||
from freqtrade.strategy import IStrategy, merge_informative_pair
|
||||
|
@ -884,14 +984,14 @@ class SampleStrategy(IStrategy):
|
|||
|
||||
## Additional data (Wallets)
|
||||
|
||||
The strategy provides access to the `wallets` object. This contains the current balances on the exchange.
|
||||
The strategy provides access to the `wallets` object. This contains the current balances of your wallets/accounts on the exchange.
|
||||
|
||||
!!! Note "Backtesting / Hyperopt"
|
||||
Wallets behaves differently depending on the function it's called.
|
||||
Wallets behaves differently depending on the function from which it is called.
|
||||
Within `populate_*()` methods, it'll return the full wallet as configured.
|
||||
Within [callbacks](strategy-callbacks.md), you'll get the wallet state corresponding to the actual simulated wallet at that point in the simulation process.
|
||||
|
||||
Please always check if `wallets` is available to avoid failures during backtesting.
|
||||
Always check if `wallets` is available to avoid failures during backtesting.
|
||||
|
||||
``` python
|
||||
if self.wallets:
|
||||
|
@ -910,15 +1010,15 @@ if self.wallets:
|
|||
|
||||
## Additional data (Trades)
|
||||
|
||||
A history of Trades can be retrieved in the strategy by querying the database.
|
||||
A history of trades can be retrieved in the strategy by querying the database.
|
||||
|
||||
At the top of the file, import Trade.
|
||||
At the top of the file, import the required object:
|
||||
|
||||
```python
|
||||
from freqtrade.persistence import Trade
|
||||
```
|
||||
|
||||
The following example queries for the current pair and trades from today, however other filters can easily be added.
|
||||
The following example queries trades from today for the current pair (`metadata['pair']`). Other filters can easily be added.
|
||||
|
||||
``` python
|
||||
trades = Trade.get_trades_proxy(pair=metadata['pair'],
|
||||
|
@ -936,7 +1036,9 @@ For a full list of available methods, please consult the [Trade object](trade-ob
|
|||
|
||||
## Prevent trades from happening for a specific pair
|
||||
|
||||
Freqtrade locks pairs automatically for the current candle (until that candle is over) when a pair is sold, preventing an immediate re-buy of that pair.
|
||||
Freqtrade locks pairs automatically for the current candle (until that candle is over) when a pair exits, preventing an immediate re-entry of that pair.
|
||||
|
||||
This is to prevent "waterfalls" of many and frequent trades within a single candle.
|
||||
|
||||
Locked pairs will show the message `Pair <pair> is currently locked.`.
|
||||
|
||||
|
@ -947,7 +1049,7 @@ Sometimes it may be desired to lock a pair after certain events happen (e.g. mul
|
|||
Freqtrade has an easy method to do this from within the strategy, by calling `self.lock_pair(pair, until, [reason])`.
|
||||
`until` must be a datetime object in the future, after which trading will be re-enabled for that pair, while `reason` is an optional string detailing why the pair was locked.
|
||||
|
||||
Locks can also be lifted manually, by calling `self.unlock_pair(pair)` or `self.unlock_reason(<reason>)` - providing reason the pair was locked with.
|
||||
Locks can also be lifted manually, by calling `self.unlock_pair(pair)` or `self.unlock_reason(<reason>)`, providing the reason the pair was unlocked.
|
||||
`self.unlock_reason(<reason>)` will unlock all pairs currently locked with the provided reason.
|
||||
|
||||
To verify if a pair is currently locked, use `self.is_pair_locked(pair)`.
|
||||
|
@ -956,7 +1058,7 @@ To verify if a pair is currently locked, use `self.is_pair_locked(pair)`.
|
|||
Locked pairs will always be rounded up to the next candle. So assuming a `5m` timeframe, a lock with `until` set to 10:18 will lock the pair until the candle from 10:15-10:20 will be finished.
|
||||
|
||||
!!! Warning
|
||||
Manually locking pairs is not available during backtesting, only locks via Protections are allowed.
|
||||
Manually locking pairs is not available during backtesting. Only locks via Protections are allowed.
|
||||
|
||||
#### Pair locking example
|
||||
|
||||
|
@ -966,7 +1068,7 @@ from datetime import timedelta, datetime, timezone
|
|||
# Put the above lines a the top of the strategy file, next to all the other imports
|
||||
# --------
|
||||
|
||||
# Within populate indicators (or populate_buy):
|
||||
# Within populate indicators (or populate_entry_trend):
|
||||
if self.config['runmode'].value in ('live', 'dry_run'):
|
||||
# fetch closed trades for the last 2 days
|
||||
trades = Trade.get_trades_proxy(
|
||||
|
@ -979,9 +1081,9 @@ if self.config['runmode'].value in ('live', 'dry_run'):
|
|||
self.lock_pair(metadata['pair'], until=datetime.now(timezone.utc) + timedelta(hours=12))
|
||||
```
|
||||
|
||||
## Print created dataframe
|
||||
## Print the main dataframe
|
||||
|
||||
To inspect the created dataframe, you can issue a print-statement in either `populate_entry_trend()` or `populate_exit_trend()`.
|
||||
To inspect the current main dataframe, you can issue a print-statement in either `populate_entry_trend()` or `populate_exit_trend()`.
|
||||
You may also want to print the pair so it's clear what data is currently shown.
|
||||
|
||||
``` python
|
||||
|
@ -1001,29 +1103,30 @@ def populate_entry_trend(self, dataframe: DataFrame, metadata: dict) -> DataFram
|
|||
return dataframe
|
||||
```
|
||||
|
||||
Printing more than a few rows is also possible (simply use `print(dataframe)` instead of `print(dataframe.tail())`), however not recommended, as that will be very verbose (~500 lines per pair every 5 seconds).
|
||||
Printing more than a few rows is also possible by using `print(dataframe)` instead of `print(dataframe.tail())`. However this is not recommended, as can results in a lot of output (~500 lines per pair every 5 seconds).
|
||||
|
||||
## Common mistakes when developing strategies
|
||||
|
||||
### Peeking into the future while backtesting
|
||||
### Looking into the future while backtesting
|
||||
|
||||
Backtesting analyzes the whole time-range at once for performance reasons. Because of this, strategy authors need to make sure that strategies do not look-ahead into the future.
|
||||
This is a common pain-point, which can cause huge differences between backtesting and dry/live run methods, since they all use data which is not available during dry/live runs, so these strategies will perform well during backtesting, but will fail / perform badly in real conditions.
|
||||
Backtesting analyzes the whole dataframe timerange at once for performance reasons. Because of this, strategy authors need to make sure that strategies do not lookahead into the future, i.e. using data that would not be available in dry or live mode.
|
||||
|
||||
The following lists some common patterns which should be avoided to prevent frustration:
|
||||
This is a common pain-point, which can cause huge differences between backtesting and dry/live run methods. Strategies that look into the future will perform well during backtesting, often with incredible profits or winrates, but will fail or perform badly in real conditions.
|
||||
|
||||
The following list contains some common patterns which should be avoided to prevent frustration:
|
||||
|
||||
- don't use `shift(-1)` or other negative values. This uses data from the future in backtesting, which is not available in dry or live modes.
|
||||
- don't use `.iloc[-1]` or any other absolute position in the dataframe within `populate_` functions, as this will be different between dry-run and backtesting. Absolute `iloc` indexing is safe to use in callbacks however - see [Strategy Callbacks](strategy-callbacks.md).
|
||||
- don't use `dataframe['volume'].mean()`. This uses the full DataFrame for backtesting, including data from the future. Use `dataframe['volume'].rolling(<window>).mean()` instead
|
||||
- don't use `.resample('1h')`. This uses the left border of the interval, so moves data from an hour to the start of the hour. Use `.resample('1h', label='right')` instead.
|
||||
- don't use functions that use all dataframe or column values, e.g. `dataframe['mean_volume'] = dataframe['volume'].mean()`. As backtesting uses the full dataframe, at any point in the dataframe, the `'mean_volume'` series would include data from the future. Use rolling() calculations instead, e.g. `dataframe['volume'].rolling(<window>).mean()`.
|
||||
- don't use `.resample('1h')`. This uses the left border of the period interval, so moves data from an hour boundary to the start of the hour. Use `.resample('1h', label='right')` instead.
|
||||
|
||||
!!! Tip "Identifying problems"
|
||||
You may also want to check the 2 helper commands [lookahead-analysis](lookahead-analysis.md) and [recursive-analysis](recursive-analysis.md), which can each help you figure out problems with your strategy in different ways.
|
||||
Please treat them as what they are - helpers to identify most common problems. A negative result of each does not guarantee that there's none of the above errors included.
|
||||
You should always use the two helper commands [lookahead-analysis](lookahead-analysis.md) and [recursive-analysis](recursive-analysis.md), which can each help you figure out problems with your strategy in different ways.
|
||||
Please treat them as what they are - helpers to identify most common problems. A negative result of each does not guarantee that there are none of the above errors included.
|
||||
|
||||
### Colliding signals
|
||||
|
||||
When conflicting signals collide (e.g. both `'enter_long'` and `'exit_long'` are 1), freqtrade will do nothing and ignore the entry signal. This will avoid trades that enter, and exit immediately. Obviously, this can potentially lead to missed entries.
|
||||
When conflicting signals collide (e.g. both `'enter_long'` and `'exit_long'` are set to `1`), freqtrade will do nothing and ignore the entry signal. This will avoid trades that enter, and exit immediately. Obviously, this can potentially lead to missed entries.
|
||||
|
||||
The following rules apply, and entry signals will be ignored if more than one of the 3 signals is set:
|
||||
|
||||
|
@ -1032,11 +1135,11 @@ The following rules apply, and entry signals will be ignored if more than one of
|
|||
|
||||
## Further strategy ideas
|
||||
|
||||
To get additional Ideas for strategies, head over to the [strategy repository](https://github.com/freqtrade/freqtrade-strategies). Feel free to use them as they are - but results will depend on the current market situation, pairs used etc. - therefore please backtest the strategy for your exchange/desired pairs first, evaluate carefully, use at your own risk.
|
||||
Feel free to use any of them as inspiration for your own strategies.
|
||||
We're happy to accept Pull Requests containing new Strategies to that repo.
|
||||
To get additional ideas for strategies, head over to the [strategy repository](https://github.com/freqtrade/freqtrade-strategies). Feel free to use them as examples, but results will depend on the current market situation, pairs used, etc. Therefore, these strategies should be considered only for learning purposes, not real world trading. Please backtest the strategy for your exchange/desired pairs first, then dry run to evaluate carefully, and use at your own risk.
|
||||
|
||||
## Next step
|
||||
Feel free to use any of them as inspiration for your own strategies. We're happy to accept Pull Requests containing new strategies to the repository.
|
||||
|
||||
## Next steps
|
||||
|
||||
Now you have a perfect strategy you probably want to backtest it.
|
||||
Your next step is to learn [How to use the Backtesting](backtesting.md).
|
||||
Your next step is to learn [how to use backtesting](backtesting.md).
|
||||
|
|
|
@ -231,7 +231,7 @@ Once all positions are sold, run `/stop` to completely stop the bot.
|
|||
`/reload_config` resets "max_open_trades" to the value set in the configuration and resets this command.
|
||||
|
||||
!!! Warning
|
||||
The stop-buy signal is ONLY active while the bot is running, and is not persisted anyway, so restarting the bot will cause this to reset.
|
||||
The stop-buy signal is ONLY active while the bot is running, and is not persisted anyway, so restarting the bot will cause this to reset.
|
||||
|
||||
### /status
|
||||
|
||||
|
|
|
@ -216,6 +216,45 @@ Example: Search dedicated strategy path.
|
|||
freqtrade list-strategies --strategy-path ~/.freqtrade/strategies/
|
||||
```
|
||||
|
||||
## List Hyperopt-Loss functions
|
||||
|
||||
Use the `list-hyperoptloss` subcommand to see all hyperopt loss functions available.
|
||||
|
||||
It provides a quick list of all available loss functions in your environment.
|
||||
|
||||
This subcommand can be useful for finding problems in your environment with loading loss functions: modules with Hyperopt-Loss functions that contain errors and failed to load are printed in red (LOAD FAILED), while hyperopt-Loss functions with duplicate names are printed in yellow (DUPLICATE NAME).
|
||||
|
||||
```
|
||||
usage: freqtrade list-hyperoptloss [-h] [-v] [--logfile FILE] [-V] [-c PATH]
|
||||
[-d PATH] [--userdir PATH]
|
||||
[--hyperopt-path PATH] [-1] [--no-color]
|
||||
|
||||
options:
|
||||
-h, --help show this help message and exit
|
||||
--hyperopt-path PATH Specify additional lookup path for Hyperopt Loss
|
||||
functions.
|
||||
-1, --one-column Print output in one column.
|
||||
--no-color Disable colorization of hyperopt results. May be
|
||||
useful if you are redirecting output to a file.
|
||||
|
||||
Common arguments:
|
||||
-v, --verbose Verbose mode (-vv for more, -vvv to get all messages).
|
||||
--logfile FILE, --log-file FILE
|
||||
Log to the file specified. Special values are:
|
||||
'syslog', 'journald'. See the documentation for more
|
||||
details.
|
||||
-V, --version show program's version number and exit
|
||||
-c PATH, --config PATH
|
||||
Specify configuration file (default:
|
||||
`userdir/config.json` or `config.json` whichever
|
||||
exists). Multiple --config options may be used. Can be
|
||||
set to `-` to read config from stdin.
|
||||
-d PATH, --datadir PATH, --data-dir PATH
|
||||
Path to directory with historical backtesting data.
|
||||
--userdir PATH, --user-data-dir PATH
|
||||
Path to userdata directory.
|
||||
```
|
||||
|
||||
## List freqAI models
|
||||
|
||||
Use the `list-freqaimodels` subcommand to see all freqAI models available.
|
||||
|
|
|
@ -5,7 +5,7 @@ We **strongly** recommend that Windows users use [Docker](docker_quickstart.md)
|
|||
If that is not possible, try using the Windows Linux subsystem (WSL) - for which the Ubuntu instructions should work.
|
||||
Otherwise, please follow the instructions below.
|
||||
|
||||
All instructions assume that python 3.9+ is installed and available.
|
||||
All instructions assume that python 3.10+ is installed and available.
|
||||
|
||||
## Clone the git repository
|
||||
|
||||
|
@ -42,7 +42,7 @@ cd freqtrade
|
|||
|
||||
Install ta-lib according to the [ta-lib documentation](https://github.com/TA-Lib/ta-lib-python#windows).
|
||||
|
||||
As compiling from source on windows has heavy dependencies (requires a partial visual studio installation), Freqtrade provides these dependencies (in the binary wheel format) for the latest 3 Python versions (3.9, 3.10, 3.11 and 3.12) and for 64bit Windows.
|
||||
As compiling from source on windows has heavy dependencies (requires a partial visual studio installation), Freqtrade provides these dependencies (in the binary wheel format) for the latest 3 Python versions (3.10, 3.11 and 3.12) and for 64bit Windows.
|
||||
These Wheels are also used by CI running on windows, and are therefore tested together with freqtrade.
|
||||
|
||||
Other versions must be downloaded from the above link.
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
"""Freqtrade bot"""
|
||||
|
||||
__version__ = "2024.9"
|
||||
__version__ = "2024.10"
|
||||
|
||||
if "dev" in __version__:
|
||||
from pathlib import Path
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
__main__.py for Freqtrade
|
||||
To launch Freqtrade as a module
|
||||
|
||||
> python -m freqtrade (with Python >= 3.9)
|
||||
> python -m freqtrade (with Python >= 3.10)
|
||||
"""
|
||||
|
||||
from freqtrade import main
|
||||
|
|
|
@ -27,6 +27,7 @@ from freqtrade.commands.hyperopt_commands import start_hyperopt_list, start_hype
|
|||
from freqtrade.commands.list_commands import (
|
||||
start_list_exchanges,
|
||||
start_list_freqAI_models,
|
||||
start_list_hyperopt_loss_functions,
|
||||
start_list_markets,
|
||||
start_list_strategies,
|
||||
start_list_timeframes,
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict
|
||||
from typing import Any
|
||||
|
||||
from freqtrade.configuration import setup_utils_configuration
|
||||
from freqtrade.enums import RunMode
|
||||
from freqtrade.exceptions import ConfigurationError, OperationalException
|
||||
|
||||
|
@ -10,13 +9,15 @@ from freqtrade.exceptions import ConfigurationError, OperationalException
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def setup_analyze_configuration(args: Dict[str, Any], method: RunMode) -> Dict[str, Any]:
|
||||
def setup_analyze_configuration(args: dict[str, Any], method: RunMode) -> dict[str, Any]:
|
||||
"""
|
||||
Prepare the configuration for the entry/exit reason analysis module
|
||||
:param args: Cli args from Arguments()
|
||||
:param method: Bot running mode
|
||||
:return: Configuration
|
||||
"""
|
||||
from freqtrade.configuration import setup_utils_configuration
|
||||
|
||||
config = setup_utils_configuration(args, method)
|
||||
|
||||
no_unlimited_runmodes = {
|
||||
|
@ -47,7 +48,7 @@ def setup_analyze_configuration(args: Dict[str, Any], method: RunMode) -> Dict[s
|
|||
return config
|
||||
|
||||
|
||||
def start_analysis_entries_exits(args: Dict[str, Any]) -> None:
|
||||
def start_analysis_entries_exits(args: dict[str, Any]) -> None:
|
||||
"""
|
||||
Start analysis script
|
||||
:param args: Cli args from Arguments()
|
||||
|
|
|
@ -5,7 +5,7 @@ This module contains the argument manager class
|
|||
from argparse import ArgumentParser, Namespace, _ArgumentGroup
|
||||
from functools import partial
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional, Union
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
from freqtrade.commands.cli_options import AVAILABLE_CLI_OPTIONS
|
||||
from freqtrade.constants import DEFAULT_CONFIG
|
||||
|
@ -23,7 +23,7 @@ ARGS_STRATEGY = [
|
|||
|
||||
ARGS_TRADE = ["db_url", "sd_notify", "dry_run", "dry_run_wallet", "fee"]
|
||||
|
||||
ARGS_WEBSERVER: List[str] = []
|
||||
ARGS_WEBSERVER: list[str] = []
|
||||
|
||||
ARGS_COMMON_OPTIMIZE = [
|
||||
"timeframe",
|
||||
|
@ -258,6 +258,7 @@ NO_CONF_REQURIED = [
|
|||
"list-pairs",
|
||||
"list-strategies",
|
||||
"list-freqaimodels",
|
||||
"list-hyperoptloss",
|
||||
"list-data",
|
||||
"hyperopt-list",
|
||||
"hyperopt-show",
|
||||
|
@ -277,11 +278,11 @@ class Arguments:
|
|||
Arguments Class. Manage the arguments received by the cli
|
||||
"""
|
||||
|
||||
def __init__(self, args: Optional[List[str]]) -> None:
|
||||
def __init__(self, args: Optional[list[str]]) -> None:
|
||||
self.args = args
|
||||
self._parsed_arg: Optional[Namespace] = None
|
||||
|
||||
def get_parsed_arg(self) -> Dict[str, Any]:
|
||||
def get_parsed_arg(self) -> dict[str, Any]:
|
||||
"""
|
||||
Return the list of arguments
|
||||
:return: List[str] List of arguments
|
||||
|
@ -322,7 +323,7 @@ class Arguments:
|
|||
return parsed_arg
|
||||
|
||||
def _build_args(
|
||||
self, optionlist: List[str], parser: Union[ArgumentParser, _ArgumentGroup]
|
||||
self, optionlist: list[str], parser: Union[ArgumentParser, _ArgumentGroup]
|
||||
) -> None:
|
||||
for val in optionlist:
|
||||
opt = AVAILABLE_CLI_OPTIONS[val]
|
||||
|
@ -365,6 +366,7 @@ class Arguments:
|
|||
start_list_data,
|
||||
start_list_exchanges,
|
||||
start_list_freqAI_models,
|
||||
start_list_hyperopt_loss_functions,
|
||||
start_list_markets,
|
||||
start_list_strategies,
|
||||
start_list_timeframes,
|
||||
|
@ -566,6 +568,15 @@ class Arguments:
|
|||
list_strategies_cmd.set_defaults(func=start_list_strategies)
|
||||
self._build_args(optionlist=ARGS_LIST_STRATEGIES, parser=list_strategies_cmd)
|
||||
|
||||
# Add list-Hyperopt loss subcommand
|
||||
list_hyperopt_loss_cmd = subparsers.add_parser(
|
||||
"list-hyperoptloss",
|
||||
help="Print available hyperopt loss functions.",
|
||||
parents=[_common_parser],
|
||||
)
|
||||
list_hyperopt_loss_cmd.set_defaults(func=start_list_hyperopt_loss_functions)
|
||||
self._build_args(optionlist=ARGS_LIST_HYPEROPTS, parser=list_hyperopt_loss_cmd)
|
||||
|
||||
# Add list-freqAI Models subcommand
|
||||
list_freqaimodels_cmd = subparsers.add_parser(
|
||||
"list-freqaimodels",
|
||||
|
|
|
@ -1,261 +1,27 @@
|
|||
import logging
|
||||
import secrets
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List
|
||||
from typing import Any
|
||||
|
||||
from questionary import Separator, prompt
|
||||
|
||||
from freqtrade.configuration import sanitize_config
|
||||
from freqtrade.configuration.config_setup import setup_utils_configuration
|
||||
from freqtrade.configuration.detect_environment import running_in_docker
|
||||
from freqtrade.configuration.directory_operations import chown_user_directory
|
||||
from freqtrade.constants import UNLIMITED_STAKE_AMOUNT
|
||||
from freqtrade.enums import RunMode
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.exchange import MAP_EXCHANGE_CHILDCLASS, available_exchanges
|
||||
from freqtrade.util import render_template
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def validate_is_int(val):
|
||||
try:
|
||||
_ = int(val)
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def validate_is_float(val):
|
||||
try:
|
||||
_ = float(val)
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def ask_user_overwrite(config_path: Path) -> bool:
|
||||
questions = [
|
||||
{
|
||||
"type": "confirm",
|
||||
"name": "overwrite",
|
||||
"message": f"File {config_path} already exists. Overwrite?",
|
||||
"default": False,
|
||||
},
|
||||
]
|
||||
answers = prompt(questions)
|
||||
return answers["overwrite"]
|
||||
|
||||
|
||||
def ask_user_config() -> Dict[str, Any]:
|
||||
"""
|
||||
Ask user a few questions to build the configuration.
|
||||
Interactive questions built using https://github.com/tmbo/questionary
|
||||
:returns: Dict with keys to put into template
|
||||
"""
|
||||
questions: List[Dict[str, Any]] = [
|
||||
{
|
||||
"type": "confirm",
|
||||
"name": "dry_run",
|
||||
"message": "Do you want to enable Dry-run (simulated trades)?",
|
||||
"default": True,
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"name": "stake_currency",
|
||||
"message": "Please insert your stake currency:",
|
||||
"default": "USDT",
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"name": "stake_amount",
|
||||
"message": f"Please insert your stake amount (Number or '{UNLIMITED_STAKE_AMOUNT}'):",
|
||||
"default": "unlimited",
|
||||
"validate": lambda val: val == UNLIMITED_STAKE_AMOUNT or validate_is_float(val),
|
||||
"filter": lambda val: (
|
||||
'"' + UNLIMITED_STAKE_AMOUNT + '"' if val == UNLIMITED_STAKE_AMOUNT else val
|
||||
),
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"name": "max_open_trades",
|
||||
"message": "Please insert max_open_trades (Integer or -1 for unlimited open trades):",
|
||||
"default": "3",
|
||||
"validate": lambda val: validate_is_int(val),
|
||||
},
|
||||
{
|
||||
"type": "select",
|
||||
"name": "timeframe_in_config",
|
||||
"message": "Time",
|
||||
"choices": ["Have the strategy define timeframe.", "Override in configuration."],
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"name": "timeframe",
|
||||
"message": "Please insert your desired timeframe (e.g. 5m):",
|
||||
"default": "5m",
|
||||
"when": lambda x: x["timeframe_in_config"] == "Override in configuration.",
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"name": "fiat_display_currency",
|
||||
"message": (
|
||||
"Please insert your display Currency for reporting "
|
||||
"(leave empty to disable FIAT conversion):"
|
||||
),
|
||||
"default": "USD",
|
||||
},
|
||||
{
|
||||
"type": "select",
|
||||
"name": "exchange_name",
|
||||
"message": "Select exchange",
|
||||
"choices": [
|
||||
"binance",
|
||||
"binanceus",
|
||||
"bingx",
|
||||
"gate",
|
||||
"htx",
|
||||
"kraken",
|
||||
"kucoin",
|
||||
"okx",
|
||||
Separator("------------------"),
|
||||
"other",
|
||||
],
|
||||
},
|
||||
{
|
||||
"type": "confirm",
|
||||
"name": "trading_mode",
|
||||
"message": "Do you want to trade Perpetual Swaps (perpetual futures)?",
|
||||
"default": False,
|
||||
"filter": lambda val: "futures" if val else "spot",
|
||||
"when": lambda x: x["exchange_name"] in ["binance", "gate", "okx", "bybit"],
|
||||
},
|
||||
{
|
||||
"type": "autocomplete",
|
||||
"name": "exchange_name",
|
||||
"message": "Type your exchange name (Must be supported by ccxt)",
|
||||
"choices": available_exchanges(),
|
||||
"when": lambda x: x["exchange_name"] == "other",
|
||||
},
|
||||
{
|
||||
"type": "password",
|
||||
"name": "exchange_key",
|
||||
"message": "Insert Exchange Key",
|
||||
"when": lambda x: not x["dry_run"],
|
||||
},
|
||||
{
|
||||
"type": "password",
|
||||
"name": "exchange_secret",
|
||||
"message": "Insert Exchange Secret",
|
||||
"when": lambda x: not x["dry_run"],
|
||||
},
|
||||
{
|
||||
"type": "password",
|
||||
"name": "exchange_key_password",
|
||||
"message": "Insert Exchange API Key password",
|
||||
"when": lambda x: not x["dry_run"] and x["exchange_name"] in ("kucoin", "okx"),
|
||||
},
|
||||
{
|
||||
"type": "confirm",
|
||||
"name": "telegram",
|
||||
"message": "Do you want to enable Telegram?",
|
||||
"default": False,
|
||||
},
|
||||
{
|
||||
"type": "password",
|
||||
"name": "telegram_token",
|
||||
"message": "Insert Telegram token",
|
||||
"when": lambda x: x["telegram"],
|
||||
},
|
||||
{
|
||||
"type": "password",
|
||||
"name": "telegram_chat_id",
|
||||
"message": "Insert Telegram chat id",
|
||||
"when": lambda x: x["telegram"],
|
||||
},
|
||||
{
|
||||
"type": "confirm",
|
||||
"name": "api_server",
|
||||
"message": "Do you want to enable the Rest API (includes FreqUI)?",
|
||||
"default": False,
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"name": "api_server_listen_addr",
|
||||
"message": (
|
||||
"Insert Api server Listen Address (0.0.0.0 for docker, "
|
||||
"otherwise best left untouched)"
|
||||
),
|
||||
"default": "127.0.0.1" if not running_in_docker() else "0.0.0.0", # noqa: S104
|
||||
"when": lambda x: x["api_server"],
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"name": "api_server_username",
|
||||
"message": "Insert api-server username",
|
||||
"default": "freqtrader",
|
||||
"when": lambda x: x["api_server"],
|
||||
},
|
||||
{
|
||||
"type": "password",
|
||||
"name": "api_server_password",
|
||||
"message": "Insert api-server password",
|
||||
"when": lambda x: x["api_server"],
|
||||
},
|
||||
]
|
||||
answers = prompt(questions)
|
||||
|
||||
if not answers:
|
||||
# Interrupted questionary sessions return an empty dict.
|
||||
raise OperationalException("User interrupted interactive questions.")
|
||||
# Ensure default is set for non-futures exchanges
|
||||
answers["trading_mode"] = answers.get("trading_mode", "spot")
|
||||
answers["margin_mode"] = "isolated" if answers.get("trading_mode") == "futures" else ""
|
||||
# Force JWT token to be a random string
|
||||
answers["api_server_jwt_key"] = secrets.token_hex()
|
||||
answers["api_server_ws_token"] = secrets.token_urlsafe(25)
|
||||
|
||||
return answers
|
||||
|
||||
|
||||
def deploy_new_config(config_path: Path, selections: Dict[str, Any]) -> None:
|
||||
"""
|
||||
Applies selections to the template and writes the result to config_path
|
||||
:param config_path: Path object for new config file. Should not exist yet
|
||||
:param selections: Dict containing selections taken by the user.
|
||||
"""
|
||||
from jinja2.exceptions import TemplateNotFound
|
||||
|
||||
try:
|
||||
exchange_template = MAP_EXCHANGE_CHILDCLASS.get(
|
||||
selections["exchange_name"], selections["exchange_name"]
|
||||
)
|
||||
|
||||
selections["exchange"] = render_template(
|
||||
templatefile=f"subtemplates/exchange_{exchange_template}.j2", arguments=selections
|
||||
)
|
||||
except TemplateNotFound:
|
||||
selections["exchange"] = render_template(
|
||||
templatefile="subtemplates/exchange_generic.j2", arguments=selections
|
||||
)
|
||||
|
||||
config_text = render_template(templatefile="base_config.json.j2", arguments=selections)
|
||||
|
||||
logger.info(f"Writing config to `{config_path}`.")
|
||||
logger.info(
|
||||
"Please make sure to check the configuration contents and adjust settings to your needs."
|
||||
)
|
||||
|
||||
config_path.write_text(config_text)
|
||||
|
||||
|
||||
def start_new_config(args: Dict[str, Any]) -> None:
|
||||
def start_new_config(args: dict[str, Any]) -> None:
|
||||
"""
|
||||
Create a new strategy from a template
|
||||
Asking the user questions to fill out the template accordingly.
|
||||
"""
|
||||
|
||||
from freqtrade.configuration.deploy_config import (
|
||||
ask_user_config,
|
||||
ask_user_overwrite,
|
||||
deploy_new_config,
|
||||
)
|
||||
from freqtrade.configuration.directory_operations import chown_user_directory
|
||||
|
||||
config_path = Path(args["config"][0])
|
||||
chown_user_directory(config_path.parent)
|
||||
if config_path.exists():
|
||||
|
@ -271,7 +37,10 @@ def start_new_config(args: Dict[str, Any]) -> None:
|
|||
deploy_new_config(config_path, selections)
|
||||
|
||||
|
||||
def start_show_config(args: Dict[str, Any]) -> None:
|
||||
def start_show_config(args: dict[str, Any]) -> None:
|
||||
from freqtrade.configuration import sanitize_config
|
||||
from freqtrade.configuration.config_setup import setup_utils_configuration
|
||||
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_EXCHANGE, set_dry=False)
|
||||
|
||||
print("Your combined configuration is:")
|
||||
|
|
|
@ -1,24 +1,12 @@
|
|||
import logging
|
||||
import sys
|
||||
from collections import defaultdict
|
||||
from typing import Any, Dict
|
||||
from typing import Any
|
||||
|
||||
from freqtrade.configuration import TimeRange, setup_utils_configuration
|
||||
from freqtrade.constants import DATETIME_PRINT_FORMAT, DL_DATA_TIMEFRAMES, Config
|
||||
from freqtrade.data.converter import (
|
||||
convert_ohlcv_format,
|
||||
convert_trades_format,
|
||||
convert_trades_to_ohlcv,
|
||||
)
|
||||
from freqtrade.data.history import download_data_main
|
||||
from freqtrade.enums import CandleType, RunMode, TradingMode
|
||||
from freqtrade.exceptions import ConfigurationError
|
||||
from freqtrade.exchange import timeframe_to_minutes
|
||||
from freqtrade.misc import plural
|
||||
from freqtrade.plugins.pairlist.pairlist_helpers import dynamic_expand_pairlist
|
||||
from freqtrade.resolvers import ExchangeResolver
|
||||
from freqtrade.util import print_rich_table
|
||||
from freqtrade.util.migrations import migrate_data
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
@ -38,10 +26,13 @@ def _check_data_config_download_sanity(config: Config) -> None:
|
|||
)
|
||||
|
||||
|
||||
def start_download_data(args: Dict[str, Any]) -> None:
|
||||
def start_download_data(args: dict[str, Any]) -> None:
|
||||
"""
|
||||
Download data (former download_backtest_data.py script)
|
||||
"""
|
||||
from freqtrade.configuration import setup_utils_configuration
|
||||
from freqtrade.data.history import download_data_main
|
||||
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_EXCHANGE)
|
||||
|
||||
_check_data_config_download_sanity(config)
|
||||
|
@ -53,7 +44,11 @@ def start_download_data(args: Dict[str, Any]) -> None:
|
|||
sys.exit("SIGINT received, aborting ...")
|
||||
|
||||
|
||||
def start_convert_trades(args: Dict[str, Any]) -> None:
|
||||
def start_convert_trades(args: dict[str, Any]) -> None:
|
||||
from freqtrade.configuration import TimeRange, setup_utils_configuration
|
||||
from freqtrade.data.converter import convert_trades_to_ohlcv
|
||||
from freqtrade.resolvers import ExchangeResolver
|
||||
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_EXCHANGE)
|
||||
|
||||
timerange = TimeRange()
|
||||
|
@ -92,10 +87,14 @@ def start_convert_trades(args: Dict[str, Any]) -> None:
|
|||
)
|
||||
|
||||
|
||||
def start_convert_data(args: Dict[str, Any], ohlcv: bool = True) -> None:
|
||||
def start_convert_data(args: dict[str, Any], ohlcv: bool = True) -> None:
|
||||
"""
|
||||
Convert data from one format to another
|
||||
"""
|
||||
from freqtrade.configuration import setup_utils_configuration
|
||||
from freqtrade.data.converter import convert_ohlcv_format, convert_trades_format
|
||||
from freqtrade.util.migrations import migrate_data
|
||||
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||
if ohlcv:
|
||||
migrate_data(config)
|
||||
|
@ -114,10 +113,13 @@ def start_convert_data(args: Dict[str, Any], ohlcv: bool = True) -> None:
|
|||
)
|
||||
|
||||
|
||||
def start_list_data(args: Dict[str, Any]) -> None:
|
||||
def start_list_data(args: dict[str, Any]) -> None:
|
||||
"""
|
||||
List available OHLCV data
|
||||
"""
|
||||
from freqtrade.configuration import setup_utils_configuration
|
||||
from freqtrade.exchange import timeframe_to_minutes
|
||||
from freqtrade.util import print_rich_table
|
||||
|
||||
if args["trades"]:
|
||||
start_list_trades_data(args)
|
||||
|
@ -177,10 +179,13 @@ def start_list_data(args: Dict[str, Any]) -> None:
|
|||
)
|
||||
|
||||
|
||||
def start_list_trades_data(args: Dict[str, Any]) -> None:
|
||||
def start_list_trades_data(args: dict[str, Any]) -> None:
|
||||
"""
|
||||
List available Trades data
|
||||
"""
|
||||
from freqtrade.configuration import setup_utils_configuration
|
||||
from freqtrade.misc import plural
|
||||
from freqtrade.util import print_rich_table
|
||||
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||
|
||||
|
|
|
@ -1,18 +1,17 @@
|
|||
import logging
|
||||
from typing import Any, Dict
|
||||
from typing import Any
|
||||
|
||||
from sqlalchemy import func, select
|
||||
|
||||
from freqtrade.configuration.config_setup import setup_utils_configuration
|
||||
from freqtrade.enums import RunMode
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def start_convert_db(args: Dict[str, Any]) -> None:
|
||||
def start_convert_db(args: dict[str, Any]) -> None:
|
||||
from sqlalchemy import func, select
|
||||
from sqlalchemy.orm import make_transient
|
||||
|
||||
from freqtrade.configuration.config_setup import setup_utils_configuration
|
||||
from freqtrade.persistence import Order, Trade, init_db
|
||||
from freqtrade.persistence.migrations import set_sequence_ids
|
||||
from freqtrade.persistence.pairlock import PairLock
|
||||
|
|
|
@ -1,16 +1,11 @@
|
|||
import logging
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Optional, Tuple
|
||||
from typing import Any
|
||||
|
||||
import requests
|
||||
|
||||
from freqtrade.configuration import setup_utils_configuration
|
||||
from freqtrade.configuration.directory_operations import copy_sample_files, create_userdata_dir
|
||||
from freqtrade.constants import USERPATH_STRATEGIES
|
||||
from freqtrade.enums import RunMode
|
||||
from freqtrade.exceptions import ConfigurationError, OperationalException
|
||||
from freqtrade.util import render_template, render_template_with_fallback
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
@ -20,12 +15,14 @@ logger = logging.getLogger(__name__)
|
|||
req_timeout = 30
|
||||
|
||||
|
||||
def start_create_userdir(args: Dict[str, Any]) -> None:
|
||||
def start_create_userdir(args: dict[str, Any]) -> None:
|
||||
"""
|
||||
Create "user_data" directory to contain user data strategies, hyperopt, ...)
|
||||
:param args: Cli args from Arguments()
|
||||
:return: None
|
||||
"""
|
||||
from freqtrade.configuration.directory_operations import copy_sample_files, create_userdata_dir
|
||||
|
||||
if "user_data_dir" in args and args["user_data_dir"]:
|
||||
userdir = create_userdata_dir(args["user_data_dir"], create_dir=True)
|
||||
copy_sample_files(userdir, overwrite=args["reset"])
|
||||
|
@ -38,6 +35,8 @@ def deploy_new_strategy(strategy_name: str, strategy_path: Path, subtemplate: st
|
|||
"""
|
||||
Deploy new strategy from template to strategy_path
|
||||
"""
|
||||
from freqtrade.util import render_template, render_template_with_fallback
|
||||
|
||||
fallback = "full"
|
||||
attributes = render_template_with_fallback(
|
||||
templatefile=f"strategy_subtemplates/strategy_attributes_{subtemplate}.j2",
|
||||
|
@ -81,7 +80,9 @@ def deploy_new_strategy(strategy_name: str, strategy_path: Path, subtemplate: st
|
|||
strategy_path.write_text(strategy_text)
|
||||
|
||||
|
||||
def start_new_strategy(args: Dict[str, Any]) -> None:
|
||||
def start_new_strategy(args: dict[str, Any]) -> None:
|
||||
from freqtrade.configuration import setup_utils_configuration
|
||||
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||
|
||||
if "strategy" in args and args["strategy"]:
|
||||
|
@ -98,80 +99,14 @@ def start_new_strategy(args: Dict[str, Any]) -> None:
|
|||
raise ConfigurationError("`new-strategy` requires --strategy to be set.")
|
||||
|
||||
|
||||
def clean_ui_subdir(directory: Path):
|
||||
if directory.is_dir():
|
||||
logger.info("Removing UI directory content.")
|
||||
def start_install_ui(args: dict[str, Any]) -> None:
|
||||
from freqtrade.commands.deploy_ui import (
|
||||
clean_ui_subdir,
|
||||
download_and_install_ui,
|
||||
get_ui_download_url,
|
||||
read_ui_version,
|
||||
)
|
||||
|
||||
for p in reversed(list(directory.glob("**/*"))): # iterate contents from leaves to root
|
||||
if p.name in (".gitkeep", "fallback_file.html"):
|
||||
continue
|
||||
if p.is_file():
|
||||
p.unlink()
|
||||
elif p.is_dir():
|
||||
p.rmdir()
|
||||
|
||||
|
||||
def read_ui_version(dest_folder: Path) -> Optional[str]:
|
||||
file = dest_folder / ".uiversion"
|
||||
if not file.is_file():
|
||||
return None
|
||||
|
||||
with file.open("r") as f:
|
||||
return f.read()
|
||||
|
||||
|
||||
def download_and_install_ui(dest_folder: Path, dl_url: str, version: str):
|
||||
from io import BytesIO
|
||||
from zipfile import ZipFile
|
||||
|
||||
logger.info(f"Downloading {dl_url}")
|
||||
resp = requests.get(dl_url, timeout=req_timeout).content
|
||||
dest_folder.mkdir(parents=True, exist_ok=True)
|
||||
with ZipFile(BytesIO(resp)) as zf:
|
||||
for fn in zf.filelist:
|
||||
with zf.open(fn) as x:
|
||||
destfile = dest_folder / fn.filename
|
||||
if fn.is_dir():
|
||||
destfile.mkdir(exist_ok=True)
|
||||
else:
|
||||
destfile.write_bytes(x.read())
|
||||
with (dest_folder / ".uiversion").open("w") as f:
|
||||
f.write(version)
|
||||
|
||||
|
||||
def get_ui_download_url(version: Optional[str] = None) -> Tuple[str, str]:
|
||||
base_url = "https://api.github.com/repos/freqtrade/frequi/"
|
||||
# Get base UI Repo path
|
||||
|
||||
resp = requests.get(f"{base_url}releases", timeout=req_timeout)
|
||||
resp.raise_for_status()
|
||||
r = resp.json()
|
||||
|
||||
if version:
|
||||
tmp = [x for x in r if x["name"] == version]
|
||||
if tmp:
|
||||
latest_version = tmp[0]["name"]
|
||||
assets = tmp[0].get("assets", [])
|
||||
else:
|
||||
raise ValueError("UI-Version not found.")
|
||||
else:
|
||||
latest_version = r[0]["name"]
|
||||
assets = r[0].get("assets", [])
|
||||
dl_url = ""
|
||||
if assets and len(assets) > 0:
|
||||
dl_url = assets[0]["browser_download_url"]
|
||||
|
||||
# URL not found - try assets url
|
||||
if not dl_url:
|
||||
assets = r[0]["assets_url"]
|
||||
resp = requests.get(assets, timeout=req_timeout)
|
||||
r = resp.json()
|
||||
dl_url = r[0]["browser_download_url"]
|
||||
|
||||
return dl_url, latest_version
|
||||
|
||||
|
||||
def start_install_ui(args: Dict[str, Any]) -> None:
|
||||
dest_folder = Path(__file__).parents[1] / "rpc/api_server/ui/installed/"
|
||||
# First make sure the assets are removed.
|
||||
dl_url, latest_version = get_ui_download_url(args.get("ui_version"))
|
||||
|
|
84
freqtrade/commands/deploy_ui.py
Normal file
84
freqtrade/commands/deploy_ui.py
Normal file
|
@ -0,0 +1,84 @@
|
|||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
import requests
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Timeout for requests
|
||||
req_timeout = 30
|
||||
|
||||
|
||||
def clean_ui_subdir(directory: Path):
|
||||
if directory.is_dir():
|
||||
logger.info("Removing UI directory content.")
|
||||
|
||||
for p in reversed(list(directory.glob("**/*"))): # iterate contents from leaves to root
|
||||
if p.name in (".gitkeep", "fallback_file.html"):
|
||||
continue
|
||||
if p.is_file():
|
||||
p.unlink()
|
||||
elif p.is_dir():
|
||||
p.rmdir()
|
||||
|
||||
|
||||
def read_ui_version(dest_folder: Path) -> Optional[str]:
|
||||
file = dest_folder / ".uiversion"
|
||||
if not file.is_file():
|
||||
return None
|
||||
|
||||
with file.open("r") as f:
|
||||
return f.read()
|
||||
|
||||
|
||||
def download_and_install_ui(dest_folder: Path, dl_url: str, version: str):
|
||||
from io import BytesIO
|
||||
from zipfile import ZipFile
|
||||
|
||||
logger.info(f"Downloading {dl_url}")
|
||||
resp = requests.get(dl_url, timeout=req_timeout).content
|
||||
dest_folder.mkdir(parents=True, exist_ok=True)
|
||||
with ZipFile(BytesIO(resp)) as zf:
|
||||
for fn in zf.filelist:
|
||||
with zf.open(fn) as x:
|
||||
destfile = dest_folder / fn.filename
|
||||
if fn.is_dir():
|
||||
destfile.mkdir(exist_ok=True)
|
||||
else:
|
||||
destfile.write_bytes(x.read())
|
||||
with (dest_folder / ".uiversion").open("w") as f:
|
||||
f.write(version)
|
||||
|
||||
|
||||
def get_ui_download_url(version: Optional[str] = None) -> tuple[str, str]:
|
||||
base_url = "https://api.github.com/repos/freqtrade/frequi/"
|
||||
# Get base UI Repo path
|
||||
|
||||
resp = requests.get(f"{base_url}releases", timeout=req_timeout)
|
||||
resp.raise_for_status()
|
||||
r = resp.json()
|
||||
|
||||
if version:
|
||||
tmp = [x for x in r if x["name"] == version]
|
||||
if tmp:
|
||||
latest_version = tmp[0]["name"]
|
||||
assets = tmp[0].get("assets", [])
|
||||
else:
|
||||
raise ValueError("UI-Version not found.")
|
||||
else:
|
||||
latest_version = r[0]["name"]
|
||||
assets = r[0].get("assets", [])
|
||||
dl_url = ""
|
||||
if assets and len(assets) > 0:
|
||||
dl_url = assets[0]["browser_download_url"]
|
||||
|
||||
# URL not found - try assets url
|
||||
if not dl_url:
|
||||
assets = r[0]["assets_url"]
|
||||
resp = requests.get(assets, timeout=req_timeout)
|
||||
r = resp.json()
|
||||
dl_url = r[0]["browser_download_url"]
|
||||
|
||||
return dl_url, latest_version
|
|
@ -1,21 +1,20 @@
|
|||
import logging
|
||||
from operator import itemgetter
|
||||
from typing import Any, Dict
|
||||
from typing import Any
|
||||
|
||||
from freqtrade.configuration import setup_utils_configuration
|
||||
from freqtrade.data.btanalysis import get_latest_hyperopt_file
|
||||
from freqtrade.enums import RunMode
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.optimize.optimize_reports import show_backtest_result
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def start_hyperopt_list(args: Dict[str, Any]) -> None:
|
||||
def start_hyperopt_list(args: dict[str, Any]) -> None:
|
||||
"""
|
||||
List hyperopt epochs previously evaluated
|
||||
"""
|
||||
from freqtrade.configuration import setup_utils_configuration
|
||||
from freqtrade.data.btanalysis import get_latest_hyperopt_file
|
||||
from freqtrade.optimize.hyperopt_output import HyperoptOutput
|
||||
from freqtrade.optimize.hyperopt_tools import HyperoptTools
|
||||
|
||||
|
@ -57,11 +56,14 @@ def start_hyperopt_list(args: Dict[str, Any]) -> None:
|
|||
HyperoptTools.export_csv_file(config, epochs, export_csv)
|
||||
|
||||
|
||||
def start_hyperopt_show(args: Dict[str, Any]) -> None:
|
||||
def start_hyperopt_show(args: dict[str, Any]) -> None:
|
||||
"""
|
||||
Show details of a hyperopt epoch previously evaluated
|
||||
"""
|
||||
from freqtrade.configuration import setup_utils_configuration
|
||||
from freqtrade.data.btanalysis import get_latest_hyperopt_file
|
||||
from freqtrade.optimize.hyperopt_tools import HyperoptTools
|
||||
from freqtrade.optimize.optimize_reports import show_backtest_result
|
||||
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||
|
||||
|
|
|
@ -1,33 +1,29 @@
|
|||
import csv
|
||||
import logging
|
||||
import sys
|
||||
from typing import Any, Dict, List, Union
|
||||
from typing import Any, Union
|
||||
|
||||
import rapidjson
|
||||
from rich.console import Console
|
||||
from rich.table import Table
|
||||
from rich.text import Text
|
||||
|
||||
from freqtrade.configuration import setup_utils_configuration
|
||||
from freqtrade.enums import RunMode
|
||||
from freqtrade.exceptions import ConfigurationError, OperationalException
|
||||
from freqtrade.exchange import list_available_exchanges, market_is_active
|
||||
from freqtrade.ft_types import ValidExchangesType
|
||||
from freqtrade.misc import parse_db_uri_for_logging, plural
|
||||
from freqtrade.resolvers import ExchangeResolver, StrategyResolver
|
||||
from freqtrade.util import print_rich_table
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def start_list_exchanges(args: Dict[str, Any]) -> None:
|
||||
def start_list_exchanges(args: dict[str, Any]) -> None:
|
||||
"""
|
||||
Print available exchanges
|
||||
:param args: Cli args from Arguments()
|
||||
:return: None
|
||||
"""
|
||||
available_exchanges: List[ValidExchangesType] = list_available_exchanges(
|
||||
from rich.console import Console
|
||||
from rich.table import Table
|
||||
from rich.text import Text
|
||||
|
||||
from freqtrade.exchange import list_available_exchanges
|
||||
|
||||
available_exchanges: list[ValidExchangesType] = list_available_exchanges(
|
||||
args["list_exchanges_all"]
|
||||
)
|
||||
|
||||
|
@ -85,9 +81,13 @@ def start_list_exchanges(args: Dict[str, Any]) -> None:
|
|||
console.print(table)
|
||||
|
||||
|
||||
def _print_objs_tabular(objs: List, print_colorized: bool) -> None:
|
||||
def _print_objs_tabular(objs: list, print_colorized: bool) -> None:
|
||||
from rich.console import Console
|
||||
from rich.table import Table
|
||||
from rich.text import Text
|
||||
|
||||
names = [s["name"] for s in objs]
|
||||
objs_to_print: List[Dict[str, Union[Text, str]]] = [
|
||||
objs_to_print: list[dict[str, Union[Text, str]]] = [
|
||||
{
|
||||
"name": Text(s["name"] if s["name"] else "--"),
|
||||
"location": s["location_rel"],
|
||||
|
@ -125,10 +125,13 @@ def _print_objs_tabular(objs: List, print_colorized: bool) -> None:
|
|||
console.print(table)
|
||||
|
||||
|
||||
def start_list_strategies(args: Dict[str, Any]) -> None:
|
||||
def start_list_strategies(args: dict[str, Any]) -> None:
|
||||
"""
|
||||
Print files with Strategy custom classes available in the directory
|
||||
"""
|
||||
from freqtrade.configuration import setup_utils_configuration
|
||||
from freqtrade.resolvers import StrategyResolver
|
||||
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||
|
||||
strategy_objs = StrategyResolver.search_all_objects(
|
||||
|
@ -148,13 +151,15 @@ def start_list_strategies(args: Dict[str, Any]) -> None:
|
|||
_print_objs_tabular(strategy_objs, config.get("print_colorized", False))
|
||||
|
||||
|
||||
def start_list_freqAI_models(args: Dict[str, Any]) -> None:
|
||||
def start_list_freqAI_models(args: dict[str, Any]) -> None:
|
||||
"""
|
||||
Print files with FreqAI models custom classes available in the directory
|
||||
"""
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||
from freqtrade.configuration import setup_utils_configuration
|
||||
from freqtrade.resolvers.freqaimodel_resolver import FreqaiModelResolver
|
||||
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||
|
||||
model_objs = FreqaiModelResolver.search_all_objects(config, not args["print_one_column"])
|
||||
# Sort alphabetically
|
||||
model_objs = sorted(model_objs, key=lambda x: x["name"])
|
||||
|
@ -164,10 +169,31 @@ def start_list_freqAI_models(args: Dict[str, Any]) -> None:
|
|||
_print_objs_tabular(model_objs, config.get("print_colorized", False))
|
||||
|
||||
|
||||
def start_list_timeframes(args: Dict[str, Any]) -> None:
|
||||
def start_list_hyperopt_loss_functions(args: dict[str, Any]) -> None:
|
||||
"""
|
||||
Print files with FreqAI models custom classes available in the directory
|
||||
"""
|
||||
from freqtrade.configuration import setup_utils_configuration
|
||||
from freqtrade.resolvers.hyperopt_resolver import HyperOptLossResolver
|
||||
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||
|
||||
model_objs = HyperOptLossResolver.search_all_objects(config, not args["print_one_column"])
|
||||
# Sort alphabetically
|
||||
model_objs = sorted(model_objs, key=lambda x: x["name"])
|
||||
if args["print_one_column"]:
|
||||
print("\n".join([s["name"] for s in model_objs]))
|
||||
else:
|
||||
_print_objs_tabular(model_objs, config.get("print_colorized", False))
|
||||
|
||||
|
||||
def start_list_timeframes(args: dict[str, Any]) -> None:
|
||||
"""
|
||||
Print timeframes available on Exchange
|
||||
"""
|
||||
from freqtrade.configuration import setup_utils_configuration
|
||||
from freqtrade.resolvers import ExchangeResolver
|
||||
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_EXCHANGE)
|
||||
# Do not use timeframe set in the config
|
||||
config["timeframe"] = None
|
||||
|
@ -184,13 +210,19 @@ def start_list_timeframes(args: Dict[str, Any]) -> None:
|
|||
)
|
||||
|
||||
|
||||
def start_list_markets(args: Dict[str, Any], pairs_only: bool = False) -> None:
|
||||
def start_list_markets(args: dict[str, Any], pairs_only: bool = False) -> None:
|
||||
"""
|
||||
Print pairs/markets on the exchange
|
||||
:param args: Cli args from Arguments()
|
||||
:param pairs_only: if True print only pairs, otherwise print all instruments (markets)
|
||||
:return: None
|
||||
"""
|
||||
from freqtrade.configuration import setup_utils_configuration
|
||||
from freqtrade.exchange import market_is_active
|
||||
from freqtrade.misc import plural
|
||||
from freqtrade.resolvers import ExchangeResolver
|
||||
from freqtrade.util import print_rich_table
|
||||
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_EXCHANGE)
|
||||
|
||||
# Init exchange
|
||||
|
@ -281,6 +313,8 @@ def start_list_markets(args: Dict[str, Any], pairs_only: bool = False) -> None:
|
|||
elif args.get("print_one_column", False):
|
||||
print("\n".join(pairs.keys()))
|
||||
elif args.get("list_pairs_print_json", False):
|
||||
import rapidjson
|
||||
|
||||
print(rapidjson.dumps(list(pairs.keys()), default=str))
|
||||
elif args.get("print_csv", False):
|
||||
writer = csv.DictWriter(sys.stdout, fieldnames=headers)
|
||||
|
@ -296,12 +330,14 @@ def start_list_markets(args: Dict[str, Any], pairs_only: bool = False) -> None:
|
|||
print(f"{summary_str}.")
|
||||
|
||||
|
||||
def start_show_trades(args: Dict[str, Any]) -> None:
|
||||
def start_show_trades(args: dict[str, Any]) -> None:
|
||||
"""
|
||||
Show trades
|
||||
"""
|
||||
import json
|
||||
|
||||
from freqtrade.configuration import setup_utils_configuration
|
||||
from freqtrade.misc import parse_db_uri_for_logging
|
||||
from freqtrade.persistence import Trade, init_db
|
||||
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||
|
|
|
@ -1,23 +1,24 @@
|
|||
import logging
|
||||
from typing import Any, Dict
|
||||
from typing import Any
|
||||
|
||||
from freqtrade import constants
|
||||
from freqtrade.configuration import setup_utils_configuration
|
||||
from freqtrade.enums import RunMode
|
||||
from freqtrade.exceptions import ConfigurationError, OperationalException
|
||||
from freqtrade.util import fmt_coin
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def setup_optimize_configuration(args: Dict[str, Any], method: RunMode) -> Dict[str, Any]:
|
||||
def setup_optimize_configuration(args: dict[str, Any], method: RunMode) -> dict[str, Any]:
|
||||
"""
|
||||
Prepare the configuration for the Hyperopt module
|
||||
:param args: Cli args from Arguments()
|
||||
:param method: Bot running mode
|
||||
:return: Configuration
|
||||
"""
|
||||
from freqtrade.configuration import setup_utils_configuration
|
||||
from freqtrade.util import fmt_coin
|
||||
|
||||
config = setup_utils_configuration(args, method)
|
||||
|
||||
no_unlimited_runmodes = {
|
||||
|
@ -41,7 +42,7 @@ def setup_optimize_configuration(args: Dict[str, Any], method: RunMode) -> Dict[
|
|||
return config
|
||||
|
||||
|
||||
def start_backtesting(args: Dict[str, Any]) -> None:
|
||||
def start_backtesting(args: dict[str, Any]) -> None:
|
||||
"""
|
||||
Start Backtesting script
|
||||
:param args: Cli args from Arguments()
|
||||
|
@ -60,10 +61,11 @@ def start_backtesting(args: Dict[str, Any]) -> None:
|
|||
backtesting.start()
|
||||
|
||||
|
||||
def start_backtesting_show(args: Dict[str, Any]) -> None:
|
||||
def start_backtesting_show(args: dict[str, Any]) -> None:
|
||||
"""
|
||||
Show previous backtest result
|
||||
"""
|
||||
from freqtrade.configuration import setup_utils_configuration
|
||||
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||
|
||||
|
@ -76,7 +78,7 @@ def start_backtesting_show(args: Dict[str, Any]) -> None:
|
|||
show_sorted_pairlist(config, results)
|
||||
|
||||
|
||||
def start_hyperopt(args: Dict[str, Any]) -> None:
|
||||
def start_hyperopt(args: dict[str, Any]) -> None:
|
||||
"""
|
||||
Start hyperopt script
|
||||
:param args: Cli args from Arguments()
|
||||
|
@ -121,7 +123,7 @@ def start_hyperopt(args: Dict[str, Any]) -> None:
|
|||
# Same in Edge and Backtesting start() functions.
|
||||
|
||||
|
||||
def start_edge(args: Dict[str, Any]) -> None:
|
||||
def start_edge(args: dict[str, Any]) -> None:
|
||||
"""
|
||||
Start Edge script
|
||||
:param args: Cli args from Arguments()
|
||||
|
@ -138,24 +140,26 @@ def start_edge(args: Dict[str, Any]) -> None:
|
|||
edge_cli.start()
|
||||
|
||||
|
||||
def start_lookahead_analysis(args: Dict[str, Any]) -> None:
|
||||
def start_lookahead_analysis(args: dict[str, Any]) -> None:
|
||||
"""
|
||||
Start the backtest bias tester script
|
||||
:param args: Cli args from Arguments()
|
||||
:return: None
|
||||
"""
|
||||
from freqtrade.configuration import setup_utils_configuration
|
||||
from freqtrade.optimize.analysis.lookahead_helpers import LookaheadAnalysisSubFunctions
|
||||
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||
LookaheadAnalysisSubFunctions.start(config)
|
||||
|
||||
|
||||
def start_recursive_analysis(args: Dict[str, Any]) -> None:
|
||||
def start_recursive_analysis(args: dict[str, Any]) -> None:
|
||||
"""
|
||||
Start the backtest recursive tester script
|
||||
:param args: Cli args from Arguments()
|
||||
:return: None
|
||||
"""
|
||||
from freqtrade.configuration import setup_utils_configuration
|
||||
from freqtrade.optimize.analysis.recursive_helpers import RecursiveAnalysisSubFunctions
|
||||
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||
|
|
|
@ -1,22 +1,22 @@
|
|||
import logging
|
||||
from typing import Any, Dict
|
||||
from typing import Any
|
||||
|
||||
import rapidjson
|
||||
|
||||
from freqtrade.configuration import setup_utils_configuration
|
||||
from freqtrade.enums import RunMode
|
||||
from freqtrade.resolvers import ExchangeResolver
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def start_test_pairlist(args: Dict[str, Any]) -> None:
|
||||
def start_test_pairlist(args: dict[str, Any]) -> None:
|
||||
"""
|
||||
Test Pairlist configuration
|
||||
"""
|
||||
from freqtrade.configuration import setup_utils_configuration
|
||||
from freqtrade.persistence import FtNoDBContext
|
||||
from freqtrade.plugins.pairlistmanager import PairListManager
|
||||
from freqtrade.resolvers import ExchangeResolver
|
||||
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_EXCHANGE)
|
||||
|
||||
|
|
|
@ -1,11 +1,10 @@
|
|||
from typing import Any, Dict
|
||||
from typing import Any
|
||||
|
||||
from freqtrade.configuration import setup_utils_configuration
|
||||
from freqtrade.enums import RunMode
|
||||
from freqtrade.exceptions import ConfigurationError
|
||||
|
||||
|
||||
def validate_plot_args(args: Dict[str, Any]) -> None:
|
||||
def validate_plot_args(args: dict[str, Any]) -> None:
|
||||
if not args.get("datadir") and not args.get("config"):
|
||||
raise ConfigurationError(
|
||||
"You need to specify either `--datadir` or `--config` "
|
||||
|
@ -13,11 +12,12 @@ def validate_plot_args(args: Dict[str, Any]) -> None:
|
|||
)
|
||||
|
||||
|
||||
def start_plot_dataframe(args: Dict[str, Any]) -> None:
|
||||
def start_plot_dataframe(args: dict[str, Any]) -> None:
|
||||
"""
|
||||
Entrypoint for dataframe plotting
|
||||
"""
|
||||
# Import here to avoid errors if plot-dependencies are not installed.
|
||||
from freqtrade.configuration import setup_utils_configuration
|
||||
from freqtrade.plot.plotting import load_and_plot_trades
|
||||
|
||||
validate_plot_args(args)
|
||||
|
@ -26,11 +26,12 @@ def start_plot_dataframe(args: Dict[str, Any]) -> None:
|
|||
load_and_plot_trades(config)
|
||||
|
||||
|
||||
def start_plot_profit(args: Dict[str, Any]) -> None:
|
||||
def start_plot_profit(args: dict[str, Any]) -> None:
|
||||
"""
|
||||
Entrypoint for plot_profit
|
||||
"""
|
||||
# Import here to avoid errors if plot-dependencies are not installed.
|
||||
from freqtrade.configuration import setup_utils_configuration
|
||||
from freqtrade.plot.plotting import plot_profit
|
||||
|
||||
validate_plot_args(args)
|
||||
|
|
|
@ -1,27 +1,22 @@
|
|||
import logging
|
||||
import sys
|
||||
import time
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict
|
||||
from typing import Any
|
||||
|
||||
from freqtrade.configuration import setup_utils_configuration
|
||||
from freqtrade.enums import RunMode
|
||||
from freqtrade.resolvers import StrategyResolver
|
||||
from freqtrade.strategy.strategyupdater import StrategyUpdater
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def start_strategy_update(args: Dict[str, Any]) -> None:
|
||||
def start_strategy_update(args: dict[str, Any]) -> None:
|
||||
"""
|
||||
Start the strategy updating script
|
||||
:param args: Cli args from Arguments()
|
||||
:return: None
|
||||
"""
|
||||
|
||||
if sys.version_info == (3, 8): # pragma: no cover
|
||||
sys.exit("Freqtrade strategy updater requires Python version >= 3.9")
|
||||
from freqtrade.configuration import setup_utils_configuration
|
||||
from freqtrade.resolvers import StrategyResolver
|
||||
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)
|
||||
|
||||
|
@ -49,6 +44,8 @@ def start_strategy_update(args: Dict[str, Any]) -> None:
|
|||
|
||||
|
||||
def start_conversion(strategy_obj, config):
|
||||
from freqtrade.strategy.strategyupdater import StrategyUpdater
|
||||
|
||||
print(f"Conversion of {Path(strategy_obj['location']).name} started.")
|
||||
instance_strategy_updater = StrategyUpdater()
|
||||
start = time.perf_counter()
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
import logging
|
||||
import signal
|
||||
from typing import Any, Dict
|
||||
from typing import Any
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def start_trading(args: Dict[str, Any]) -> int:
|
||||
def start_trading(args: dict[str, Any]) -> int:
|
||||
"""
|
||||
Main entry point for trading mode
|
||||
"""
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
from typing import Any, Dict
|
||||
from typing import Any
|
||||
|
||||
from freqtrade.enums import RunMode
|
||||
|
||||
|
||||
def start_webserver(args: Dict[str, Any]) -> None:
|
||||
def start_webserver(args: dict[str, Any]) -> None:
|
||||
"""
|
||||
Main entry point for webserver mode
|
||||
"""
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
# flake8: noqa: F401
|
||||
|
||||
from freqtrade.configuration.asyncio_config import asyncio_setup
|
||||
from freqtrade.configuration.config_secrets import sanitize_config
|
||||
from freqtrade.configuration.config_setup import setup_utils_configuration
|
||||
from freqtrade.configuration.config_validation import validate_config_consistency
|
||||
|
|
|
@ -1,10 +1,8 @@
|
|||
# Required json-schema for user specified config
|
||||
from typing import Dict
|
||||
|
||||
from freqtrade.constants import (
|
||||
AVAILABLE_DATAHANDLERS,
|
||||
AVAILABLE_PAIRLISTS,
|
||||
AVAILABLE_PROTECTIONS,
|
||||
BACKTEST_BREAKDOWNS,
|
||||
DRY_RUN_WALLET,
|
||||
EXPORT_OPTIONS,
|
||||
|
@ -24,7 +22,7 @@ from freqtrade.constants import (
|
|||
from freqtrade.enums import RPCMessageType
|
||||
|
||||
|
||||
__MESSAGE_TYPE_DICT: Dict[str, Dict[str, str]] = {x: {"type": "object"} for x in RPCMessageType}
|
||||
__MESSAGE_TYPE_DICT: dict[str, dict[str, str]] = {x: {"type": "object"} for x in RPCMessageType}
|
||||
|
||||
__IN_STRATEGY = "\nUsually specified in the strategy and missing in the configuration."
|
||||
|
||||
|
@ -449,60 +447,6 @@ CONF_SCHEMA = {
|
|||
"required": ["method"],
|
||||
},
|
||||
},
|
||||
"protections": {
|
||||
"description": "Configuration for various protections.",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"method": {
|
||||
"description": "Method used for the protection.",
|
||||
"type": "string",
|
||||
"enum": AVAILABLE_PROTECTIONS,
|
||||
},
|
||||
"stop_duration": {
|
||||
"description": (
|
||||
"Duration to lock the pair after a protection is triggered, "
|
||||
"in minutes."
|
||||
),
|
||||
"type": "number",
|
||||
"minimum": 0.0,
|
||||
},
|
||||
"stop_duration_candles": {
|
||||
"description": (
|
||||
"Duration to lock the pair after a protection is triggered, in "
|
||||
"number of candles."
|
||||
),
|
||||
"type": "number",
|
||||
"minimum": 0,
|
||||
},
|
||||
"unlock_at": {
|
||||
"description": (
|
||||
"Time when trading will be unlocked regularly. Format: HH:MM"
|
||||
),
|
||||
"type": "string",
|
||||
},
|
||||
"trade_limit": {
|
||||
"description": "Minimum number of trades required during lookback period.",
|
||||
"type": "number",
|
||||
"minimum": 1,
|
||||
},
|
||||
"lookback_period": {
|
||||
"description": "Period to look back for protection checks, in minutes.",
|
||||
"type": "number",
|
||||
"minimum": 1,
|
||||
},
|
||||
"lookback_period_candles": {
|
||||
"description": (
|
||||
"Period to look back for protection checks, in number " "of candles."
|
||||
),
|
||||
"type": "number",
|
||||
"minimum": 1,
|
||||
},
|
||||
},
|
||||
"required": ["method"],
|
||||
},
|
||||
},
|
||||
# RPC section
|
||||
"telegram": {
|
||||
"description": "Telegram settings.",
|
||||
|
@ -1051,6 +995,13 @@ CONF_SCHEMA = {
|
|||
"type": "string",
|
||||
"default": "example",
|
||||
},
|
||||
"wait_for_training_iteration_on_reload": {
|
||||
"description": (
|
||||
"Wait for the next training iteration to complete after /reload or ctrl+c."
|
||||
),
|
||||
"type": "boolean",
|
||||
"default": True,
|
||||
},
|
||||
"feature_parameters": {
|
||||
"description": "The parameters used to engineer the feature set",
|
||||
"type": "object",
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import logging
|
||||
from typing import Any, Dict
|
||||
from typing import Any
|
||||
|
||||
from freqtrade.enums import RunMode
|
||||
|
||||
|
@ -11,8 +11,8 @@ logger = logging.getLogger(__name__)
|
|||
|
||||
|
||||
def setup_utils_configuration(
|
||||
args: Dict[str, Any], method: RunMode, *, set_dry: bool = True
|
||||
) -> Dict[str, Any]:
|
||||
args: dict[str, Any], method: RunMode, *, set_dry: bool = True
|
||||
) -> dict[str, Any]:
|
||||
"""
|
||||
Prepare the configuration for utils subcommands
|
||||
:param args: Cli args from Arguments()
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
import logging
|
||||
from collections import Counter
|
||||
from copy import deepcopy
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict
|
||||
from typing import Any
|
||||
|
||||
from jsonschema import Draft4Validator, validators
|
||||
from jsonschema.exceptions import ValidationError, best_match
|
||||
|
@ -44,7 +43,7 @@ def _extend_validator(validator_class):
|
|||
FreqtradeValidator = _extend_validator(Draft4Validator)
|
||||
|
||||
|
||||
def validate_config_schema(conf: Dict[str, Any], preliminary: bool = False) -> Dict[str, Any]:
|
||||
def validate_config_schema(conf: dict[str, Any], preliminary: bool = False) -> dict[str, Any]:
|
||||
"""
|
||||
Validate the configuration follow the Config Schema
|
||||
:param conf: Config in JSON format
|
||||
|
@ -70,7 +69,7 @@ def validate_config_schema(conf: Dict[str, Any], preliminary: bool = False) -> D
|
|||
raise ValidationError(best_match(Draft4Validator(conf_schema).iter_errors(conf)).message)
|
||||
|
||||
|
||||
def validate_config_consistency(conf: Dict[str, Any], *, preliminary: bool = False) -> None:
|
||||
def validate_config_consistency(conf: dict[str, Any], *, preliminary: bool = False) -> None:
|
||||
"""
|
||||
Validate the configuration consistency.
|
||||
Should be ran after loading both configuration and strategy,
|
||||
|
@ -84,7 +83,6 @@ def validate_config_consistency(conf: Dict[str, Any], *, preliminary: bool = Fal
|
|||
_validate_price_config(conf)
|
||||
_validate_edge(conf)
|
||||
_validate_whitelist(conf)
|
||||
_validate_protections(conf)
|
||||
_validate_unlimited_amount(conf)
|
||||
_validate_ask_orderbook(conf)
|
||||
_validate_freqai_hyperopt(conf)
|
||||
|
@ -99,7 +97,7 @@ def validate_config_consistency(conf: Dict[str, Any], *, preliminary: bool = Fal
|
|||
validate_config_schema(conf, preliminary=preliminary)
|
||||
|
||||
|
||||
def _validate_unlimited_amount(conf: Dict[str, Any]) -> None:
|
||||
def _validate_unlimited_amount(conf: dict[str, Any]) -> None:
|
||||
"""
|
||||
If edge is disabled, either max_open_trades or stake_amount need to be set.
|
||||
:raise: ConfigurationError if config validation failed
|
||||
|
@ -112,7 +110,7 @@ def _validate_unlimited_amount(conf: Dict[str, Any]) -> None:
|
|||
raise ConfigurationError("`max_open_trades` and `stake_amount` cannot both be unlimited.")
|
||||
|
||||
|
||||
def _validate_price_config(conf: Dict[str, Any]) -> None:
|
||||
def _validate_price_config(conf: dict[str, Any]) -> None:
|
||||
"""
|
||||
When using market orders, price sides must be using the "other" side of the price
|
||||
"""
|
||||
|
@ -128,7 +126,7 @@ def _validate_price_config(conf: Dict[str, Any]) -> None:
|
|||
raise ConfigurationError('Market exit orders require exit_pricing.price_side = "other".')
|
||||
|
||||
|
||||
def _validate_trailing_stoploss(conf: Dict[str, Any]) -> None:
|
||||
def _validate_trailing_stoploss(conf: dict[str, Any]) -> None:
|
||||
if conf.get("stoploss") == 0.0:
|
||||
raise ConfigurationError(
|
||||
"The config stoploss needs to be different from 0 to avoid problems with sell orders."
|
||||
|
@ -161,7 +159,7 @@ def _validate_trailing_stoploss(conf: Dict[str, Any]) -> None:
|
|||
)
|
||||
|
||||
|
||||
def _validate_edge(conf: Dict[str, Any]) -> None:
|
||||
def _validate_edge(conf: dict[str, Any]) -> None:
|
||||
"""
|
||||
Edge and Dynamic whitelist should not both be enabled, since edge overrides dynamic whitelists.
|
||||
"""
|
||||
|
@ -175,7 +173,7 @@ def _validate_edge(conf: Dict[str, Any]) -> None:
|
|||
)
|
||||
|
||||
|
||||
def _validate_whitelist(conf: Dict[str, Any]) -> None:
|
||||
def _validate_whitelist(conf: dict[str, Any]) -> None:
|
||||
"""
|
||||
Dynamic whitelist does not require pair_whitelist to be set - however StaticWhitelist does.
|
||||
"""
|
||||
|
@ -196,42 +194,7 @@ def _validate_whitelist(conf: Dict[str, Any]) -> None:
|
|||
raise ConfigurationError("StaticPairList requires pair_whitelist to be set.")
|
||||
|
||||
|
||||
def _validate_protections(conf: Dict[str, Any]) -> None:
|
||||
"""
|
||||
Validate protection configuration validity
|
||||
"""
|
||||
|
||||
for prot in conf.get("protections", []):
|
||||
parsed_unlock_at = None
|
||||
if (config_unlock_at := prot.get("unlock_at")) is not None:
|
||||
try:
|
||||
parsed_unlock_at = datetime.strptime(config_unlock_at, "%H:%M")
|
||||
except ValueError:
|
||||
raise ConfigurationError(f"Invalid date format for unlock_at: {config_unlock_at}.")
|
||||
|
||||
if "stop_duration" in prot and "stop_duration_candles" in prot:
|
||||
raise ConfigurationError(
|
||||
"Protections must specify either `stop_duration` or `stop_duration_candles`.\n"
|
||||
f"Please fix the protection {prot.get('method')}."
|
||||
)
|
||||
|
||||
if "lookback_period" in prot and "lookback_period_candles" in prot:
|
||||
raise ConfigurationError(
|
||||
"Protections must specify either `lookback_period` or `lookback_period_candles`.\n"
|
||||
f"Please fix the protection {prot.get('method')}."
|
||||
)
|
||||
|
||||
if parsed_unlock_at is not None and (
|
||||
"stop_duration" in prot or "stop_duration_candles" in prot
|
||||
):
|
||||
raise ConfigurationError(
|
||||
"Protections must specify either `unlock_at`, `stop_duration` or "
|
||||
"`stop_duration_candles`.\n"
|
||||
f"Please fix the protection {prot.get('method')}."
|
||||
)
|
||||
|
||||
|
||||
def _validate_ask_orderbook(conf: Dict[str, Any]) -> None:
|
||||
def _validate_ask_orderbook(conf: dict[str, Any]) -> None:
|
||||
ask_strategy = conf.get("exit_pricing", {})
|
||||
ob_min = ask_strategy.get("order_book_min")
|
||||
ob_max = ask_strategy.get("order_book_max")
|
||||
|
@ -251,7 +214,7 @@ def _validate_ask_orderbook(conf: Dict[str, Any]) -> None:
|
|||
)
|
||||
|
||||
|
||||
def validate_migrated_strategy_settings(conf: Dict[str, Any]) -> None:
|
||||
def validate_migrated_strategy_settings(conf: dict[str, Any]) -> None:
|
||||
_validate_time_in_force(conf)
|
||||
_validate_order_types(conf)
|
||||
_validate_unfilledtimeout(conf)
|
||||
|
@ -259,7 +222,7 @@ def validate_migrated_strategy_settings(conf: Dict[str, Any]) -> None:
|
|||
_strategy_settings(conf)
|
||||
|
||||
|
||||
def _validate_time_in_force(conf: Dict[str, Any]) -> None:
|
||||
def _validate_time_in_force(conf: dict[str, Any]) -> None:
|
||||
time_in_force = conf.get("order_time_in_force", {})
|
||||
if "buy" in time_in_force or "sell" in time_in_force:
|
||||
if conf.get("trading_mode", TradingMode.SPOT) != TradingMode.SPOT:
|
||||
|
@ -280,7 +243,7 @@ def _validate_time_in_force(conf: Dict[str, Any]) -> None:
|
|||
)
|
||||
|
||||
|
||||
def _validate_order_types(conf: Dict[str, Any]) -> None:
|
||||
def _validate_order_types(conf: dict[str, Any]) -> None:
|
||||
order_types = conf.get("order_types", {})
|
||||
old_order_types = [
|
||||
"buy",
|
||||
|
@ -315,7 +278,7 @@ def _validate_order_types(conf: Dict[str, Any]) -> None:
|
|||
process_deprecated_setting(conf, "order_types", o, "order_types", n)
|
||||
|
||||
|
||||
def _validate_unfilledtimeout(conf: Dict[str, Any]) -> None:
|
||||
def _validate_unfilledtimeout(conf: dict[str, Any]) -> None:
|
||||
unfilledtimeout = conf.get("unfilledtimeout", {})
|
||||
if any(x in unfilledtimeout for x in ["buy", "sell"]):
|
||||
if conf.get("trading_mode", TradingMode.SPOT) != TradingMode.SPOT:
|
||||
|
@ -334,7 +297,7 @@ def _validate_unfilledtimeout(conf: Dict[str, Any]) -> None:
|
|||
process_deprecated_setting(conf, "unfilledtimeout", o, "unfilledtimeout", n)
|
||||
|
||||
|
||||
def _validate_pricing_rules(conf: Dict[str, Any]) -> None:
|
||||
def _validate_pricing_rules(conf: dict[str, Any]) -> None:
|
||||
if conf.get("ask_strategy") or conf.get("bid_strategy"):
|
||||
if conf.get("trading_mode", TradingMode.SPOT) != TradingMode.SPOT:
|
||||
raise ConfigurationError("Please migrate your pricing settings to use the new wording.")
|
||||
|
@ -364,7 +327,7 @@ def _validate_pricing_rules(conf: Dict[str, Any]) -> None:
|
|||
del conf["ask_strategy"]
|
||||
|
||||
|
||||
def _validate_freqai_hyperopt(conf: Dict[str, Any]) -> None:
|
||||
def _validate_freqai_hyperopt(conf: dict[str, Any]) -> None:
|
||||
freqai_enabled = conf.get("freqai", {}).get("enabled", False)
|
||||
analyze_per_epoch = conf.get("analyze_per_epoch", False)
|
||||
if analyze_per_epoch and freqai_enabled:
|
||||
|
@ -373,7 +336,7 @@ def _validate_freqai_hyperopt(conf: Dict[str, Any]) -> None:
|
|||
)
|
||||
|
||||
|
||||
def _validate_freqai_include_timeframes(conf: Dict[str, Any], preliminary: bool) -> None:
|
||||
def _validate_freqai_include_timeframes(conf: dict[str, Any], preliminary: bool) -> None:
|
||||
freqai_enabled = conf.get("freqai", {}).get("enabled", False)
|
||||
if freqai_enabled:
|
||||
main_tf = conf.get("timeframe", "5m")
|
||||
|
@ -404,7 +367,7 @@ def _validate_freqai_include_timeframes(conf: Dict[str, Any], preliminary: bool)
|
|||
)
|
||||
|
||||
|
||||
def _validate_freqai_backtest(conf: Dict[str, Any]) -> None:
|
||||
def _validate_freqai_backtest(conf: dict[str, Any]) -> None:
|
||||
if conf.get("runmode", RunMode.OTHER) == RunMode.BACKTEST:
|
||||
freqai_enabled = conf.get("freqai", {}).get("enabled", False)
|
||||
timerange = conf.get("timerange")
|
||||
|
@ -427,7 +390,7 @@ def _validate_freqai_backtest(conf: Dict[str, Any]) -> None:
|
|||
)
|
||||
|
||||
|
||||
def _validate_consumers(conf: Dict[str, Any]) -> None:
|
||||
def _validate_consumers(conf: dict[str, Any]) -> None:
|
||||
emc_conf = conf.get("external_message_consumer", {})
|
||||
if emc_conf.get("enabled", False):
|
||||
if len(emc_conf.get("producers", [])) < 1:
|
||||
|
@ -447,7 +410,7 @@ def _validate_consumers(conf: Dict[str, Any]) -> None:
|
|||
)
|
||||
|
||||
|
||||
def _validate_orderflow(conf: Dict[str, Any]) -> None:
|
||||
def _validate_orderflow(conf: dict[str, Any]) -> None:
|
||||
if conf.get("exchange", {}).get("use_public_trades"):
|
||||
if "orderflow" not in conf:
|
||||
raise ConfigurationError(
|
||||
|
@ -455,7 +418,7 @@ def _validate_orderflow(conf: Dict[str, Any]) -> None:
|
|||
)
|
||||
|
||||
|
||||
def _strategy_settings(conf: Dict[str, Any]) -> None:
|
||||
def _strategy_settings(conf: dict[str, Any]) -> None:
|
||||
process_deprecated_setting(conf, None, "use_sell_signal", None, "use_exit_signal")
|
||||
process_deprecated_setting(conf, None, "sell_profit_only", None, "exit_profit_only")
|
||||
process_deprecated_setting(conf, None, "sell_profit_offset", None, "exit_profit_offset")
|
||||
|
|
|
@ -7,7 +7,7 @@ import logging
|
|||
import warnings
|
||||
from copy import deepcopy
|
||||
from pathlib import Path
|
||||
from typing import Any, Callable, Dict, List, Optional, Tuple
|
||||
from typing import Any, Callable, Optional
|
||||
|
||||
from freqtrade import constants
|
||||
from freqtrade.configuration.deprecated_settings import process_temporary_deprecated_settings
|
||||
|
@ -37,7 +37,7 @@ class Configuration:
|
|||
Reuse this class for the bot, backtesting, hyperopt and every script that required configuration
|
||||
"""
|
||||
|
||||
def __init__(self, args: Dict[str, Any], runmode: Optional[RunMode] = None) -> None:
|
||||
def __init__(self, args: dict[str, Any], runmode: Optional[RunMode] = None) -> None:
|
||||
self.args = args
|
||||
self.config: Optional[Config] = None
|
||||
self.runmode = runmode
|
||||
|
@ -53,7 +53,7 @@ class Configuration:
|
|||
return self.config
|
||||
|
||||
@staticmethod
|
||||
def from_files(files: List[str]) -> Dict[str, Any]:
|
||||
def from_files(files: list[str]) -> dict[str, Any]:
|
||||
"""
|
||||
Iterate through the config files passed in, loading all of them
|
||||
and merging their contents.
|
||||
|
@ -68,7 +68,7 @@ class Configuration:
|
|||
c = Configuration({"config": files}, RunMode.OTHER)
|
||||
return c.get_config()
|
||||
|
||||
def load_config(self) -> Dict[str, Any]:
|
||||
def load_config(self) -> dict[str, Any]:
|
||||
"""
|
||||
Extract information for sys.argv and load the bot configuration
|
||||
:return: Configuration dictionary
|
||||
|
@ -421,7 +421,7 @@ class Configuration:
|
|||
]
|
||||
self._args_to_config_loop(config, configurations)
|
||||
|
||||
def _args_to_config_loop(self, config, configurations: List[Tuple[str, str]]) -> None:
|
||||
def _args_to_config_loop(self, config, configurations: list[tuple[str, str]]) -> None:
|
||||
for argname, logstring in configurations:
|
||||
self._args_to_config(config, argname=argname, logstring=logstring)
|
||||
|
||||
|
|
250
freqtrade/configuration/deploy_config.py
Normal file
250
freqtrade/configuration/deploy_config.py
Normal file
|
@ -0,0 +1,250 @@
|
|||
import logging
|
||||
import secrets
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
from questionary import Separator, prompt
|
||||
|
||||
from freqtrade.constants import UNLIMITED_STAKE_AMOUNT
|
||||
from freqtrade.exceptions import OperationalException
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def validate_is_int(val):
|
||||
try:
|
||||
_ = int(val)
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def validate_is_float(val):
|
||||
try:
|
||||
_ = float(val)
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def ask_user_overwrite(config_path: Path) -> bool:
|
||||
questions = [
|
||||
{
|
||||
"type": "confirm",
|
||||
"name": "overwrite",
|
||||
"message": f"File {config_path} already exists. Overwrite?",
|
||||
"default": False,
|
||||
},
|
||||
]
|
||||
answers = prompt(questions)
|
||||
return answers["overwrite"]
|
||||
|
||||
|
||||
def ask_user_config() -> dict[str, Any]:
|
||||
"""
|
||||
Ask user a few questions to build the configuration.
|
||||
Interactive questions built using https://github.com/tmbo/questionary
|
||||
:returns: Dict with keys to put into template
|
||||
"""
|
||||
|
||||
from freqtrade.configuration.detect_environment import running_in_docker
|
||||
from freqtrade.exchange import available_exchanges
|
||||
|
||||
questions: list[dict[str, Any]] = [
|
||||
{
|
||||
"type": "confirm",
|
||||
"name": "dry_run",
|
||||
"message": "Do you want to enable Dry-run (simulated trades)?",
|
||||
"default": True,
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"name": "stake_currency",
|
||||
"message": "Please insert your stake currency:",
|
||||
"default": "USDT",
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"name": "stake_amount",
|
||||
"message": f"Please insert your stake amount (Number or '{UNLIMITED_STAKE_AMOUNT}'):",
|
||||
"default": "unlimited",
|
||||
"validate": lambda val: val == UNLIMITED_STAKE_AMOUNT or validate_is_float(val),
|
||||
"filter": lambda val: (
|
||||
'"' + UNLIMITED_STAKE_AMOUNT + '"' if val == UNLIMITED_STAKE_AMOUNT else val
|
||||
),
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"name": "max_open_trades",
|
||||
"message": "Please insert max_open_trades (Integer or -1 for unlimited open trades):",
|
||||
"default": "3",
|
||||
"validate": lambda val: validate_is_int(val),
|
||||
},
|
||||
{
|
||||
"type": "select",
|
||||
"name": "timeframe_in_config",
|
||||
"message": "Time",
|
||||
"choices": ["Have the strategy define timeframe.", "Override in configuration."],
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"name": "timeframe",
|
||||
"message": "Please insert your desired timeframe (e.g. 5m):",
|
||||
"default": "5m",
|
||||
"when": lambda x: x["timeframe_in_config"] == "Override in configuration.",
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"name": "fiat_display_currency",
|
||||
"message": (
|
||||
"Please insert your display Currency for reporting "
|
||||
"(leave empty to disable FIAT conversion):"
|
||||
),
|
||||
"default": "USD",
|
||||
},
|
||||
{
|
||||
"type": "select",
|
||||
"name": "exchange_name",
|
||||
"message": "Select exchange",
|
||||
"choices": [
|
||||
"binance",
|
||||
"binanceus",
|
||||
"bingx",
|
||||
"gate",
|
||||
"htx",
|
||||
"kraken",
|
||||
"kucoin",
|
||||
"okx",
|
||||
Separator("------------------"),
|
||||
"other",
|
||||
],
|
||||
},
|
||||
{
|
||||
"type": "confirm",
|
||||
"name": "trading_mode",
|
||||
"message": "Do you want to trade Perpetual Swaps (perpetual futures)?",
|
||||
"default": False,
|
||||
"filter": lambda val: "futures" if val else "spot",
|
||||
"when": lambda x: x["exchange_name"] in ["binance", "gate", "okx", "bybit"],
|
||||
},
|
||||
{
|
||||
"type": "autocomplete",
|
||||
"name": "exchange_name",
|
||||
"message": "Type your exchange name (Must be supported by ccxt)",
|
||||
"choices": available_exchanges(),
|
||||
"when": lambda x: x["exchange_name"] == "other",
|
||||
},
|
||||
{
|
||||
"type": "password",
|
||||
"name": "exchange_key",
|
||||
"message": "Insert Exchange Key",
|
||||
"when": lambda x: not x["dry_run"],
|
||||
},
|
||||
{
|
||||
"type": "password",
|
||||
"name": "exchange_secret",
|
||||
"message": "Insert Exchange Secret",
|
||||
"when": lambda x: not x["dry_run"],
|
||||
},
|
||||
{
|
||||
"type": "password",
|
||||
"name": "exchange_key_password",
|
||||
"message": "Insert Exchange API Key password",
|
||||
"when": lambda x: not x["dry_run"] and x["exchange_name"] in ("kucoin", "okx"),
|
||||
},
|
||||
{
|
||||
"type": "confirm",
|
||||
"name": "telegram",
|
||||
"message": "Do you want to enable Telegram?",
|
||||
"default": False,
|
||||
},
|
||||
{
|
||||
"type": "password",
|
||||
"name": "telegram_token",
|
||||
"message": "Insert Telegram token",
|
||||
"when": lambda x: x["telegram"],
|
||||
},
|
||||
{
|
||||
"type": "password",
|
||||
"name": "telegram_chat_id",
|
||||
"message": "Insert Telegram chat id",
|
||||
"when": lambda x: x["telegram"],
|
||||
},
|
||||
{
|
||||
"type": "confirm",
|
||||
"name": "api_server",
|
||||
"message": "Do you want to enable the Rest API (includes FreqUI)?",
|
||||
"default": False,
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"name": "api_server_listen_addr",
|
||||
"message": (
|
||||
"Insert Api server Listen Address (0.0.0.0 for docker, "
|
||||
"otherwise best left untouched)"
|
||||
),
|
||||
"default": "127.0.0.1" if not running_in_docker() else "0.0.0.0", # noqa: S104
|
||||
"when": lambda x: x["api_server"],
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"name": "api_server_username",
|
||||
"message": "Insert api-server username",
|
||||
"default": "freqtrader",
|
||||
"when": lambda x: x["api_server"],
|
||||
},
|
||||
{
|
||||
"type": "password",
|
||||
"name": "api_server_password",
|
||||
"message": "Insert api-server password",
|
||||
"when": lambda x: x["api_server"],
|
||||
},
|
||||
]
|
||||
answers = prompt(questions)
|
||||
|
||||
if not answers:
|
||||
# Interrupted questionary sessions return an empty dict.
|
||||
raise OperationalException("User interrupted interactive questions.")
|
||||
# Ensure default is set for non-futures exchanges
|
||||
answers["trading_mode"] = answers.get("trading_mode", "spot")
|
||||
answers["margin_mode"] = "isolated" if answers.get("trading_mode") == "futures" else ""
|
||||
# Force JWT token to be a random string
|
||||
answers["api_server_jwt_key"] = secrets.token_hex()
|
||||
answers["api_server_ws_token"] = secrets.token_urlsafe(25)
|
||||
|
||||
return answers
|
||||
|
||||
|
||||
def deploy_new_config(config_path: Path, selections: dict[str, Any]) -> None:
|
||||
"""
|
||||
Applies selections to the template and writes the result to config_path
|
||||
:param config_path: Path object for new config file. Should not exist yet
|
||||
:param selections: Dict containing selections taken by the user.
|
||||
"""
|
||||
from jinja2.exceptions import TemplateNotFound
|
||||
|
||||
from freqtrade.exchange import MAP_EXCHANGE_CHILDCLASS
|
||||
from freqtrade.util import render_template
|
||||
|
||||
try:
|
||||
exchange_template = MAP_EXCHANGE_CHILDCLASS.get(
|
||||
selections["exchange_name"], selections["exchange_name"]
|
||||
)
|
||||
|
||||
selections["exchange"] = render_template(
|
||||
templatefile=f"subtemplates/exchange_{exchange_template}.j2", arguments=selections
|
||||
)
|
||||
except TemplateNotFound:
|
||||
selections["exchange"] = render_template(
|
||||
templatefile="subtemplates/exchange_generic.j2", arguments=selections
|
||||
)
|
||||
|
||||
config_text = render_template(templatefile="base_config.json.j2", arguments=selections)
|
||||
|
||||
logger.info(f"Writing config to `{config_path}`.")
|
||||
logger.info(
|
||||
"Please make sure to check the configuration contents and adjust settings to your needs."
|
||||
)
|
||||
|
||||
config_path.write_text(config_text)
|
|
@ -177,4 +177,6 @@ def process_temporary_deprecated_settings(config: Config) -> None:
|
|||
)
|
||||
|
||||
if "protections" in config:
|
||||
logger.warning("DEPRECATED: Setting 'protections' in the configuration is deprecated.")
|
||||
raise ConfigurationError(
|
||||
"DEPRECATED: Setting 'protections' in the configuration is deprecated."
|
||||
)
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import logging
|
||||
import os
|
||||
from typing import Any, Dict
|
||||
from typing import Any
|
||||
|
||||
from freqtrade.constants import ENV_VAR_PREFIX
|
||||
from freqtrade.misc import deep_merge_dicts
|
||||
|
@ -24,7 +24,7 @@ def _get_var_typed(val):
|
|||
return val
|
||||
|
||||
|
||||
def _flat_vars_to_nested_dict(env_dict: Dict[str, Any], prefix: str) -> Dict[str, Any]:
|
||||
def _flat_vars_to_nested_dict(env_dict: dict[str, Any], prefix: str) -> dict[str, Any]:
|
||||
"""
|
||||
Environment variables must be prefixed with FREQTRADE.
|
||||
FREQTRADE__{section}__{key}
|
||||
|
@ -33,7 +33,7 @@ def _flat_vars_to_nested_dict(env_dict: Dict[str, Any], prefix: str) -> Dict[str
|
|||
:return: Nested dict based on available and relevant variables.
|
||||
"""
|
||||
no_convert = ["CHAT_ID", "PASSWORD"]
|
||||
relevant_vars: Dict[str, Any] = {}
|
||||
relevant_vars: dict[str, Any] = {}
|
||||
|
||||
for env_var, val in sorted(env_dict.items()):
|
||||
if env_var.startswith(prefix):
|
||||
|
@ -51,7 +51,7 @@ def _flat_vars_to_nested_dict(env_dict: Dict[str, Any], prefix: str) -> Dict[str
|
|||
return relevant_vars
|
||||
|
||||
|
||||
def enironment_vars_to_dict() -> Dict[str, Any]:
|
||||
def enironment_vars_to_dict() -> dict[str, Any]:
|
||||
"""
|
||||
Read environment variables and return a nested dict for relevant variables
|
||||
Relevant variables must follow the FREQTRADE__{section}__{key} pattern
|
||||
|
|
|
@ -7,7 +7,7 @@ import re
|
|||
import sys
|
||||
from copy import deepcopy
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional
|
||||
from typing import Any, Optional
|
||||
|
||||
import rapidjson
|
||||
|
||||
|
@ -42,7 +42,7 @@ def log_config_error_range(path: str, errmsg: str) -> str:
|
|||
return ""
|
||||
|
||||
|
||||
def load_file(path: Path) -> Dict[str, Any]:
|
||||
def load_file(path: Path) -> dict[str, Any]:
|
||||
try:
|
||||
with path.open("r") as file:
|
||||
config = rapidjson.load(file, parse_mode=CONFIG_PARSE_MODE)
|
||||
|
@ -51,7 +51,7 @@ def load_file(path: Path) -> Dict[str, Any]:
|
|||
return config
|
||||
|
||||
|
||||
def load_config_file(path: str) -> Dict[str, Any]:
|
||||
def load_config_file(path: str) -> dict[str, Any]:
|
||||
"""
|
||||
Loads a config file from the given path
|
||||
:param path: path as str
|
||||
|
@ -78,8 +78,8 @@ def load_config_file(path: str) -> Dict[str, Any]:
|
|||
|
||||
|
||||
def load_from_files(
|
||||
files: List[str], base_path: Optional[Path] = None, level: int = 0
|
||||
) -> Dict[str, Any]:
|
||||
files: list[str], base_path: Optional[Path] = None, level: int = 0
|
||||
) -> dict[str, Any]:
|
||||
"""
|
||||
Recursively load configuration files if specified.
|
||||
Sub-files are assumed to be relative to the initial config.
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
bot constants
|
||||
"""
|
||||
|
||||
from typing import Any, Dict, List, Literal, Optional, Tuple
|
||||
from typing import Any, Literal, Optional
|
||||
|
||||
from freqtrade.enums import CandleType, PriceType
|
||||
|
||||
|
@ -57,7 +57,6 @@ AVAILABLE_PAIRLISTS = [
|
|||
"SpreadFilter",
|
||||
"VolatilityFilter",
|
||||
]
|
||||
AVAILABLE_PROTECTIONS = ["CooldownPeriod", "LowProfitPairs", "MaxDrawdown", "StoplossGuard"]
|
||||
AVAILABLE_DATAHANDLERS = ["json", "jsongz", "hdf5", "feather", "parquet"]
|
||||
BACKTEST_BREAKDOWNS = ["day", "week", "month"]
|
||||
BACKTEST_CACHE_AGE = ["none", "day", "week", "month"]
|
||||
|
@ -188,14 +187,14 @@ CANCEL_REASON = {
|
|||
}
|
||||
|
||||
# List of pairs with their timeframes
|
||||
PairWithTimeframe = Tuple[str, str, CandleType]
|
||||
ListPairsWithTimeframes = List[PairWithTimeframe]
|
||||
PairWithTimeframe = tuple[str, str, CandleType]
|
||||
ListPairsWithTimeframes = list[PairWithTimeframe]
|
||||
|
||||
# Type for trades list
|
||||
TradeList = List[List]
|
||||
TradeList = list[list]
|
||||
# ticks, pair, timeframe, CandleType
|
||||
TickWithTimeframe = Tuple[str, str, CandleType, Optional[int], Optional[int]]
|
||||
ListTicksWithTimeframes = List[TickWithTimeframe]
|
||||
TickWithTimeframe = tuple[str, str, CandleType, Optional[int], Optional[int]]
|
||||
ListTicksWithTimeframes = list[TickWithTimeframe]
|
||||
|
||||
LongShort = Literal["long", "short"]
|
||||
EntryExit = Literal["entry", "exit"]
|
||||
|
@ -204,9 +203,9 @@ MakerTaker = Literal["maker", "taker"]
|
|||
BidAsk = Literal["bid", "ask"]
|
||||
OBLiteral = Literal["asks", "bids"]
|
||||
|
||||
Config = Dict[str, Any]
|
||||
Config = dict[str, Any]
|
||||
# Exchange part of the configuration.
|
||||
ExchangeConfig = Dict[str, Any]
|
||||
ExchangeConfig = dict[str, Any]
|
||||
IntOrInf = float
|
||||
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@ import logging
|
|||
from copy import copy
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Literal, Optional, Union
|
||||
from typing import Any, Literal, Optional, Union
|
||||
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
|
@ -137,7 +137,7 @@ def get_latest_hyperopt_file(
|
|||
return directory / get_latest_hyperopt_filename(directory)
|
||||
|
||||
|
||||
def load_backtest_metadata(filename: Union[Path, str]) -> Dict[str, Any]:
|
||||
def load_backtest_metadata(filename: Union[Path, str]) -> dict[str, Any]:
|
||||
"""
|
||||
Read metadata dictionary from backtest results file without reading and deserializing entire
|
||||
file.
|
||||
|
@ -176,7 +176,7 @@ def load_backtest_stats(filename: Union[Path, str]) -> BacktestResultType:
|
|||
return data
|
||||
|
||||
|
||||
def load_and_merge_backtest_result(strategy_name: str, filename: Path, results: Dict[str, Any]):
|
||||
def load_and_merge_backtest_result(strategy_name: str, filename: Path, results: dict[str, Any]):
|
||||
"""
|
||||
Load one strategy from multi-strategy result and merge it with results
|
||||
:param strategy_name: Name of the strategy contained in the result
|
||||
|
@ -195,12 +195,12 @@ def load_and_merge_backtest_result(strategy_name: str, filename: Path, results:
|
|||
break
|
||||
|
||||
|
||||
def _get_backtest_files(dirname: Path) -> List[Path]:
|
||||
def _get_backtest_files(dirname: Path) -> list[Path]:
|
||||
# Weird glob expression here avoids including .meta.json files.
|
||||
return list(reversed(sorted(dirname.glob("backtest-result-*-[0-9][0-9].json"))))
|
||||
|
||||
|
||||
def _extract_backtest_result(filename: Path) -> List[BacktestHistoryEntryType]:
|
||||
def _extract_backtest_result(filename: Path) -> list[BacktestHistoryEntryType]:
|
||||
metadata = load_backtest_metadata(filename)
|
||||
return [
|
||||
{
|
||||
|
@ -220,14 +220,14 @@ def _extract_backtest_result(filename: Path) -> List[BacktestHistoryEntryType]:
|
|||
]
|
||||
|
||||
|
||||
def get_backtest_result(filename: Path) -> List[BacktestHistoryEntryType]:
|
||||
def get_backtest_result(filename: Path) -> list[BacktestHistoryEntryType]:
|
||||
"""
|
||||
Get backtest result read from metadata file
|
||||
"""
|
||||
return _extract_backtest_result(filename)
|
||||
|
||||
|
||||
def get_backtest_resultlist(dirname: Path) -> List[BacktestHistoryEntryType]:
|
||||
def get_backtest_resultlist(dirname: Path) -> list[BacktestHistoryEntryType]:
|
||||
"""
|
||||
Get list of backtest results read from metadata files
|
||||
"""
|
||||
|
@ -244,12 +244,13 @@ def delete_backtest_result(file_abs: Path):
|
|||
"""
|
||||
# *.meta.json
|
||||
logger.info(f"Deleting backtest result file: {file_abs.name}")
|
||||
file_abs_meta = file_abs.with_suffix(".meta.json")
|
||||
file_abs.unlink()
|
||||
file_abs_meta.unlink()
|
||||
|
||||
for file in file_abs.parent.glob(f"{file_abs.stem}*"):
|
||||
logger.info(f"Deleting file: {file}")
|
||||
file.unlink()
|
||||
|
||||
|
||||
def update_backtest_metadata(filename: Path, strategy: str, content: Dict[str, Any]):
|
||||
def update_backtest_metadata(filename: Path, strategy: str, content: dict[str, Any]):
|
||||
"""
|
||||
Updates backtest metadata file with new content.
|
||||
:raises: ValueError if metadata file does not exist, or strategy is not in this file.
|
||||
|
@ -275,8 +276,8 @@ def get_backtest_market_change(filename: Path, include_ts: bool = True) -> pd.Da
|
|||
|
||||
|
||||
def find_existing_backtest_stats(
|
||||
dirname: Union[Path, str], run_ids: Dict[str, str], min_backtest_date: Optional[datetime] = None
|
||||
) -> Dict[str, Any]:
|
||||
dirname: Union[Path, str], run_ids: dict[str, str], min_backtest_date: Optional[datetime] = None
|
||||
) -> dict[str, Any]:
|
||||
"""
|
||||
Find existing backtest stats that match specified run IDs and load them.
|
||||
:param dirname: pathlib.Path object, or string pointing to the file.
|
||||
|
@ -287,7 +288,7 @@ def find_existing_backtest_stats(
|
|||
# Copy so we can modify this dict without affecting parent scope.
|
||||
run_ids = copy(run_ids)
|
||||
dirname = Path(dirname)
|
||||
results: Dict[str, Any] = {
|
||||
results: dict[str, Any] = {
|
||||
"metadata": {},
|
||||
"strategy": {},
|
||||
"strategy_comparison": [],
|
||||
|
@ -438,7 +439,7 @@ def evaluate_result_multi(
|
|||
return df_final[df_final["open_trades"] > max_open_trades]
|
||||
|
||||
|
||||
def trade_list_to_dataframe(trades: Union[List[Trade], List[LocalTrade]]) -> pd.DataFrame:
|
||||
def trade_list_to_dataframe(trades: Union[list[Trade], list[LocalTrade]]) -> pd.DataFrame:
|
||||
"""
|
||||
Convert list of Trade objects to pandas Dataframe
|
||||
:param trades: List of trade objects
|
||||
|
|
|
@ -3,7 +3,6 @@ Functions to convert data from one format to another
|
|||
"""
|
||||
|
||||
import logging
|
||||
from typing import Dict
|
||||
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
|
@ -158,8 +157,8 @@ def trim_dataframe(
|
|||
|
||||
|
||||
def trim_dataframes(
|
||||
preprocessed: Dict[str, DataFrame], timerange, startup_candles: int
|
||||
) -> Dict[str, DataFrame]:
|
||||
preprocessed: dict[str, DataFrame], timerange, startup_candles: int
|
||||
) -> dict[str, DataFrame]:
|
||||
"""
|
||||
Trim startup period from analyzed dataframes
|
||||
:param preprocessed: Dict of pair: dataframe
|
||||
|
@ -167,7 +166,7 @@ def trim_dataframes(
|
|||
:param startup_candles: Startup-candles that should be removed
|
||||
:return: Dict of trimmed dataframes
|
||||
"""
|
||||
processed: Dict[str, DataFrame] = {}
|
||||
processed: dict[str, DataFrame] = {}
|
||||
|
||||
for pair, df in preprocessed.items():
|
||||
trimed_df = trim_dataframe(df, timerange, startup_candles=startup_candles)
|
||||
|
|
|
@ -7,7 +7,6 @@ import time
|
|||
import typing
|
||||
from collections import OrderedDict
|
||||
from datetime import datetime
|
||||
from typing import Tuple
|
||||
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
|
@ -62,11 +61,11 @@ def _calculate_ohlcv_candle_start_and_end(df: pd.DataFrame, timeframe: str):
|
|||
|
||||
|
||||
def populate_dataframe_with_trades(
|
||||
cached_grouped_trades: OrderedDict[Tuple[datetime, datetime], pd.DataFrame],
|
||||
cached_grouped_trades: OrderedDict[tuple[datetime, datetime], pd.DataFrame],
|
||||
config: Config,
|
||||
dataframe: pd.DataFrame,
|
||||
trades: pd.DataFrame,
|
||||
) -> Tuple[pd.DataFrame, OrderedDict[Tuple[datetime, datetime], pd.DataFrame]]:
|
||||
) -> tuple[pd.DataFrame, OrderedDict[tuple[datetime, datetime], pd.DataFrame]]:
|
||||
"""
|
||||
Populates a dataframe with trades
|
||||
:param dataframe: Dataframe to populate
|
||||
|
|
|
@ -4,7 +4,6 @@ Functions to convert data from one format to another
|
|||
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Dict, List
|
||||
|
||||
import pandas as pd
|
||||
from pandas import DataFrame, to_datetime
|
||||
|
@ -34,7 +33,7 @@ def trades_df_remove_duplicates(trades: pd.DataFrame) -> pd.DataFrame:
|
|||
return trades.drop_duplicates(subset=["timestamp", "id"])
|
||||
|
||||
|
||||
def trades_dict_to_list(trades: List[Dict]) -> TradeList:
|
||||
def trades_dict_to_list(trades: list[dict]) -> TradeList:
|
||||
"""
|
||||
Convert fetch_trades result into a List (to be more memory efficient).
|
||||
:param trades: List of trades, as returned by ccxt.fetch_trades.
|
||||
|
@ -91,8 +90,8 @@ def trades_to_ohlcv(trades: DataFrame, timeframe: str) -> DataFrame:
|
|||
|
||||
|
||||
def convert_trades_to_ohlcv(
|
||||
pairs: List[str],
|
||||
timeframes: List[str],
|
||||
pairs: list[str],
|
||||
timeframes: list[str],
|
||||
datadir: Path,
|
||||
timerange: TimeRange,
|
||||
erase: bool,
|
||||
|
|
|
@ -8,7 +8,7 @@ Common Interface for bot and strategy to access data.
|
|||
import logging
|
||||
from collections import deque
|
||||
from datetime import datetime, timezone
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
from typing import Any, Optional
|
||||
|
||||
from pandas import DataFrame, Timedelta, Timestamp, to_timedelta
|
||||
|
||||
|
@ -48,15 +48,15 @@ class DataProvider:
|
|||
self._exchange = exchange
|
||||
self._pairlists = pairlists
|
||||
self.__rpc = rpc
|
||||
self.__cached_pairs: Dict[PairWithTimeframe, Tuple[DataFrame, datetime]] = {}
|
||||
self.__cached_pairs: dict[PairWithTimeframe, tuple[DataFrame, datetime]] = {}
|
||||
self.__slice_index: Optional[int] = None
|
||||
self.__slice_date: Optional[datetime] = None
|
||||
|
||||
self.__cached_pairs_backtesting: Dict[PairWithTimeframe, DataFrame] = {}
|
||||
self.__producer_pairs_df: Dict[
|
||||
str, Dict[PairWithTimeframe, Tuple[DataFrame, datetime]]
|
||||
self.__cached_pairs_backtesting: dict[PairWithTimeframe, DataFrame] = {}
|
||||
self.__producer_pairs_df: dict[
|
||||
str, dict[PairWithTimeframe, tuple[DataFrame, datetime]]
|
||||
] = {}
|
||||
self.__producer_pairs: Dict[str, List[str]] = {}
|
||||
self.__producer_pairs: dict[str, list[str]] = {}
|
||||
self._msg_queue: deque = deque()
|
||||
|
||||
self._default_candle_type = self._config.get("candle_type_def", CandleType.SPOT)
|
||||
|
@ -101,7 +101,7 @@ class DataProvider:
|
|||
self.__cached_pairs[pair_key] = (dataframe, datetime.now(timezone.utc))
|
||||
|
||||
# For multiple producers we will want to merge the pairlists instead of overwriting
|
||||
def _set_producer_pairs(self, pairlist: List[str], producer_name: str = "default"):
|
||||
def _set_producer_pairs(self, pairlist: list[str], producer_name: str = "default"):
|
||||
"""
|
||||
Set the pairs received to later be used.
|
||||
|
||||
|
@ -109,7 +109,7 @@ class DataProvider:
|
|||
"""
|
||||
self.__producer_pairs[producer_name] = pairlist
|
||||
|
||||
def get_producer_pairs(self, producer_name: str = "default") -> List[str]:
|
||||
def get_producer_pairs(self, producer_name: str = "default") -> list[str]:
|
||||
"""
|
||||
Get the pairs cached from the producer
|
||||
|
||||
|
@ -177,7 +177,7 @@ class DataProvider:
|
|||
timeframe: str,
|
||||
candle_type: CandleType,
|
||||
producer_name: str = "default",
|
||||
) -> Tuple[bool, int]:
|
||||
) -> tuple[bool, int]:
|
||||
"""
|
||||
Append a candle to the existing external dataframe. The incoming dataframe
|
||||
must have at least 1 candle.
|
||||
|
@ -258,7 +258,7 @@ class DataProvider:
|
|||
timeframe: Optional[str] = None,
|
||||
candle_type: Optional[CandleType] = None,
|
||||
producer_name: str = "default",
|
||||
) -> Tuple[DataFrame, datetime]:
|
||||
) -> tuple[DataFrame, datetime]:
|
||||
"""
|
||||
Get the pair data from producers.
|
||||
|
||||
|
@ -377,7 +377,7 @@ class DataProvider:
|
|||
logger.warning(f"No data found for ({pair}, {timeframe}, {candle_type}).")
|
||||
return data
|
||||
|
||||
def get_analyzed_dataframe(self, pair: str, timeframe: str) -> Tuple[DataFrame, datetime]:
|
||||
def get_analyzed_dataframe(self, pair: str, timeframe: str) -> tuple[DataFrame, datetime]:
|
||||
"""
|
||||
Retrieve the analyzed dataframe. Returns the full dataframe in trade mode (live / dry),
|
||||
and the last 1000 candles (up to the time evaluated at this moment) in all other modes.
|
||||
|
@ -408,7 +408,7 @@ class DataProvider:
|
|||
"""
|
||||
return RunMode(self._config.get("runmode", RunMode.OTHER))
|
||||
|
||||
def current_whitelist(self) -> List[str]:
|
||||
def current_whitelist(self) -> list[str]:
|
||||
"""
|
||||
fetch latest available whitelist.
|
||||
|
||||
|
@ -529,7 +529,7 @@ class DataProvider:
|
|||
)
|
||||
return trades_df
|
||||
|
||||
def market(self, pair: str) -> Optional[Dict[str, Any]]:
|
||||
def market(self, pair: str) -> Optional[dict[str, Any]]:
|
||||
"""
|
||||
Return market data for the pair
|
||||
:param pair: Pair to get the data for
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Dict, List
|
||||
|
||||
import joblib
|
||||
import pandas as pd
|
||||
|
@ -48,14 +47,14 @@ def _load_signal_candles(backtest_dir: Path):
|
|||
return _load_backtest_analysis_data(backtest_dir, "signals")
|
||||
|
||||
|
||||
def _load_exit_signal_candles(backtest_dir: Path) -> Dict[str, Dict[str, pd.DataFrame]]:
|
||||
def _load_exit_signal_candles(backtest_dir: Path) -> dict[str, dict[str, pd.DataFrame]]:
|
||||
return _load_backtest_analysis_data(backtest_dir, "exited")
|
||||
|
||||
|
||||
def _process_candles_and_indicators(
|
||||
pairlist, strategy_name, trades, signal_candles, date_col: str = "open_date"
|
||||
):
|
||||
analysed_trades_dict: Dict[str, Dict] = {strategy_name: {}}
|
||||
analysed_trades_dict: dict[str, dict] = {strategy_name: {}}
|
||||
|
||||
try:
|
||||
logger.info(f"Processing {strategy_name} : {len(pairlist)} pairs")
|
||||
|
@ -261,8 +260,8 @@ def prepare_results(
|
|||
def print_results(
|
||||
res_df: pd.DataFrame,
|
||||
exit_df: pd.DataFrame,
|
||||
analysis_groups: List[str],
|
||||
indicator_list: List[str],
|
||||
analysis_groups: list[str],
|
||||
indicator_list: list[str],
|
||||
entry_only: bool,
|
||||
exit_only: bool,
|
||||
csv_path: Path,
|
||||
|
@ -307,7 +306,7 @@ def print_results(
|
|||
def _merge_dfs(
|
||||
entry_df: pd.DataFrame,
|
||||
exit_df: pd.DataFrame,
|
||||
available_inds: List[str],
|
||||
available_inds: list[str],
|
||||
entry_only: bool,
|
||||
exit_only: bool,
|
||||
):
|
||||
|
@ -438,7 +437,7 @@ def _generate_dfs(
|
|||
pairlist: list,
|
||||
enter_reason_list: list,
|
||||
exit_reason_list: list,
|
||||
signal_candles: Dict,
|
||||
signal_candles: dict,
|
||||
strategy_name: str,
|
||||
timerange: TimeRange,
|
||||
trades: pd.DataFrame,
|
||||
|
|
|
@ -10,7 +10,7 @@ from abc import ABC, abstractmethod
|
|||
from copy import deepcopy
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import List, Optional, Tuple, Type
|
||||
from typing import Optional
|
||||
|
||||
from pandas import DataFrame, to_datetime
|
||||
|
||||
|
@ -71,7 +71,7 @@ class IDataHandler(ABC):
|
|||
]
|
||||
|
||||
@classmethod
|
||||
def ohlcv_get_pairs(cls, datadir: Path, timeframe: str, candle_type: CandleType) -> List[str]:
|
||||
def ohlcv_get_pairs(cls, datadir: Path, timeframe: str, candle_type: CandleType) -> list[str]:
|
||||
"""
|
||||
Returns a list of all pairs with ohlcv data available in this datadir
|
||||
for the specified timeframe
|
||||
|
@ -107,7 +107,7 @@ class IDataHandler(ABC):
|
|||
|
||||
def ohlcv_data_min_max(
|
||||
self, pair: str, timeframe: str, candle_type: CandleType
|
||||
) -> Tuple[datetime, datetime, int]:
|
||||
) -> tuple[datetime, datetime, int]:
|
||||
"""
|
||||
Returns the min and max timestamp for the given pair and timeframe.
|
||||
:param pair: Pair to get min/max for
|
||||
|
@ -168,7 +168,7 @@ class IDataHandler(ABC):
|
|||
"""
|
||||
|
||||
@classmethod
|
||||
def trades_get_available_data(cls, datadir: Path, trading_mode: TradingMode) -> List[str]:
|
||||
def trades_get_available_data(cls, datadir: Path, trading_mode: TradingMode) -> list[str]:
|
||||
"""
|
||||
Returns a list of all pairs with ohlcv data available in this datadir
|
||||
:param datadir: Directory to search for ohlcv files
|
||||
|
@ -191,7 +191,7 @@ class IDataHandler(ABC):
|
|||
self,
|
||||
pair: str,
|
||||
trading_mode: TradingMode,
|
||||
) -> Tuple[datetime, datetime, int]:
|
||||
) -> tuple[datetime, datetime, int]:
|
||||
"""
|
||||
Returns the min and max timestamp for the given pair's trades data.
|
||||
:param pair: Pair to get min/max for
|
||||
|
@ -212,7 +212,7 @@ class IDataHandler(ABC):
|
|||
)
|
||||
|
||||
@classmethod
|
||||
def trades_get_pairs(cls, datadir: Path) -> List[str]:
|
||||
def trades_get_pairs(cls, datadir: Path) -> list[str]:
|
||||
"""
|
||||
Returns a list of all pairs for which trade data is available in this
|
||||
:param datadir: Directory to search for ohlcv files
|
||||
|
@ -532,7 +532,7 @@ class IDataHandler(ABC):
|
|||
Path(old_name).rename(new_name)
|
||||
|
||||
|
||||
def get_datahandlerclass(datatype: str) -> Type[IDataHandler]:
|
||||
def get_datahandlerclass(datatype: str) -> type[IDataHandler]:
|
||||
"""
|
||||
Get datahandler class.
|
||||
Could be done using Resolvers, but since this may be called often and resolvers
|
||||
|
|
|
@ -2,7 +2,7 @@ import logging
|
|||
import operator
|
||||
from datetime import datetime, timedelta
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
from typing import Optional
|
||||
|
||||
from pandas import DataFrame, concat
|
||||
|
||||
|
@ -77,7 +77,7 @@ def load_pair_history(
|
|||
def load_data(
|
||||
datadir: Path,
|
||||
timeframe: str,
|
||||
pairs: List[str],
|
||||
pairs: list[str],
|
||||
*,
|
||||
timerange: Optional[TimeRange] = None,
|
||||
fill_up_missing: bool = True,
|
||||
|
@ -86,7 +86,7 @@ def load_data(
|
|||
data_format: str = "feather",
|
||||
candle_type: CandleType = CandleType.SPOT,
|
||||
user_futures_funding_rate: Optional[int] = None,
|
||||
) -> Dict[str, DataFrame]:
|
||||
) -> dict[str, DataFrame]:
|
||||
"""
|
||||
Load ohlcv history data for a list of pairs.
|
||||
|
||||
|
@ -101,7 +101,7 @@ def load_data(
|
|||
:param candle_type: Any of the enum CandleType (must match trading mode!)
|
||||
:return: dict(<pair>:<Dataframe>)
|
||||
"""
|
||||
result: Dict[str, DataFrame] = {}
|
||||
result: dict[str, DataFrame] = {}
|
||||
if startup_candles > 0 and timerange:
|
||||
logger.info(f"Using indicator startup period: {startup_candles} ...")
|
||||
|
||||
|
@ -135,7 +135,7 @@ def refresh_data(
|
|||
*,
|
||||
datadir: Path,
|
||||
timeframe: str,
|
||||
pairs: List[str],
|
||||
pairs: list[str],
|
||||
exchange: Exchange,
|
||||
data_format: Optional[str] = None,
|
||||
timerange: Optional[TimeRange] = None,
|
||||
|
@ -172,7 +172,7 @@ def _load_cached_data_for_updating(
|
|||
data_handler: IDataHandler,
|
||||
candle_type: CandleType,
|
||||
prepend: bool = False,
|
||||
) -> Tuple[DataFrame, Optional[int], Optional[int]]:
|
||||
) -> tuple[DataFrame, Optional[int], Optional[int]]:
|
||||
"""
|
||||
Load cached data to download more data.
|
||||
If timerange is passed in, checks whether data from an before the stored data will be
|
||||
|
@ -318,8 +318,8 @@ def _download_pair_history(
|
|||
|
||||
def refresh_backtest_ohlcv_data(
|
||||
exchange: Exchange,
|
||||
pairs: List[str],
|
||||
timeframes: List[str],
|
||||
pairs: list[str],
|
||||
timeframes: list[str],
|
||||
datadir: Path,
|
||||
trading_mode: str,
|
||||
timerange: Optional[TimeRange] = None,
|
||||
|
@ -327,7 +327,7 @@ def refresh_backtest_ohlcv_data(
|
|||
erase: bool = False,
|
||||
data_format: Optional[str] = None,
|
||||
prepend: bool = False,
|
||||
) -> List[str]:
|
||||
) -> list[str]:
|
||||
"""
|
||||
Refresh stored ohlcv data for backtesting and hyperopt operations.
|
||||
Used by freqtrade download-data subcommand.
|
||||
|
@ -489,14 +489,14 @@ def _download_trades_history(
|
|||
|
||||
def refresh_backtest_trades_data(
|
||||
exchange: Exchange,
|
||||
pairs: List[str],
|
||||
pairs: list[str],
|
||||
datadir: Path,
|
||||
timerange: TimeRange,
|
||||
trading_mode: TradingMode,
|
||||
new_pairs_days: int = 30,
|
||||
erase: bool = False,
|
||||
data_format: str = "feather",
|
||||
) -> List[str]:
|
||||
) -> list[str]:
|
||||
"""
|
||||
Refresh stored trades data for backtesting and hyperopt operations.
|
||||
Used by freqtrade download-data subcommand.
|
||||
|
@ -531,7 +531,7 @@ def refresh_backtest_trades_data(
|
|||
return pairs_not_available
|
||||
|
||||
|
||||
def get_timerange(data: Dict[str, DataFrame]) -> Tuple[datetime, datetime]:
|
||||
def get_timerange(data: dict[str, DataFrame]) -> tuple[datetime, datetime]:
|
||||
"""
|
||||
Get the maximum common timerange for the given backtest data.
|
||||
|
||||
|
@ -583,12 +583,12 @@ def download_data_main(config: Config) -> None:
|
|||
timerange = TimeRange.parse_timerange(f"{time_since}-")
|
||||
|
||||
if "timerange" in config:
|
||||
timerange = timerange.parse_timerange(config["timerange"])
|
||||
timerange = TimeRange.parse_timerange(config["timerange"])
|
||||
|
||||
# Remove stake-currency to skip checks which are not relevant for datadownload
|
||||
config["stake_currency"] = ""
|
||||
|
||||
pairs_not_available: List[str] = []
|
||||
pairs_not_available: list[str] = []
|
||||
|
||||
# Init exchange
|
||||
from freqtrade.resolvers.exchange_resolver import ExchangeResolver
|
||||
|
|
|
@ -2,7 +2,6 @@ import logging
|
|||
import math
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from typing import Dict, Tuple
|
||||
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
|
@ -11,7 +10,7 @@ import pandas as pd
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def calculate_market_change(data: Dict[str, pd.DataFrame], column: str = "close") -> float:
|
||||
def calculate_market_change(data: dict[str, pd.DataFrame], column: str = "close") -> float:
|
||||
"""
|
||||
Calculate market change based on "column".
|
||||
Calculation is done by taking the first non-null and the last non-null element of each column
|
||||
|
@ -32,7 +31,7 @@ def calculate_market_change(data: Dict[str, pd.DataFrame], column: str = "close"
|
|||
|
||||
|
||||
def combine_dataframes_by_column(
|
||||
data: Dict[str, pd.DataFrame], column: str = "close"
|
||||
data: dict[str, pd.DataFrame], column: str = "close"
|
||||
) -> pd.DataFrame:
|
||||
"""
|
||||
Combine multiple dataframes "column"
|
||||
|
@ -50,7 +49,7 @@ def combine_dataframes_by_column(
|
|||
|
||||
|
||||
def combined_dataframes_with_rel_mean(
|
||||
data: Dict[str, pd.DataFrame], fromdt: datetime, todt: datetime, column: str = "close"
|
||||
data: dict[str, pd.DataFrame], fromdt: datetime, todt: datetime, column: str = "close"
|
||||
) -> pd.DataFrame:
|
||||
"""
|
||||
Combine multiple dataframes "column"
|
||||
|
@ -70,7 +69,7 @@ def combined_dataframes_with_rel_mean(
|
|||
|
||||
|
||||
def combine_dataframes_with_mean(
|
||||
data: Dict[str, pd.DataFrame], column: str = "close"
|
||||
data: dict[str, pd.DataFrame], column: str = "close"
|
||||
) -> pd.DataFrame:
|
||||
"""
|
||||
Combine multiple dataframes "column"
|
||||
|
@ -222,7 +221,7 @@ def calculate_max_drawdown(
|
|||
)
|
||||
|
||||
|
||||
def calculate_csum(trades: pd.DataFrame, starting_balance: float = 0) -> Tuple[float, float]:
|
||||
def calculate_csum(trades: pd.DataFrame, starting_balance: float = 0) -> tuple[float, float]:
|
||||
"""
|
||||
Calculate min/max cumsum of trades, to show if the wallet/stake amount ratio is sane
|
||||
:param trades: DataFrame containing trades (requires columns close_date and profit_percent)
|
||||
|
@ -255,15 +254,15 @@ def calculate_cagr(days_passed: int, starting_balance: float, final_balance: flo
|
|||
return (final_balance / starting_balance) ** (1 / (days_passed / 365)) - 1
|
||||
|
||||
|
||||
def calculate_expectancy(trades: pd.DataFrame) -> Tuple[float, float]:
|
||||
def calculate_expectancy(trades: pd.DataFrame) -> tuple[float, float]:
|
||||
"""
|
||||
Calculate expectancy
|
||||
:param trades: DataFrame containing trades (requires columns close_date and profit_abs)
|
||||
:return: expectancy, expectancy_ratio
|
||||
"""
|
||||
|
||||
expectancy = 0
|
||||
expectancy_ratio = 100
|
||||
expectancy = 0.0
|
||||
expectancy_ratio = 100.0
|
||||
|
||||
if len(trades) > 0:
|
||||
winning_trades = trades.loc[trades["profit_abs"] > 0]
|
||||
|
|
|
@ -5,7 +5,7 @@ import logging
|
|||
from collections import defaultdict
|
||||
from copy import deepcopy
|
||||
from datetime import timedelta
|
||||
from typing import Any, Dict, List, NamedTuple
|
||||
from typing import Any, NamedTuple
|
||||
|
||||
import numpy as np
|
||||
import utils_find_1st as utf1st
|
||||
|
@ -44,7 +44,7 @@ class Edge:
|
|||
Author: https://github.com/mishaker
|
||||
"""
|
||||
|
||||
_cached_pairs: Dict[str, Any] = {} # Keeps a list of pairs
|
||||
_cached_pairs: dict[str, Any] = {} # Keeps a list of pairs
|
||||
|
||||
def __init__(self, config: Config, exchange, strategy) -> None:
|
||||
self.config = config
|
||||
|
@ -52,7 +52,7 @@ class Edge:
|
|||
self.strategy: IStrategy = strategy
|
||||
|
||||
self.edge_config = self.config.get("edge", {})
|
||||
self._cached_pairs: Dict[str, Any] = {} # Keeps a list of pairs
|
||||
self._cached_pairs: dict[str, Any] = {} # Keeps a list of pairs
|
||||
self._final_pairs: list = []
|
||||
|
||||
# checking max_open_trades. it should be -1 as with Edge
|
||||
|
@ -93,7 +93,7 @@ class Edge:
|
|||
except IndexError:
|
||||
self.fee = None
|
||||
|
||||
def calculate(self, pairs: List[str]) -> bool:
|
||||
def calculate(self, pairs: list[str]) -> bool:
|
||||
if self.fee is None and pairs:
|
||||
self.fee = self.exchange.get_fee(pairs[0])
|
||||
|
||||
|
@ -104,7 +104,7 @@ class Edge:
|
|||
):
|
||||
return False
|
||||
|
||||
data: Dict[str, Any] = {}
|
||||
data: dict[str, Any] = {}
|
||||
logger.info("Using stake_currency: %s ...", self.config["stake_currency"])
|
||||
logger.info("Using local backtesting data (using whitelist in given config) ...")
|
||||
|
||||
|
@ -231,7 +231,7 @@ class Edge:
|
|||
)
|
||||
return self.strategy.stoploss
|
||||
|
||||
def adjust(self, pairs: List[str]) -> list:
|
||||
def adjust(self, pairs: list[str]) -> list:
|
||||
"""
|
||||
Filters out and sorts "pairs" according to Edge calculated pairs
|
||||
"""
|
||||
|
@ -260,7 +260,7 @@ class Edge:
|
|||
|
||||
return self._final_pairs
|
||||
|
||||
def accepted_pairs(self) -> List[Dict[str, Any]]:
|
||||
def accepted_pairs(self) -> list[dict[str, Any]]:
|
||||
"""
|
||||
return a list of accepted pairs along with their winrate, expectancy and stoploss
|
||||
"""
|
||||
|
@ -322,7 +322,7 @@ class Edge:
|
|||
|
||||
return result
|
||||
|
||||
def _process_expectancy(self, results: DataFrame) -> Dict[str, Any]:
|
||||
def _process_expectancy(self, results: DataFrame) -> dict[str, Any]:
|
||||
"""
|
||||
This calculates WinRate, Required Risk Reward, Risk Reward and Expectancy of all pairs
|
||||
The calculation will be done per pair and per strategy.
|
||||
|
|
|
@ -43,4 +43,5 @@ from freqtrade.exchange.hyperliquid import Hyperliquid
|
|||
from freqtrade.exchange.idex import Idex
|
||||
from freqtrade.exchange.kraken import Kraken
|
||||
from freqtrade.exchange.kucoin import Kucoin
|
||||
from freqtrade.exchange.lbank import Lbank
|
||||
from freqtrade.exchange.okx import Okx
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
import logging
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
from typing import Optional
|
||||
|
||||
import ccxt
|
||||
|
||||
|
@ -46,14 +46,14 @@ class Binance(Exchange):
|
|||
"ws_enabled": False,
|
||||
}
|
||||
|
||||
_supported_trading_mode_margin_pairs: List[Tuple[TradingMode, MarginMode]] = [
|
||||
_supported_trading_mode_margin_pairs: list[tuple[TradingMode, MarginMode]] = [
|
||||
# TradingMode.SPOT always supported and not required in this list
|
||||
# (TradingMode.MARGIN, MarginMode.CROSS),
|
||||
# (TradingMode.FUTURES, MarginMode.CROSS),
|
||||
(TradingMode.FUTURES, MarginMode.ISOLATED)
|
||||
]
|
||||
|
||||
def get_tickers(self, symbols: Optional[List[str]] = None, cached: bool = False) -> Tickers:
|
||||
def get_tickers(self, symbols: Optional[list[str]] = None, cached: bool = False) -> Tickers:
|
||||
tickers = super().get_tickers(symbols=symbols, cached=cached)
|
||||
if self.trading_mode == TradingMode.FUTURES:
|
||||
# Binance's future result has no bid/ask values.
|
||||
|
@ -144,6 +144,29 @@ class Binance(Exchange):
|
|||
"""
|
||||
return open_date.minute == 0 and open_date.second < 15
|
||||
|
||||
def fetch_funding_rates(
|
||||
self, symbols: Optional[list[str]] = None
|
||||
) -> dict[str, dict[str, float]]:
|
||||
"""
|
||||
Fetch funding rates for the given symbols.
|
||||
:param symbols: List of symbols to fetch funding rates for
|
||||
:return: Dict of funding rates for the given symbols
|
||||
"""
|
||||
try:
|
||||
if self.trading_mode == TradingMode.FUTURES:
|
||||
rates = self._api.fetch_funding_rates(symbols)
|
||||
return rates
|
||||
return {}
|
||||
except ccxt.DDoSProtection as e:
|
||||
raise DDosProtection(e) from e
|
||||
except (ccxt.OperationFailed, ccxt.ExchangeError) as e:
|
||||
raise TemporaryError(
|
||||
f"Error in additional_exchange_init due to {e.__class__.__name__}. Message: {e}"
|
||||
) from e
|
||||
|
||||
except ccxt.BaseError as e:
|
||||
raise OperationalException(e) from e
|
||||
|
||||
def dry_run_liquidation_price(
|
||||
self,
|
||||
pair: str,
|
||||
|
@ -153,8 +176,7 @@ class Binance(Exchange):
|
|||
stake_amount: float,
|
||||
leverage: float,
|
||||
wallet_balance: float, # Or margin balance
|
||||
mm_ex_1: float = 0.0, # (Binance) Cross only
|
||||
upnl_ex_1: float = 0.0, # (Binance) Cross only
|
||||
open_trades: list,
|
||||
) -> Optional[float]:
|
||||
"""
|
||||
Important: Must be fetching data from cached values as this is used by backtesting!
|
||||
|
@ -172,6 +194,7 @@ class Binance(Exchange):
|
|||
:param wallet_balance: Amount of margin_mode in the wallet being used to trade
|
||||
Cross-Margin Mode: crossWalletBalance
|
||||
Isolated-Margin Mode: isolatedWalletBalance
|
||||
:param open_trades: List of open trades in the same wallet
|
||||
|
||||
# * Only required for Cross
|
||||
:param mm_ex_1: (TMM)
|
||||
|
@ -180,15 +203,41 @@ class Binance(Exchange):
|
|||
:param upnl_ex_1: (UPNL)
|
||||
Cross-Margin Mode: Unrealized PNL of all other contracts, excluding Contract 1.
|
||||
Isolated-Margin Mode: 0
|
||||
:param other
|
||||
"""
|
||||
|
||||
side_1 = -1 if is_short else 1
|
||||
cross_vars = upnl_ex_1 - mm_ex_1 if self.margin_mode == MarginMode.CROSS else 0.0
|
||||
cross_vars: float = 0.0
|
||||
|
||||
# mm_ratio: Binance's formula specifies maintenance margin rate which is mm_ratio * 100%
|
||||
# maintenance_amt: (CUM) Maintenance Amount of position
|
||||
mm_ratio, maintenance_amt = self.get_maintenance_ratio_and_amt(pair, stake_amount)
|
||||
|
||||
if self.margin_mode == MarginMode.CROSS:
|
||||
mm_ex_1: float = 0.0
|
||||
upnl_ex_1: float = 0.0
|
||||
pairs = [trade.pair for trade in open_trades]
|
||||
if self._config["runmode"] in ("live", "dry_run"):
|
||||
funding_rates = self.fetch_funding_rates(pairs)
|
||||
for trade in open_trades:
|
||||
if trade.pair == pair:
|
||||
# Only "other" trades are considered
|
||||
continue
|
||||
if self._config["runmode"] in ("live", "dry_run"):
|
||||
mark_price = funding_rates[trade.pair]["markPrice"]
|
||||
else:
|
||||
# Fall back to open rate for backtesting
|
||||
mark_price = trade.open_rate
|
||||
mm_ratio1, maint_amnt1 = self.get_maintenance_ratio_and_amt(
|
||||
trade.pair, trade.stake_amount
|
||||
)
|
||||
maint_margin = trade.amount * mark_price * mm_ratio1 - maint_amnt1
|
||||
mm_ex_1 += maint_margin
|
||||
|
||||
upnl_ex_1 += trade.amount * mark_price - trade.amount * trade.open_rate
|
||||
|
||||
cross_vars = upnl_ex_1 - mm_ex_1
|
||||
|
||||
side_1 = -1 if is_short else 1
|
||||
|
||||
if maintenance_amt is None:
|
||||
raise OperationalException(
|
||||
"Parameter maintenance_amt is required by Binance.liquidation_price"
|
||||
|
@ -204,7 +253,7 @@ class Binance(Exchange):
|
|||
"Freqtrade only supports isolated futures for leverage trading"
|
||||
)
|
||||
|
||||
def load_leverage_tiers(self) -> Dict[str, List[Dict]]:
|
||||
def load_leverage_tiers(self) -> dict[str, list[dict]]:
|
||||
if self.trading_mode == TradingMode.FUTURES:
|
||||
if self._config["dry_run"]:
|
||||
leverage_tiers_path = Path(__file__).parent / "binance_leverage_tiers.json"
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -2,7 +2,7 @@
|
|||
|
||||
import logging
|
||||
from datetime import datetime, timezone
|
||||
from typing import Dict, List, Optional
|
||||
from typing import Optional
|
||||
|
||||
from freqtrade.exchange import Exchange
|
||||
|
||||
|
@ -17,8 +17,8 @@ class Bitpanda(Exchange):
|
|||
"""
|
||||
|
||||
def get_trades_for_order(
|
||||
self, order_id: str, pair: str, since: datetime, params: Optional[Dict] = None
|
||||
) -> List:
|
||||
self, order_id: str, pair: str, since: datetime, params: Optional[dict] = None
|
||||
) -> list:
|
||||
"""
|
||||
Fetch Orders using the "fetch_my_trades" endpoint and filter them by order-id.
|
||||
The "since" argument passed in is coming from the database and is in UTC,
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
from typing import Any, Optional
|
||||
|
||||
import ccxt
|
||||
|
||||
|
@ -36,6 +36,12 @@ class Bybit(Exchange):
|
|||
"order_time_in_force": ["GTC", "FOK", "IOC", "PO"],
|
||||
"ws_enabled": True,
|
||||
"trades_has_history": False, # Endpoint doesn't support pagination
|
||||
"exchange_has_overrides": {
|
||||
# Bybit spot does not support fetch_order
|
||||
# Unless the account is unified.
|
||||
# TODO: Can be removed once bybit fully forces all accounts to unified mode.
|
||||
"fetchOrder": False,
|
||||
},
|
||||
}
|
||||
_ft_has_futures: FtHas = {
|
||||
"ohlcv_has_history": True,
|
||||
|
@ -51,16 +57,19 @@ class Bybit(Exchange):
|
|||
PriceType.MARK: "MarkPrice",
|
||||
PriceType.INDEX: "IndexPrice",
|
||||
},
|
||||
"exchange_has_overrides": {
|
||||
"fetchOrder": True,
|
||||
},
|
||||
}
|
||||
|
||||
_supported_trading_mode_margin_pairs: List[Tuple[TradingMode, MarginMode]] = [
|
||||
_supported_trading_mode_margin_pairs: list[tuple[TradingMode, MarginMode]] = [
|
||||
# TradingMode.SPOT always supported and not required in this list
|
||||
# (TradingMode.FUTURES, MarginMode.CROSS),
|
||||
(TradingMode.FUTURES, MarginMode.ISOLATED)
|
||||
]
|
||||
|
||||
@property
|
||||
def _ccxt_config(self) -> Dict:
|
||||
def _ccxt_config(self) -> dict:
|
||||
# Parameters to add directly to ccxt sync/async initialization.
|
||||
# ccxt defaults to swap mode.
|
||||
config = {}
|
||||
|
@ -69,7 +78,7 @@ class Bybit(Exchange):
|
|||
config.update(super()._ccxt_config)
|
||||
return config
|
||||
|
||||
def market_is_future(self, market: Dict[str, Any]) -> bool:
|
||||
def market_is_future(self, market: dict[str, Any]) -> bool:
|
||||
main = super().market_is_future(market)
|
||||
# For ByBit, we'll only support USDT markets for now.
|
||||
return main and market["settle"] == "USDT"
|
||||
|
@ -108,7 +117,7 @@ class Bybit(Exchange):
|
|||
def ohlcv_candle_limit(
|
||||
self, timeframe: str, candle_type: CandleType, since_ms: Optional[int] = None
|
||||
) -> int:
|
||||
if candle_type in (CandleType.FUNDING_RATE):
|
||||
if candle_type == CandleType.FUNDING_RATE:
|
||||
return 200
|
||||
|
||||
return super().ohlcv_candle_limit(timeframe, candle_type, since_ms)
|
||||
|
@ -126,7 +135,7 @@ class Bybit(Exchange):
|
|||
leverage: float,
|
||||
reduceOnly: bool,
|
||||
time_in_force: str = "GTC",
|
||||
) -> Dict:
|
||||
) -> dict:
|
||||
params = super()._get_params(
|
||||
side=side,
|
||||
ordertype=ordertype,
|
||||
|
@ -147,8 +156,7 @@ class Bybit(Exchange):
|
|||
stake_amount: float,
|
||||
leverage: float,
|
||||
wallet_balance: float, # Or margin balance
|
||||
mm_ex_1: float = 0.0, # (Binance) Cross only
|
||||
upnl_ex_1: float = 0.0, # (Binance) Cross only
|
||||
open_trades: list,
|
||||
) -> Optional[float]:
|
||||
"""
|
||||
Important: Must be fetching data from cached values as this is used by backtesting!
|
||||
|
@ -178,6 +186,7 @@ class Bybit(Exchange):
|
|||
:param wallet_balance: Amount of margin_mode in the wallet being used to trade
|
||||
Cross-Margin Mode: crossWalletBalance
|
||||
Isolated-Margin Mode: isolatedWalletBalance
|
||||
:param open_trades: List of other open trades in the same wallet
|
||||
"""
|
||||
|
||||
market = self.markets[pair]
|
||||
|
@ -220,7 +229,7 @@ class Bybit(Exchange):
|
|||
logger.warning(f"Could not update funding fees for {pair}.")
|
||||
return 0.0
|
||||
|
||||
def fetch_orders(self, pair: str, since: datetime, params: Optional[Dict] = None) -> List[Dict]:
|
||||
def fetch_orders(self, pair: str, since: datetime, params: Optional[dict] = None) -> list[dict]:
|
||||
"""
|
||||
Fetch all orders for a pair "since"
|
||||
:param pair: Pair for the query
|
||||
|
@ -237,7 +246,7 @@ class Bybit(Exchange):
|
|||
|
||||
return orders
|
||||
|
||||
def fetch_order(self, order_id: str, pair: str, params: Optional[Dict] = None) -> Dict:
|
||||
def fetch_order(self, order_id: str, pair: str, params: Optional[dict] = None) -> dict:
|
||||
if self.exchange_has("fetchOrder"):
|
||||
# Set acknowledged to True to avoid ccxt exception
|
||||
params = {"acknowledged": True}
|
||||
|
@ -255,7 +264,7 @@ class Bybit(Exchange):
|
|||
return order
|
||||
|
||||
@retrier
|
||||
def get_leverage_tiers(self) -> Dict[str, List[Dict]]:
|
||||
def get_leverage_tiers(self) -> dict[str, list[dict]]:
|
||||
"""
|
||||
Cache leverage tiers for 1 day, since they are not expected to change often, and
|
||||
bybit requires pagination to fetch all tiers.
|
||||
|
|
|
@ -2,7 +2,7 @@ import asyncio
|
|||
import logging
|
||||
import time
|
||||
from functools import wraps
|
||||
from typing import Any, Callable, Dict, List, Optional, TypeVar, cast, overload
|
||||
from typing import Any, Callable, Optional, TypeVar, cast, overload
|
||||
|
||||
from freqtrade.constants import ExchangeConfig
|
||||
from freqtrade.exceptions import DDosProtection, RetryableOrderError, TemporaryError
|
||||
|
@ -62,7 +62,7 @@ SUPPORTED_EXCHANGES = [
|
|||
]
|
||||
|
||||
# either the main, or replacement methods (array) is required
|
||||
EXCHANGE_HAS_REQUIRED: Dict[str, List[str]] = {
|
||||
EXCHANGE_HAS_REQUIRED: dict[str, list[str]] = {
|
||||
# Required / private
|
||||
"fetchOrder": ["fetchOpenOrder", "fetchClosedOrder"],
|
||||
"fetchL2OrderBook": ["fetchTicker"],
|
||||
|
|
|
@ -7,11 +7,12 @@ import asyncio
|
|||
import inspect
|
||||
import logging
|
||||
import signal
|
||||
from collections.abc import Coroutine
|
||||
from copy import deepcopy
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from math import floor, isnan
|
||||
from threading import Lock
|
||||
from typing import Any, Coroutine, Dict, List, Literal, Optional, Tuple, Union
|
||||
from typing import Any, Literal, Optional, Union
|
||||
|
||||
import ccxt
|
||||
import ccxt.pro as ccxt_pro
|
||||
|
@ -114,10 +115,10 @@ logger = logging.getLogger(__name__)
|
|||
|
||||
class Exchange:
|
||||
# Parameters to add directly to buy/sell calls (like agreeing to trading agreement)
|
||||
_params: Dict = {}
|
||||
_params: dict = {}
|
||||
|
||||
# Additional parameters - added to the ccxt object
|
||||
_ccxt_params: Dict = {}
|
||||
_ccxt_params: dict = {}
|
||||
|
||||
# Dict to specify which options each exchange implements
|
||||
# This defines defaults, which can be selectively overridden by subclasses using _ft_has
|
||||
|
@ -160,7 +161,7 @@ class Exchange:
|
|||
_ft_has: FtHas = {}
|
||||
_ft_has_futures: FtHas = {}
|
||||
|
||||
_supported_trading_mode_margin_pairs: List[Tuple[TradingMode, MarginMode]] = [
|
||||
_supported_trading_mode_margin_pairs: list[tuple[TradingMode, MarginMode]] = [
|
||||
# TradingMode.SPOT always supported and not required in this list
|
||||
]
|
||||
|
||||
|
@ -181,9 +182,9 @@ class Exchange:
|
|||
self._api_async: ccxt_pro.Exchange
|
||||
self._ws_async: ccxt_pro.Exchange = None
|
||||
self._exchange_ws: Optional[ExchangeWS] = None
|
||||
self._markets: Dict = {}
|
||||
self._trading_fees: Dict[str, Any] = {}
|
||||
self._leverage_tiers: Dict[str, List[Dict]] = {}
|
||||
self._markets: dict = {}
|
||||
self._trading_fees: dict[str, Any] = {}
|
||||
self._leverage_tiers: dict[str, list[dict]] = {}
|
||||
# Lock event loop. This is necessary to avoid race-conditions when using force* commands
|
||||
# Due to funding fee fetching.
|
||||
self._loop_lock = Lock()
|
||||
|
@ -193,7 +194,7 @@ class Exchange:
|
|||
self._config.update(config)
|
||||
|
||||
# Holds last candle refreshed time of each pair
|
||||
self._pairs_last_refresh_time: Dict[PairWithTimeframe, int] = {}
|
||||
self._pairs_last_refresh_time: dict[PairWithTimeframe, int] = {}
|
||||
# Timestamp of last markets refresh
|
||||
self._last_markets_refresh: int = 0
|
||||
|
||||
|
@ -208,19 +209,19 @@ class Exchange:
|
|||
self._entry_rate_cache: TTLCache = TTLCache(maxsize=100, ttl=300)
|
||||
|
||||
# Holds candles
|
||||
self._klines: Dict[PairWithTimeframe, DataFrame] = {}
|
||||
self._expiring_candle_cache: Dict[Tuple[str, int], PeriodicCache] = {}
|
||||
self._klines: dict[PairWithTimeframe, DataFrame] = {}
|
||||
self._expiring_candle_cache: dict[tuple[str, int], PeriodicCache] = {}
|
||||
|
||||
# Holds public_trades
|
||||
self._trades: Dict[PairWithTimeframe, DataFrame] = {}
|
||||
self._trades: dict[PairWithTimeframe, DataFrame] = {}
|
||||
|
||||
# Holds all open sell orders for dry_run
|
||||
self._dry_run_open_orders: Dict[str, Any] = {}
|
||||
self._dry_run_open_orders: dict[str, Any] = {}
|
||||
|
||||
if config["dry_run"]:
|
||||
logger.info("Instance is running with dry_run enabled")
|
||||
logger.info(f"Using CCXT {ccxt.__version__}")
|
||||
exchange_conf: Dict[str, Any] = exchange_config if exchange_config else config["exchange"]
|
||||
exchange_conf: dict[str, Any] = exchange_config if exchange_config else config["exchange"]
|
||||
remove_exchange_credentials(exchange_conf, config.get("dry_run", False))
|
||||
self.log_responses = exchange_conf.get("log_responses", False)
|
||||
|
||||
|
@ -339,7 +340,7 @@ class Exchange:
|
|||
self.validate_freqai(config)
|
||||
|
||||
def _init_ccxt(
|
||||
self, exchange_config: Dict[str, Any], sync: bool, ccxt_kwargs: Dict[str, Any]
|
||||
self, exchange_config: dict[str, Any], sync: bool, ccxt_kwargs: dict[str, Any]
|
||||
) -> ccxt.Exchange:
|
||||
"""
|
||||
Initialize ccxt with given config and return valid ccxt instance.
|
||||
|
@ -390,7 +391,7 @@ class Exchange:
|
|||
return api
|
||||
|
||||
@property
|
||||
def _ccxt_config(self) -> Dict:
|
||||
def _ccxt_config(self) -> dict:
|
||||
# Parameters to add directly to ccxt sync/async initialization.
|
||||
if self.trading_mode == TradingMode.MARGIN:
|
||||
return {"options": {"defaultType": "margin"}}
|
||||
|
@ -410,11 +411,11 @@ class Exchange:
|
|||
return self._api.id
|
||||
|
||||
@property
|
||||
def timeframes(self) -> List[str]:
|
||||
def timeframes(self) -> list[str]:
|
||||
return list((self._api.timeframes or {}).keys())
|
||||
|
||||
@property
|
||||
def markets(self) -> Dict[str, Any]:
|
||||
def markets(self) -> dict[str, Any]:
|
||||
"""exchange ccxt markets"""
|
||||
if not self._markets:
|
||||
logger.info("Markets were not loaded. Loading them now..")
|
||||
|
@ -471,14 +472,14 @@ class Exchange:
|
|||
|
||||
def get_markets(
|
||||
self,
|
||||
base_currencies: Optional[List[str]] = None,
|
||||
quote_currencies: Optional[List[str]] = None,
|
||||
base_currencies: Optional[list[str]] = None,
|
||||
quote_currencies: Optional[list[str]] = None,
|
||||
spot_only: bool = False,
|
||||
margin_only: bool = False,
|
||||
futures_only: bool = False,
|
||||
tradable_only: bool = True,
|
||||
active_only: bool = False,
|
||||
) -> Dict[str, Any]:
|
||||
) -> dict[str, Any]:
|
||||
"""
|
||||
Return exchange ccxt markets, filtered out by base currency and quote currency
|
||||
if this was requested in parameters.
|
||||
|
@ -503,7 +504,7 @@ class Exchange:
|
|||
markets = {k: v for k, v in markets.items() if market_is_active(v)}
|
||||
return markets
|
||||
|
||||
def get_quote_currencies(self) -> List[str]:
|
||||
def get_quote_currencies(self) -> list[str]:
|
||||
"""
|
||||
Return a list of supported quote currencies
|
||||
"""
|
||||
|
@ -518,19 +519,19 @@ class Exchange:
|
|||
"""Return a pair's base currency (base/quote:settlement)"""
|
||||
return self.markets.get(pair, {}).get("base", "")
|
||||
|
||||
def market_is_future(self, market: Dict[str, Any]) -> bool:
|
||||
def market_is_future(self, market: dict[str, Any]) -> bool:
|
||||
return (
|
||||
market.get(self._ft_has["ccxt_futures_name"], False) is True
|
||||
and market.get("linear", False) is True
|
||||
)
|
||||
|
||||
def market_is_spot(self, market: Dict[str, Any]) -> bool:
|
||||
def market_is_spot(self, market: dict[str, Any]) -> bool:
|
||||
return market.get("spot", False) is True
|
||||
|
||||
def market_is_margin(self, market: Dict[str, Any]) -> bool:
|
||||
def market_is_margin(self, market: dict[str, Any]) -> bool:
|
||||
return market.get("margin", False) is True
|
||||
|
||||
def market_is_tradable(self, market: Dict[str, Any]) -> bool:
|
||||
def market_is_tradable(self, market: dict[str, Any]) -> bool:
|
||||
"""
|
||||
Check if the market symbol is tradable by Freqtrade.
|
||||
Ensures that Configured mode aligns to
|
||||
|
@ -578,7 +579,7 @@ class Exchange:
|
|||
else:
|
||||
return 1
|
||||
|
||||
def _trades_contracts_to_amount(self, trades: List) -> List:
|
||||
def _trades_contracts_to_amount(self, trades: list) -> list:
|
||||
if len(trades) > 0 and "symbol" in trades[0]:
|
||||
contract_size = self.get_contract_size(trades[0]["symbol"])
|
||||
if contract_size != 1:
|
||||
|
@ -586,7 +587,7 @@ class Exchange:
|
|||
trade["amount"] = trade["amount"] * contract_size
|
||||
return trades
|
||||
|
||||
def _order_contracts_to_amount(self, order: Dict) -> Dict:
|
||||
def _order_contracts_to_amount(self, order: dict) -> dict:
|
||||
if "symbol" in order and order["symbol"] is not None:
|
||||
contract_size = self.get_contract_size(order["symbol"])
|
||||
if contract_size != 1:
|
||||
|
@ -620,7 +621,7 @@ class Exchange:
|
|||
if self._exchange_ws:
|
||||
self._exchange_ws.reset_connections()
|
||||
|
||||
async def _api_reload_markets(self, reload: bool = False) -> Dict[str, Any]:
|
||||
async def _api_reload_markets(self, reload: bool = False) -> dict[str, Any]:
|
||||
try:
|
||||
return await self._api_async.load_markets(reload=reload, params={})
|
||||
except ccxt.DDoSProtection as e:
|
||||
|
@ -632,7 +633,7 @@ class Exchange:
|
|||
except ccxt.BaseError as e:
|
||||
raise TemporaryError(e) from e
|
||||
|
||||
def _load_async_markets(self, reload: bool = False) -> Dict[str, Any]:
|
||||
def _load_async_markets(self, reload: bool = False) -> dict[str, Any]:
|
||||
try:
|
||||
markets = self.loop.run_until_complete(self._api_reload_markets(reload=reload))
|
||||
|
||||
|
@ -734,7 +735,7 @@ class Exchange:
|
|||
):
|
||||
raise ConfigurationError("Timeframes < 1m are currently not supported by Freqtrade.")
|
||||
|
||||
def validate_ordertypes(self, order_types: Dict) -> None:
|
||||
def validate_ordertypes(self, order_types: dict) -> None:
|
||||
"""
|
||||
Checks if order-types configured in strategy/config are supported
|
||||
"""
|
||||
|
@ -743,7 +744,7 @@ class Exchange:
|
|||
raise ConfigurationError(f"Exchange {self.name} does not support market orders.")
|
||||
self.validate_stop_ordertypes(order_types)
|
||||
|
||||
def validate_stop_ordertypes(self, order_types: Dict) -> None:
|
||||
def validate_stop_ordertypes(self, order_types: dict) -> None:
|
||||
"""
|
||||
Validate stoploss order types
|
||||
"""
|
||||
|
@ -762,7 +763,7 @@ class Exchange:
|
|||
f"On exchange stoploss price type is not supported for {self.name}."
|
||||
)
|
||||
|
||||
def validate_pricing(self, pricing: Dict) -> None:
|
||||
def validate_pricing(self, pricing: dict) -> None:
|
||||
if pricing.get("use_order_book", False) and not self.exchange_has("fetchL2OrderBook"):
|
||||
raise ConfigurationError(f"Orderbook not available for {self.name}.")
|
||||
if not pricing.get("use_order_book", False) and (
|
||||
|
@ -770,7 +771,7 @@ class Exchange:
|
|||
):
|
||||
raise ConfigurationError(f"Ticker pricing not available for {self.name}.")
|
||||
|
||||
def validate_order_time_in_force(self, order_time_in_force: Dict) -> None:
|
||||
def validate_order_time_in_force(self, order_time_in_force: dict) -> None:
|
||||
"""
|
||||
Checks if order time in force configured in strategy/config are supported
|
||||
"""
|
||||
|
@ -782,7 +783,7 @@ class Exchange:
|
|||
f"Time in force policies are not supported for {self.name} yet."
|
||||
)
|
||||
|
||||
def validate_orderflow(self, exchange: Dict) -> None:
|
||||
def validate_orderflow(self, exchange: dict) -> None:
|
||||
if exchange.get("use_public_trades", False) and (
|
||||
not self.exchange_has("fetchTrades") or not self._ft_has["trades_has_history"]
|
||||
):
|
||||
|
@ -1000,16 +1001,16 @@ class Exchange:
|
|||
amount: float,
|
||||
rate: float,
|
||||
leverage: float,
|
||||
params: Optional[Dict] = None,
|
||||
params: Optional[dict] = None,
|
||||
stop_loss: bool = False,
|
||||
) -> Dict[str, Any]:
|
||||
) -> dict[str, Any]:
|
||||
now = dt_now()
|
||||
order_id = f"dry_run_{side}_{pair}_{now.timestamp()}"
|
||||
# Rounding here must respect to contract sizes
|
||||
_amount = self._contracts_to_amount(
|
||||
pair, self.amount_to_precision(pair, self._amount_to_contracts(pair, amount))
|
||||
)
|
||||
dry_order: Dict[str, Any] = {
|
||||
dry_order: dict[str, Any] = {
|
||||
"id": order_id,
|
||||
"symbol": pair,
|
||||
"price": rate,
|
||||
|
@ -1071,9 +1072,9 @@ class Exchange:
|
|||
def add_dry_order_fee(
|
||||
self,
|
||||
pair: str,
|
||||
dry_order: Dict[str, Any],
|
||||
dry_order: dict[str, Any],
|
||||
taker_or_maker: MakerTaker,
|
||||
) -> Dict[str, Any]:
|
||||
) -> dict[str, Any]:
|
||||
fee = self.get_fee(pair, taker_or_maker=taker_or_maker)
|
||||
dry_order.update(
|
||||
{
|
||||
|
@ -1157,8 +1158,8 @@ class Exchange:
|
|||
return False
|
||||
|
||||
def check_dry_limit_order_filled(
|
||||
self, order: Dict[str, Any], immediate: bool = False, orderbook: Optional[OrderBook] = None
|
||||
) -> Dict[str, Any]:
|
||||
self, order: dict[str, Any], immediate: bool = False, orderbook: Optional[OrderBook] = None
|
||||
) -> dict[str, Any]:
|
||||
"""
|
||||
Check dry-run limit order fill and update fee (if it filled).
|
||||
"""
|
||||
|
@ -1185,7 +1186,7 @@ class Exchange:
|
|||
|
||||
return order
|
||||
|
||||
def fetch_dry_run_order(self, order_id) -> Dict[str, Any]:
|
||||
def fetch_dry_run_order(self, order_id) -> dict[str, Any]:
|
||||
"""
|
||||
Return dry-run order
|
||||
Only call if running in dry-run mode.
|
||||
|
@ -1221,7 +1222,7 @@ class Exchange:
|
|||
leverage: float,
|
||||
reduceOnly: bool,
|
||||
time_in_force: str = "GTC",
|
||||
) -> Dict:
|
||||
) -> dict:
|
||||
params = self._params.copy()
|
||||
if time_in_force != "GTC" and ordertype != "market":
|
||||
params.update({"timeInForce": time_in_force.upper()})
|
||||
|
@ -1229,10 +1230,10 @@ class Exchange:
|
|||
params.update({"reduceOnly": True})
|
||||
return params
|
||||
|
||||
def _order_needs_price(self, ordertype: str) -> bool:
|
||||
def _order_needs_price(self, side: BuySell, ordertype: str) -> bool:
|
||||
return (
|
||||
ordertype != "market"
|
||||
or self._api.options.get("createMarketBuyOrderRequiresPrice", False)
|
||||
or (side == "buy" and self._api.options.get("createMarketBuyOrderRequiresPrice", False))
|
||||
or self._ft_has.get("marketOrderRequiresPrice", False)
|
||||
)
|
||||
|
||||
|
@ -1247,7 +1248,7 @@ class Exchange:
|
|||
leverage: float,
|
||||
reduceOnly: bool = False,
|
||||
time_in_force: str = "GTC",
|
||||
) -> Dict:
|
||||
) -> dict:
|
||||
if self._config["dry_run"]:
|
||||
dry_order = self.create_dry_run_order(
|
||||
pair, ordertype, side, amount, self.price_to_precision(pair, rate), leverage
|
||||
|
@ -1259,7 +1260,7 @@ class Exchange:
|
|||
try:
|
||||
# Set the precision for amount and price(rate) as accepted by the exchange
|
||||
amount = self.amount_to_precision(pair, self._amount_to_contracts(pair, amount))
|
||||
needs_price = self._order_needs_price(ordertype)
|
||||
needs_price = self._order_needs_price(side, ordertype)
|
||||
rate_for_order = self.price_to_precision(pair, rate) if needs_price else None
|
||||
|
||||
if not reduceOnly:
|
||||
|
@ -1305,7 +1306,7 @@ class Exchange:
|
|||
except ccxt.BaseError as e:
|
||||
raise OperationalException(e) from e
|
||||
|
||||
def stoploss_adjust(self, stop_loss: float, order: Dict, side: str) -> bool:
|
||||
def stoploss_adjust(self, stop_loss: float, order: dict, side: str) -> bool:
|
||||
"""
|
||||
Verify stop_loss against stoploss-order value (limit or price)
|
||||
Returns True if adjustment is necessary.
|
||||
|
@ -1318,8 +1319,8 @@ class Exchange:
|
|||
or (side == "buy" and stop_loss < float(order[price_param]))
|
||||
)
|
||||
|
||||
def _get_stop_order_type(self, user_order_type) -> Tuple[str, str]:
|
||||
available_order_Types: Dict[str, str] = self._ft_has["stoploss_order_types"]
|
||||
def _get_stop_order_type(self, user_order_type) -> tuple[str, str]:
|
||||
available_order_Types: dict[str, str] = self._ft_has["stoploss_order_types"]
|
||||
|
||||
if user_order_type in available_order_Types.keys():
|
||||
ordertype = available_order_Types[user_order_type]
|
||||
|
@ -1329,7 +1330,7 @@ class Exchange:
|
|||
user_order_type = list(available_order_Types.keys())[0]
|
||||
return ordertype, user_order_type
|
||||
|
||||
def _get_stop_limit_rate(self, stop_price: float, order_types: Dict, side: str) -> float:
|
||||
def _get_stop_limit_rate(self, stop_price: float, order_types: dict, side: str) -> float:
|
||||
# Limit price threshold: As limit price should always be below stop-price
|
||||
limit_price_pct = order_types.get("stoploss_on_exchange_limit_ratio", 0.99)
|
||||
if side == "sell":
|
||||
|
@ -1351,7 +1352,7 @@ class Exchange:
|
|||
)
|
||||
return limit_rate
|
||||
|
||||
def _get_stop_params(self, side: BuySell, ordertype: str, stop_price: float) -> Dict:
|
||||
def _get_stop_params(self, side: BuySell, ordertype: str, stop_price: float) -> dict:
|
||||
params = self._params.copy()
|
||||
# Verify if stopPrice works for your exchange, else configure stop_price_param
|
||||
params.update({self._ft_has["stop_price_param"]: stop_price})
|
||||
|
@ -1363,10 +1364,10 @@ class Exchange:
|
|||
pair: str,
|
||||
amount: float,
|
||||
stop_price: float,
|
||||
order_types: Dict,
|
||||
order_types: dict,
|
||||
side: BuySell,
|
||||
leverage: float,
|
||||
) -> Dict:
|
||||
) -> dict:
|
||||
"""
|
||||
creates a stoploss order.
|
||||
requires `_ft_has['stoploss_order_types']` to be set as a dict mapping limit and market
|
||||
|
@ -1459,7 +1460,7 @@ class Exchange:
|
|||
except ccxt.BaseError as e:
|
||||
raise OperationalException(e) from e
|
||||
|
||||
def fetch_order_emulated(self, order_id: str, pair: str, params: Dict) -> Dict:
|
||||
def fetch_order_emulated(self, order_id: str, pair: str, params: dict) -> dict:
|
||||
"""
|
||||
Emulated fetch_order if the exchange doesn't support fetch_order, but requires separate
|
||||
calls for open and closed orders.
|
||||
|
@ -1493,7 +1494,7 @@ class Exchange:
|
|||
raise OperationalException(e) from e
|
||||
|
||||
@retrier(retries=API_FETCH_ORDER_RETRY_COUNT)
|
||||
def fetch_order(self, order_id: str, pair: str, params: Optional[Dict] = None) -> Dict:
|
||||
def fetch_order(self, order_id: str, pair: str, params: Optional[dict] = None) -> dict:
|
||||
if self._config["dry_run"]:
|
||||
return self.fetch_dry_run_order(order_id)
|
||||
if params is None:
|
||||
|
@ -1522,12 +1523,12 @@ class Exchange:
|
|||
except ccxt.BaseError as e:
|
||||
raise OperationalException(e) from e
|
||||
|
||||
def fetch_stoploss_order(self, order_id: str, pair: str, params: Optional[Dict] = None) -> Dict:
|
||||
def fetch_stoploss_order(self, order_id: str, pair: str, params: Optional[dict] = None) -> dict:
|
||||
return self.fetch_order(order_id, pair, params)
|
||||
|
||||
def fetch_order_or_stoploss_order(
|
||||
self, order_id: str, pair: str, stoploss_order: bool = False
|
||||
) -> Dict:
|
||||
) -> dict:
|
||||
"""
|
||||
Simple wrapper calling either fetch_order or fetch_stoploss_order depending on
|
||||
the stoploss_order parameter
|
||||
|
@ -1539,7 +1540,7 @@ class Exchange:
|
|||
return self.fetch_stoploss_order(order_id, pair)
|
||||
return self.fetch_order(order_id, pair)
|
||||
|
||||
def check_order_canceled_empty(self, order: Dict) -> bool:
|
||||
def check_order_canceled_empty(self, order: dict) -> bool:
|
||||
"""
|
||||
Verify if an order has been cancelled without being partially filled
|
||||
:param order: Order dict as returned from fetch_order()
|
||||
|
@ -1548,7 +1549,7 @@ class Exchange:
|
|||
return order.get("status") in NON_OPEN_EXCHANGE_STATES and order.get("filled") == 0.0
|
||||
|
||||
@retrier
|
||||
def cancel_order(self, order_id: str, pair: str, params: Optional[Dict] = None) -> Dict:
|
||||
def cancel_order(self, order_id: str, pair: str, params: Optional[dict] = None) -> dict:
|
||||
if self._config["dry_run"]:
|
||||
try:
|
||||
order = self.fetch_dry_run_order(order_id)
|
||||
|
@ -1577,8 +1578,8 @@ class Exchange:
|
|||
raise OperationalException(e) from e
|
||||
|
||||
def cancel_stoploss_order(
|
||||
self, order_id: str, pair: str, params: Optional[Dict] = None
|
||||
) -> Dict:
|
||||
self, order_id: str, pair: str, params: Optional[dict] = None
|
||||
) -> dict:
|
||||
return self.cancel_order(order_id, pair, params)
|
||||
|
||||
def is_cancel_order_result_suitable(self, corder) -> bool:
|
||||
|
@ -1588,7 +1589,7 @@ class Exchange:
|
|||
required = ("fee", "status", "amount")
|
||||
return all(corder.get(k, None) is not None for k in required)
|
||||
|
||||
def cancel_order_with_result(self, order_id: str, pair: str, amount: float) -> Dict:
|
||||
def cancel_order_with_result(self, order_id: str, pair: str, amount: float) -> dict:
|
||||
"""
|
||||
Cancel order returning a result.
|
||||
Creates a fake result if cancel order returns a non-usable result
|
||||
|
@ -1619,7 +1620,7 @@ class Exchange:
|
|||
|
||||
return order
|
||||
|
||||
def cancel_stoploss_order_with_result(self, order_id: str, pair: str, amount: float) -> Dict:
|
||||
def cancel_stoploss_order_with_result(self, order_id: str, pair: str, amount: float) -> dict:
|
||||
"""
|
||||
Cancel stoploss order returning a result.
|
||||
Creates a fake result if cancel order returns a non-usable result
|
||||
|
@ -1661,7 +1662,7 @@ class Exchange:
|
|||
raise OperationalException(e) from e
|
||||
|
||||
@retrier
|
||||
def fetch_positions(self, pair: Optional[str] = None) -> List[CcxtPosition]:
|
||||
def fetch_positions(self, pair: Optional[str] = None) -> list[CcxtPosition]:
|
||||
"""
|
||||
Fetch positions from the exchange.
|
||||
If no pair is given, all positions are returned.
|
||||
|
@ -1673,7 +1674,7 @@ class Exchange:
|
|||
symbols = []
|
||||
if pair:
|
||||
symbols.append(pair)
|
||||
positions: List[CcxtPosition] = self._api.fetch_positions(symbols)
|
||||
positions: list[CcxtPosition] = self._api.fetch_positions(symbols)
|
||||
self._log_exchange_response("fetch_positions", positions)
|
||||
return positions
|
||||
except ccxt.DDoSProtection as e:
|
||||
|
@ -1685,7 +1686,7 @@ class Exchange:
|
|||
except ccxt.BaseError as e:
|
||||
raise OperationalException(e) from e
|
||||
|
||||
def _fetch_orders_emulate(self, pair: str, since_ms: int) -> List[Dict]:
|
||||
def _fetch_orders_emulate(self, pair: str, since_ms: int) -> list[dict]:
|
||||
orders = []
|
||||
if self.exchange_has("fetchClosedOrders"):
|
||||
orders = self._api.fetch_closed_orders(pair, since=since_ms)
|
||||
|
@ -1695,7 +1696,7 @@ class Exchange:
|
|||
return orders
|
||||
|
||||
@retrier(retries=0)
|
||||
def fetch_orders(self, pair: str, since: datetime, params: Optional[Dict] = None) -> List[Dict]:
|
||||
def fetch_orders(self, pair: str, since: datetime, params: Optional[dict] = None) -> list[dict]:
|
||||
"""
|
||||
Fetch all orders for a pair "since"
|
||||
:param pair: Pair for the query
|
||||
|
@ -1711,7 +1712,7 @@ class Exchange:
|
|||
if not params:
|
||||
params = {}
|
||||
try:
|
||||
orders: List[Dict] = self._api.fetch_orders(pair, since=since_ms, params=params)
|
||||
orders: list[dict] = self._api.fetch_orders(pair, since=since_ms, params=params)
|
||||
except ccxt.NotSupported:
|
||||
# Some exchanges don't support fetchOrders
|
||||
# attempt to fetch open and closed orders separately
|
||||
|
@ -1731,7 +1732,7 @@ class Exchange:
|
|||
raise OperationalException(e) from e
|
||||
|
||||
@retrier
|
||||
def fetch_trading_fees(self) -> Dict[str, Any]:
|
||||
def fetch_trading_fees(self) -> dict[str, Any]:
|
||||
"""
|
||||
Fetch user account trading fees
|
||||
Can be cached, should not update often.
|
||||
|
@ -1743,7 +1744,7 @@ class Exchange:
|
|||
):
|
||||
return {}
|
||||
try:
|
||||
trading_fees: Dict[str, Any] = self._api.fetch_trading_fees()
|
||||
trading_fees: dict[str, Any] = self._api.fetch_trading_fees()
|
||||
self._log_exchange_response("fetch_trading_fees", trading_fees)
|
||||
return trading_fees
|
||||
except ccxt.DDoSProtection as e:
|
||||
|
@ -1756,7 +1757,7 @@ class Exchange:
|
|||
raise OperationalException(e) from e
|
||||
|
||||
@retrier
|
||||
def fetch_bids_asks(self, symbols: Optional[List[str]] = None, cached: bool = False) -> Dict:
|
||||
def fetch_bids_asks(self, symbols: Optional[list[str]] = None, cached: bool = False) -> dict:
|
||||
"""
|
||||
:param symbols: List of symbols to fetch
|
||||
:param cached: Allow cached result
|
||||
|
@ -1789,7 +1790,7 @@ class Exchange:
|
|||
raise OperationalException(e) from e
|
||||
|
||||
@retrier
|
||||
def get_tickers(self, symbols: Optional[List[str]] = None, cached: bool = False) -> Tickers:
|
||||
def get_tickers(self, symbols: Optional[list[str]] = None, cached: bool = False) -> Tickers:
|
||||
"""
|
||||
:param cached: Allow cached result
|
||||
:return: fetch_tickers result
|
||||
|
@ -1849,7 +1850,7 @@ class Exchange:
|
|||
|
||||
@staticmethod
|
||||
def get_next_limit_in_list(
|
||||
limit: int, limit_range: Optional[List[int]], range_required: bool = True
|
||||
limit: int, limit_range: Optional[list[int]], range_required: bool = True
|
||||
):
|
||||
"""
|
||||
Get next greater value in the list.
|
||||
|
@ -1890,7 +1891,7 @@ class Exchange:
|
|||
except ccxt.BaseError as e:
|
||||
raise OperationalException(e) from e
|
||||
|
||||
def _get_price_side(self, side: str, is_short: bool, conf_strategy: Dict) -> BidAsk:
|
||||
def _get_price_side(self, side: str, is_short: bool, conf_strategy: dict) -> BidAsk:
|
||||
price_side = conf_strategy["price_side"]
|
||||
|
||||
if price_side in ("same", "other"):
|
||||
|
@ -1962,7 +1963,7 @@ class Exchange:
|
|||
return rate
|
||||
|
||||
def _get_rate_from_ticker(
|
||||
self, side: EntryExit, ticker: Ticker, conf_strategy: Dict[str, Any], price_side: BidAsk
|
||||
self, side: EntryExit, ticker: Ticker, conf_strategy: dict[str, Any], price_side: BidAsk
|
||||
) -> Optional[float]:
|
||||
"""
|
||||
Get rate from ticker.
|
||||
|
@ -2008,7 +2009,7 @@ class Exchange:
|
|||
)
|
||||
return rate
|
||||
|
||||
def get_rates(self, pair: str, refresh: bool, is_short: bool) -> Tuple[float, float]:
|
||||
def get_rates(self, pair: str, refresh: bool, is_short: bool) -> tuple[float, float]:
|
||||
entry_rate = None
|
||||
exit_rate = None
|
||||
if not refresh:
|
||||
|
@ -2042,8 +2043,8 @@ class Exchange:
|
|||
|
||||
@retrier
|
||||
def get_trades_for_order(
|
||||
self, order_id: str, pair: str, since: datetime, params: Optional[Dict] = None
|
||||
) -> List:
|
||||
self, order_id: str, pair: str, since: datetime, params: Optional[dict] = None
|
||||
) -> list:
|
||||
"""
|
||||
Fetch Orders using the "fetch_my_trades" endpoint and filter them by order-id.
|
||||
The "since" argument passed in is coming from the database and is in UTC,
|
||||
|
@ -2089,7 +2090,7 @@ class Exchange:
|
|||
except ccxt.BaseError as e:
|
||||
raise OperationalException(e) from e
|
||||
|
||||
def get_order_id_conditional(self, order: Dict[str, Any]) -> str:
|
||||
def get_order_id_conditional(self, order: dict[str, Any]) -> str:
|
||||
return order["id"]
|
||||
|
||||
@retrier
|
||||
|
@ -2138,7 +2139,7 @@ class Exchange:
|
|||
raise OperationalException(e) from e
|
||||
|
||||
@staticmethod
|
||||
def order_has_fee(order: Dict) -> bool:
|
||||
def order_has_fee(order: dict) -> bool:
|
||||
"""
|
||||
Verifies if the passed in order dict has the needed keys to extract fees,
|
||||
and that these keys (currency, cost) are not empty.
|
||||
|
@ -2156,7 +2157,7 @@ class Exchange:
|
|||
)
|
||||
|
||||
def calculate_fee_rate(
|
||||
self, fee: Dict, symbol: str, cost: float, amount: float
|
||||
self, fee: dict, symbol: str, cost: float, amount: float
|
||||
) -> Optional[float]:
|
||||
"""
|
||||
Calculate fee rate if it's not given by the exchange.
|
||||
|
@ -2196,8 +2197,8 @@ class Exchange:
|
|||
return round((fee_cost * fee_to_quote_rate) / cost, 8)
|
||||
|
||||
def extract_cost_curr_rate(
|
||||
self, fee: Dict, symbol: str, cost: float, amount: float
|
||||
) -> Tuple[float, str, Optional[float]]:
|
||||
self, fee: dict, symbol: str, cost: float, amount: float
|
||||
) -> tuple[float, str, Optional[float]]:
|
||||
"""
|
||||
Extract tuple of cost, currency, rate.
|
||||
Requires order_has_fee to run first!
|
||||
|
@ -2277,7 +2278,7 @@ class Exchange:
|
|||
for since in range(since_ms, until_ms or dt_ts(), one_call)
|
||||
]
|
||||
|
||||
data: List = []
|
||||
data: list = []
|
||||
# Chunk requests into batches of 100 to avoid overwhelming ccxt Throttling
|
||||
for input_coro in chunks(input_coroutines, 100):
|
||||
results = await asyncio.gather(*input_coro, return_exceptions=True)
|
||||
|
@ -2371,11 +2372,11 @@ class Exchange:
|
|||
|
||||
def _build_ohlcv_dl_jobs(
|
||||
self, pair_list: ListPairsWithTimeframes, since_ms: Optional[int], cache: bool
|
||||
) -> Tuple[List[Coroutine], List[PairWithTimeframe]]:
|
||||
) -> tuple[list[Coroutine], list[PairWithTimeframe]]:
|
||||
"""
|
||||
Build Coroutines to execute as part of refresh_latest_ohlcv
|
||||
"""
|
||||
input_coroutines: List[Coroutine[Any, Any, OHLCVResponse]] = []
|
||||
input_coroutines: list[Coroutine[Any, Any, OHLCVResponse]] = []
|
||||
cached_pairs = []
|
||||
for pair, timeframe, candle_type in set(pair_list):
|
||||
if timeframe not in self.timeframes and candle_type in (
|
||||
|
@ -2411,7 +2412,7 @@ class Exchange:
|
|||
pair: str,
|
||||
timeframe: str,
|
||||
c_type: CandleType,
|
||||
ticks: List[List],
|
||||
ticks: list[list],
|
||||
cache: bool,
|
||||
drop_incomplete: bool,
|
||||
) -> DataFrame:
|
||||
|
@ -2450,7 +2451,7 @@ class Exchange:
|
|||
since_ms: Optional[int] = None,
|
||||
cache: bool = True,
|
||||
drop_incomplete: Optional[bool] = None,
|
||||
) -> Dict[PairWithTimeframe, DataFrame]:
|
||||
) -> dict[PairWithTimeframe, DataFrame]:
|
||||
"""
|
||||
Refresh in-memory OHLCV asynchronously and set `_klines` with the result
|
||||
Loops asynchronously over pair_list and downloads all pairs async (semi-parallel).
|
||||
|
@ -2499,8 +2500,8 @@ class Exchange:
|
|||
return results_df
|
||||
|
||||
def refresh_ohlcv_with_cache(
|
||||
self, pairs: List[PairWithTimeframe], since_ms: int
|
||||
) -> Dict[PairWithTimeframe, DataFrame]:
|
||||
self, pairs: list[PairWithTimeframe], since_ms: int
|
||||
) -> dict[PairWithTimeframe, DataFrame]:
|
||||
"""
|
||||
Refresh ohlcv data for all pairs in needed_pairs if necessary.
|
||||
Caches data with expiring per timeframe.
|
||||
|
@ -2618,7 +2619,7 @@ class Exchange:
|
|||
timeframe: str,
|
||||
limit: int,
|
||||
since_ms: Optional[int] = None,
|
||||
) -> List[List]:
|
||||
) -> list[list]:
|
||||
"""
|
||||
Fetch funding rate history - used to selectively override this by subclasses.
|
||||
"""
|
||||
|
@ -2652,7 +2653,7 @@ class Exchange:
|
|||
pair: str,
|
||||
timeframe: str,
|
||||
c_type: CandleType,
|
||||
ticks: List[List],
|
||||
ticks: list[list],
|
||||
cache: bool,
|
||||
first_required_candle_date: int,
|
||||
) -> DataFrame:
|
||||
|
@ -2676,13 +2677,13 @@ class Exchange:
|
|||
|
||||
async def _build_trades_dl_jobs(
|
||||
self, pairwt: PairWithTimeframe, data_handler, cache: bool
|
||||
) -> Tuple[PairWithTimeframe, Optional[DataFrame]]:
|
||||
) -> tuple[PairWithTimeframe, Optional[DataFrame]]:
|
||||
"""
|
||||
Build coroutines to refresh trades for (they're then called through async.gather)
|
||||
"""
|
||||
pair, timeframe, candle_type = pairwt
|
||||
since_ms = None
|
||||
new_ticks: List = []
|
||||
new_ticks: list = []
|
||||
all_stored_ticks_df = DataFrame(columns=DEFAULT_TRADES_COLUMNS + ["date"])
|
||||
first_candle_ms = self.needed_candle_for_trades_ms(timeframe, candle_type)
|
||||
# refresh, if
|
||||
|
@ -2767,7 +2768,7 @@ class Exchange:
|
|||
pair_list: ListPairsWithTimeframes,
|
||||
*,
|
||||
cache: bool = True,
|
||||
) -> Dict[PairWithTimeframe, DataFrame]:
|
||||
) -> dict[PairWithTimeframe, DataFrame]:
|
||||
"""
|
||||
Refresh in-memory TRADES asynchronously and set `_trades` with the result
|
||||
Loops asynchronously over pair_list and downloads all pairs async (semi-parallel).
|
||||
|
@ -2821,7 +2822,7 @@ class Exchange:
|
|||
@retrier_async
|
||||
async def _async_fetch_trades(
|
||||
self, pair: str, since: Optional[int] = None, params: Optional[dict] = None
|
||||
) -> Tuple[List[List], Any]:
|
||||
) -> tuple[list[list], Any]:
|
||||
"""
|
||||
Asynchronously gets trade history using fetch_trades.
|
||||
Handles exchange errors, does one call to the exchange.
|
||||
|
@ -2867,7 +2868,7 @@ class Exchange:
|
|||
"""
|
||||
return True
|
||||
|
||||
def _get_trade_pagination_next_value(self, trades: List[Dict]):
|
||||
def _get_trade_pagination_next_value(self, trades: list[dict]):
|
||||
"""
|
||||
Extract pagination id for the next "from_id" value
|
||||
Applies only to fetch_trade_history by id.
|
||||
|
@ -2881,7 +2882,7 @@ class Exchange:
|
|||
|
||||
async def _async_get_trade_history_id(
|
||||
self, pair: str, until: int, since: Optional[int] = None, from_id: Optional[str] = None
|
||||
) -> Tuple[str, List[List]]:
|
||||
) -> tuple[str, list[list]]:
|
||||
"""
|
||||
Asynchronously gets trade history using fetch_trades
|
||||
use this when exchange uses id-based iteration (check `self._trades_pagination`)
|
||||
|
@ -2892,7 +2893,7 @@ class Exchange:
|
|||
returns tuple: (pair, trades-list)
|
||||
"""
|
||||
|
||||
trades: List[List] = []
|
||||
trades: list[list] = []
|
||||
# DEFAULT_TRADES_COLUMNS: 0 -> timestamp
|
||||
# DEFAULT_TRADES_COLUMNS: 1 -> id
|
||||
has_overlap = self._ft_has.get("trades_pagination_overlap", True)
|
||||
|
@ -2936,7 +2937,7 @@ class Exchange:
|
|||
|
||||
async def _async_get_trade_history_time(
|
||||
self, pair: str, until: int, since: Optional[int] = None
|
||||
) -> Tuple[str, List[List]]:
|
||||
) -> tuple[str, list[list]]:
|
||||
"""
|
||||
Asynchronously gets trade history using fetch_trades,
|
||||
when the exchange uses time-based iteration (check `self._trades_pagination`)
|
||||
|
@ -2946,7 +2947,7 @@ class Exchange:
|
|||
returns tuple: (pair, trades-list)
|
||||
"""
|
||||
|
||||
trades: List[List] = []
|
||||
trades: list[list] = []
|
||||
# DEFAULT_TRADES_COLUMNS: 0 -> timestamp
|
||||
# DEFAULT_TRADES_COLUMNS: 1 -> id
|
||||
while True:
|
||||
|
@ -2979,7 +2980,7 @@ class Exchange:
|
|||
since: Optional[int] = None,
|
||||
until: Optional[int] = None,
|
||||
from_id: Optional[str] = None,
|
||||
) -> Tuple[str, List[List]]:
|
||||
) -> tuple[str, list[list]]:
|
||||
"""
|
||||
Async wrapper handling downloading trades using either time or id based methods.
|
||||
"""
|
||||
|
@ -3010,7 +3011,7 @@ class Exchange:
|
|||
since: Optional[int] = None,
|
||||
until: Optional[int] = None,
|
||||
from_id: Optional[str] = None,
|
||||
) -> Tuple[str, List]:
|
||||
) -> tuple[str, list]:
|
||||
"""
|
||||
Get trade history data using asyncio.
|
||||
Handles all async work and returns the list of candles.
|
||||
|
@ -3070,7 +3071,7 @@ class Exchange:
|
|||
raise OperationalException(e) from e
|
||||
|
||||
@retrier
|
||||
def get_leverage_tiers(self) -> Dict[str, List[Dict]]:
|
||||
def get_leverage_tiers(self) -> dict[str, list[dict]]:
|
||||
try:
|
||||
return self._api.fetch_leverage_tiers()
|
||||
except ccxt.DDoSProtection as e:
|
||||
|
@ -3083,7 +3084,7 @@ class Exchange:
|
|||
raise OperationalException(e) from e
|
||||
|
||||
@retrier_async
|
||||
async def get_market_leverage_tiers(self, symbol: str) -> Tuple[str, List[Dict]]:
|
||||
async def get_market_leverage_tiers(self, symbol: str) -> tuple[str, list[dict]]:
|
||||
"""Leverage tiers per symbol"""
|
||||
try:
|
||||
tier = await self._api_async.fetch_market_leverage_tiers(symbol)
|
||||
|
@ -3098,7 +3099,7 @@ class Exchange:
|
|||
except ccxt.BaseError as e:
|
||||
raise OperationalException(e) from e
|
||||
|
||||
def load_leverage_tiers(self) -> Dict[str, List[Dict]]:
|
||||
def load_leverage_tiers(self) -> dict[str, list[dict]]:
|
||||
if self.trading_mode == TradingMode.FUTURES:
|
||||
if self.exchange_has("fetchLeverageTiers"):
|
||||
# Fetch all leverage tiers at once
|
||||
|
@ -3117,7 +3118,7 @@ class Exchange:
|
|||
)
|
||||
]
|
||||
|
||||
tiers: Dict[str, List[Dict]] = {}
|
||||
tiers: dict[str, list[dict]] = {}
|
||||
|
||||
tiers_cached = self.load_cached_leverage_tiers(self._config["stake_currency"])
|
||||
if tiers_cached:
|
||||
|
@ -3158,7 +3159,7 @@ class Exchange:
|
|||
return tiers
|
||||
return {}
|
||||
|
||||
def cache_leverage_tiers(self, tiers: Dict[str, List[Dict]], stake_currency: str) -> None:
|
||||
def cache_leverage_tiers(self, tiers: dict[str, list[dict]], stake_currency: str) -> None:
|
||||
filename = self._config["datadir"] / "futures" / f"leverage_tiers_{stake_currency}.json"
|
||||
if not filename.parent.is_dir():
|
||||
filename.parent.mkdir(parents=True)
|
||||
|
@ -3170,7 +3171,7 @@ class Exchange:
|
|||
|
||||
def load_cached_leverage_tiers(
|
||||
self, stake_currency: str, cache_time: Optional[timedelta] = None
|
||||
) -> Optional[Dict[str, List[Dict]]]:
|
||||
) -> Optional[dict[str, list[dict]]]:
|
||||
"""
|
||||
Load cached leverage tiers from disk
|
||||
:param cache_time: The maximum age of the cache before it is considered outdated
|
||||
|
@ -3188,7 +3189,7 @@ class Exchange:
|
|||
if updated_dt < datetime.now(timezone.utc) - cache_time:
|
||||
logger.info("Cached leverage tiers are outdated. Will update.")
|
||||
return None
|
||||
return tiers["data"]
|
||||
return tiers.get("data")
|
||||
except Exception:
|
||||
logger.exception("Error loading cached leverage tiers. Refreshing.")
|
||||
return None
|
||||
|
@ -3205,7 +3206,7 @@ class Exchange:
|
|||
pair_tiers.append(self.parse_leverage_tier(tier))
|
||||
self._leverage_tiers[pair] = pair_tiers
|
||||
|
||||
def parse_leverage_tier(self, tier) -> Dict:
|
||||
def parse_leverage_tier(self, tier) -> dict:
|
||||
info = tier.get("info", {})
|
||||
return {
|
||||
"minNotional": tier["minNotional"],
|
||||
|
@ -3345,7 +3346,7 @@ class Exchange:
|
|||
pair: str,
|
||||
margin_mode: MarginMode,
|
||||
accept_fail: bool = False,
|
||||
params: Optional[Dict] = None,
|
||||
params: Optional[dict] = None,
|
||||
):
|
||||
"""
|
||||
Set's the margin mode on the exchange to cross or isolated for a specific pair
|
||||
|
@ -3532,8 +3533,7 @@ class Exchange:
|
|||
stake_amount: float,
|
||||
leverage: float,
|
||||
wallet_balance: float,
|
||||
mm_ex_1: float = 0.0, # (Binance) Cross only
|
||||
upnl_ex_1: float = 0.0, # (Binance) Cross only
|
||||
open_trades: Optional[list] = None,
|
||||
) -> Optional[float]:
|
||||
"""
|
||||
Set's the margin mode on the exchange to cross or isolated for a specific pair
|
||||
|
@ -3555,8 +3555,7 @@ class Exchange:
|
|||
leverage=leverage,
|
||||
stake_amount=stake_amount,
|
||||
wallet_balance=wallet_balance,
|
||||
mm_ex_1=mm_ex_1,
|
||||
upnl_ex_1=upnl_ex_1,
|
||||
open_trades=open_trades or [],
|
||||
)
|
||||
else:
|
||||
positions = self.fetch_positions(pair)
|
||||
|
@ -3582,8 +3581,7 @@ class Exchange:
|
|||
stake_amount: float,
|
||||
leverage: float,
|
||||
wallet_balance: float, # Or margin balance
|
||||
mm_ex_1: float = 0.0, # (Binance) Cross only
|
||||
upnl_ex_1: float = 0.0, # (Binance) Cross only
|
||||
open_trades: list,
|
||||
) -> Optional[float]:
|
||||
"""
|
||||
Important: Must be fetching data from cached values as this is used by backtesting!
|
||||
|
@ -3608,10 +3606,7 @@ class Exchange:
|
|||
:param wallet_balance: Amount of margin_mode in the wallet being used to trade
|
||||
Cross-Margin Mode: crossWalletBalance
|
||||
Isolated-Margin Mode: isolatedWalletBalance
|
||||
|
||||
# * Not required by Gate or OKX
|
||||
:param mm_ex_1:
|
||||
:param upnl_ex_1:
|
||||
:param open_trades: List of other open trades in the same wallet
|
||||
"""
|
||||
|
||||
market = self.markets[pair]
|
||||
|
@ -3638,7 +3633,7 @@ class Exchange:
|
|||
self,
|
||||
pair: str,
|
||||
notional_value: float,
|
||||
) -> Tuple[float, Optional[float]]:
|
||||
) -> tuple[float, Optional[float]]:
|
||||
"""
|
||||
Important: Must be fetching data from cached values as this is used by backtesting!
|
||||
:param pair: Market symbol
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
from typing import Dict, List, Optional, Tuple, TypedDict
|
||||
from typing import Optional, TypedDict
|
||||
|
||||
from freqtrade.enums import CandleType
|
||||
|
||||
|
||||
class FtHas(TypedDict, total=False):
|
||||
order_time_in_force: List[str]
|
||||
exchange_has_overrides: Dict[str, bool]
|
||||
order_time_in_force: list[str]
|
||||
exchange_has_overrides: dict[str, bool]
|
||||
marketOrderRequiresPrice: bool
|
||||
|
||||
# Stoploss on exchange
|
||||
|
@ -13,16 +13,16 @@ class FtHas(TypedDict, total=False):
|
|||
stop_price_param: str
|
||||
stop_price_prop: str
|
||||
stop_price_type_field: str
|
||||
stop_price_type_value_mapping: Dict
|
||||
stoploss_order_types: Dict[str, str]
|
||||
stop_price_type_value_mapping: dict
|
||||
stoploss_order_types: dict[str, str]
|
||||
# ohlcv
|
||||
ohlcv_params: Dict
|
||||
ohlcv_params: dict
|
||||
ohlcv_candle_limit: int
|
||||
ohlcv_has_history: bool
|
||||
ohlcv_partial_candle: bool
|
||||
ohlcv_require_since: bool
|
||||
ohlcv_volume_currency: str
|
||||
ohlcv_candle_limit_per_timeframe: Dict[str, int]
|
||||
ohlcv_candle_limit_per_timeframe: dict[str, int]
|
||||
# Tickers
|
||||
tickers_have_quoteVolume: bool
|
||||
tickers_have_percentage: bool
|
||||
|
@ -35,7 +35,7 @@ class FtHas(TypedDict, total=False):
|
|||
trades_has_history: bool
|
||||
trades_pagination_overlap: bool
|
||||
# Orderbook
|
||||
l2_limit_range: Optional[List[int]]
|
||||
l2_limit_range: Optional[list[int]]
|
||||
l2_limit_range_required: bool
|
||||
# Futures
|
||||
ccxt_futures_name: str # usually swap
|
||||
|
@ -44,7 +44,7 @@ class FtHas(TypedDict, total=False):
|
|||
funding_fee_timeframe: str
|
||||
floor_leverage: bool
|
||||
needs_trading_fees: bool
|
||||
order_props_in_contracts: List[str]
|
||||
order_props_in_contracts: list[str]
|
||||
|
||||
# Websocket control
|
||||
ws_enabled: bool
|
||||
|
@ -63,13 +63,13 @@ class Ticker(TypedDict):
|
|||
# Several more - only listing required.
|
||||
|
||||
|
||||
Tickers = Dict[str, Ticker]
|
||||
Tickers = dict[str, Ticker]
|
||||
|
||||
|
||||
class OrderBook(TypedDict):
|
||||
symbol: str
|
||||
bids: List[Tuple[float, float]]
|
||||
asks: List[Tuple[float, float]]
|
||||
bids: list[tuple[float, float]]
|
||||
asks: list[tuple[float, float]]
|
||||
timestamp: Optional[int]
|
||||
datetime: Optional[str]
|
||||
nonce: Optional[int]
|
||||
|
@ -81,7 +81,7 @@ class CcxtBalance(TypedDict):
|
|||
total: float
|
||||
|
||||
|
||||
CcxtBalances = Dict[str, CcxtBalance]
|
||||
CcxtBalances = dict[str, CcxtBalance]
|
||||
|
||||
|
||||
class CcxtPosition(TypedDict):
|
||||
|
@ -95,4 +95,4 @@ class CcxtPosition(TypedDict):
|
|||
|
||||
|
||||
# pair, timeframe, candleType, OHLCV, drop last?,
|
||||
OHLCVResponse = Tuple[str, str, CandleType, List, bool]
|
||||
OHLCVResponse = tuple[str, str, CandleType, list, bool]
|
||||
|
|
|
@ -5,7 +5,7 @@ Exchange support utils
|
|||
import inspect
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from math import ceil, floor
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
from typing import Any, Optional
|
||||
|
||||
import ccxt
|
||||
from ccxt import (
|
||||
|
@ -39,14 +39,14 @@ def is_exchange_known_ccxt(
|
|||
return exchange_name in ccxt_exchanges(ccxt_module)
|
||||
|
||||
|
||||
def ccxt_exchanges(ccxt_module: Optional[CcxtModuleType] = None) -> List[str]:
|
||||
def ccxt_exchanges(ccxt_module: Optional[CcxtModuleType] = None) -> list[str]:
|
||||
"""
|
||||
Return the list of all exchanges known to ccxt
|
||||
"""
|
||||
return ccxt_module.exchanges if ccxt_module is not None else ccxt.exchanges
|
||||
|
||||
|
||||
def available_exchanges(ccxt_module: Optional[CcxtModuleType] = None) -> List[str]:
|
||||
def available_exchanges(ccxt_module: Optional[CcxtModuleType] = None) -> list[str]:
|
||||
"""
|
||||
Return exchanges available to the bot, i.e. non-bad exchanges in the ccxt list
|
||||
"""
|
||||
|
@ -54,7 +54,7 @@ def available_exchanges(ccxt_module: Optional[CcxtModuleType] = None) -> List[st
|
|||
return [x for x in exchanges if validate_exchange(x)[0]]
|
||||
|
||||
|
||||
def validate_exchange(exchange: str) -> Tuple[bool, str, Optional[ccxt.Exchange]]:
|
||||
def validate_exchange(exchange: str) -> tuple[bool, str, Optional[ccxt.Exchange]]:
|
||||
"""
|
||||
returns: can_use, reason, exchange_object
|
||||
with Reason including both missing and missing_opt
|
||||
|
@ -91,7 +91,7 @@ def validate_exchange(exchange: str) -> Tuple[bool, str, Optional[ccxt.Exchange]
|
|||
|
||||
|
||||
def _build_exchange_list_entry(
|
||||
exchange_name: str, exchangeClasses: Dict[str, Any]
|
||||
exchange_name: str, exchangeClasses: dict[str, Any]
|
||||
) -> ValidExchangesType:
|
||||
valid, comment, ex_mod = validate_exchange(exchange_name)
|
||||
result: ValidExchangesType = {
|
||||
|
@ -121,7 +121,7 @@ def _build_exchange_list_entry(
|
|||
return result
|
||||
|
||||
|
||||
def list_available_exchanges(all_exchanges: bool) -> List[ValidExchangesType]:
|
||||
def list_available_exchanges(all_exchanges: bool) -> list[ValidExchangesType]:
|
||||
"""
|
||||
:return: List of tuples with exchangename, valid, reason.
|
||||
"""
|
||||
|
@ -130,7 +130,7 @@ def list_available_exchanges(all_exchanges: bool) -> List[ValidExchangesType]:
|
|||
|
||||
subclassed = {e["name"].lower(): e for e in ExchangeResolver.search_all_objects({}, False)}
|
||||
|
||||
exchanges_valid: List[ValidExchangesType] = [
|
||||
exchanges_valid: list[ValidExchangesType] = [
|
||||
_build_exchange_list_entry(e, subclassed) for e in exchanges
|
||||
]
|
||||
|
||||
|
@ -155,7 +155,7 @@ def date_minus_candles(
|
|||
return new_date
|
||||
|
||||
|
||||
def market_is_active(market: Dict) -> bool:
|
||||
def market_is_active(market: dict) -> bool:
|
||||
"""
|
||||
Return True if the market is active.
|
||||
"""
|
||||
|
|
|
@ -4,7 +4,6 @@ import time
|
|||
from copy import deepcopy
|
||||
from functools import partial
|
||||
from threading import Thread
|
||||
from typing import Dict, Set
|
||||
|
||||
import ccxt
|
||||
|
||||
|
@ -22,12 +21,12 @@ class ExchangeWS:
|
|||
def __init__(self, config: Config, ccxt_object: ccxt.Exchange) -> None:
|
||||
self.config = config
|
||||
self.ccxt_object = ccxt_object
|
||||
self._background_tasks: Set[asyncio.Task] = set()
|
||||
self._background_tasks: set[asyncio.Task] = set()
|
||||
|
||||
self._klines_watching: Set[PairWithTimeframe] = set()
|
||||
self._klines_scheduled: Set[PairWithTimeframe] = set()
|
||||
self.klines_last_refresh: Dict[PairWithTimeframe, float] = {}
|
||||
self.klines_last_request: Dict[PairWithTimeframe, float] = {}
|
||||
self._klines_watching: set[PairWithTimeframe] = set()
|
||||
self._klines_scheduled: set[PairWithTimeframe] = set()
|
||||
self.klines_last_refresh: dict[PairWithTimeframe, float] = {}
|
||||
self.klines_last_request: dict[PairWithTimeframe, float] = {}
|
||||
self._thread = Thread(name="ccxt_ws", target=self._start_forever)
|
||||
self._thread.start()
|
||||
self.__cleanup_called = False
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
from typing import Any, Optional
|
||||
|
||||
from freqtrade.constants import BuySell
|
||||
from freqtrade.enums import MarginMode, PriceType, TradingMode
|
||||
|
@ -46,7 +46,7 @@ class Gate(Exchange):
|
|||
},
|
||||
}
|
||||
|
||||
_supported_trading_mode_margin_pairs: List[Tuple[TradingMode, MarginMode]] = [
|
||||
_supported_trading_mode_margin_pairs: list[tuple[TradingMode, MarginMode]] = [
|
||||
# TradingMode.SPOT always supported and not required in this list
|
||||
# (TradingMode.MARGIN, MarginMode.CROSS),
|
||||
# (TradingMode.FUTURES, MarginMode.CROSS),
|
||||
|
@ -60,7 +60,7 @@ class Gate(Exchange):
|
|||
leverage: float,
|
||||
reduceOnly: bool,
|
||||
time_in_force: str = "GTC",
|
||||
) -> Dict:
|
||||
) -> dict:
|
||||
params = super()._get_params(
|
||||
side=side,
|
||||
ordertype=ordertype,
|
||||
|
@ -74,8 +74,8 @@ class Gate(Exchange):
|
|||
return params
|
||||
|
||||
def get_trades_for_order(
|
||||
self, order_id: str, pair: str, since: datetime, params: Optional[Dict] = None
|
||||
) -> List:
|
||||
self, order_id: str, pair: str, since: datetime, params: Optional[dict] = None
|
||||
) -> list:
|
||||
trades = super().get_trades_for_order(order_id, pair, since, params)
|
||||
|
||||
if self.trading_mode == TradingMode.FUTURES:
|
||||
|
@ -99,10 +99,10 @@ class Gate(Exchange):
|
|||
}
|
||||
return trades
|
||||
|
||||
def get_order_id_conditional(self, order: Dict[str, Any]) -> str:
|
||||
def get_order_id_conditional(self, order: dict[str, Any]) -> str:
|
||||
return safe_value_fallback2(order, order, "id_stop", "id")
|
||||
|
||||
def fetch_stoploss_order(self, order_id: str, pair: str, params: Optional[Dict] = None) -> Dict:
|
||||
def fetch_stoploss_order(self, order_id: str, pair: str, params: Optional[dict] = None) -> dict:
|
||||
order = self.fetch_order(order_id=order_id, pair=pair, params={"stop": True})
|
||||
if order.get("status", "open") == "closed":
|
||||
# Places a real order - which we need to fetch explicitly.
|
||||
|
@ -120,6 +120,6 @@ class Gate(Exchange):
|
|||
return order
|
||||
|
||||
def cancel_stoploss_order(
|
||||
self, order_id: str, pair: str, params: Optional[Dict] = None
|
||||
) -> Dict:
|
||||
self, order_id: str, pair: str, params: Optional[dict] = None
|
||||
) -> dict:
|
||||
return self.cancel_order(order_id=order_id, pair=pair, params={"stop": True})
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
"""HTX exchange subclass"""
|
||||
|
||||
import logging
|
||||
from typing import Dict
|
||||
|
||||
from freqtrade.constants import BuySell
|
||||
from freqtrade.exchange import Exchange
|
||||
|
@ -32,7 +31,7 @@ class Htx(Exchange):
|
|||
"trades_has_history": False, # Endpoint doesn't have a "since" parameter
|
||||
}
|
||||
|
||||
def _get_stop_params(self, side: BuySell, ordertype: str, stop_price: float) -> Dict:
|
||||
def _get_stop_params(self, side: BuySell, ordertype: str, stop_price: float) -> dict:
|
||||
params = self._params.copy()
|
||||
params.update(
|
||||
{
|
||||
|
|
|
@ -1,9 +1,6 @@
|
|||
"""Hyperliquid exchange subclass"""
|
||||
|
||||
import logging
|
||||
from typing import Dict
|
||||
|
||||
from ccxt import SIGNIFICANT_DIGITS
|
||||
|
||||
from freqtrade.enums import TradingMode
|
||||
from freqtrade.exchange import Exchange
|
||||
|
@ -28,7 +25,7 @@ class Hyperliquid(Exchange):
|
|||
}
|
||||
|
||||
@property
|
||||
def _ccxt_config(self) -> Dict:
|
||||
def _ccxt_config(self) -> dict:
|
||||
# Parameters to add directly to ccxt sync/async initialization.
|
||||
# ccxt defaults to swap mode.
|
||||
config = {}
|
||||
|
@ -36,10 +33,3 @@ class Hyperliquid(Exchange):
|
|||
config.update({"options": {"defaultType": "spot"}})
|
||||
config.update(super()._ccxt_config)
|
||||
return config
|
||||
|
||||
@property
|
||||
def precision_mode_price(self) -> int:
|
||||
"""
|
||||
Override the default precision mode for price.
|
||||
"""
|
||||
return SIGNIFICANT_DIGITS
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
from typing import Any, Optional
|
||||
|
||||
import ccxt
|
||||
from pandas import DataFrame
|
||||
|
@ -19,7 +19,7 @@ logger = logging.getLogger(__name__)
|
|||
|
||||
|
||||
class Kraken(Exchange):
|
||||
_params: Dict = {"trading_agreement": "agree"}
|
||||
_params: dict = {"trading_agreement": "agree"}
|
||||
_ft_has: FtHas = {
|
||||
"stoploss_on_exchange": True,
|
||||
"stop_price_param": "stopLossPrice",
|
||||
|
@ -35,13 +35,13 @@ class Kraken(Exchange):
|
|||
"mark_ohlcv_timeframe": "4h",
|
||||
}
|
||||
|
||||
_supported_trading_mode_margin_pairs: List[Tuple[TradingMode, MarginMode]] = [
|
||||
_supported_trading_mode_margin_pairs: list[tuple[TradingMode, MarginMode]] = [
|
||||
# TradingMode.SPOT always supported and not required in this list
|
||||
# (TradingMode.MARGIN, MarginMode.CROSS),
|
||||
# (TradingMode.FUTURES, MarginMode.CROSS)
|
||||
]
|
||||
|
||||
def market_is_tradable(self, market: Dict[str, Any]) -> bool:
|
||||
def market_is_tradable(self, market: dict[str, Any]) -> bool:
|
||||
"""
|
||||
Check if the market symbol is tradable by Freqtrade.
|
||||
Default checks + check if pair is darkpool pair.
|
||||
|
@ -50,7 +50,7 @@ class Kraken(Exchange):
|
|||
|
||||
return parent_check and market.get("darkpool", False) is False
|
||||
|
||||
def get_tickers(self, symbols: Optional[List[str]] = None, cached: bool = False) -> Tickers:
|
||||
def get_tickers(self, symbols: Optional[list[str]] = None, cached: bool = False) -> Tickers:
|
||||
# Only fetch tickers for current stake currency
|
||||
# Otherwise the request for kraken becomes too large.
|
||||
symbols = list(self.get_markets(quote_currencies=[self._config["stake_currency"]]))
|
||||
|
@ -115,7 +115,7 @@ class Kraken(Exchange):
|
|||
leverage: float,
|
||||
reduceOnly: bool,
|
||||
time_in_force: str = "GTC",
|
||||
) -> Dict:
|
||||
) -> dict:
|
||||
params = super()._get_params(
|
||||
side=side,
|
||||
ordertype=ordertype,
|
||||
|
@ -165,7 +165,7 @@ class Kraken(Exchange):
|
|||
|
||||
return fees if is_short else -fees
|
||||
|
||||
def _get_trade_pagination_next_value(self, trades: List[Dict]):
|
||||
def _get_trade_pagination_next_value(self, trades: list[dict]):
|
||||
"""
|
||||
Extract pagination id for the next "from_id" value
|
||||
Applies only to fetch_trade_history by id.
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
"""Kucoin exchange subclass."""
|
||||
|
||||
import logging
|
||||
from typing import Dict
|
||||
|
||||
from freqtrade.constants import BuySell
|
||||
from freqtrade.exchange import Exchange
|
||||
|
@ -32,7 +31,7 @@ class Kucoin(Exchange):
|
|||
"ohlcv_candle_limit": 1500,
|
||||
}
|
||||
|
||||
def _get_stop_params(self, side: BuySell, ordertype: str, stop_price: float) -> Dict:
|
||||
def _get_stop_params(self, side: BuySell, ordertype: str, stop_price: float) -> dict:
|
||||
params = self._params.copy()
|
||||
params.update({"stopPrice": stop_price, "stop": "loss"})
|
||||
return params
|
||||
|
@ -48,7 +47,7 @@ class Kucoin(Exchange):
|
|||
leverage: float,
|
||||
reduceOnly: bool = False,
|
||||
time_in_force: str = "GTC",
|
||||
) -> Dict:
|
||||
) -> dict:
|
||||
res = super().create_order(
|
||||
pair=pair,
|
||||
ordertype=ordertype,
|
||||
|
|
21
freqtrade/exchange/lbank.py
Normal file
21
freqtrade/exchange/lbank.py
Normal file
|
@ -0,0 +1,21 @@
|
|||
"""Lbank exchange subclass"""
|
||||
|
||||
import logging
|
||||
|
||||
from freqtrade.exchange import Exchange
|
||||
from freqtrade.exchange.exchange_types import FtHas
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Lbank(Exchange):
|
||||
"""
|
||||
Lbank exchange class. Contains adjustments needed for Freqtrade to work
|
||||
with this exchange.
|
||||
"""
|
||||
|
||||
_ft_has: FtHas = {
|
||||
"ohlcv_candle_limit": 1998, # lower than the allowed 2000 to avoid current_candle issue
|
||||
"trades_has_history": False,
|
||||
}
|
|
@ -1,6 +1,6 @@
|
|||
import logging
|
||||
from datetime import timedelta
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
from typing import Any, Optional
|
||||
|
||||
import ccxt
|
||||
|
||||
|
@ -48,7 +48,7 @@ class Okx(Exchange):
|
|||
"ws_enabled": True,
|
||||
}
|
||||
|
||||
_supported_trading_mode_margin_pairs: List[Tuple[TradingMode, MarginMode]] = [
|
||||
_supported_trading_mode_margin_pairs: list[tuple[TradingMode, MarginMode]] = [
|
||||
# TradingMode.SPOT always supported and not required in this list
|
||||
# (TradingMode.MARGIN, MarginMode.CROSS),
|
||||
# (TradingMode.FUTURES, MarginMode.CROSS),
|
||||
|
@ -57,7 +57,7 @@ class Okx(Exchange):
|
|||
|
||||
net_only = True
|
||||
|
||||
_ccxt_params: Dict = {"options": {"brokerId": "ffb5405ad327SUDE"}}
|
||||
_ccxt_params: dict = {"options": {"brokerId": "ffb5405ad327SUDE"}}
|
||||
|
||||
def ohlcv_candle_limit(
|
||||
self, timeframe: str, candle_type: CandleType, since_ms: Optional[int] = None
|
||||
|
@ -119,7 +119,7 @@ class Okx(Exchange):
|
|||
leverage: float,
|
||||
reduceOnly: bool,
|
||||
time_in_force: str = "GTC",
|
||||
) -> Dict:
|
||||
) -> dict:
|
||||
params = super()._get_params(
|
||||
side=side,
|
||||
ordertype=ordertype,
|
||||
|
@ -184,14 +184,14 @@ class Okx(Exchange):
|
|||
pair_tiers = self._leverage_tiers[pair]
|
||||
return pair_tiers[-1]["maxNotional"] / leverage
|
||||
|
||||
def _get_stop_params(self, side: BuySell, ordertype: str, stop_price: float) -> Dict:
|
||||
def _get_stop_params(self, side: BuySell, ordertype: str, stop_price: float) -> dict:
|
||||
params = super()._get_stop_params(side, ordertype, stop_price)
|
||||
if self.trading_mode == TradingMode.FUTURES and self.margin_mode:
|
||||
params["tdMode"] = self.margin_mode.value
|
||||
params["posSide"] = self._get_posSide(side, True)
|
||||
return params
|
||||
|
||||
def _convert_stop_order(self, pair: str, order_id: str, order: Dict) -> Dict:
|
||||
def _convert_stop_order(self, pair: str, order_id: str, order: dict) -> dict:
|
||||
if (
|
||||
order.get("status", "open") == "closed"
|
||||
and (real_order_id := order.get("info", {}).get("ordId")) is not None
|
||||
|
@ -209,7 +209,7 @@ class Okx(Exchange):
|
|||
return order
|
||||
|
||||
@retrier(retries=API_RETRY_COUNT)
|
||||
def fetch_stoploss_order(self, order_id: str, pair: str, params: Optional[Dict] = None) -> Dict:
|
||||
def fetch_stoploss_order(self, order_id: str, pair: str, params: Optional[dict] = None) -> dict:
|
||||
if self._config["dry_run"]:
|
||||
return self.fetch_dry_run_order(order_id)
|
||||
|
||||
|
@ -231,7 +231,7 @@ class Okx(Exchange):
|
|||
|
||||
return self._fetch_stop_order_fallback(order_id, pair)
|
||||
|
||||
def _fetch_stop_order_fallback(self, order_id: str, pair: str) -> Dict:
|
||||
def _fetch_stop_order_fallback(self, order_id: str, pair: str) -> dict:
|
||||
params2 = {"stop": True, "ordType": "conditional"}
|
||||
for method in (
|
||||
self._api.fetch_open_orders,
|
||||
|
@ -256,14 +256,14 @@ class Okx(Exchange):
|
|||
raise OperationalException(e) from e
|
||||
raise RetryableOrderError(f"StoplossOrder not found (pair: {pair} id: {order_id}).")
|
||||
|
||||
def get_order_id_conditional(self, order: Dict[str, Any]) -> str:
|
||||
def get_order_id_conditional(self, order: dict[str, Any]) -> str:
|
||||
if order.get("type", "") == "stop":
|
||||
return safe_value_fallback2(order, order, "id_stop", "id")
|
||||
return order["id"]
|
||||
|
||||
def cancel_stoploss_order(
|
||||
self, order_id: str, pair: str, params: Optional[Dict] = None
|
||||
) -> Dict:
|
||||
self, order_id: str, pair: str, params: Optional[dict] = None
|
||||
) -> dict:
|
||||
params1 = {"stop": True}
|
||||
# 'ordType': 'conditional'
|
||||
#
|
||||
|
@ -273,7 +273,7 @@ class Okx(Exchange):
|
|||
params=params1,
|
||||
)
|
||||
|
||||
def _fetch_orders_emulate(self, pair: str, since_ms: int) -> List[Dict]:
|
||||
def _fetch_orders_emulate(self, pair: str, since_ms: int) -> list[dict]:
|
||||
orders = []
|
||||
|
||||
orders = self._api.fetch_closed_orders(pair, since=since_ms)
|
||||
|
|
|
@ -2,7 +2,7 @@ import logging
|
|||
import random
|
||||
from abc import abstractmethod
|
||||
from enum import Enum
|
||||
from typing import List, Optional, Type, Union
|
||||
from typing import Optional, Union
|
||||
|
||||
import gymnasium as gym
|
||||
import numpy as np
|
||||
|
@ -89,7 +89,7 @@ class BaseEnvironment(gym.Env):
|
|||
self.fee = fee
|
||||
|
||||
# set here to default 5Ac, but all children envs can override this
|
||||
self.actions: Type[Enum] = BaseActions
|
||||
self.actions: type[Enum] = BaseActions
|
||||
self.tensorboard_metrics: dict = {}
|
||||
self.can_short: bool = can_short
|
||||
self.live: bool = live
|
||||
|
@ -163,7 +163,7 @@ class BaseEnvironment(gym.Env):
|
|||
Unique to the environment action count. Must be inherited.
|
||||
"""
|
||||
|
||||
def action_masks(self) -> List[bool]:
|
||||
def action_masks(self) -> list[bool]:
|
||||
return [self._is_valid(action.value) for action in self.actions]
|
||||
|
||||
def seed(self, seed: int = 1):
|
||||
|
@ -375,7 +375,7 @@ class BaseEnvironment(gym.Env):
|
|||
def current_price(self) -> float:
|
||||
return self.prices.iloc[self._current_tick].open
|
||||
|
||||
def get_actions(self) -> Type[Enum]:
|
||||
def get_actions(self) -> type[Enum]:
|
||||
"""
|
||||
Used by SubprocVecEnv to get actions from
|
||||
initialized env for tensorboard callback
|
||||
|
|
|
@ -4,7 +4,7 @@ import logging
|
|||
from abc import abstractmethod
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any, Callable, Dict, Optional, Tuple, Type, Union
|
||||
from typing import Any, Callable, Optional, Union
|
||||
|
||||
import gymnasium as gym
|
||||
import numpy as np
|
||||
|
@ -114,7 +114,7 @@ class BaseReinforcementLearningModel(IFreqaiModel):
|
|||
training_filter=True,
|
||||
)
|
||||
|
||||
dd: Dict[str, Any] = dk.make_train_test_datasets(features_filtered, labels_filtered)
|
||||
dd: dict[str, Any] = dk.make_train_test_datasets(features_filtered, labels_filtered)
|
||||
self.df_raw = copy.deepcopy(dd["train_features"])
|
||||
dk.fit_labels() # FIXME useless for now, but just satiating append methods
|
||||
|
||||
|
@ -151,7 +151,7 @@ class BaseReinforcementLearningModel(IFreqaiModel):
|
|||
|
||||
def set_train_and_eval_environments(
|
||||
self,
|
||||
data_dictionary: Dict[str, DataFrame],
|
||||
data_dictionary: dict[str, DataFrame],
|
||||
prices_train: DataFrame,
|
||||
prices_test: DataFrame,
|
||||
dk: FreqaiDataKitchen,
|
||||
|
@ -183,7 +183,7 @@ class BaseReinforcementLearningModel(IFreqaiModel):
|
|||
actions = self.train_env.get_actions()
|
||||
self.tensorboard_callback = TensorboardCallback(verbose=1, actions=actions)
|
||||
|
||||
def pack_env_dict(self, pair: str) -> Dict[str, Any]:
|
||||
def pack_env_dict(self, pair: str) -> dict[str, Any]:
|
||||
"""
|
||||
Create dictionary of environment arguments
|
||||
"""
|
||||
|
@ -204,7 +204,7 @@ class BaseReinforcementLearningModel(IFreqaiModel):
|
|||
return env_info
|
||||
|
||||
@abstractmethod
|
||||
def fit(self, data_dictionary: Dict[str, Any], dk: FreqaiDataKitchen, **kwargs):
|
||||
def fit(self, data_dictionary: dict[str, Any], dk: FreqaiDataKitchen, **kwargs):
|
||||
"""
|
||||
Agent customizations and abstract Reinforcement Learning customizations
|
||||
go in here. Abstract method, so this function must be overridden by
|
||||
|
@ -212,7 +212,7 @@ class BaseReinforcementLearningModel(IFreqaiModel):
|
|||
"""
|
||||
return
|
||||
|
||||
def get_state_info(self, pair: str) -> Tuple[float, float, int]:
|
||||
def get_state_info(self, pair: str) -> tuple[float, float, int]:
|
||||
"""
|
||||
State info during dry/live (not backtesting) which is fed back
|
||||
into the model.
|
||||
|
@ -250,7 +250,7 @@ class BaseReinforcementLearningModel(IFreqaiModel):
|
|||
|
||||
def predict(
|
||||
self, unfiltered_df: DataFrame, dk: FreqaiDataKitchen, **kwargs
|
||||
) -> Tuple[DataFrame, npt.NDArray[np.int_]]:
|
||||
) -> tuple[DataFrame, npt.NDArray[np.int_]]:
|
||||
"""
|
||||
Filter the prediction features data and predict with it.
|
||||
:param unfiltered_dataframe: Full dataframe for the current backtest period.
|
||||
|
@ -303,7 +303,7 @@ class BaseReinforcementLearningModel(IFreqaiModel):
|
|||
|
||||
def build_ohlc_price_dataframes(
|
||||
self, data_dictionary: dict, pair: str, dk: FreqaiDataKitchen
|
||||
) -> Tuple[DataFrame, DataFrame]:
|
||||
) -> tuple[DataFrame, DataFrame]:
|
||||
"""
|
||||
Builds the train prices and test prices for the environment.
|
||||
"""
|
||||
|
@ -482,13 +482,13 @@ class BaseReinforcementLearningModel(IFreqaiModel):
|
|||
|
||||
|
||||
def make_env(
|
||||
MyRLEnv: Type[BaseEnvironment],
|
||||
MyRLEnv: type[BaseEnvironment],
|
||||
env_id: str,
|
||||
rank: int,
|
||||
seed: int,
|
||||
train_df: DataFrame,
|
||||
price: DataFrame,
|
||||
env_info: Dict[str, Any] = {},
|
||||
env_info: dict[str, Any] = {},
|
||||
) -> Callable:
|
||||
"""
|
||||
Utility function for multiprocessed env.
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import logging
|
||||
from time import time
|
||||
from typing import Any, Tuple
|
||||
from typing import Any
|
||||
|
||||
import numpy as np
|
||||
import numpy.typing as npt
|
||||
|
@ -86,7 +86,7 @@ class BaseClassifierModel(IFreqaiModel):
|
|||
|
||||
def predict(
|
||||
self, unfiltered_df: DataFrame, dk: FreqaiDataKitchen, **kwargs
|
||||
) -> Tuple[DataFrame, npt.NDArray[np.int_]]:
|
||||
) -> tuple[DataFrame, npt.NDArray[np.int_]]:
|
||||
"""
|
||||
Filter the prediction features data and predict with it.
|
||||
:param unfiltered_df: Full dataframe for the current backtest period.
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import logging
|
||||
from time import time
|
||||
from typing import Any, Dict, List, Tuple
|
||||
from typing import Any
|
||||
|
||||
import numpy as np
|
||||
import numpy.typing as npt
|
||||
|
@ -39,12 +39,12 @@ class BasePyTorchClassifier(BasePyTorchModel):
|
|||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.class_name_to_index = None
|
||||
self.index_to_class_name = None
|
||||
self.class_name_to_index = {}
|
||||
self.index_to_class_name = {}
|
||||
|
||||
def predict(
|
||||
self, unfiltered_df: DataFrame, dk: FreqaiDataKitchen, **kwargs
|
||||
) -> Tuple[DataFrame, npt.NDArray[np.int_]]:
|
||||
) -> tuple[DataFrame, npt.NDArray[np.int_]]:
|
||||
"""
|
||||
Filter the prediction features data and predict with it.
|
||||
:param dk: dk: The datakitchen object
|
||||
|
@ -100,9 +100,9 @@ class BasePyTorchClassifier(BasePyTorchModel):
|
|||
|
||||
def encode_class_names(
|
||||
self,
|
||||
data_dictionary: Dict[str, pd.DataFrame],
|
||||
data_dictionary: dict[str, pd.DataFrame],
|
||||
dk: FreqaiDataKitchen,
|
||||
class_names: List[str],
|
||||
class_names: list[str],
|
||||
):
|
||||
"""
|
||||
encode class name, str -> int
|
||||
|
@ -119,7 +119,7 @@ class BasePyTorchClassifier(BasePyTorchModel):
|
|||
)
|
||||
|
||||
@staticmethod
|
||||
def assert_valid_class_names(target_column: pd.Series, class_names: List[str]):
|
||||
def assert_valid_class_names(target_column: pd.Series, class_names: list[str]):
|
||||
non_defined_labels = set(target_column) - set(class_names)
|
||||
if len(non_defined_labels) != 0:
|
||||
raise OperationalException(
|
||||
|
@ -127,7 +127,7 @@ class BasePyTorchClassifier(BasePyTorchModel):
|
|||
f"expecting labels: {class_names}",
|
||||
)
|
||||
|
||||
def decode_class_names(self, class_ints: torch.Tensor) -> List[str]:
|
||||
def decode_class_names(self, class_ints: torch.Tensor) -> list[str]:
|
||||
"""
|
||||
decode class name, int -> str
|
||||
"""
|
||||
|
@ -141,14 +141,14 @@ class BasePyTorchClassifier(BasePyTorchModel):
|
|||
|
||||
def convert_label_column_to_int(
|
||||
self,
|
||||
data_dictionary: Dict[str, pd.DataFrame],
|
||||
data_dictionary: dict[str, pd.DataFrame],
|
||||
dk: FreqaiDataKitchen,
|
||||
class_names: List[str],
|
||||
class_names: list[str],
|
||||
):
|
||||
self.init_class_names_to_index_mapping(class_names)
|
||||
self.encode_class_names(data_dictionary, dk, class_names)
|
||||
|
||||
def get_class_names(self) -> List[str]:
|
||||
def get_class_names(self) -> list[str]:
|
||||
if not self.class_names:
|
||||
raise ValueError(
|
||||
"self.class_names is empty, "
|
||||
|
|
|
@ -20,7 +20,11 @@ class BasePyTorchModel(IFreqaiModel, ABC):
|
|||
def __init__(self, **kwargs):
|
||||
super().__init__(config=kwargs["config"])
|
||||
self.dd.model_type = "pytorch"
|
||||
self.device = "cuda" if torch.cuda.is_available() else "cpu"
|
||||
self.device = (
|
||||
"mps"
|
||||
if torch.backends.mps.is_available() and torch.backends.mps.is_built()
|
||||
else ("cuda" if torch.cuda.is_available() else "cpu")
|
||||
)
|
||||
test_size = self.freqai_info.get("data_split_parameters", {}).get("test_size")
|
||||
self.splits = ["train", "test"] if test_size != 0 else ["train"]
|
||||
self.window_size = self.freqai_info.get("conv_width", 1)
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import logging
|
||||
from time import time
|
||||
from typing import Any, Tuple
|
||||
from typing import Any
|
||||
|
||||
import numpy as np
|
||||
import numpy.typing as npt
|
||||
|
@ -24,7 +24,7 @@ class BasePyTorchRegressor(BasePyTorchModel):
|
|||
|
||||
def predict(
|
||||
self, unfiltered_df: DataFrame, dk: FreqaiDataKitchen, **kwargs
|
||||
) -> Tuple[DataFrame, npt.NDArray[np.int_]]:
|
||||
) -> tuple[DataFrame, npt.NDArray[np.int_]]:
|
||||
"""
|
||||
Filter the prediction features data and predict with it.
|
||||
:param unfiltered_df: Full dataframe for the current backtest period.
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import logging
|
||||
from time import time
|
||||
from typing import Any, Tuple
|
||||
from typing import Any
|
||||
|
||||
import numpy as np
|
||||
import numpy.typing as npt
|
||||
|
@ -88,7 +88,7 @@ class BaseRegressionModel(IFreqaiModel):
|
|||
|
||||
def predict(
|
||||
self, unfiltered_df: DataFrame, dk: FreqaiDataKitchen, **kwargs
|
||||
) -> Tuple[DataFrame, npt.NDArray[np.int_]]:
|
||||
) -> tuple[DataFrame, npt.NDArray[np.int_]]:
|
||||
"""
|
||||
Filter the prediction features data and predict with it.
|
||||
:param unfiltered_df: Full dataframe for the current backtest period.
|
||||
|
|
|
@ -7,7 +7,7 @@ import threading
|
|||
import warnings
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Tuple, TypedDict
|
||||
from typing import Any, TypedDict
|
||||
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
|
@ -69,14 +69,14 @@ class FreqaiDataDrawer:
|
|||
self.config = config
|
||||
self.freqai_info = config.get("freqai", {})
|
||||
# dictionary holding all pair metadata necessary to load in from disk
|
||||
self.pair_dict: Dict[str, pair_info] = {}
|
||||
self.pair_dict: dict[str, pair_info] = {}
|
||||
# dictionary holding all actively inferenced models in memory given a model filename
|
||||
self.model_dictionary: Dict[str, Any] = {}
|
||||
self.model_dictionary: dict[str, Any] = {}
|
||||
# all additional metadata that we want to keep in ram
|
||||
self.meta_data_dictionary: Dict[str, Dict[str, Any]] = {}
|
||||
self.model_return_values: Dict[str, DataFrame] = {}
|
||||
self.historic_data: Dict[str, Dict[str, DataFrame]] = {}
|
||||
self.historic_predictions: Dict[str, DataFrame] = {}
|
||||
self.meta_data_dictionary: dict[str, dict[str, Any]] = {}
|
||||
self.model_return_values: dict[str, DataFrame] = {}
|
||||
self.historic_data: dict[str, dict[str, DataFrame]] = {}
|
||||
self.historic_predictions: dict[str, DataFrame] = {}
|
||||
self.full_path = full_path
|
||||
self.historic_predictions_path = Path(self.full_path / "historic_predictions.pkl")
|
||||
self.historic_predictions_bkp_path = Path(
|
||||
|
@ -87,14 +87,14 @@ class FreqaiDataDrawer:
|
|||
self.metric_tracker_path = Path(self.full_path / "metric_tracker.json")
|
||||
self.load_drawer_from_disk()
|
||||
self.load_historic_predictions_from_disk()
|
||||
self.metric_tracker: Dict[str, Dict[str, Dict[str, list]]] = {}
|
||||
self.metric_tracker: dict[str, dict[str, dict[str, list]]] = {}
|
||||
self.load_metric_tracker_from_disk()
|
||||
self.training_queue: Dict[str, int] = {}
|
||||
self.training_queue: dict[str, int] = {}
|
||||
self.history_lock = threading.Lock()
|
||||
self.save_lock = threading.Lock()
|
||||
self.pair_dict_lock = threading.Lock()
|
||||
self.metric_tracker_lock = threading.Lock()
|
||||
self.old_DBSCAN_eps: Dict[str, float] = {}
|
||||
self.old_DBSCAN_eps: dict[str, float] = {}
|
||||
self.empty_pair_dict: pair_info = {
|
||||
"model_filename": "",
|
||||
"trained_timestamp": 0,
|
||||
|
@ -228,7 +228,7 @@ class FreqaiDataDrawer:
|
|||
self.pair_dict, fp, default=self.np_encoder, number_mode=rapidjson.NM_NATIVE
|
||||
)
|
||||
|
||||
def save_global_metadata_to_disk(self, metadata: Dict[str, Any]):
|
||||
def save_global_metadata_to_disk(self, metadata: dict[str, Any]):
|
||||
"""
|
||||
Save global metadata json to disk
|
||||
"""
|
||||
|
@ -242,7 +242,7 @@ class FreqaiDataDrawer:
|
|||
if isinstance(obj, np.generic):
|
||||
return obj.item()
|
||||
|
||||
def get_pair_dict_info(self, pair: str) -> Tuple[str, int]:
|
||||
def get_pair_dict_info(self, pair: str) -> tuple[str, int]:
|
||||
"""
|
||||
Locate and load existing model metadata from persistent storage. If not located,
|
||||
create a new one and append the current pair to it and prepare it for its first
|
||||
|
@ -446,7 +446,7 @@ class FreqaiDataDrawer:
|
|||
|
||||
pattern = re.compile(r"sub-train-(\w+)_(\d{10})")
|
||||
|
||||
delete_dict: Dict[str, Any] = {}
|
||||
delete_dict: dict[str, Any] = {}
|
||||
|
||||
for directory in model_folders:
|
||||
result = pattern.match(str(directory.name))
|
||||
|
@ -704,7 +704,7 @@ class FreqaiDataDrawer:
|
|||
|
||||
def get_base_and_corr_dataframes(
|
||||
self, timerange: TimeRange, pair: str, dk: FreqaiDataKitchen
|
||||
) -> Tuple[Dict[Any, Any], Dict[Any, Any]]:
|
||||
) -> tuple[dict[Any, Any], dict[Any, Any]]:
|
||||
"""
|
||||
Searches through our historic_data in memory and returns the dataframes relevant
|
||||
to the present pair.
|
||||
|
@ -713,8 +713,8 @@ class FreqaiDataDrawer:
|
|||
:param metadata: dict = strategy furnished pair metadata
|
||||
"""
|
||||
with self.history_lock:
|
||||
corr_dataframes: Dict[Any, Any] = {}
|
||||
base_dataframes: Dict[Any, Any] = {}
|
||||
corr_dataframes: dict[Any, Any] = {}
|
||||
base_dataframes: dict[Any, Any] = {}
|
||||
historic_data = self.historic_data
|
||||
pairs = self.freqai_info["feature_parameters"].get("include_corr_pairlist", [])
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@ import random
|
|||
import shutil
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
from typing import Any, Optional
|
||||
|
||||
import numpy as np
|
||||
import numpy.typing as npt
|
||||
|
@ -64,15 +64,15 @@ class FreqaiDataKitchen:
|
|||
live: bool = False,
|
||||
pair: str = "",
|
||||
):
|
||||
self.data: Dict[str, Any] = {}
|
||||
self.data_dictionary: Dict[str, DataFrame] = {}
|
||||
self.data: dict[str, Any] = {}
|
||||
self.data_dictionary: dict[str, DataFrame] = {}
|
||||
self.config = config
|
||||
self.freqai_config: Dict[str, Any] = config["freqai"]
|
||||
self.freqai_config: dict[str, Any] = config["freqai"]
|
||||
self.full_df: DataFrame = DataFrame()
|
||||
self.append_df: DataFrame = DataFrame()
|
||||
self.data_path = Path()
|
||||
self.label_list: List = []
|
||||
self.training_features_list: List = []
|
||||
self.label_list: list = []
|
||||
self.training_features_list: list = []
|
||||
self.model_filename: str = ""
|
||||
self.backtesting_results_path = Path()
|
||||
self.backtest_predictions_folder: str = "backtesting_predictions"
|
||||
|
@ -104,9 +104,9 @@ class FreqaiDataKitchen:
|
|||
else:
|
||||
self.thread_count = self.freqai_config["data_kitchen_thread_count"]
|
||||
self.train_dates: DataFrame = pd.DataFrame()
|
||||
self.unique_classes: Dict[str, list] = {}
|
||||
self.unique_classes: dict[str, list] = {}
|
||||
self.unique_class_list: list = []
|
||||
self.backtest_live_models_data: Dict[str, Any] = {}
|
||||
self.backtest_live_models_data: dict[str, Any] = {}
|
||||
|
||||
def set_paths(
|
||||
self,
|
||||
|
@ -127,7 +127,7 @@ class FreqaiDataKitchen:
|
|||
|
||||
def make_train_test_datasets(
|
||||
self, filtered_dataframe: DataFrame, labels: DataFrame
|
||||
) -> Dict[Any, Any]:
|
||||
) -> dict[Any, Any]:
|
||||
"""
|
||||
Given the dataframe for the full history for training, split the data into
|
||||
training and test data according to user specified parameters in configuration
|
||||
|
@ -213,10 +213,10 @@ class FreqaiDataKitchen:
|
|||
def filter_features(
|
||||
self,
|
||||
unfiltered_df: DataFrame,
|
||||
training_feature_list: List,
|
||||
label_list: List = list(),
|
||||
training_feature_list: list,
|
||||
label_list: list = list(),
|
||||
training_filter: bool = True,
|
||||
) -> Tuple[DataFrame, DataFrame]:
|
||||
) -> tuple[DataFrame, DataFrame]:
|
||||
"""
|
||||
Filter the unfiltered dataframe to extract the user requested features/labels and properly
|
||||
remove all NaNs. Any row with a NaN is removed from training dataset or replaced with
|
||||
|
@ -306,7 +306,7 @@ class FreqaiDataKitchen:
|
|||
test_labels: DataFrame,
|
||||
train_weights: Any,
|
||||
test_weights: Any,
|
||||
) -> Dict:
|
||||
) -> dict:
|
||||
self.data_dictionary = {
|
||||
"train_features": train_df,
|
||||
"test_features": test_df,
|
||||
|
@ -321,7 +321,7 @@ class FreqaiDataKitchen:
|
|||
|
||||
def split_timerange(
|
||||
self, tr: str, train_split: int = 28, bt_split: float = 7
|
||||
) -> Tuple[list, list]:
|
||||
) -> tuple[list, list]:
|
||||
"""
|
||||
Function which takes a single time range (tr) and splits it
|
||||
into sub timeranges to train and backtest on based on user input
|
||||
|
@ -535,7 +535,7 @@ class FreqaiDataKitchen:
|
|||
|
||||
def check_if_new_training_required(
|
||||
self, trained_timestamp: int
|
||||
) -> Tuple[bool, TimeRange, TimeRange]:
|
||||
) -> tuple[bool, TimeRange, TimeRange]:
|
||||
time = datetime.now(tz=timezone.utc).timestamp()
|
||||
trained_timerange = TimeRange()
|
||||
data_load_timerange = TimeRange()
|
||||
|
@ -603,7 +603,7 @@ class FreqaiDataKitchen:
|
|||
|
||||
def extract_corr_pair_columns_from_populated_indicators(
|
||||
self, dataframe: DataFrame
|
||||
) -> Dict[str, DataFrame]:
|
||||
) -> dict[str, DataFrame]:
|
||||
"""
|
||||
Find the columns of the dataframe corresponding to the corr_pairlist, save them
|
||||
in a dictionary to be reused and attached to other pairs.
|
||||
|
@ -612,7 +612,7 @@ class FreqaiDataKitchen:
|
|||
:return: corr_dataframes, dictionary of dataframes to be attached
|
||||
to other pairs in same candle.
|
||||
"""
|
||||
corr_dataframes: Dict[str, DataFrame] = {}
|
||||
corr_dataframes: dict[str, DataFrame] = {}
|
||||
pairs = self.freqai_config["feature_parameters"].get("include_corr_pairlist", [])
|
||||
|
||||
for pair in pairs:
|
||||
|
@ -628,7 +628,7 @@ class FreqaiDataKitchen:
|
|||
return corr_dataframes
|
||||
|
||||
def attach_corr_pair_columns(
|
||||
self, dataframe: DataFrame, corr_dataframes: Dict[str, DataFrame], current_pair: str
|
||||
self, dataframe: DataFrame, corr_dataframes: dict[str, DataFrame], current_pair: str
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Attach the existing corr_pair dataframes to the current pair dataframe before training
|
||||
|
@ -731,7 +731,7 @@ class FreqaiDataKitchen:
|
|||
:param is_corr_pairs: bool = whether the pair is a corr pair or not
|
||||
:return: dataframe = populated dataframe
|
||||
"""
|
||||
tfs: List[str] = self.freqai_config["feature_parameters"].get("include_timeframes")
|
||||
tfs: list[str] = self.freqai_config["feature_parameters"].get("include_timeframes")
|
||||
|
||||
for tf in tfs:
|
||||
metadata = {"pair": pair, "tf": tf}
|
||||
|
@ -810,8 +810,8 @@ class FreqaiDataKitchen:
|
|||
f"{DOCS_LINK}/freqai-feature-engineering/"
|
||||
)
|
||||
|
||||
tfs: List[str] = self.freqai_config["feature_parameters"].get("include_timeframes")
|
||||
pairs: List[str] = self.freqai_config["feature_parameters"].get("include_corr_pairlist", [])
|
||||
tfs: list[str] = self.freqai_config["feature_parameters"].get("include_timeframes")
|
||||
pairs: list[str] = self.freqai_config["feature_parameters"].get("include_corr_pairlist", [])
|
||||
|
||||
for tf in tfs:
|
||||
if tf not in base_dataframes:
|
||||
|
@ -828,7 +828,7 @@ class FreqaiDataKitchen:
|
|||
else:
|
||||
dataframe = base_dataframes[self.config["timeframe"]].copy()
|
||||
|
||||
corr_pairs: List[str] = self.freqai_config["feature_parameters"].get(
|
||||
corr_pairs: list[str] = self.freqai_config["feature_parameters"].get(
|
||||
"include_corr_pairlist", []
|
||||
)
|
||||
dataframe = self.populate_features(
|
||||
|
@ -953,7 +953,7 @@ class FreqaiDataKitchen:
|
|||
Returns default FreqAI model path
|
||||
:param config: Configuration dictionary
|
||||
"""
|
||||
freqai_config: Dict[str, Any] = config["freqai"]
|
||||
freqai_config: dict[str, Any] = config["freqai"]
|
||||
return Path(config["user_data_dir"] / "models" / str(freqai_config.get("identifier")))
|
||||
|
||||
def remove_special_chars_from_feature_names(self, dataframe: pd.DataFrame) -> pd.DataFrame:
|
||||
|
@ -992,7 +992,7 @@ class FreqaiDataKitchen:
|
|||
return timerange
|
||||
|
||||
# deprecated functions
|
||||
def normalize_data(self, data_dictionary: Dict) -> Dict[Any, Any]:
|
||||
def normalize_data(self, data_dictionary: dict) -> dict[Any, Any]:
|
||||
"""
|
||||
Deprecation warning, migration assistance
|
||||
"""
|
||||
|
|
|
@ -5,7 +5,7 @@ from abc import ABC, abstractmethod
|
|||
from collections import deque
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Literal, Optional, Tuple
|
||||
from typing import Any, Literal, Optional
|
||||
|
||||
import datasieve.transforms as ds
|
||||
import numpy as np
|
||||
|
@ -59,11 +59,11 @@ class IFreqaiModel(ABC):
|
|||
def __init__(self, config: Config) -> None:
|
||||
self.config = config
|
||||
self.assert_config(self.config)
|
||||
self.freqai_info: Dict[str, Any] = config["freqai"]
|
||||
self.data_split_parameters: Dict[str, Any] = config.get("freqai", {}).get(
|
||||
self.freqai_info: dict[str, Any] = config["freqai"]
|
||||
self.data_split_parameters: dict[str, Any] = config.get("freqai", {}).get(
|
||||
"data_split_parameters", {}
|
||||
)
|
||||
self.model_training_parameters: Dict[str, Any] = config.get("freqai", {}).get(
|
||||
self.model_training_parameters: dict[str, Any] = config.get("freqai", {}).get(
|
||||
"model_training_parameters", {}
|
||||
)
|
||||
self.identifier: str = self.freqai_info.get("identifier", "no_id_provided")
|
||||
|
@ -80,14 +80,14 @@ class IFreqaiModel(ABC):
|
|||
self.dd.current_candle = self.current_candle
|
||||
self.scanning = False
|
||||
self.ft_params = self.freqai_info["feature_parameters"]
|
||||
self.corr_pairlist: List[str] = self.ft_params.get("include_corr_pairlist", [])
|
||||
self.corr_pairlist: list[str] = self.ft_params.get("include_corr_pairlist", [])
|
||||
self.keras: bool = self.freqai_info.get("keras", False)
|
||||
if self.keras and self.ft_params.get("DI_threshold", 0):
|
||||
self.ft_params["DI_threshold"] = 0
|
||||
logger.warning("DI threshold is not configured for Keras models yet. Deactivating.")
|
||||
|
||||
self.CONV_WIDTH = self.freqai_info.get("conv_width", 1)
|
||||
self.class_names: List[str] = [] # used in classification subclasses
|
||||
self.class_names: list[str] = [] # used in classification subclasses
|
||||
self.pair_it = 0
|
||||
self.pair_it_train = 0
|
||||
self.total_pairs = len(self.config.get("exchange", {}).get("pair_whitelist"))
|
||||
|
@ -99,13 +99,13 @@ class IFreqaiModel(ABC):
|
|||
self.base_tf_seconds = timeframe_to_seconds(self.config["timeframe"])
|
||||
self.continual_learning = self.freqai_info.get("continual_learning", False)
|
||||
self.plot_features = self.ft_params.get("plot_feature_importances", 0)
|
||||
self.corr_dataframes: Dict[str, DataFrame] = {}
|
||||
self.corr_dataframes: dict[str, DataFrame] = {}
|
||||
# get_corr_dataframes is controlling the caching of corr_dataframes
|
||||
# for improved performance. Careful with this boolean.
|
||||
self.get_corr_dataframes: bool = True
|
||||
self._threads: List[threading.Thread] = []
|
||||
self._threads: list[threading.Thread] = []
|
||||
self._stop_event = threading.Event()
|
||||
self.metadata: Dict[str, Any] = self.dd.load_global_metadata_from_disk()
|
||||
self.metadata: dict[str, Any] = self.dd.load_global_metadata_from_disk()
|
||||
self.data_provider: Optional[DataProvider] = None
|
||||
self.max_system_threads = max(int(psutil.cpu_count() * 2 - 2), 1)
|
||||
self.can_short = True # overridden in start() with strategy.can_short
|
||||
|
@ -185,6 +185,7 @@ class IFreqaiModel(ABC):
|
|||
Callback for Subclasses to override to include logic for shutting down resources
|
||||
when SIGINT is sent.
|
||||
"""
|
||||
self.dd.save_historic_predictions_to_disk()
|
||||
return
|
||||
|
||||
def shutdown(self):
|
||||
|
@ -198,9 +199,16 @@ class IFreqaiModel(ABC):
|
|||
self.data_provider = None
|
||||
self._on_stop()
|
||||
|
||||
logger.info("Waiting on Training iteration")
|
||||
for _thread in self._threads:
|
||||
_thread.join()
|
||||
if self.freqai_info.get("wait_for_training_iteration_on_reload", True):
|
||||
logger.info("Waiting on Training iteration")
|
||||
for _thread in self._threads:
|
||||
_thread.join()
|
||||
else:
|
||||
logger.warning(
|
||||
"Breaking current training iteration because "
|
||||
"you set wait_for_training_iteration_on_reload to "
|
||||
" False."
|
||||
)
|
||||
|
||||
def start_scanning(self, *args, **kwargs) -> None:
|
||||
"""
|
||||
|
@ -901,7 +909,7 @@ class IFreqaiModel(ABC):
|
|||
|
||||
return
|
||||
|
||||
def update_metadata(self, metadata: Dict[str, Any]):
|
||||
def update_metadata(self, metadata: dict[str, Any]):
|
||||
"""
|
||||
Update global metadata and save the updated json file
|
||||
:param metadata: new global metadata dict
|
||||
|
@ -954,7 +962,7 @@ class IFreqaiModel(ABC):
|
|||
"""
|
||||
|
||||
@abstractmethod
|
||||
def fit(self, data_dictionary: Dict[str, Any], dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
def fit(self, data_dictionary: dict[str, Any], dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
"""
|
||||
Most regressors use the same function names and arguments e.g. user
|
||||
can drop in LGBMRegressor in place of CatBoostRegressor and all data
|
||||
|
@ -968,7 +976,7 @@ class IFreqaiModel(ABC):
|
|||
@abstractmethod
|
||||
def predict(
|
||||
self, unfiltered_df: DataFrame, dk: FreqaiDataKitchen, **kwargs
|
||||
) -> Tuple[DataFrame, NDArray[np.int_]]:
|
||||
) -> tuple[DataFrame, NDArray[np.int_]]:
|
||||
"""
|
||||
Filter the prediction features data and predict with it.
|
||||
:param unfiltered_df: Full dataframe for the current backtest period.
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict
|
||||
from typing import Any
|
||||
|
||||
from catboost import CatBoostClassifier, Pool
|
||||
|
||||
|
@ -21,7 +21,7 @@ class CatboostClassifier(BaseClassifierModel):
|
|||
top level config.json file.
|
||||
"""
|
||||
|
||||
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
def fit(self, data_dictionary: dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
"""
|
||||
User sets up the training and test data to fit their desired model here
|
||||
:param data_dictionary: the dictionary holding all data for train, test,
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict
|
||||
from typing import Any
|
||||
|
||||
from catboost import CatBoostClassifier, Pool
|
||||
|
||||
|
@ -22,7 +22,7 @@ class CatboostClassifierMultiTarget(BaseClassifierModel):
|
|||
top level config.json file.
|
||||
"""
|
||||
|
||||
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
def fit(self, data_dictionary: dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
"""
|
||||
User sets up the training and test data to fit their desired model here
|
||||
:param data_dictionary: the dictionary holding all data for train, test,
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict
|
||||
from typing import Any
|
||||
|
||||
from catboost import CatBoostRegressor, Pool
|
||||
|
||||
|
@ -21,7 +21,7 @@ class CatboostRegressor(BaseRegressionModel):
|
|||
top level config.json file.
|
||||
"""
|
||||
|
||||
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
def fit(self, data_dictionary: dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
"""
|
||||
User sets up the training and test data to fit their desired model here
|
||||
:param data_dictionary: the dictionary holding all data for train, test,
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict
|
||||
from typing import Any
|
||||
|
||||
from catboost import CatBoostRegressor, Pool
|
||||
|
||||
|
@ -22,7 +22,7 @@ class CatboostRegressorMultiTarget(BaseRegressionModel):
|
|||
top level config.json file.
|
||||
"""
|
||||
|
||||
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
def fit(self, data_dictionary: dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
"""
|
||||
User sets up the training and test data to fit their desired model here
|
||||
:param data_dictionary: the dictionary holding all data for train, test,
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import logging
|
||||
from typing import Any, Dict
|
||||
from typing import Any
|
||||
|
||||
from lightgbm import LGBMClassifier
|
||||
|
||||
|
@ -20,7 +20,7 @@ class LightGBMClassifier(BaseClassifierModel):
|
|||
top level config.json file.
|
||||
"""
|
||||
|
||||
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
def fit(self, data_dictionary: dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
"""
|
||||
User sets up the training and test data to fit their desired model here
|
||||
:param data_dictionary: the dictionary holding all data for train, test,
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import logging
|
||||
from typing import Any, Dict
|
||||
from typing import Any
|
||||
|
||||
from lightgbm import LGBMClassifier
|
||||
|
||||
|
@ -21,7 +21,7 @@ class LightGBMClassifierMultiTarget(BaseClassifierModel):
|
|||
top level config.json file.
|
||||
"""
|
||||
|
||||
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
def fit(self, data_dictionary: dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
"""
|
||||
User sets up the training and test data to fit their desired model here
|
||||
:param data_dictionary: the dictionary holding all data for train, test,
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import logging
|
||||
from typing import Any, Dict
|
||||
from typing import Any
|
||||
|
||||
from lightgbm import LGBMRegressor
|
||||
|
||||
|
@ -20,7 +20,7 @@ class LightGBMRegressor(BaseRegressionModel):
|
|||
top level config.json file.
|
||||
"""
|
||||
|
||||
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
def fit(self, data_dictionary: dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
"""
|
||||
User sets up the training and test data to fit their desired model here
|
||||
:param data_dictionary: the dictionary holding all data for train, test,
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import logging
|
||||
from typing import Any, Dict
|
||||
from typing import Any
|
||||
|
||||
from lightgbm import LGBMRegressor
|
||||
|
||||
|
@ -21,7 +21,7 @@ class LightGBMRegressorMultiTarget(BaseRegressionModel):
|
|||
top level config.json file.
|
||||
"""
|
||||
|
||||
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
def fit(self, data_dictionary: dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
"""
|
||||
User sets up the training and test data to fit their desired model here
|
||||
:param data_dictionary: the dictionary holding all data for train, test,
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from typing import Any, Dict
|
||||
from typing import Any
|
||||
|
||||
import torch
|
||||
|
||||
|
@ -52,10 +52,10 @@ class PyTorchMLPClassifier(BasePyTorchClassifier):
|
|||
super().__init__(**kwargs)
|
||||
config = self.freqai_info.get("model_training_parameters", {})
|
||||
self.learning_rate: float = config.get("learning_rate", 3e-4)
|
||||
self.model_kwargs: Dict[str, Any] = config.get("model_kwargs", {})
|
||||
self.trainer_kwargs: Dict[str, Any] = config.get("trainer_kwargs", {})
|
||||
self.model_kwargs: dict[str, Any] = config.get("model_kwargs", {})
|
||||
self.trainer_kwargs: dict[str, Any] = config.get("trainer_kwargs", {})
|
||||
|
||||
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
def fit(self, data_dictionary: dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
"""
|
||||
User sets up the training and test data to fit their desired model here
|
||||
:param data_dictionary: the dictionary holding all data for train, test,
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from typing import Any, Dict
|
||||
from typing import Any
|
||||
|
||||
import torch
|
||||
|
||||
|
@ -51,10 +51,10 @@ class PyTorchMLPRegressor(BasePyTorchRegressor):
|
|||
super().__init__(**kwargs)
|
||||
config = self.freqai_info.get("model_training_parameters", {})
|
||||
self.learning_rate: float = config.get("learning_rate", 3e-4)
|
||||
self.model_kwargs: Dict[str, Any] = config.get("model_kwargs", {})
|
||||
self.trainer_kwargs: Dict[str, Any] = config.get("trainer_kwargs", {})
|
||||
self.model_kwargs: dict[str, Any] = config.get("model_kwargs", {})
|
||||
self.trainer_kwargs: dict[str, Any] = config.get("trainer_kwargs", {})
|
||||
|
||||
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
def fit(self, data_dictionary: dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
"""
|
||||
User sets up the training and test data to fit their desired model here
|
||||
:param data_dictionary: the dictionary holding all data for train, test,
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from typing import Any, Dict, Tuple
|
||||
from typing import Any
|
||||
|
||||
import numpy as np
|
||||
import numpy.typing as npt
|
||||
|
@ -60,10 +60,10 @@ class PyTorchTransformerRegressor(BasePyTorchRegressor):
|
|||
super().__init__(**kwargs)
|
||||
config = self.freqai_info.get("model_training_parameters", {})
|
||||
self.learning_rate: float = config.get("learning_rate", 3e-4)
|
||||
self.model_kwargs: Dict[str, Any] = config.get("model_kwargs", {})
|
||||
self.trainer_kwargs: Dict[str, Any] = config.get("trainer_kwargs", {})
|
||||
self.model_kwargs: dict[str, Any] = config.get("model_kwargs", {})
|
||||
self.trainer_kwargs: dict[str, Any] = config.get("trainer_kwargs", {})
|
||||
|
||||
def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
def fit(self, data_dictionary: dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
|
||||
"""
|
||||
User sets up the training and test data to fit their desired model here
|
||||
:param data_dictionary: the dictionary holding all data for train, test,
|
||||
|
@ -100,7 +100,7 @@ class PyTorchTransformerRegressor(BasePyTorchRegressor):
|
|||
|
||||
def predict(
|
||||
self, unfiltered_df: pd.DataFrame, dk: FreqaiDataKitchen, **kwargs
|
||||
) -> Tuple[pd.DataFrame, npt.NDArray[np.int_]]:
|
||||
) -> tuple[pd.DataFrame, npt.NDArray[np.int_]]:
|
||||
"""
|
||||
Filter the prediction features data and predict with it.
|
||||
:param unfiltered_df: Full dataframe for the current backtest period.
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional, Type
|
||||
from typing import Any, Optional
|
||||
|
||||
import torch as th
|
||||
from stable_baselines3.common.callbacks import ProgressBarCallback
|
||||
|
@ -44,7 +44,7 @@ class ReinforcementLearner(BaseReinforcementLearningModel):
|
|||
take fine-tuned control over the data handling pipeline.
|
||||
"""
|
||||
|
||||
def fit(self, data_dictionary: Dict[str, Any], dk: FreqaiDataKitchen, **kwargs):
|
||||
def fit(self, data_dictionary: dict[str, Any], dk: FreqaiDataKitchen, **kwargs):
|
||||
"""
|
||||
User customizable fit method
|
||||
:param data_dictionary: dict = common data dictionary containing all train/test
|
||||
|
@ -77,7 +77,7 @@ class ReinforcementLearner(BaseReinforcementLearningModel):
|
|||
)
|
||||
model = self.dd.model_dictionary[dk.pair]
|
||||
model.set_env(self.train_env)
|
||||
callbacks: List[Any] = [self.eval_callback, self.tensorboard_callback]
|
||||
callbacks: list[Any] = [self.eval_callback, self.tensorboard_callback]
|
||||
progressbar_callback: Optional[ProgressBarCallback] = None
|
||||
if self.rl_config.get("progress_bar", False):
|
||||
progressbar_callback = ProgressBarCallback()
|
||||
|
@ -101,7 +101,7 @@ class ReinforcementLearner(BaseReinforcementLearningModel):
|
|||
|
||||
return model
|
||||
|
||||
MyRLEnv: Type[BaseEnvironment]
|
||||
MyRLEnv: type[BaseEnvironment]
|
||||
|
||||
class MyRLEnv(Base5ActionRLEnv): # type: ignore[no-redef]
|
||||
"""
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import logging
|
||||
from typing import Any, Dict
|
||||
from typing import Any
|
||||
|
||||
from pandas import DataFrame
|
||||
from sb3_contrib.common.maskable.callbacks import MaskableEvalCallback
|
||||
|
@ -22,7 +22,7 @@ class ReinforcementLearner_multiproc(ReinforcementLearner):
|
|||
|
||||
def set_train_and_eval_environments(
|
||||
self,
|
||||
data_dictionary: Dict[str, Any],
|
||||
data_dictionary: dict[str, Any],
|
||||
prices_train: DataFrame,
|
||||
prices_test: DataFrame,
|
||||
dk: FreqaiDataKitchen,
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user