mirror of
https://github.com/freqtrade/freqtrade.git
synced 2024-11-10 10:21:59 +00:00
Merge branch 'develop' into pr/Axel-CH/9267
This commit is contained in:
commit
42d75b0754
18
.github/workflows/ci.yml
vendored
18
.github/workflows/ci.yml
vendored
|
@ -90,7 +90,7 @@ jobs:
|
|||
|
||||
- name: Backtesting (multi)
|
||||
run: |
|
||||
cp config_examples/config_bittrex.example.json config.json
|
||||
cp tests/testdata/config.tests.json config.json
|
||||
freqtrade create-userdir --userdir user_data
|
||||
freqtrade new-strategy -s AwesomeStrategy
|
||||
freqtrade new-strategy -s AwesomeStrategyMin --template minimal
|
||||
|
@ -98,7 +98,7 @@ jobs:
|
|||
|
||||
- name: Hyperopt
|
||||
run: |
|
||||
cp config_examples/config_bittrex.example.json config.json
|
||||
cp tests/testdata/config.tests.json config.json
|
||||
freqtrade create-userdir --userdir user_data
|
||||
freqtrade hyperopt --datadir tests/testdata -e 6 --strategy SampleStrategy --hyperopt-loss SharpeHyperOptLossDaily --print-all
|
||||
|
||||
|
@ -108,7 +108,7 @@ jobs:
|
|||
|
||||
- name: Run Ruff
|
||||
run: |
|
||||
ruff check --format=github .
|
||||
ruff check --output-format=github .
|
||||
|
||||
- name: Mypy
|
||||
run: |
|
||||
|
@ -200,14 +200,14 @@ jobs:
|
|||
|
||||
- name: Backtesting
|
||||
run: |
|
||||
cp config_examples/config_bittrex.example.json config.json
|
||||
cp tests/testdata/config.tests.json config.json
|
||||
freqtrade create-userdir --userdir user_data
|
||||
freqtrade new-strategy -s AwesomeStrategyAdv --template advanced
|
||||
freqtrade backtesting --datadir tests/testdata --strategy AwesomeStrategyAdv
|
||||
|
||||
- name: Hyperopt
|
||||
run: |
|
||||
cp config_examples/config_bittrex.example.json config.json
|
||||
cp tests/testdata/config.tests.json config.json
|
||||
freqtrade create-userdir --userdir user_data
|
||||
freqtrade hyperopt --datadir tests/testdata -e 5 --strategy SampleStrategy --hyperopt-loss SharpeHyperOptLossDaily --print-all
|
||||
|
||||
|
@ -217,7 +217,7 @@ jobs:
|
|||
|
||||
- name: Run Ruff
|
||||
run: |
|
||||
ruff check --format=github .
|
||||
ruff check --output-format=github .
|
||||
|
||||
- name: Mypy
|
||||
run: |
|
||||
|
@ -275,19 +275,19 @@ jobs:
|
|||
|
||||
- name: Backtesting
|
||||
run: |
|
||||
cp config_examples/config_bittrex.example.json config.json
|
||||
cp tests/testdata/config.tests.json config.json
|
||||
freqtrade create-userdir --userdir user_data
|
||||
freqtrade backtesting --datadir tests/testdata --strategy SampleStrategy
|
||||
|
||||
- name: Hyperopt
|
||||
run: |
|
||||
cp config_examples/config_bittrex.example.json config.json
|
||||
cp tests/testdata/config.tests.json config.json
|
||||
freqtrade create-userdir --userdir user_data
|
||||
freqtrade hyperopt --datadir tests/testdata -e 5 --strategy SampleStrategy --hyperopt-loss SharpeHyperOptLossDaily --print-all
|
||||
|
||||
- name: Run Ruff
|
||||
run: |
|
||||
ruff check --format=github .
|
||||
ruff check --output-format=github .
|
||||
|
||||
- name: Mypy
|
||||
run: |
|
||||
|
|
|
@ -8,17 +8,17 @@ repos:
|
|||
# stages: [push]
|
||||
|
||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||
rev: "v1.5.1"
|
||||
rev: "v1.7.0"
|
||||
hooks:
|
||||
- id: mypy
|
||||
exclude: build_helpers
|
||||
additional_dependencies:
|
||||
- types-cachetools==5.3.0.6
|
||||
- types-cachetools==5.3.0.7
|
||||
- types-filelock==3.2.7
|
||||
- types-requests==2.31.0.7
|
||||
- types-requests==2.31.0.10
|
||||
- types-tabulate==0.9.0.3
|
||||
- types-python-dateutil==2.8.19.14
|
||||
- SQLAlchemy==2.0.21
|
||||
- SQLAlchemy==2.0.23
|
||||
# stages: [push]
|
||||
|
||||
- repo: https://github.com/pycqa/isort
|
||||
|
@ -30,7 +30,7 @@ repos:
|
|||
|
||||
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: 'v0.0.270'
|
||||
rev: 'v0.1.1'
|
||||
hooks:
|
||||
- id: ruff
|
||||
|
||||
|
|
|
@ -125,7 +125,7 @@ Exceptions:
|
|||
|
||||
Contributors may be given commit privileges. Preference will be given to those with:
|
||||
|
||||
1. Past contributions to Freqtrade and other related open-source projects. Contributions to Freqtrade include both code (both accepted and pending) and friendly participation in the issue tracker and Pull request reviews. Quantity and quality are considered.
|
||||
1. Past contributions to Freqtrade and other related open-source projects. Contributions to Freqtrade include both code (both accepted and pending) and friendly participation in the issue tracker and Pull request reviews. Both quantity and quality are considered.
|
||||
1. A coding style that the other core committers find simple, minimal, and clean.
|
||||
1. Access to resources for cross-platform development and testing.
|
||||
1. Time to devote to the project regularly.
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
FROM python:3.11.5-slim-bullseye as base
|
||||
FROM python:3.11.6-slim-bookworm as base
|
||||
|
||||
# Setup env
|
||||
ENV LANG C.UTF-8
|
||||
|
|
|
@ -28,7 +28,6 @@ hesitate to read the source code and understand the mechanism of this bot.
|
|||
Please read the [exchange specific notes](docs/exchanges.md) to learn about eventual, special configurations needed for each exchange.
|
||||
|
||||
- [X] [Binance](https://www.binance.com/)
|
||||
- [X] [Bittrex](https://bittrex.com/)
|
||||
- [X] [Gate.io](https://www.gate.io/ref/6266643)
|
||||
- [X] [Huobi](http://huobi.com/)
|
||||
- [X] [Kraken](https://kraken.com/)
|
||||
|
|
BIN
build_helpers/TA_Lib-0.4.28-cp311-cp311-linux_armv7l.whl
Normal file
BIN
build_helpers/TA_Lib-0.4.28-cp311-cp311-linux_armv7l.whl
Normal file
Binary file not shown.
BIN
build_helpers/TA_Lib-0.4.28-cp39-cp39-linux_armv7l.whl
Normal file
BIN
build_helpers/TA_Lib-0.4.28-cp39-cp39-linux_armv7l.whl
Normal file
Binary file not shown.
|
@ -54,7 +54,7 @@ docker tag freqtrade:$TAG_FREQAI_ARM ${CACHE_IMAGE}:$TAG_FREQAI_ARM
|
|||
docker tag freqtrade:$TAG_FREQAI_RL_ARM ${CACHE_IMAGE}:$TAG_FREQAI_RL_ARM
|
||||
|
||||
# Run backtest
|
||||
docker run --rm -v $(pwd)/config_examples/config_bittrex.example.json:/freqtrade/config.json:ro -v $(pwd)/tests:/tests freqtrade:${TAG_ARM} backtesting --datadir /tests/testdata --strategy-path /tests/strategy/strats/ --strategy StrategyTestV3
|
||||
docker run --rm -v $(pwd)/tests/testdata/config.tests.json:/freqtrade/config.json:ro -v $(pwd)/tests:/tests freqtrade:${TAG_ARM} backtesting --datadir /tests/testdata --strategy-path /tests/strategy/strats/ --strategy StrategyTestV3
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "failed running backtest"
|
||||
|
|
|
@ -67,7 +67,7 @@ docker tag freqtrade:$TAG_FREQAI ${CACHE_IMAGE}:$TAG_FREQAI
|
|||
docker tag freqtrade:$TAG_FREQAI_RL ${CACHE_IMAGE}:$TAG_FREQAI_RL
|
||||
|
||||
# Run backtest
|
||||
docker run --rm -v $(pwd)/config_examples/config_bittrex.example.json:/freqtrade/config.json:ro -v $(pwd)/tests:/tests freqtrade:${TAG} backtesting --datadir /tests/testdata --strategy-path /tests/strategy/strats/ --strategy StrategyTestV3
|
||||
docker run --rm -v $(pwd)/tests/testdata/config.tests.json:/freqtrade/config.json:ro -v $(pwd)/tests:/tests freqtrade:${TAG} backtesting --datadir /tests/testdata --strategy-path /tests/strategy/strats/ --strategy StrategyTestV3
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "failed running backtest"
|
||||
|
|
BIN
build_helpers/pyarrow-14.0.1-cp311-cp311-linux_armv7l.whl
Normal file
BIN
build_helpers/pyarrow-14.0.1-cp311-cp311-linux_armv7l.whl
Normal file
Binary file not shown.
Binary file not shown.
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"max_open_trades": 3,
|
||||
"stake_currency": "BTC",
|
||||
"stake_currency": "USDT",
|
||||
"stake_amount": 0.05,
|
||||
"tradable_balance_ratio": 0.99,
|
||||
"fiat_display_currency": "USD",
|
||||
|
@ -36,21 +36,21 @@
|
|||
"ccxt_async_config": {
|
||||
},
|
||||
"pair_whitelist": [
|
||||
"ALGO/BTC",
|
||||
"ATOM/BTC",
|
||||
"BAT/BTC",
|
||||
"BCH/BTC",
|
||||
"BRD/BTC",
|
||||
"EOS/BTC",
|
||||
"ETH/BTC",
|
||||
"IOTA/BTC",
|
||||
"LINK/BTC",
|
||||
"LTC/BTC",
|
||||
"NEO/BTC",
|
||||
"NXS/BTC",
|
||||
"XMR/BTC",
|
||||
"XRP/BTC",
|
||||
"XTZ/BTC"
|
||||
"ALGO/USDT",
|
||||
"ATOM/USDT",
|
||||
"BAT/USDT",
|
||||
"BCH/USDT",
|
||||
"BRD/USDT",
|
||||
"EOS/USDT",
|
||||
"ETH/USDT",
|
||||
"IOTA/USDT",
|
||||
"LINK/USDT",
|
||||
"LTC/USDT",
|
||||
"NEO/USDT",
|
||||
"NXS/USDT",
|
||||
"XMR/USDT",
|
||||
"XRP/USDT",
|
||||
"XTZ/USDT"
|
||||
],
|
||||
"pair_blacklist": [
|
||||
"BNB/.*"
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
FROM python:3.9.16-slim-bullseye as base
|
||||
FROM python:3.11.6-slim-bookworm as base
|
||||
|
||||
# Setup env
|
||||
ENV LANG C.UTF-8
|
||||
|
@ -11,12 +11,13 @@ ENV FT_APP_ENV="docker"
|
|||
# Prepare environment
|
||||
RUN mkdir /freqtrade \
|
||||
&& apt-get update \
|
||||
&& apt-get -y install sudo libatlas3-base curl sqlite3 libhdf5-dev libutf8proc-dev libsnappy-dev \
|
||||
&& apt-get -y install sudo libatlas3-base libopenblas-dev curl sqlite3 libhdf5-dev libutf8proc-dev libsnappy-dev \
|
||||
&& apt-get clean \
|
||||
&& useradd -u 1000 -G sudo -U -m ftuser \
|
||||
&& chown ftuser:ftuser /freqtrade \
|
||||
# Allow sudoers
|
||||
&& echo "ftuser ALL=(ALL) NOPASSWD: /bin/chown" >> /etc/sudoers
|
||||
&& echo "ftuser ALL=(ALL) NOPASSWD: /bin/chown" >> /etc/sudoers \
|
||||
&& pip install --upgrade pip
|
||||
|
||||
WORKDIR /freqtrade
|
||||
|
||||
|
@ -25,20 +26,16 @@ FROM base as python-deps
|
|||
RUN apt-get update \
|
||||
&& apt-get -y install build-essential libssl-dev libffi-dev libgfortran5 pkg-config cmake gcc \
|
||||
&& apt-get clean \
|
||||
&& pip install --upgrade pip \
|
||||
&& echo "[global]\nextra-index-url=https://www.piwheels.org/simple" > /etc/pip.conf
|
||||
|
||||
# Install TA-lib
|
||||
COPY build_helpers/* /tmp/
|
||||
RUN cd /tmp && /tmp/install_ta-lib.sh && rm -r /tmp/*ta-lib*
|
||||
ENV LD_LIBRARY_PATH /usr/local/lib
|
||||
|
||||
# Install dependencies
|
||||
COPY --chown=ftuser:ftuser requirements.txt /freqtrade/
|
||||
USER ftuser
|
||||
RUN pip install --user --no-cache-dir numpy \
|
||||
&& pip install --user /tmp/pyarrow-*.whl \
|
||||
&& pip install --user TA-Lib==0.4.28 \
|
||||
&& pip install --user --no-index --find-links /tmp/ pyarrow TA-Lib==0.4.28 \
|
||||
&& pip install --user --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy dependencies to runtime-image
|
||||
|
|
|
@ -170,11 +170,11 @@ freqtrade backtesting --strategy AwesomeStrategy --dry-run-wallet 1000
|
|||
|
||||
Using a different on-disk historical candle (OHLCV) data source
|
||||
|
||||
Assume you downloaded the history data from the Bittrex exchange and kept it in the `user_data/data/bittrex-20180101` directory.
|
||||
Assume you downloaded the history data from the Binance exchange and kept it in the `user_data/data/binance-20180101` directory.
|
||||
You can then use this data for backtesting as follows:
|
||||
|
||||
```bash
|
||||
freqtrade backtesting --strategy AwesomeStrategy --datadir user_data/data/bittrex-20180101
|
||||
freqtrade backtesting --strategy AwesomeStrategy --datadir user_data/data/binance-20180101
|
||||
```
|
||||
|
||||
---
|
||||
|
|
|
@ -594,7 +594,7 @@ creating trades on the exchange.
|
|||
|
||||
```json
|
||||
"exchange": {
|
||||
"name": "bittrex",
|
||||
"name": "binance",
|
||||
"key": "key",
|
||||
"secret": "secret",
|
||||
...
|
||||
|
@ -644,7 +644,7 @@ API Keys are usually only required for live trading (trading for real money, bot
|
|||
```json
|
||||
{
|
||||
"exchange": {
|
||||
"name": "bittrex",
|
||||
"name": "binance",
|
||||
"key": "af8ddd35195e9dc500b9a6f799f6f5c93d89193b",
|
||||
"secret": "08a9dc6db3d7b53e1acebd9275677f4b0a04f1a5",
|
||||
//"password": "", // Optional, not needed by all exchanges)
|
||||
|
|
|
@ -318,6 +318,7 @@ Additional tests / steps to complete:
|
|||
* Check if balance shows correctly (*)
|
||||
* Create market order (*)
|
||||
* Create limit order (*)
|
||||
* Cancel order (*)
|
||||
* Complete trade (enter + exit) (*)
|
||||
* Compare result calculation between exchange and bot
|
||||
* Ensure fees are applied correctly (check the database against the exchange)
|
||||
|
|
|
@ -7,7 +7,7 @@ Low level feature engineering is performed in the user strategy within a set of
|
|||
| Function | Description |
|
||||
|---------------|-------------|
|
||||
| `feature_engineering_expand_all()` | This optional function will automatically expand the defined features on the config defined `indicator_periods_candles`, `include_timeframes`, `include_shifted_candles`, and `include_corr_pairs`.
|
||||
| `feature_engineering_expand_basic()` | This optional function will automatically expand the defined features on the config defined `include_timeframes`, `include_shifted_candles`, and `include_corr_pairs`. Note: this function does *not* expand across `include_periods_candles`.
|
||||
| `feature_engineering_expand_basic()` | This optional function will automatically expand the defined features on the config defined `include_timeframes`, `include_shifted_candles`, and `include_corr_pairs`. Note: this function does *not* expand across `indicator_periods_candles`.
|
||||
| `feature_engineering_standard()` | This optional function will be called once with the dataframe of the base timeframe. This is the final function to be called, which means that the dataframe entering this function will contain all the features and columns from the base asset created by the other `feature_engineering_expand` functions. This function is a good place to do custom exotic feature extractions (e.g. tsfresh). This function is also a good place for any feature that should not be auto-expanded upon (e.g., day of the week).
|
||||
| `set_freqai_targets()` | Required function to set the targets for the model. All targets must be prepended with `&` to be recognized by the FreqAI internals.
|
||||
|
||||
|
|
|
@ -74,7 +74,6 @@ Mandatory parameters are marked as **Required** and have to be set in one of the
|
|||
| | **Reinforcement Learning Parameters within the `freqai.rl_config` sub dictionary**
|
||||
| `rl_config` | A dictionary containing the control parameters for a Reinforcement Learning model. <br> **Datatype:** Dictionary.
|
||||
| `train_cycles` | Training time steps will be set based on the `train_cycles * number of training data points. <br> **Datatype:** Integer.
|
||||
| `cpu_count` | Number of processors to dedicate to the Reinforcement Learning training process. <br> **Datatype:** int.
|
||||
| `max_trade_duration_candles`| Guides the agent training to keep trades below desired length. Example usage shown in `prediction_models/ReinforcementLearner.py` within the customizable `calculate_reward()` function. <br> **Datatype:** int.
|
||||
| `model_type` | Model string from stable_baselines3 or SBcontrib. Available strings include: `'TRPO', 'ARS', 'RecurrentPPO', 'MaskablePPO', 'PPO', 'A2C', 'DQN'`. User should ensure that `model_training_parameters` match those available to the corresponding stable_baselines3 model by visiting their documentaiton. [PPO doc](https://stable-baselines3.readthedocs.io/en/master/modules/ppo.html) (external website) <br> **Datatype:** string.
|
||||
| `policy_type` | One of the available policy types from stable_baselines3 <br> **Datatype:** string.
|
||||
|
|
|
@ -337,11 +337,15 @@ There are four parameter types each suited for different purposes.
|
|||
* `CategoricalParameter` - defines a parameter with a predetermined number of choices.
|
||||
* `BooleanParameter` - Shorthand for `CategoricalParameter([True, False])` - great for "enable" parameters.
|
||||
|
||||
!!! Tip "Disabling parameter optimization"
|
||||
Each parameter takes two boolean parameters:
|
||||
* `load` - when set to `False` it will not load values configured in `buy_params` and `sell_params`.
|
||||
* `optimize` - when set to `False` parameter will not be included in optimization process.
|
||||
Use these parameters to quickly prototype various ideas.
|
||||
### Parameter options
|
||||
|
||||
There are two parameter options that can help you to quickly test various ideas:
|
||||
|
||||
* `optimize` - when set to `False`, the parameter will not be included in optimization process. (Default: True)
|
||||
* `load` - when set to `False`, results of a previous hyperopt run (in `buy_params` and `sell_params` either in your strategy or the JSON output file) will not be used as the starting value for subsequent hyperopts. The default value specified in the parameter will be used instead. (Default: True)
|
||||
|
||||
!!! Tip "Effects of `load=False` on backtesting"
|
||||
Be aware that setting the `load` option to `False` will mean backtesting will also use the default value specified in the parameter and *not* the value found through hyperoptimisation.
|
||||
|
||||
!!! Warning
|
||||
Hyperoptable parameters cannot be used in `populate_indicators` - as hyperopt does not recalculate indicators for each epoch, so the starting value would be used in this case.
|
||||
|
|
|
@ -40,7 +40,6 @@ Freqtrade is a free and open source crypto trading bot written in Python. It is
|
|||
Please read the [exchange specific notes](exchanges.md) to learn about eventual, special configurations needed for each exchange.
|
||||
|
||||
- [X] [Binance](https://www.binance.com/)
|
||||
- [X] [Bittrex](https://bittrex.com/)
|
||||
- [X] [Gate.io](https://www.gate.io/ref/6266643)
|
||||
- [X] [Huobi](http://huobi.com/)
|
||||
- [X] [Kraken](https://kraken.com/)
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
markdown==3.4.4
|
||||
markdown==3.5.1
|
||||
mkdocs==1.5.3
|
||||
mkdocs-material==9.4.2
|
||||
mkdocs-material==9.4.14
|
||||
mdx_truly_sane_lists==1.3
|
||||
pymdown-extensions==10.3
|
||||
pymdown-extensions==10.5
|
||||
jinja2==3.1.2
|
||||
|
|
|
@ -134,13 +134,16 @@ python3 scripts/rest_client.py --config rest_config.json <command> [optional par
|
|||
| `reload_config` | Reloads the configuration file.
|
||||
| `trades` | List last trades. Limited to 500 trades per call.
|
||||
| `trade/<tradeid>` | Get specific trade.
|
||||
| `trade/<tradeid>` | DELETE - Remove trade from the database. Tries to close open orders. Requires manual handling of this trade on the exchange.
|
||||
| `trade/<tradeid>/open-order` | DELETE - Cancel open order for this trade.
|
||||
| `trade/<tradeid>/reload` | GET - Reload a trade from the Exchange. Only works in live, and can potentially help recover a trade that was manually sold on the exchange.
|
||||
| `trades/<tradeid>` | DELETE - Remove trade from the database. Tries to close open orders. Requires manual handling of this trade on the exchange.
|
||||
| `trades/<tradeid>/open-order` | DELETE - Cancel open order for this trade.
|
||||
| `trades/<tradeid>/reload` | GET - Reload a trade from the Exchange. Only works in live, and can potentially help recover a trade that was manually sold on the exchange.
|
||||
| `show_config` | Shows part of the current configuration with relevant settings to operation.
|
||||
| `logs` | Shows last log messages.
|
||||
| `status` | Lists all open trades.
|
||||
| `count` | Displays number of trades used and available.
|
||||
| `entries [pair]` | Shows profit statistics for each enter tags for given pair (or all pairs if pair isn't given). Pair is optional.
|
||||
| `exits [pair]` | Shows profit statistics for each exit reasons for given pair (or all pairs if pair isn't given). Pair is optional.
|
||||
| `mix_tags [pair]` | Shows profit statistics for each combinations of enter tag + exit reasons for given pair (or all pairs if pair isn't given). Pair is optional.
|
||||
| `locks` | Displays currently locked pairs.
|
||||
| `delete_lock <lock_id>` | Deletes (disables) the lock by id.
|
||||
| `profit` | Display a summary of your profit/loss from close trades and some stats about your performance.
|
||||
|
|
|
@ -760,9 +760,9 @@ The `position_adjustment_enable` strategy property enables the usage of `adjust_
|
|||
For performance reasons, it's disabled by default and freqtrade will show a warning message on startup if enabled.
|
||||
`adjust_trade_position()` can be used to perform additional orders, for example to manage risk with DCA (Dollar Cost Averaging) or to increase or decrease positions.
|
||||
|
||||
`max_entry_position_adjustment` property is used to limit the number of additional buys per trade (on top of the first buy) that the bot can execute. By default, the value is -1 which means the bot have no limit on number of adjustment buys.
|
||||
`max_entry_position_adjustment` property is used to limit the number of additional entries per trade (on top of the first entry order) that the bot can execute. By default, the value is -1 which means the bot have no limit on number of adjustment entries.
|
||||
|
||||
The strategy is expected to return a stake_amount (in stake currency) between `min_stake` and `max_stake` if and when an additional buy order should be made (position is increased).
|
||||
The strategy is expected to return a stake_amount (in stake currency) between `min_stake` and `max_stake` if and when an additional entry order should be made (position is increased -> buy order for long trades, sell order for short trades).
|
||||
If there are not enough funds in the wallet (the return value is above `max_stake`) then the signal will be ignored.
|
||||
Additional orders also result in additional fees and those orders don't count towards `max_open_trades`.
|
||||
|
||||
|
@ -770,9 +770,11 @@ This callback is **not** called when there is an open order (either buy or sell)
|
|||
|
||||
`adjust_trade_position()` is called very frequently for the duration of a trade, so you must keep your implementation as performant as possible.
|
||||
|
||||
Additional Buys are ignored once you have reached the maximum amount of extra buys that you have set on `max_entry_position_adjustment`, but the callback is called anyway looking for partial exits.
|
||||
Additional entries are ignored once you have reached the maximum amount of extra entries that you have set on `max_entry_position_adjustment`, but the callback is called anyway looking for partial exits.
|
||||
|
||||
Position adjustments will always be applied in the direction of the trade, so a positive value will always increase your position (negative values will decrease your position), no matter if it's a long or short trade. Modifications to leverage are not possible, and the stake-amount is assumed to be before applying leverage.
|
||||
Position adjustments will always be applied in the direction of the trade, so a positive value will always increase your position (negative values will decrease your position), no matter if it's a long or short trade.
|
||||
|
||||
Modifications to leverage are not possible, and the stake-amount returned is assumed to be before applying leverage.
|
||||
|
||||
!!! Note "About stake size"
|
||||
Using fixed stake size means it will be the amount used for the first order, just like without position adjustment.
|
||||
|
|
|
@ -173,7 +173,7 @@ You can use [recursive-analysis](recursive-analysis.md) to check and find the co
|
|||
In this example strategy, this should be set to 400 (`startup_candle_count = 400`), since the minimum needed history for ema100 calculation to make sure the value is correct is 400 candles.
|
||||
|
||||
``` python
|
||||
dataframe['ema100'] = ta.EMA(dataframe, timeperiod=400)
|
||||
dataframe['ema100'] = ta.EMA(dataframe, timeperiod=100)
|
||||
```
|
||||
|
||||
By letting the bot know how much history is needed, backtest trades can start at the specified timerange during backtesting and hyperopt.
|
||||
|
@ -486,17 +486,18 @@ for more information.
|
|||
|
||||
:param timeframe: Informative timeframe. Must always be equal or higher than strategy timeframe.
|
||||
:param asset: Informative asset, for example BTC, BTC/USDT, ETH/BTC. Do not specify to use
|
||||
current pair.
|
||||
current pair. Also supports limited pair format strings (see below)
|
||||
:param fmt: Column format (str) or column formatter (callable(name, asset, timeframe)). When not
|
||||
specified, defaults to:
|
||||
* {base}_{quote}_{column}_{timeframe} if asset is specified.
|
||||
* {base}_{quote}_{column}_{timeframe} if asset is specified.
|
||||
* {column}_{timeframe} if asset is not specified.
|
||||
Format string supports these format variables:
|
||||
* {asset} - full name of the asset, for example 'BTC/USDT'.
|
||||
Pair format supports these format variables:
|
||||
* {base} - base currency in lower case, for example 'eth'.
|
||||
* {BASE} - same as {base}, except in upper case.
|
||||
* {quote} - quote currency in lower case, for example 'usdt'.
|
||||
* {QUOTE} - same as {quote}, except in upper case.
|
||||
Format string additionally supports this variables.
|
||||
* {asset} - full name of the asset, for example 'BTC/USDT'.
|
||||
* {column} - name of dataframe column.
|
||||
* {timeframe} - timeframe of informative dataframe.
|
||||
:param ffill: ffill dataframe after merging informative pair.
|
||||
|
@ -1008,6 +1009,10 @@ The following lists some common patterns which should be avoided to prevent frus
|
|||
- don't use `dataframe['volume'].mean()`. This uses the full DataFrame for backtesting, including data from the future. Use `dataframe['volume'].rolling(<window>).mean()` instead
|
||||
- don't use `.resample('1h')`. This uses the left border of the interval, so moves data from an hour to the start of the hour. Use `.resample('1h', label='right')` instead.
|
||||
|
||||
!!! Tip "Identifying problems"
|
||||
You may also want to check the 2 helper commands [lookahead-analysis](lookahead-analysis.md) and [recursive-analysis](recursive-analysis.md), which can each help you figure out problems with your strategy in different ways.
|
||||
Please treat them as what they are - helpers to identify most common problems. A negative result of each does not guarantee that there's none of the above errors included.
|
||||
|
||||
### Colliding signals
|
||||
|
||||
When conflicting signals collide (e.g. both `'enter_long'` and `'exit_long'` are 1), freqtrade will do nothing and ignore the entry signal. This will avoid trades that enter, and exit immediately. Obviously, this can potentially lead to missed entries.
|
||||
|
|
|
@ -570,7 +570,7 @@ def populate_any_indicators(
|
|||
```
|
||||
|
||||
1. Features - Move to `feature_engineering_expand_all`
|
||||
2. Basic features, not expanded across `include_periods_candles` - move to`feature_engineering_expand_basic()`.
|
||||
2. Basic features, not expanded across `indicator_periods_candles` - move to`feature_engineering_expand_basic()`.
|
||||
3. Standard features which should not be expanded - move to `feature_engineering_standard()`.
|
||||
4. Targets - Move this part to `set_freqai_targets()`.
|
||||
|
||||
|
|
|
@ -175,6 +175,7 @@ official commands. You can ask at any moment for help with `/help`.
|
|||
| `/status` | Lists all open trades
|
||||
| `/status <trade_id>` | Lists one or more specific trade. Separate multiple <trade_id> with a blank space.
|
||||
| `/status table` | List all open trades in a table format. Pending buy orders are marked with an asterisk (*) Pending sell orders are marked with a double asterisk (**)
|
||||
| `/order <trade_id>` | Lists orders of one or more specific trade. Separate multiple <trade_id> with a blank space.
|
||||
| `/trades [limit]` | List all recently closed trades in a table format.
|
||||
| `/count` | Displays number of trades used and available
|
||||
| `/locks` | Show currently locked pairs.
|
||||
|
|
|
@ -427,25 +427,33 @@ zb True missing opt: fetchMyTrades
|
|||
Use the `list-timeframes` subcommand to see the list of timeframes available for the exchange.
|
||||
|
||||
```
|
||||
usage: freqtrade list-timeframes [-h] [-v] [--logfile FILE] [-V] [-c PATH] [-d PATH] [--userdir PATH] [--exchange EXCHANGE] [-1]
|
||||
usage: freqtrade list-timeframes [-h] [-v] [--logfile FILE] [-V] [-c PATH]
|
||||
[-d PATH] [--userdir PATH]
|
||||
[--exchange EXCHANGE] [-1]
|
||||
|
||||
optional arguments:
|
||||
options:
|
||||
-h, --help show this help message and exit
|
||||
--exchange EXCHANGE Exchange name (default: `bittrex`). Only valid if no config is provided.
|
||||
--exchange EXCHANGE Exchange name. Only valid if no config is provided.
|
||||
-1, --one-column Print output in one column.
|
||||
|
||||
Common arguments:
|
||||
-v, --verbose Verbose mode (-vv for more, -vvv to get all messages).
|
||||
--logfile FILE Log to the file specified. Special values are: 'syslog', 'journald'. See the documentation for more details.
|
||||
--logfile FILE, --log-file FILE
|
||||
Log to the file specified. Special values are:
|
||||
'syslog', 'journald'. See the documentation for more
|
||||
details.
|
||||
-V, --version show program's version number and exit
|
||||
-c PATH, --config PATH
|
||||
Specify configuration file (default: `config.json`). Multiple --config options may be used. Can be set to `-`
|
||||
to read config from stdin.
|
||||
-d PATH, --datadir PATH
|
||||
Specify configuration file (default:
|
||||
`userdir/config.json` or `config.json` whichever
|
||||
exists). Multiple --config options may be used. Can be
|
||||
set to `-` to read config from stdin.
|
||||
-d PATH, --datadir PATH, --data-dir PATH
|
||||
Path to directory with historical backtesting data.
|
||||
--userdir PATH, --user-data-dir PATH
|
||||
Path to userdata directory.
|
||||
|
||||
|
||||
```
|
||||
|
||||
* Example: see the timeframes for the 'binance' exchange, set in the configuration file:
|
||||
|
@ -479,20 +487,17 @@ usage: freqtrade list-markets [-h] [-v] [--logfile FILE] [-V] [-c PATH]
|
|||
[-d PATH] [--userdir PATH] [--exchange EXCHANGE]
|
||||
[--print-list] [--print-json] [-1] [--print-csv]
|
||||
[--base BASE_CURRENCY [BASE_CURRENCY ...]]
|
||||
[--quote QUOTE_CURRENCY [QUOTE_CURRENCY ...]] [-a]
|
||||
[--trading-mode {spot,margin,futures}]
|
||||
|
||||
[--quote QUOTE_CURRENCY [QUOTE_CURRENCY ...]]
|
||||
[-a] [--trading-mode {spot,margin,futures}]
|
||||
usage: freqtrade list-pairs [-h] [-v] [--logfile FILE] [-V] [-c PATH]
|
||||
[-d PATH] [--userdir PATH] [--exchange EXCHANGE]
|
||||
[--print-list] [--print-json] [-1] [--print-csv]
|
||||
[--base BASE_CURRENCY [BASE_CURRENCY ...]]
|
||||
[--quote QUOTE_CURRENCY [QUOTE_CURRENCY ...]] [-a]
|
||||
[--trading-mode {spot,margin,futures}]
|
||||
|
||||
optional arguments:
|
||||
options:
|
||||
-h, --help show this help message and exit
|
||||
--exchange EXCHANGE Exchange name (default: `bittrex`). Only valid if no
|
||||
config is provided.
|
||||
--exchange EXCHANGE Exchange name. Only valid if no config is provided.
|
||||
--print-list Print list of pairs or market symbols. By default data
|
||||
is printed in the tabular format.
|
||||
--print-json Print list of pairs or market symbols in JSON format.
|
||||
|
@ -504,20 +509,22 @@ optional arguments:
|
|||
Specify quote currency(-ies). Space-separated list.
|
||||
-a, --all Print all pairs or market symbols. By default only
|
||||
active ones are shown.
|
||||
--trading-mode {spot,margin,futures}
|
||||
--trading-mode {spot,margin,futures}, --tradingmode {spot,margin,futures}
|
||||
Select Trading mode
|
||||
|
||||
Common arguments:
|
||||
-v, --verbose Verbose mode (-vv for more, -vvv to get all messages).
|
||||
--logfile FILE Log to the file specified. Special values are:
|
||||
--logfile FILE, --log-file FILE
|
||||
Log to the file specified. Special values are:
|
||||
'syslog', 'journald'. See the documentation for more
|
||||
details.
|
||||
-V, --version show program's version number and exit
|
||||
-c PATH, --config PATH
|
||||
Specify configuration file (default: `config.json`).
|
||||
Multiple --config options may be used. Can be set to
|
||||
`-` to read config from stdin.
|
||||
-d PATH, --datadir PATH
|
||||
Specify configuration file (default:
|
||||
`userdir/config.json` or `config.json` whichever
|
||||
exists). Multiple --config options may be used. Can be
|
||||
set to `-` to read config from stdin.
|
||||
-d PATH, --datadir PATH, --data-dir PATH
|
||||
Path to directory with historical backtesting data.
|
||||
--userdir PATH, --user-data-dir PATH
|
||||
Path to userdata directory.
|
||||
|
@ -532,7 +539,7 @@ Pairs/markets are sorted by its symbol string in the printed output.
|
|||
### Examples
|
||||
|
||||
* Print the list of active pairs with quote currency USD on exchange, specified in the default
|
||||
configuration file (i.e. pairs on the "Bittrex" exchange) in JSON format:
|
||||
configuration file (i.e. pairs on the "Binance" exchange) in JSON format:
|
||||
|
||||
```
|
||||
$ freqtrade list-pairs --quote USD --print-json
|
||||
|
@ -564,7 +571,7 @@ usage: freqtrade test-pairlist [-h] [--userdir PATH] [-v] [-c PATH]
|
|||
[--quote QUOTE_CURRENCY [QUOTE_CURRENCY ...]]
|
||||
[-1] [--print-json] [--exchange EXCHANGE]
|
||||
|
||||
optional arguments:
|
||||
options:
|
||||
-h, --help show this help message and exit
|
||||
--userdir PATH, --user-data-dir PATH
|
||||
Path to userdata directory.
|
||||
|
@ -578,8 +585,7 @@ optional arguments:
|
|||
Specify quote currency(-ies). Space-separated list.
|
||||
-1, --one-column Print output in one column.
|
||||
--print-json Print list of pairs or market symbols in JSON format.
|
||||
--exchange EXCHANGE Exchange name (default: `bittrex`). Only valid if no
|
||||
config is provided.
|
||||
--exchange EXCHANGE Exchange name. Only valid if no config is provided.
|
||||
|
||||
```
|
||||
|
||||
|
|
|
@ -302,6 +302,7 @@ You can configure this as follows:
|
|||
```
|
||||
|
||||
The above represents the default (`exit_fill` and `entry_fill` are optional and will default to the above configuration) - modifications are obviously possible.
|
||||
To disable either of the two default values (`entry_fill` / `exit_fill`), you can assign them an empty array (`exit_fill: []`).
|
||||
|
||||
Available fields correspond to the fields for webhooks and are documented in the corresponding webhook sections.
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
""" Freqtrade bot """
|
||||
__version__ = '2023.10-dev'
|
||||
__version__ = '2023.11-dev'
|
||||
|
||||
if 'dev' in __version__:
|
||||
from pathlib import Path
|
||||
|
|
|
@ -108,7 +108,6 @@ def ask_user_config() -> Dict[str, Any]:
|
|||
"choices": [
|
||||
"binance",
|
||||
"binanceus",
|
||||
"bittrex",
|
||||
"gate",
|
||||
"huobi",
|
||||
"kraken",
|
||||
|
|
|
@ -211,8 +211,9 @@ def prepare_results(analysed_trades, stratname,
|
|||
timerange=None):
|
||||
res_df = pd.DataFrame()
|
||||
for pair, trades in analysed_trades[stratname].items():
|
||||
trades.dropna(subset=['close_date'], inplace=True)
|
||||
res_df = pd.concat([res_df, trades], ignore_index=True)
|
||||
if (trades.shape[0] > 0):
|
||||
trades.dropna(subset=['close_date'], inplace=True)
|
||||
res_df = pd.concat([res_df, trades], ignore_index=True)
|
||||
|
||||
res_df = _select_rows_within_dates(res_df, timerange)
|
||||
|
||||
|
|
|
@ -123,10 +123,14 @@ class Binance(Exchange):
|
|||
|
||||
def funding_fee_cutoff(self, open_date: datetime):
|
||||
"""
|
||||
Funding fees are only charged at full hours (usually every 4-8h).
|
||||
Therefore a trade opening at 10:00:01 will not be charged a funding fee until the next hour.
|
||||
On binance, this cutoff is 15s.
|
||||
https://github.com/freqtrade/freqtrade/pull/5779#discussion_r740175931
|
||||
:param open_date: The open date for a trade
|
||||
:return: The cutoff open time for when a funding fee is charged
|
||||
:return: True if the date falls on a full hour, False otherwise
|
||||
"""
|
||||
return open_date.minute > 0 or (open_date.minute == 0 and open_date.second > 15)
|
||||
return open_date.minute == 0 and open_date.second < 15
|
||||
|
||||
def dry_run_liquidation_price(
|
||||
self,
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -7,7 +7,7 @@ import ccxt
|
|||
|
||||
from freqtrade.constants import BuySell
|
||||
from freqtrade.enums import CandleType, MarginMode, PriceType, TradingMode
|
||||
from freqtrade.exceptions import DDosProtection, OperationalException, TemporaryError
|
||||
from freqtrade.exceptions import DDosProtection, ExchangeError, OperationalException, TemporaryError
|
||||
from freqtrade.exchange import Exchange
|
||||
from freqtrade.exchange.common import retrier
|
||||
from freqtrade.util.datetime_helpers import dt_now, dt_ts
|
||||
|
@ -202,8 +202,11 @@ class Bybit(Exchange):
|
|||
"""
|
||||
# Bybit does not provide "applied" funding fees per position.
|
||||
if self.trading_mode == TradingMode.FUTURES:
|
||||
return self._fetch_and_calculate_funding_fees(
|
||||
pair, amount, is_short, open_date)
|
||||
try:
|
||||
return self._fetch_and_calculate_funding_fees(
|
||||
pair, amount, is_short, open_date)
|
||||
except ExchangeError:
|
||||
logger.warning(f"Could not update funding fees for {pair}.")
|
||||
return 0.0
|
||||
|
||||
def fetch_orders(self, pair: str, since: datetime, params: Optional[Dict] = None) -> List[Dict]:
|
||||
|
|
|
@ -52,7 +52,6 @@ MAP_EXCHANGE_CHILDCLASS = {
|
|||
|
||||
SUPPORTED_EXCHANGES = [
|
||||
'binance',
|
||||
'bittrex',
|
||||
'gate',
|
||||
'huobi',
|
||||
'kraken',
|
||||
|
|
|
@ -486,11 +486,14 @@ class Exchange:
|
|||
except ccxt.BaseError:
|
||||
logger.exception('Unable to initialize markets.')
|
||||
|
||||
def reload_markets(self) -> None:
|
||||
def reload_markets(self, force: bool = False) -> None:
|
||||
"""Reload markets both sync and async if refresh interval has passed """
|
||||
# Check whether markets have to be reloaded
|
||||
if (self._last_markets_refresh > 0) and (
|
||||
self._last_markets_refresh + self.markets_refresh_interval > dt_ts()):
|
||||
if (
|
||||
not force
|
||||
and self._last_markets_refresh > 0
|
||||
and (self._last_markets_refresh + self.markets_refresh_interval > dt_ts())
|
||||
):
|
||||
return None
|
||||
logger.debug("Performing scheduled market reload..")
|
||||
try:
|
||||
|
@ -1228,16 +1231,16 @@ class Exchange:
|
|||
return order
|
||||
except ccxt.InsufficientFunds as e:
|
||||
raise InsufficientFundsError(
|
||||
f'Insufficient funds to create {ordertype} sell order on market {pair}. '
|
||||
f'Tried to sell amount {amount} at rate {limit_rate}. '
|
||||
f'Message: {e}') from e
|
||||
except ccxt.InvalidOrder as e:
|
||||
f'Insufficient funds to create {ordertype} {side} order on market {pair}. '
|
||||
f'Tried to {side} amount {amount} at rate {limit_rate} with '
|
||||
f'stop-price {stop_price_norm}. Message: {e}') from e
|
||||
except (ccxt.InvalidOrder, ccxt.BadRequest) as e:
|
||||
# Errors:
|
||||
# `Order would trigger immediately.`
|
||||
raise InvalidOrderException(
|
||||
f'Could not create {ordertype} sell order on market {pair}. '
|
||||
f'Tried to sell amount {amount} at rate {limit_rate}. '
|
||||
f'Message: {e}') from e
|
||||
f'Could not create {ordertype} {side} order on market {pair}. '
|
||||
f'Tried to {side} amount {amount} at rate {limit_rate} with '
|
||||
f'stop-price {stop_price_norm}. Message: {e}') from e
|
||||
except ccxt.DDoSProtection as e:
|
||||
raise DDosProtection(e) from e
|
||||
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
|
||||
|
@ -1496,8 +1499,9 @@ class Exchange:
|
|||
@retrier
|
||||
def fetch_bids_asks(self, symbols: Optional[List[str]] = None, cached: bool = False) -> Dict:
|
||||
"""
|
||||
:param symbols: List of symbols to fetch
|
||||
:param cached: Allow cached result
|
||||
:return: fetch_tickers result
|
||||
:return: fetch_bids_asks result
|
||||
"""
|
||||
if not self.exchange_has('fetchBidsAsks'):
|
||||
return {}
|
||||
|
@ -1546,6 +1550,12 @@ class Exchange:
|
|||
raise OperationalException(
|
||||
f'Exchange {self._api.name} does not support fetching tickers in batch. '
|
||||
f'Message: {e}') from e
|
||||
except ccxt.BadSymbol as e:
|
||||
logger.warning(f"Could not load tickers due to {e.__class__.__name__}. Message: {e} ."
|
||||
"Reloading markets.")
|
||||
self.reload_markets(True)
|
||||
# Re-raise exception to repeat the call.
|
||||
raise TemporaryError from e
|
||||
except ccxt.DDoSProtection as e:
|
||||
raise DDosProtection(e) from e
|
||||
except (ccxt.NetworkError, ccxt.ExchangeError) as e:
|
||||
|
@ -1954,7 +1964,7 @@ class Exchange:
|
|||
|
||||
results = await asyncio.gather(*input_coro, return_exceptions=True)
|
||||
for res in results:
|
||||
if isinstance(res, Exception):
|
||||
if isinstance(res, BaseException):
|
||||
logger.warning(f"Async code raised an exception: {repr(res)}")
|
||||
if raise_:
|
||||
raise
|
||||
|
@ -2279,6 +2289,7 @@ class Exchange:
|
|||
|
||||
from_id = t[-1][1]
|
||||
else:
|
||||
logger.debug("Stopping as no more trades were returned.")
|
||||
break
|
||||
except asyncio.CancelledError:
|
||||
logger.debug("Async operation Interrupted, breaking trades DL loop.")
|
||||
|
@ -2304,6 +2315,11 @@ class Exchange:
|
|||
try:
|
||||
t = await self._async_fetch_trades(pair, since=since)
|
||||
if t:
|
||||
# No more trades to download available at the exchange,
|
||||
# So we repeatedly get the same trade over and over again.
|
||||
if since == t[-1][0] and len(t) == 1:
|
||||
logger.debug("Stopping because no more trades are available.")
|
||||
break
|
||||
since = t[-1][0]
|
||||
trades.extend(t)
|
||||
# Reached the end of the defined-download period
|
||||
|
@ -2312,6 +2328,7 @@ class Exchange:
|
|||
f"Stopping because until was reached. {t[-1][0]} > {until}")
|
||||
break
|
||||
else:
|
||||
logger.debug("Stopping as no more trades were returned.")
|
||||
break
|
||||
except asyncio.CancelledError:
|
||||
logger.debug("Async operation Interrupted, breaking trades DL loop.")
|
||||
|
@ -2653,12 +2670,14 @@ class Exchange:
|
|||
"""
|
||||
return 0.0
|
||||
|
||||
def funding_fee_cutoff(self, open_date: datetime):
|
||||
def funding_fee_cutoff(self, open_date: datetime) -> bool:
|
||||
"""
|
||||
Funding fees are only charged at full hours (usually every 4-8h).
|
||||
Therefore a trade opening at 10:00:01 will not be charged a funding fee until the next hour.
|
||||
:param open_date: The open date for a trade
|
||||
:return: The cutoff open time for when a funding fee is charged
|
||||
:return: True if the date falls on a full hour, False otherwise
|
||||
"""
|
||||
return open_date.minute > 0 or open_date.second > 0
|
||||
return open_date.minute == 0 and open_date.second == 0
|
||||
|
||||
@retrier
|
||||
def set_margin_mode(self, pair: str, margin_mode: MarginMode, accept_fail: bool = False,
|
||||
|
@ -2706,15 +2725,16 @@ class Exchange:
|
|||
"""
|
||||
|
||||
if self.funding_fee_cutoff(open_date):
|
||||
open_date += timedelta(hours=1)
|
||||
# Shift back to 1h candle to avoid missing funding fees
|
||||
# Only really relevant for trades very close to the full hour
|
||||
open_date = timeframe_to_prev_date('1h', open_date)
|
||||
timeframe = self._ft_has['mark_ohlcv_timeframe']
|
||||
timeframe_ff = self._ft_has.get('funding_fee_timeframe',
|
||||
self._ft_has['mark_ohlcv_timeframe'])
|
||||
|
||||
if not close_date:
|
||||
close_date = datetime.now(timezone.utc)
|
||||
open_timestamp = int(timeframe_to_prev_date(timeframe, open_date).timestamp()) * 1000
|
||||
# close_timestamp = int(close_date.timestamp()) * 1000
|
||||
since_ms = int(timeframe_to_prev_date(timeframe, open_date).timestamp()) * 1000
|
||||
|
||||
mark_comb: PairWithTimeframe = (
|
||||
pair, timeframe, CandleType.from_string(self._ft_has["mark_ohlcv_price"]))
|
||||
|
@ -2722,7 +2742,7 @@ class Exchange:
|
|||
funding_comb: PairWithTimeframe = (pair, timeframe_ff, CandleType.FUNDING_RATE)
|
||||
candle_histories = self.refresh_latest_ohlcv(
|
||||
[mark_comb, funding_comb],
|
||||
since_ms=open_timestamp,
|
||||
since_ms=since_ms,
|
||||
cache=False,
|
||||
drop_incomplete=False,
|
||||
)
|
||||
|
@ -2733,8 +2753,7 @@ class Exchange:
|
|||
except KeyError:
|
||||
raise ExchangeError("Could not find funding rates.") from None
|
||||
|
||||
funding_mark_rates = self.combine_funding_and_mark(
|
||||
funding_rates=funding_rates, mark_rates=mark_rates)
|
||||
funding_mark_rates = self.combine_funding_and_mark(funding_rates, mark_rates)
|
||||
|
||||
return self.calculate_funding_fees(
|
||||
funding_mark_rates,
|
||||
|
@ -2781,7 +2800,7 @@ class Exchange:
|
|||
amount: float,
|
||||
is_short: bool,
|
||||
open_date: datetime,
|
||||
close_date: Optional[datetime] = None,
|
||||
close_date: datetime,
|
||||
time_in_ratio: Optional[float] = None
|
||||
) -> float:
|
||||
"""
|
||||
|
@ -2797,8 +2816,8 @@ class Exchange:
|
|||
fees: float = 0
|
||||
|
||||
if not df.empty:
|
||||
df = df[(df['date'] >= open_date) & (df['date'] <= close_date)]
|
||||
fees = sum(df['open_fund'] * df['open_mark'] * amount)
|
||||
df1 = df[(df['date'] >= open_date) & (df['date'] <= close_date)]
|
||||
fees = sum(df1['open_fund'] * df1['open_mark'] * amount)
|
||||
|
||||
# Negate fees for longs as funding_fees expects it this way based on live endpoints.
|
||||
return fees if is_short else -fees
|
||||
|
@ -2813,17 +2832,19 @@ class Exchange:
|
|||
:param amount: Trade amount
|
||||
:param open_date: Open date of the trade
|
||||
:return: funding fee since open_date
|
||||
:raises: ExchangeError if something goes wrong.
|
||||
"""
|
||||
if self.trading_mode == TradingMode.FUTURES:
|
||||
if self._config['dry_run']:
|
||||
funding_fees = self._fetch_and_calculate_funding_fees(
|
||||
pair, amount, is_short, open_date)
|
||||
else:
|
||||
funding_fees = self._get_funding_fees_from_exchange(pair, open_date)
|
||||
return funding_fees
|
||||
else:
|
||||
return 0.0
|
||||
try:
|
||||
if self._config['dry_run']:
|
||||
funding_fees = self._fetch_and_calculate_funding_fees(
|
||||
pair, amount, is_short, open_date)
|
||||
else:
|
||||
funding_fees = self._get_funding_fees_from_exchange(pair, open_date)
|
||||
return funding_fees
|
||||
except ExchangeError:
|
||||
logger.warning(f"Could not update funding fees for {pair}.")
|
||||
|
||||
return 0.0
|
||||
|
||||
def get_liquidation_price(
|
||||
self,
|
||||
|
|
|
@ -195,7 +195,7 @@ class Kraken(Exchange):
|
|||
amount: float,
|
||||
is_short: bool,
|
||||
open_date: datetime,
|
||||
close_date: Optional[datetime] = None,
|
||||
close_date: datetime,
|
||||
time_in_ratio: Optional[float] = None
|
||||
) -> float:
|
||||
"""
|
||||
|
|
|
@ -159,7 +159,7 @@ class BaseEnvironment(gym.Env):
|
|||
function is designed for tracking incremented objects,
|
||||
events, actions inside the training environment.
|
||||
For example, a user can call this to track the
|
||||
frequency of occurence of an `is_valid` call in
|
||||
frequency of occurrence of an `is_valid` call in
|
||||
their `calculate_reward()`:
|
||||
|
||||
def calculate_reward(self, action: int) -> float:
|
||||
|
|
|
@ -296,8 +296,7 @@ class FreqaiDataDrawer:
|
|||
f"for more than {len(dataframe.index)} candles.")
|
||||
|
||||
df_concat = pd.concat([hist_preds, new_pred], ignore_index=True, keys=hist_preds.keys())
|
||||
# remove last row because we will append that later in append_model_predictions()
|
||||
df_concat = df_concat.iloc[:-1]
|
||||
|
||||
# any missing values will get zeroed out so users can see the exact
|
||||
# downtime in FreqUI
|
||||
df_concat = df_concat.fillna(0)
|
||||
|
|
|
@ -27,6 +27,12 @@ class PyTorchTransformerRegressor(BasePyTorchRegressor):
|
|||
...
|
||||
"freqai": {
|
||||
...
|
||||
"conv_width": 30, // PyTorchTransformer is based on windowing
|
||||
"feature_parameters": {
|
||||
...
|
||||
"include_shifted_candles": 0, // which removes the need for shifted candles
|
||||
...
|
||||
},
|
||||
"model_training_parameters" : {
|
||||
"learning_rate": 3e-4,
|
||||
"trainer_kwargs": {
|
||||
|
@ -120,16 +126,16 @@ class PyTorchTransformerRegressor(BasePyTorchRegressor):
|
|||
# create empty torch tensor
|
||||
self.model.model.eval()
|
||||
yb = torch.empty(0).to(self.device)
|
||||
if x.shape[1] > 1:
|
||||
if x.shape[1] > self.window_size:
|
||||
ws = self.window_size
|
||||
for i in range(0, x.shape[1] - ws):
|
||||
xb = x[:, i:i + ws, :].to(self.device)
|
||||
y = self.model.model(xb)
|
||||
yb = torch.cat((yb, y), dim=0)
|
||||
yb = torch.cat((yb, y), dim=1)
|
||||
else:
|
||||
yb = self.model.model(x)
|
||||
|
||||
yb = yb.cpu().squeeze()
|
||||
yb = yb.cpu().squeeze(0)
|
||||
pred_df = pd.DataFrame(yb.detach().numpy(), columns=dk.label_list)
|
||||
pred_df, _, _ = dk.label_pipeline.inverse_transform(pred_df)
|
||||
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Type
|
||||
from typing import Any, Dict, List, Optional, Type
|
||||
|
||||
import torch as th
|
||||
from stable_baselines3.common.callbacks import ProgressBarCallback
|
||||
|
||||
from freqtrade.freqai.data_kitchen import FreqaiDataKitchen
|
||||
from freqtrade.freqai.RL.Base5ActionRLEnv import Actions, Base5ActionRLEnv, Positions
|
||||
|
@ -73,19 +74,27 @@ class ReinforcementLearner(BaseReinforcementLearningModel):
|
|||
'trained agent.')
|
||||
model = self.dd.model_dictionary[dk.pair]
|
||||
model.set_env(self.train_env)
|
||||
callbacks: List[Any] = [self.eval_callback, self.tensorboard_callback]
|
||||
progressbar_callback: Optional[ProgressBarCallback] = None
|
||||
if self.rl_config.get('progress_bar', False):
|
||||
progressbar_callback = ProgressBarCallback()
|
||||
callbacks.insert(0, progressbar_callback)
|
||||
|
||||
model.learn(
|
||||
total_timesteps=int(total_timesteps),
|
||||
callback=[self.eval_callback, self.tensorboard_callback],
|
||||
progress_bar=self.rl_config.get('progress_bar', False)
|
||||
)
|
||||
try:
|
||||
model.learn(
|
||||
total_timesteps=int(total_timesteps),
|
||||
callback=callbacks,
|
||||
)
|
||||
finally:
|
||||
if progressbar_callback:
|
||||
progressbar_callback.on_training_end()
|
||||
|
||||
if Path(dk.data_path / "best_model.zip").is_file():
|
||||
logger.info('Callback found a best model.')
|
||||
best_model = self.MODELCLASS.load(dk.data_path / "best_model")
|
||||
return best_model
|
||||
|
||||
logger.info('Couldnt find best model, using final model instead.')
|
||||
logger.info("Couldn't find best model, using final model instead.")
|
||||
|
||||
return model
|
||||
|
||||
|
|
|
@ -3,7 +3,6 @@ from typing import Any, Dict, Type, Union
|
|||
|
||||
from stable_baselines3.common.callbacks import BaseCallback
|
||||
from stable_baselines3.common.logger import HParam
|
||||
from stable_baselines3.common.vec_env import VecEnv
|
||||
|
||||
from freqtrade.freqai.RL.BaseEnvironment import BaseActions
|
||||
|
||||
|
@ -13,13 +12,9 @@ class TensorboardCallback(BaseCallback):
|
|||
Custom callback for plotting additional values in tensorboard and
|
||||
episodic summary reports.
|
||||
"""
|
||||
# Override training_env type to fix type errors
|
||||
training_env: Union[VecEnv, None] = None
|
||||
|
||||
def __init__(self, verbose=1, actions: Type[Enum] = BaseActions):
|
||||
super().__init__(verbose)
|
||||
self.model: Any = None
|
||||
self.logger: Any = None
|
||||
self.actions: Type[Enum] = actions
|
||||
|
||||
def _on_training_start(self) -> None:
|
||||
|
@ -47,9 +42,13 @@ class TensorboardCallback(BaseCallback):
|
|||
def _on_step(self) -> bool:
|
||||
|
||||
local_info = self.locals["infos"][0]
|
||||
if self.training_env is None:
|
||||
return True
|
||||
tensorboard_metrics = self.training_env.get_attr("tensorboard_metrics")[0]
|
||||
|
||||
if hasattr(self.training_env, 'envs'):
|
||||
tensorboard_metrics = self.training_env.envs[0].unwrapped.tensorboard_metrics
|
||||
|
||||
else:
|
||||
# For RL-multiproc - usage of [0] might need to be evaluated
|
||||
tensorboard_metrics = self.training_env.get_attr("tensorboard_metrics")[0]
|
||||
|
||||
for metric in local_info:
|
||||
if metric not in ["episode", "terminal_observation"]:
|
||||
|
|
|
@ -132,7 +132,7 @@ class FreqtradeBot(LoggingMixin):
|
|||
# TODO: This would be more efficient if scheduled in utc time, and performed at each
|
||||
# TODO: funding interval, specified by funding_fee_times on the exchange classes
|
||||
for time_slot in range(0, 24):
|
||||
for minutes in [0, 15, 30, 45]:
|
||||
for minutes in [1, 31]:
|
||||
t = str(time(time_slot, minutes, 2))
|
||||
self._schedule.every().day.at(t).do(update)
|
||||
self.last_process: Optional[datetime] = None
|
||||
|
@ -199,6 +199,7 @@ class FreqtradeBot(LoggingMixin):
|
|||
# Only update open orders on startup
|
||||
# This will update the database after the initial migration
|
||||
self.startup_update_open_orders()
|
||||
self.update_funding_fees()
|
||||
|
||||
def process(self) -> None:
|
||||
"""
|
||||
|
@ -312,22 +313,19 @@ class FreqtradeBot(LoggingMixin):
|
|||
open_trades = Trade.get_open_trade_count()
|
||||
return max(0, self.config['max_open_trades'] - open_trades)
|
||||
|
||||
def update_funding_fees(self):
|
||||
def update_funding_fees(self) -> None:
|
||||
if self.trading_mode == TradingMode.FUTURES:
|
||||
trades = Trade.get_open_trades()
|
||||
try:
|
||||
for trade in trades:
|
||||
funding_fees = self.exchange.get_funding_fees(
|
||||
trades: List[Trade] = Trade.get_open_trades()
|
||||
for trade in trades:
|
||||
trade.set_funding_fees(
|
||||
self.exchange.get_funding_fees(
|
||||
pair=trade.pair,
|
||||
amount=trade.amount,
|
||||
is_short=trade.is_short,
|
||||
open_date=trade.date_last_filled_utc
|
||||
)
|
||||
trade.funding_fees = funding_fees
|
||||
except ExchangeError:
|
||||
logger.warning("Could not update funding fees for open trades.")
|
||||
open_date=trade.date_last_filled_utc)
|
||||
)
|
||||
|
||||
def startup_backpopulate_precision(self):
|
||||
def startup_backpopulate_precision(self) -> None:
|
||||
|
||||
trades = Trade.get_trades([Trade.contract_size.is_(None)])
|
||||
for trade in trades:
|
||||
|
@ -374,17 +372,13 @@ class FreqtradeBot(LoggingMixin):
|
|||
fo = order.to_ccxt_object()
|
||||
fo['status'] = 'canceled'
|
||||
self.handle_cancel_order(
|
||||
fo, order.order_id, order.trade,
|
||||
constants.CANCEL_REASON['TIMEOUT']
|
||||
fo, order, order.trade, constants.CANCEL_REASON['TIMEOUT']
|
||||
)
|
||||
|
||||
except ExchangeError as e:
|
||||
|
||||
logger.warning(f"Error updating Order {order.order_id} due to {e}")
|
||||
|
||||
if self.trading_mode == TradingMode.FUTURES:
|
||||
self._schedule.run_pending()
|
||||
|
||||
def update_trades_without_assigned_fees(self) -> None:
|
||||
"""
|
||||
Update closed trades without close fees assigned.
|
||||
|
@ -745,6 +739,7 @@ class FreqtradeBot(LoggingMixin):
|
|||
:param pair: pair for which we want to create a LIMIT_BUY
|
||||
:param stake_amount: amount of stake-currency for the pair
|
||||
:return: True if a buy order is created, false if it fails.
|
||||
:raise: DependencyException or it's subclasses like ExchangeError.
|
||||
"""
|
||||
time_in_force = self.strategy.order_time_in_force['entry']
|
||||
|
||||
|
@ -831,14 +826,15 @@ class FreqtradeBot(LoggingMixin):
|
|||
base_currency = self.exchange.get_pair_base_currency(pair)
|
||||
open_date = datetime.now(timezone.utc)
|
||||
|
||||
funding_fees = self.exchange.get_funding_fees(
|
||||
pair=pair,
|
||||
amount=amount + trade.amount if trade else amount,
|
||||
is_short=is_short,
|
||||
open_date=trade.date_last_filled_utc if trade else open_date
|
||||
)
|
||||
|
||||
# This is a new trade
|
||||
if trade is None:
|
||||
funding_fees = 0.0
|
||||
try:
|
||||
funding_fees = self.exchange.get_funding_fees(
|
||||
pair=pair, amount=amount, is_short=is_short, open_date=open_date)
|
||||
except ExchangeError:
|
||||
logger.warning("Could not find funding fee.")
|
||||
|
||||
trade = Trade(
|
||||
pair=pair,
|
||||
|
@ -874,6 +870,7 @@ class FreqtradeBot(LoggingMixin):
|
|||
trade.is_open = True
|
||||
trade.fee_open_currency = None
|
||||
trade.open_rate_requested = enter_limit_requested
|
||||
trade.set_funding_fees(funding_fees)
|
||||
|
||||
trade.orders.append(order_obj)
|
||||
trade.recalc_trade_from_orders()
|
||||
|
@ -1081,7 +1078,11 @@ class FreqtradeBot(LoggingMixin):
|
|||
trades_closed = 0
|
||||
for trade in trades:
|
||||
|
||||
if not trade.has_open_orders and not self.wallets.check_exit_amount(trade):
|
||||
if (
|
||||
not trade.has_open_orders
|
||||
and not trade.stoploss_order_id
|
||||
and not self.wallets.check_exit_amount(trade)
|
||||
):
|
||||
logger.warning(
|
||||
f'Not enough {trade.safe_base_currency} in wallet to exit {trade}. '
|
||||
'Trying to recover.')
|
||||
|
@ -1340,6 +1341,7 @@ class FreqtradeBot(LoggingMixin):
|
|||
:return: None
|
||||
"""
|
||||
for trade in Trade.get_open_trades():
|
||||
open_order: Order
|
||||
for open_order in trade.open_orders:
|
||||
try:
|
||||
order = self.exchange.fetch_order(open_order.order_id, trade.pair)
|
||||
|
@ -1360,22 +1362,23 @@ class FreqtradeBot(LoggingMixin):
|
|||
)
|
||||
):
|
||||
self.handle_cancel_order(
|
||||
order, open_order.order_id, trade, constants.CANCEL_REASON['TIMEOUT']
|
||||
order, open_order, trade, constants.CANCEL_REASON['TIMEOUT']
|
||||
)
|
||||
else:
|
||||
self.replace_order(order, open_order, trade)
|
||||
|
||||
def handle_cancel_order(self, order: Dict, order_id: str, trade: Trade, reason: str) -> None:
|
||||
def handle_cancel_order(self, order: Dict, order_obj: Order, trade: Trade, reason: str) -> None:
|
||||
"""
|
||||
Check if current analyzed order timed out and cancel if necessary.
|
||||
:param order: Order dict grabbed with exchange.fetch_order()
|
||||
:param order_obj: Order object from the database.
|
||||
:param trade: Trade object.
|
||||
:return: None
|
||||
"""
|
||||
if order['side'] == trade.entry_side:
|
||||
self.handle_cancel_enter(trade, order, order_id, reason)
|
||||
self.handle_cancel_enter(trade, order, order_obj, reason)
|
||||
else:
|
||||
canceled = self.handle_cancel_exit(trade, order, order_id, reason)
|
||||
canceled = self.handle_cancel_exit(trade, order, order_obj, reason)
|
||||
canceled_count = trade.get_canceled_exit_order_count()
|
||||
max_timeouts = self.config.get('unfilledtimeout', {}).get('exit_timeout_count', 0)
|
||||
if (canceled and max_timeouts > 0 and canceled_count >= max_timeouts):
|
||||
|
@ -1440,7 +1443,7 @@ class FreqtradeBot(LoggingMixin):
|
|||
trade=trade, order=order_obj, pair=trade.pair,
|
||||
current_time=datetime.now(timezone.utc), proposed_rate=proposed_rate,
|
||||
current_order_rate=order_obj.safe_price, entry_tag=trade.enter_tag,
|
||||
side=trade.entry_side)
|
||||
side=trade.trade_direction)
|
||||
|
||||
replacing = True
|
||||
cancel_reason = constants.CANCEL_REASON['REPLACE']
|
||||
|
@ -1449,7 +1452,7 @@ class FreqtradeBot(LoggingMixin):
|
|||
cancel_reason = constants.CANCEL_REASON['USER_CANCEL']
|
||||
if order_obj.price != adjusted_entry_price:
|
||||
# cancel existing order if new price is supplied or None
|
||||
res = self.handle_cancel_enter(trade, order, order_obj.order_id, cancel_reason,
|
||||
res = self.handle_cancel_enter(trade, order, order_obj, cancel_reason,
|
||||
replacing=replacing)
|
||||
if not res:
|
||||
self.replace_order_failed(
|
||||
|
@ -1457,15 +1460,21 @@ class FreqtradeBot(LoggingMixin):
|
|||
return
|
||||
if adjusted_entry_price:
|
||||
# place new order only if new price is supplied
|
||||
if not self.execute_entry(
|
||||
pair=trade.pair,
|
||||
stake_amount=(
|
||||
order_obj.safe_remaining * order_obj.safe_price / trade.leverage),
|
||||
price=adjusted_entry_price,
|
||||
trade=trade,
|
||||
is_short=trade.is_short,
|
||||
mode='replace',
|
||||
):
|
||||
try:
|
||||
if not self.execute_entry(
|
||||
pair=trade.pair,
|
||||
stake_amount=(
|
||||
order_obj.safe_remaining * order_obj.safe_price / trade.leverage),
|
||||
price=adjusted_entry_price,
|
||||
trade=trade,
|
||||
is_short=trade.is_short,
|
||||
mode='replace',
|
||||
):
|
||||
self.replace_order_failed(
|
||||
trade, f"Could not replace order for {trade}.")
|
||||
except DependencyException as exception:
|
||||
logger.warning(
|
||||
f'Unable to replace order for {trade.pair}: {exception}')
|
||||
self.replace_order_failed(trade, f"Could not replace order for {trade}.")
|
||||
|
||||
def cancel_all_open_orders(self) -> None:
|
||||
|
@ -1484,29 +1493,28 @@ class FreqtradeBot(LoggingMixin):
|
|||
|
||||
if order['side'] == trade.entry_side:
|
||||
self.handle_cancel_enter(
|
||||
trade, order, open_order.order_id, constants.CANCEL_REASON['ALL_CANCELLED']
|
||||
trade, order, open_order, constants.CANCEL_REASON['ALL_CANCELLED']
|
||||
)
|
||||
|
||||
elif order['side'] == trade.exit_side:
|
||||
self.handle_cancel_exit(
|
||||
trade, order, open_order.order_id, constants.CANCEL_REASON['ALL_CANCELLED']
|
||||
trade, order, open_order, constants.CANCEL_REASON['ALL_CANCELLED']
|
||||
)
|
||||
Trade.commit()
|
||||
|
||||
def handle_cancel_enter(
|
||||
self, trade: Trade, order: Dict, order_id: str,
|
||||
self, trade: Trade, order: Dict, order_obj: Order,
|
||||
reason: str, replacing: Optional[bool] = False
|
||||
) -> bool:
|
||||
"""
|
||||
entry cancel - cancel order
|
||||
:param order_obj: Order object from the database.
|
||||
:param replacing: Replacing order - prevent trade deletion.
|
||||
:return: True if trade was fully cancelled
|
||||
"""
|
||||
was_trade_fully_canceled = False
|
||||
order_id = order_obj.order_id
|
||||
side = trade.entry_side.capitalize()
|
||||
if not trade.has_open_orders:
|
||||
logger.warning(f"No open order for {trade}.")
|
||||
return False
|
||||
|
||||
if order['status'] not in constants.NON_OPEN_EXCHANGE_STATES:
|
||||
filled_val: float = order.get('filled', 0.0) or 0.0
|
||||
|
@ -1519,8 +1527,8 @@ class FreqtradeBot(LoggingMixin):
|
|||
f"Order {order_id} for {trade.pair} not cancelled, "
|
||||
f"as the filled amount of {filled_val} would result in an unexitable trade.")
|
||||
return False
|
||||
corder = self.exchange.cancel_order_with_result(order_id, trade.pair,
|
||||
trade.amount)
|
||||
corder = self.exchange.cancel_order_with_result(order_id, trade.pair, trade.amount)
|
||||
order_obj.ft_cancel_reason = reason
|
||||
# if replacing, retry fetching the order 3 times if the status is not what we need
|
||||
if replacing:
|
||||
retry_count = 0
|
||||
|
@ -1541,9 +1549,10 @@ class FreqtradeBot(LoggingMixin):
|
|||
else:
|
||||
# Order was cancelled already, so we can reuse the existing dict
|
||||
corder = order
|
||||
reason = constants.CANCEL_REASON['CANCELLED_ON_EXCHANGE']
|
||||
if order_obj.ft_cancel_reason is None:
|
||||
order_obj.ft_cancel_reason = constants.CANCEL_REASON['CANCELLED_ON_EXCHANGE']
|
||||
|
||||
logger.info(f'{side} order {reason} for {trade}.')
|
||||
logger.info(f'{side} order {order_obj.ft_cancel_reason} for {trade}.')
|
||||
|
||||
# Using filled to determine the filled amount
|
||||
filled_amount = safe_value_fallback2(corder, order, 'filled', 'filled')
|
||||
|
@ -1556,7 +1565,7 @@ class FreqtradeBot(LoggingMixin):
|
|||
if open_order_count < 1 and trade.nr_of_successful_entries == 0 and not replacing:
|
||||
logger.info(f'{side} order fully cancelled. Removing {trade} from database.')
|
||||
trade.delete()
|
||||
reason += f", {constants.CANCEL_REASON['FULLY_CANCELLED']}"
|
||||
order_obj.ft_cancel_reason += f", {constants.CANCEL_REASON['FULLY_CANCELLED']}"
|
||||
else:
|
||||
self.update_trade_state(trade, order_id, corder)
|
||||
logger.info(f'{side} Order timeout for {trade}.')
|
||||
|
@ -1566,21 +1575,21 @@ class FreqtradeBot(LoggingMixin):
|
|||
self.update_trade_state(trade, order_id, corder)
|
||||
|
||||
logger.info(f'Partial {trade.entry_side} order timeout for {trade}.')
|
||||
reason += f", {constants.CANCEL_REASON['PARTIALLY_FILLED']}"
|
||||
order_obj.ft_cancel_reason += f", {constants.CANCEL_REASON['PARTIALLY_FILLED']}"
|
||||
|
||||
self.wallets.update()
|
||||
self._notify_enter_cancel(trade, order_type=self.strategy.order_types['entry'],
|
||||
reason=reason)
|
||||
reason=order_obj.ft_cancel_reason)
|
||||
return was_trade_fully_canceled
|
||||
|
||||
def handle_cancel_exit(
|
||||
self, trade: Trade, order: Dict, order_id: str,
|
||||
reason: str
|
||||
self, trade: Trade, order: Dict, order_obj: Order, reason: str
|
||||
) -> bool:
|
||||
"""
|
||||
exit order cancel - cancel order and update trade
|
||||
:return: True if exit order was cancelled, false otherwise
|
||||
"""
|
||||
order_id = order_obj.order_id
|
||||
cancelled = False
|
||||
# Cancelled orders may have the status of 'canceled' or 'closed'
|
||||
if order['status'] not in constants.NON_OPEN_EXCHANGE_STATES:
|
||||
|
@ -1605,7 +1614,7 @@ class FreqtradeBot(LoggingMixin):
|
|||
sub_trade=trade.amount != order['amount']
|
||||
)
|
||||
return False
|
||||
|
||||
order_obj.ft_cancel_reason = reason
|
||||
try:
|
||||
order = self.exchange.cancel_order_with_result(
|
||||
order['id'], trade.pair, trade.amount)
|
||||
|
@ -1624,19 +1633,22 @@ class FreqtradeBot(LoggingMixin):
|
|||
trade.exit_reason = exit_reason_prev
|
||||
cancelled = True
|
||||
else:
|
||||
reason = constants.CANCEL_REASON['CANCELLED_ON_EXCHANGE']
|
||||
if order_obj.ft_cancel_reason is None:
|
||||
order_obj.ft_cancel_reason = constants.CANCEL_REASON['CANCELLED_ON_EXCHANGE']
|
||||
trade.exit_reason = None
|
||||
|
||||
self.update_trade_state(trade, order['id'], order)
|
||||
|
||||
logger.info(f'{trade.exit_side.capitalize()} order {reason} for {trade}.')
|
||||
logger.info(
|
||||
f'{trade.exit_side.capitalize()} order {order_obj.ft_cancel_reason} for {trade}.')
|
||||
trade.close_rate = None
|
||||
trade.close_rate_requested = None
|
||||
|
||||
self._notify_exit_cancel(
|
||||
trade,
|
||||
order_type=self.strategy.order_types['exit'],
|
||||
reason=reason, order_id=order['id'], sub_trade=trade.amount != order['amount']
|
||||
reason=order_obj.ft_cancel_reason, order_id=order['id'],
|
||||
sub_trade=trade.amount != order['amount']
|
||||
)
|
||||
return cancelled
|
||||
|
||||
|
@ -1688,15 +1700,13 @@ class FreqtradeBot(LoggingMixin):
|
|||
:param exit_check: CheckTuple with signal and reason
|
||||
:return: True if it succeeds False
|
||||
"""
|
||||
try:
|
||||
trade.funding_fees = self.exchange.get_funding_fees(
|
||||
trade.set_funding_fees(
|
||||
self.exchange.get_funding_fees(
|
||||
pair=trade.pair,
|
||||
amount=trade.amount,
|
||||
is_short=trade.is_short,
|
||||
open_date=trade.date_last_filled_utc,
|
||||
)
|
||||
except ExchangeError:
|
||||
logger.warning("Could not update funding fee.")
|
||||
open_date=trade.date_last_filled_utc)
|
||||
)
|
||||
|
||||
exit_type = 'exit'
|
||||
exit_reason = exit_tag or exit_check.exit_reason
|
||||
|
@ -1919,7 +1929,7 @@ class FreqtradeBot(LoggingMixin):
|
|||
|
||||
if self.exchange.check_order_canceled_empty(order):
|
||||
# Trade has been cancelled on exchange
|
||||
# Handling of this will happen in check_handle_timedout.
|
||||
# Handling of this will happen in handle_cancel_order.
|
||||
return True
|
||||
|
||||
order_obj_or_none = trade.select_order_by_order_id(order_id)
|
||||
|
|
|
@ -8,15 +8,13 @@ logger = logging.getLogger(__name__)
|
|||
def set_loggers(verbosity: int = 0, api_verbosity: str = 'info') -> None:
|
||||
"""
|
||||
Set the logging level for third party libraries
|
||||
:param verbosity: Verbosity level. amount of `-v` passed to the command line
|
||||
:return: None
|
||||
"""
|
||||
|
||||
logging.getLogger('requests').setLevel(
|
||||
logging.INFO if verbosity <= 1 else logging.DEBUG
|
||||
)
|
||||
logging.getLogger("urllib3").setLevel(
|
||||
logging.INFO if verbosity <= 1 else logging.DEBUG
|
||||
)
|
||||
for logger_name in ('requests', 'urllib3', 'httpcore'):
|
||||
logging.getLogger(logger_name).setLevel(
|
||||
logging.INFO if verbosity <= 1 else logging.DEBUG
|
||||
)
|
||||
logging.getLogger('ccxt.base.exchange').setLevel(
|
||||
logging.INFO if verbosity <= 2 else logging.DEBUG
|
||||
)
|
||||
|
|
|
@ -3,6 +3,7 @@ Various tool function for Freqtrade and scripts
|
|||
"""
|
||||
import gzip
|
||||
import logging
|
||||
from io import StringIO
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Iterator, List, Mapping, Optional, TextIO, Union
|
||||
from urllib.parse import urlparse
|
||||
|
@ -231,7 +232,7 @@ def json_to_dataframe(data: str) -> pd.DataFrame:
|
|||
:param data: A JSON string
|
||||
:returns: A pandas DataFrame from the JSON string
|
||||
"""
|
||||
dataframe = pd.read_json(data, orient='split')
|
||||
dataframe = pd.read_json(StringIO(data), orient='split')
|
||||
if 'date' in dataframe.columns:
|
||||
dataframe['date'] = pd.to_datetime(dataframe['date'], unit='ms', utc=True)
|
||||
|
||||
|
|
|
@ -94,8 +94,8 @@ class LookaheadAnalysis(BaseAnalysis):
|
|||
# compare_df now comprises tuples with [1] having either 'self' or 'other'
|
||||
if 'other' in col_name[1]:
|
||||
continue
|
||||
self_value = compare_df_row[col_idx]
|
||||
other_value = compare_df_row[col_idx + 1]
|
||||
self_value = compare_df_row.iloc[col_idx]
|
||||
other_value = compare_df_row.iloc[col_idx + 1]
|
||||
|
||||
# output differences
|
||||
if self_value != other_value:
|
||||
|
|
|
@ -525,10 +525,10 @@ class Backtesting:
|
|||
# This should not be reached...
|
||||
return row[OPEN_IDX]
|
||||
|
||||
def _get_adjust_trade_entry_for_candle(self, trade: LocalTrade, row: Tuple
|
||||
) -> LocalTrade:
|
||||
def _get_adjust_trade_entry_for_candle(
|
||||
self, trade: LocalTrade, row: Tuple, current_time: datetime
|
||||
) -> LocalTrade:
|
||||
current_rate = row[OPEN_IDX]
|
||||
current_date = row[DATE_IDX].to_pydatetime()
|
||||
current_profit = trade.calc_profit_ratio(current_rate)
|
||||
min_stake = self.exchange.get_min_pair_stake_amount(trade.pair, current_rate, -0.1)
|
||||
max_stake = self.exchange.get_max_pair_stake_amount(trade.pair, current_rate)
|
||||
|
@ -536,7 +536,7 @@ class Backtesting:
|
|||
stake_amount = strategy_safe_wrapper(self.strategy.adjust_trade_position,
|
||||
default_retval=None, supress_error=True)(
|
||||
trade=trade, # type: ignore[arg-type]
|
||||
current_time=current_date, current_rate=current_rate,
|
||||
current_time=current_time, current_rate=current_rate,
|
||||
current_profit=current_profit, min_stake=min_stake,
|
||||
max_stake=min(max_stake, stake_available),
|
||||
current_entry_rate=current_rate, current_exit_rate=current_rate,
|
||||
|
@ -569,10 +569,10 @@ class Backtesting:
|
|||
# Remaining stake is too low to be sold.
|
||||
return trade
|
||||
exit_ = ExitCheckTuple(ExitType.PARTIAL_EXIT)
|
||||
pos_trade = self._get_exit_for_signal(trade, row, exit_, amount)
|
||||
pos_trade = self._get_exit_for_signal(trade, row, exit_, current_time, amount)
|
||||
if pos_trade is not None:
|
||||
order = pos_trade.orders[-1]
|
||||
if self._try_close_open_order(order, trade, current_date, row):
|
||||
if self._try_close_open_order(order, trade, current_time, row):
|
||||
trade.recalc_trade_from_orders()
|
||||
self.wallets.update()
|
||||
return pos_trade
|
||||
|
@ -615,11 +615,11 @@ class Backtesting:
|
|||
|
||||
def _get_exit_for_signal(
|
||||
self, trade: LocalTrade, row: Tuple, exit_: ExitCheckTuple,
|
||||
current_time: datetime,
|
||||
amount: Optional[float] = None) -> Optional[LocalTrade]:
|
||||
|
||||
exit_candle_time: datetime = row[DATE_IDX].to_pydatetime()
|
||||
if exit_.exit_flag:
|
||||
trade.close_date = exit_candle_time
|
||||
trade.close_date = current_time
|
||||
exit_reason = exit_.exit_reason
|
||||
amount_ = amount if amount is not None else trade.amount
|
||||
trade_dur = int((trade.close_date_utc - trade.open_date_utc).total_seconds() // 60)
|
||||
|
@ -647,10 +647,10 @@ class Backtesting:
|
|||
default_retval=close_rate)(
|
||||
pair=trade.pair,
|
||||
trade=trade, # type: ignore[arg-type]
|
||||
current_time=exit_candle_time,
|
||||
current_time=current_time,
|
||||
proposed_rate=close_rate, current_profit=current_profit,
|
||||
exit_tag=exit_reason)
|
||||
if rate != close_rate:
|
||||
if rate is not None and rate != close_rate:
|
||||
close_rate = price_to_precision(rate, trade.price_precision,
|
||||
self.precision_mode)
|
||||
# We can't place orders lower than current low.
|
||||
|
@ -673,7 +673,7 @@ class Backtesting:
|
|||
time_in_force=time_in_force,
|
||||
sell_reason=exit_reason, # deprecated
|
||||
exit_reason=exit_reason,
|
||||
current_time=exit_candle_time)):
|
||||
current_time=current_time)):
|
||||
return None
|
||||
|
||||
trade.exit_reason = exit_reason
|
||||
|
@ -714,21 +714,24 @@ class Backtesting:
|
|||
trade.orders.append(order)
|
||||
return trade
|
||||
|
||||
def _check_trade_exit(self, trade: LocalTrade, row: Tuple) -> Optional[LocalTrade]:
|
||||
exit_candle_time: datetime = row[DATE_IDX].to_pydatetime()
|
||||
def _check_trade_exit(
|
||||
self, trade: LocalTrade, row: Tuple, current_time: datetime
|
||||
) -> Optional[LocalTrade]:
|
||||
|
||||
if self.trading_mode == TradingMode.FUTURES:
|
||||
trade.funding_fees = self.exchange.calculate_funding_fees(
|
||||
self.futures_data[trade.pair],
|
||||
amount=trade.amount,
|
||||
is_short=trade.is_short,
|
||||
open_date=trade.date_last_filled_utc,
|
||||
close_date=exit_candle_time,
|
||||
trade.set_funding_fees(
|
||||
self.exchange.calculate_funding_fees(
|
||||
self.futures_data[trade.pair],
|
||||
amount=trade.amount,
|
||||
is_short=trade.is_short,
|
||||
open_date=trade.date_last_filled_utc,
|
||||
close_date=current_time
|
||||
)
|
||||
)
|
||||
|
||||
# Check if we need to adjust our current positions
|
||||
if self.strategy.position_adjustment_enable:
|
||||
trade = self._get_adjust_trade_entry_for_candle(trade, row)
|
||||
trade = self._get_adjust_trade_entry_for_candle(trade, row, current_time)
|
||||
|
||||
enter = row[SHORT_IDX] if trade.is_short else row[LONG_IDX]
|
||||
exit_sig = row[ESHORT_IDX] if trade.is_short else row[ELONG_IDX]
|
||||
|
@ -738,7 +741,7 @@ class Backtesting:
|
|||
low=row[LOW_IDX], high=row[HIGH_IDX]
|
||||
)
|
||||
for exit_ in exits:
|
||||
t = self._get_exit_for_signal(trade, row, exit_)
|
||||
t = self._get_exit_for_signal(trade, row, exit_, current_time)
|
||||
if t:
|
||||
return t
|
||||
return None
|
||||
|
@ -760,7 +763,7 @@ class Backtesting:
|
|||
) # default value is the open rate
|
||||
# We can't place orders higher than current high (otherwise it'd be a stop limit entry)
|
||||
# which freqtrade does not support in live.
|
||||
if new_rate != propose_rate:
|
||||
if new_rate is not None and new_rate != propose_rate:
|
||||
propose_rate = price_to_precision(new_rate, price_precision,
|
||||
self.precision_mode)
|
||||
if direction == "short":
|
||||
|
@ -1145,7 +1148,7 @@ class Backtesting:
|
|||
|
||||
# 4. Create exit orders (if any)
|
||||
if not trade.has_open_orders:
|
||||
self._check_trade_exit(trade, row) # Place exit order if necessary
|
||||
self._check_trade_exit(trade, row, current_time) # Place exit order if necessary
|
||||
|
||||
# 5. Process exit orders.
|
||||
order = trade.select_order(trade.exit_side, is_open=True)
|
||||
|
|
|
@ -21,7 +21,7 @@ logger = logging.getLogger(__name__)
|
|||
|
||||
def _format_exception_message(space: str, ignore_missing_space: bool) -> None:
|
||||
msg = (f"The '{space}' space is included into the hyperoptimization "
|
||||
f"but no parameter for this space was not found in your Strategy. "
|
||||
f"but no parameter for this space was found in your Strategy. "
|
||||
)
|
||||
if ignore_missing_space:
|
||||
logger.warning(msg + "This space will be ignored.")
|
||||
|
|
|
@ -429,14 +429,18 @@ class HyperoptTools:
|
|||
trials = trials.drop(columns=['Total profit'])
|
||||
|
||||
if print_colorized:
|
||||
trials2 = trials.astype(str)
|
||||
for i in range(len(trials)):
|
||||
if trials.loc[i]['is_profit']:
|
||||
for j in range(len(trials.loc[i]) - 3):
|
||||
trials.iat[i, j] = f"{Fore.GREEN}{str(trials.loc[i][j])}{Fore.RESET}"
|
||||
trials2.iat[i, j] = f"{Fore.GREEN}{str(trials.iloc[i, j])}{Fore.RESET}"
|
||||
if trials.loc[i]['is_best'] and highlight_best:
|
||||
for j in range(len(trials.loc[i]) - 3):
|
||||
trials.iat[i, j] = f"{Style.BRIGHT}{str(trials.loc[i][j])}{Style.RESET_ALL}"
|
||||
|
||||
trials2.iat[i, j] = (
|
||||
f"{Style.BRIGHT}{str(trials.iloc[i, j])}{Style.RESET_ALL}"
|
||||
)
|
||||
trials = trials2
|
||||
del trials2
|
||||
trials = trials.drop(columns=['is_initial_point', 'is_best', 'is_profit', 'is_random'])
|
||||
if remove_header > 0:
|
||||
table = tabulate.tabulate(
|
||||
|
|
|
@ -219,8 +219,10 @@ def _get_resample_from_period(period: str) -> str:
|
|||
raise ValueError(f"Period {period} is not supported.")
|
||||
|
||||
|
||||
def generate_periodic_breakdown_stats(trade_list: List, period: str) -> List[Dict[str, Any]]:
|
||||
results = DataFrame.from_records(trade_list)
|
||||
def generate_periodic_breakdown_stats(
|
||||
trade_list: Union[List, DataFrame], period: str) -> List[Dict[str, Any]]:
|
||||
|
||||
results = trade_list if not isinstance(trade_list, list) else DataFrame.from_records(trade_list)
|
||||
if len(results) == 0:
|
||||
return []
|
||||
results['close_date'] = to_datetime(results['close_date'], utc=True)
|
||||
|
|
|
@ -115,6 +115,7 @@ def migrate_trades_and_orders_table(
|
|||
# Futures Properties
|
||||
interest_rate = get_column_def(cols, 'interest_rate', '0.0')
|
||||
funding_fees = get_column_def(cols, 'funding_fees', '0.0')
|
||||
funding_fee_running = get_column_def(cols, 'funding_fee_running', 'null')
|
||||
max_stake_amount = get_column_def(cols, 'max_stake_amount', 'stake_amount')
|
||||
|
||||
# If ticker-interval existed use that, else null.
|
||||
|
@ -163,7 +164,7 @@ def migrate_trades_and_orders_table(
|
|||
max_rate, min_rate, exit_reason, exit_order_status, strategy, enter_tag,
|
||||
timeframe, open_trade_value, close_profit_abs,
|
||||
trading_mode, leverage, liquidation_price, is_short,
|
||||
interest_rate, funding_fees, realized_profit,
|
||||
interest_rate, funding_fees, funding_fee_running, realized_profit,
|
||||
amount_precision, price_precision, precision_mode, contract_size,
|
||||
max_stake_amount
|
||||
)
|
||||
|
@ -192,7 +193,8 @@ def migrate_trades_and_orders_table(
|
|||
{open_trade_value} open_trade_value, {close_profit_abs} close_profit_abs,
|
||||
{trading_mode} trading_mode, {leverage} leverage, {liquidation_price} liquidation_price,
|
||||
{is_short} is_short, {interest_rate} interest_rate,
|
||||
{funding_fees} funding_fees, {realized_profit} realized_profit,
|
||||
{funding_fees} funding_fees, {funding_fee_running} funding_fee_running,
|
||||
{realized_profit} realized_profit,
|
||||
{amount_precision} amount_precision, {price_precision} price_precision,
|
||||
{precision_mode} precision_mode, {contract_size} contract_size,
|
||||
{max_stake_amount} max_stake_amount
|
||||
|
@ -220,6 +222,7 @@ def migrate_orders_table(engine, table_back_name: str, cols_order: List):
|
|||
funding_fee = get_column_def(cols_order, 'funding_fee', '0.0')
|
||||
ft_amount = get_column_def(cols_order, 'ft_amount', 'coalesce(amount, 0.0)')
|
||||
ft_price = get_column_def(cols_order, 'ft_price', 'coalesce(price, 0.0)')
|
||||
ft_cancel_reason = get_column_def(cols_order, 'ft_cancel_reason', 'null')
|
||||
|
||||
# sqlite does not support literals for booleans
|
||||
with engine.begin() as connection:
|
||||
|
@ -227,13 +230,13 @@ def migrate_orders_table(engine, table_back_name: str, cols_order: List):
|
|||
insert into orders (id, ft_trade_id, ft_order_side, ft_pair, ft_is_open, order_id,
|
||||
status, symbol, order_type, side, price, amount, filled, average, remaining, cost,
|
||||
stop_price, order_date, order_filled_date, order_update_date, ft_fee_base, funding_fee,
|
||||
ft_amount, ft_price
|
||||
ft_amount, ft_price, ft_cancel_reason
|
||||
)
|
||||
select id, ft_trade_id, ft_order_side, ft_pair, ft_is_open, order_id,
|
||||
status, symbol, order_type, side, price, amount, filled, {average} average, remaining,
|
||||
cost, {stop_price} stop_price, order_date, order_filled_date,
|
||||
order_update_date, {ft_fee_base} ft_fee_base, {funding_fee} funding_fee,
|
||||
{ft_amount} ft_amount, {ft_price} ft_price
|
||||
{ft_amount} ft_amount, {ft_price} ft_price, {ft_cancel_reason} ft_cancel_reason
|
||||
from {table_back_name}
|
||||
"""))
|
||||
|
||||
|
@ -328,8 +331,8 @@ def check_migrate(engine, decl_base, previous_tables) -> None:
|
|||
# if ('orders' not in previous_tables
|
||||
# or not has_column(cols_orders, 'funding_fee')):
|
||||
migrating = False
|
||||
# if not has_column(cols_orders, 'ft_price'):
|
||||
if not has_column(cols_trades, 'is_stop_loss_trailing'):
|
||||
# if not has_column(cols_orders, 'ft_cancel_reason'):
|
||||
if not has_column(cols_trades, 'funding_fee_running'):
|
||||
migrating = True
|
||||
logger.info(f"Running database migration for trades - "
|
||||
f"backup: {table_back_name}, {order_table_bak_name}")
|
||||
|
|
|
@ -68,6 +68,7 @@ class Order(ModelBase):
|
|||
ft_is_open: Mapped[bool] = mapped_column(nullable=False, default=True, index=True)
|
||||
ft_amount: Mapped[float] = mapped_column(Float(), nullable=False)
|
||||
ft_price: Mapped[float] = mapped_column(Float(), nullable=False)
|
||||
ft_cancel_reason: Mapped[str] = mapped_column(String(CUSTOM_TAG_MAX_LENGTH), nullable=True)
|
||||
|
||||
order_id: Mapped[str] = mapped_column(String(255), nullable=False, index=True)
|
||||
status: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
|
||||
|
@ -173,10 +174,6 @@ class Order(ModelBase):
|
|||
self.ft_is_open = True
|
||||
if self.status in NON_OPEN_EXCHANGE_STATES:
|
||||
self.ft_is_open = False
|
||||
if self.trade:
|
||||
# Assign funding fee up to this point
|
||||
# (represents the funding fee since the last order)
|
||||
self.funding_fee = self.trade.funding_fees
|
||||
if (order.get('filled', 0.0) or 0.0) > 0 and not self.order_filled_date:
|
||||
self.order_filled_date = dt_from_ts(
|
||||
safe_value_fallback(order, 'lastTradeTimestamp', default_value=dt_ts())
|
||||
|
@ -249,7 +246,8 @@ class Order(ModelBase):
|
|||
self.ft_is_open = False
|
||||
# Assign funding fees to Order.
|
||||
# Assumes backtesting will use date_last_filled_utc to calculate future funding fees.
|
||||
self.funding_fee = trade.funding_fees
|
||||
self.funding_fee = trade.funding_fee_running
|
||||
trade.funding_fee_running = 0.0
|
||||
|
||||
if (self.ft_order_side == trade.entry_side and self.price):
|
||||
trade.open_rate = self.price
|
||||
|
@ -394,6 +392,9 @@ class LocalTrade:
|
|||
|
||||
# Futures properties
|
||||
funding_fees: Optional[float] = None
|
||||
# Used to keep running funding fees - between the last filled order and now
|
||||
# Shall not be used for calculations!
|
||||
funding_fee_running: Optional[float] = None
|
||||
|
||||
@property
|
||||
def stoploss_or_liquidation(self) -> float:
|
||||
|
@ -564,6 +565,7 @@ class LocalTrade:
|
|||
for key in kwargs:
|
||||
setattr(self, key, kwargs[key])
|
||||
self.recalc_open_trade_value()
|
||||
self.orders = []
|
||||
if self.trading_mode == TradingMode.MARGIN and self.interest_rate is None:
|
||||
raise OperationalException(
|
||||
f"{self.trading_mode.value} trading requires param interest_rate on trades")
|
||||
|
@ -689,6 +691,16 @@ class LocalTrade:
|
|||
return
|
||||
self.liquidation_price = liquidation_price
|
||||
|
||||
def set_funding_fees(self, funding_fee: float) -> None:
|
||||
"""
|
||||
Assign funding fees to Trade.
|
||||
"""
|
||||
if funding_fee is None:
|
||||
return
|
||||
self.funding_fee_running = funding_fee
|
||||
prior_funding_fees = sum([o.funding_fee for o in self.orders if o.funding_fee])
|
||||
self.funding_fees = prior_funding_fees + funding_fee
|
||||
|
||||
def __set_stop_loss(self, stop_loss: float, percent: float):
|
||||
"""
|
||||
Method used internally to set self.stop_loss.
|
||||
|
@ -769,6 +781,10 @@ class LocalTrade:
|
|||
return
|
||||
|
||||
logger.info(f'Updating trade (id={self.id}) ...')
|
||||
if order.ft_order_side != 'stoploss':
|
||||
order.funding_fee = self.funding_fee_running
|
||||
# Reset running funding fees
|
||||
self.funding_fee_running = 0.0
|
||||
|
||||
if order.ft_order_side == self.entry_side:
|
||||
# Update open rate and actual amount
|
||||
|
@ -1064,7 +1080,7 @@ class LocalTrade:
|
|||
price = avg_price if is_exit else tmp_price
|
||||
current_stake += price * tmp_amount * side
|
||||
|
||||
if current_amount > ZERO:
|
||||
if current_amount > ZERO and not is_exit:
|
||||
avg_price = current_stake / current_amount
|
||||
|
||||
if is_exit:
|
||||
|
@ -1077,7 +1093,10 @@ class LocalTrade:
|
|||
exit_amount = o.safe_amount_after_fee
|
||||
prof = self.calculate_profit(exit_rate, exit_amount, float(avg_price))
|
||||
close_profit_abs += prof.profit_abs
|
||||
close_profit = prof.profit_ratio
|
||||
if total_stake > 0:
|
||||
# This needs to be calculated based on the last occuring exit to be aligned
|
||||
# with realized_profit.
|
||||
close_profit = (close_profit_abs / total_stake) * self.leverage
|
||||
else:
|
||||
total_stake = total_stake + self._calc_open_trade_value(tmp_amount, price)
|
||||
max_stake_amount += (tmp_amount * price)
|
||||
|
@ -1313,6 +1332,98 @@ class LocalTrade:
|
|||
trade.adjust_stop_loss(trade.open_rate, desired_stoploss)
|
||||
logger.info(f"New stoploss: {trade.stop_loss}.")
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, json_str: str) -> Self:
|
||||
"""
|
||||
Create a Trade instance from a json string.
|
||||
|
||||
Used for debugging purposes - please keep.
|
||||
:param json_str: json string to parse
|
||||
:return: Trade instance
|
||||
"""
|
||||
import rapidjson
|
||||
data = rapidjson.loads(json_str)
|
||||
trade = cls(
|
||||
__FROM_JSON=True,
|
||||
id=data["trade_id"],
|
||||
pair=data["pair"],
|
||||
base_currency=data["base_currency"],
|
||||
stake_currency=data["quote_currency"],
|
||||
is_open=data["is_open"],
|
||||
exchange=data["exchange"],
|
||||
amount=data["amount"],
|
||||
amount_requested=data["amount_requested"],
|
||||
stake_amount=data["stake_amount"],
|
||||
strategy=data["strategy"],
|
||||
enter_tag=data["enter_tag"],
|
||||
timeframe=data["timeframe"],
|
||||
fee_open=data["fee_open"],
|
||||
fee_open_cost=data["fee_open_cost"],
|
||||
fee_open_currency=data["fee_open_currency"],
|
||||
fee_close=data["fee_close"],
|
||||
fee_close_cost=data["fee_close_cost"],
|
||||
fee_close_currency=data["fee_close_currency"],
|
||||
open_date=datetime.fromtimestamp(data["open_timestamp"] // 1000, tz=timezone.utc),
|
||||
open_rate=data["open_rate"],
|
||||
open_rate_requested=data["open_rate_requested"],
|
||||
open_trade_value=data["open_trade_value"],
|
||||
close_date=(datetime.fromtimestamp(data["close_timestamp"] // 1000, tz=timezone.utc)
|
||||
if data["close_timestamp"] else None),
|
||||
realized_profit=data["realized_profit"],
|
||||
close_rate=data["close_rate"],
|
||||
close_rate_requested=data["close_rate_requested"],
|
||||
close_profit=data["close_profit"],
|
||||
close_profit_abs=data["close_profit_abs"],
|
||||
exit_reason=data["exit_reason"],
|
||||
exit_order_status=data["exit_order_status"],
|
||||
stop_loss=data["stop_loss_abs"],
|
||||
stop_loss_pct=data["stop_loss_ratio"],
|
||||
stoploss_last_update=(
|
||||
datetime.fromtimestamp(data["stoploss_last_update_timestamp"] // 1000,
|
||||
tz=timezone.utc)
|
||||
if data["stoploss_last_update_timestamp"] else None),
|
||||
initial_stop_loss=data["initial_stop_loss_abs"],
|
||||
initial_stop_loss_pct=data["initial_stop_loss_ratio"],
|
||||
min_rate=data["min_rate"],
|
||||
max_rate=data["max_rate"],
|
||||
leverage=data["leverage"],
|
||||
interest_rate=data["interest_rate"],
|
||||
liquidation_price=data["liquidation_price"],
|
||||
is_short=data["is_short"],
|
||||
trading_mode=data["trading_mode"],
|
||||
funding_fees=data["funding_fees"],
|
||||
amount_precision=data.get('amount_precision', None),
|
||||
price_precision=data.get('price_precision', None),
|
||||
precision_mode=data.get('precision_mode', None),
|
||||
contract_size=data.get('contract_size', None),
|
||||
)
|
||||
for order in data["orders"]:
|
||||
|
||||
order_obj = Order(
|
||||
amount=order["amount"],
|
||||
ft_amount=order["amount"],
|
||||
ft_order_side=order["ft_order_side"],
|
||||
ft_pair=order["pair"],
|
||||
ft_is_open=order["is_open"],
|
||||
order_id=order["order_id"],
|
||||
status=order["status"],
|
||||
average=order["average"],
|
||||
cost=order["cost"],
|
||||
filled=order["filled"],
|
||||
order_date=datetime.strptime(order["order_date"], DATETIME_PRINT_FORMAT),
|
||||
order_filled_date=(datetime.fromtimestamp(
|
||||
order["order_filled_timestamp"] // 1000, tz=timezone.utc)
|
||||
if order["order_filled_timestamp"] else None),
|
||||
order_type=order["order_type"],
|
||||
price=order["price"],
|
||||
ft_price=order["price"],
|
||||
remaining=order["remaining"],
|
||||
funding_fee=order.get("funding_fee", None),
|
||||
)
|
||||
trade.orders.append(order_obj)
|
||||
|
||||
return trade
|
||||
|
||||
|
||||
class Trade(ModelBase, LocalTrade):
|
||||
"""
|
||||
|
@ -1413,6 +1524,8 @@ class Trade(ModelBase, LocalTrade):
|
|||
# Futures properties
|
||||
funding_fees: Mapped[Optional[float]] = mapped_column(
|
||||
Float(), nullable=True, default=None) # type: ignore
|
||||
funding_fee_running: Mapped[Optional[float]] = mapped_column(
|
||||
Float(), nullable=True, default=None) # type: ignore
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
from_json = kwargs.pop('__FROM_JSON', None)
|
||||
|
@ -1693,7 +1806,7 @@ class Trade(ModelBase, LocalTrade):
|
|||
.order_by(desc('profit_sum_abs'))
|
||||
).all()
|
||||
|
||||
return_list: List[Dict] = []
|
||||
resp: List[Dict] = []
|
||||
for id, enter_tag, exit_reason, profit, profit_abs, count in mix_tag_perf:
|
||||
enter_tag = enter_tag if enter_tag is not None else "Other"
|
||||
exit_reason = exit_reason if exit_reason is not None else "Other"
|
||||
|
@ -1701,24 +1814,25 @@ class Trade(ModelBase, LocalTrade):
|
|||
if (exit_reason is not None and enter_tag is not None):
|
||||
mix_tag = enter_tag + " " + exit_reason
|
||||
i = 0
|
||||
if not any(item["mix_tag"] == mix_tag for item in return_list):
|
||||
return_list.append({'mix_tag': mix_tag,
|
||||
'profit': profit,
|
||||
'profit_pct': round(profit * 100, 2),
|
||||
'profit_abs': profit_abs,
|
||||
'count': count})
|
||||
if not any(item["mix_tag"] == mix_tag for item in resp):
|
||||
resp.append({'mix_tag': mix_tag,
|
||||
'profit_ratio': profit,
|
||||
'profit_pct': round(profit * 100, 2),
|
||||
'profit_abs': profit_abs,
|
||||
'count': count})
|
||||
else:
|
||||
while i < len(return_list):
|
||||
if return_list[i]["mix_tag"] == mix_tag:
|
||||
return_list[i] = {
|
||||
while i < len(resp):
|
||||
if resp[i]["mix_tag"] == mix_tag:
|
||||
resp[i] = {
|
||||
'mix_tag': mix_tag,
|
||||
'profit': profit + return_list[i]["profit"],
|
||||
'profit_pct': round(profit + return_list[i]["profit"] * 100, 2),
|
||||
'profit_abs': profit_abs + return_list[i]["profit_abs"],
|
||||
'count': 1 + return_list[i]["count"]}
|
||||
'profit_ratio': profit + resp[i]["profit_ratio"],
|
||||
'profit_pct': round(profit + resp[i]["profit_ratio"] * 100, 2),
|
||||
'profit_abs': profit_abs + resp[i]["profit_abs"],
|
||||
'count': 1 + resp[i]["count"]
|
||||
}
|
||||
i += 1
|
||||
|
||||
return return_list
|
||||
return resp
|
||||
|
||||
@staticmethod
|
||||
def get_best_pair(start_date: datetime = datetime.fromtimestamp(0)):
|
||||
|
@ -1753,94 +1867,3 @@ class Trade(ModelBase, LocalTrade):
|
|||
Order.status == 'closed'
|
||||
)).scalar_one()
|
||||
return trading_volume
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, json_str: str) -> Self:
|
||||
"""
|
||||
Create a Trade instance from a json string.
|
||||
|
||||
Used for debugging purposes - please keep.
|
||||
:param json_str: json string to parse
|
||||
:return: Trade instance
|
||||
"""
|
||||
import rapidjson
|
||||
data = rapidjson.loads(json_str)
|
||||
trade = cls(
|
||||
__FROM_JSON=True,
|
||||
id=data["trade_id"],
|
||||
pair=data["pair"],
|
||||
base_currency=data["base_currency"],
|
||||
stake_currency=data["quote_currency"],
|
||||
is_open=data["is_open"],
|
||||
exchange=data["exchange"],
|
||||
amount=data["amount"],
|
||||
amount_requested=data["amount_requested"],
|
||||
stake_amount=data["stake_amount"],
|
||||
strategy=data["strategy"],
|
||||
enter_tag=data["enter_tag"],
|
||||
timeframe=data["timeframe"],
|
||||
fee_open=data["fee_open"],
|
||||
fee_open_cost=data["fee_open_cost"],
|
||||
fee_open_currency=data["fee_open_currency"],
|
||||
fee_close=data["fee_close"],
|
||||
fee_close_cost=data["fee_close_cost"],
|
||||
fee_close_currency=data["fee_close_currency"],
|
||||
open_date=datetime.fromtimestamp(data["open_timestamp"] // 1000, tz=timezone.utc),
|
||||
open_rate=data["open_rate"],
|
||||
open_rate_requested=data["open_rate_requested"],
|
||||
open_trade_value=data["open_trade_value"],
|
||||
close_date=(datetime.fromtimestamp(data["close_timestamp"] // 1000, tz=timezone.utc)
|
||||
if data["close_timestamp"] else None),
|
||||
realized_profit=data["realized_profit"],
|
||||
close_rate=data["close_rate"],
|
||||
close_rate_requested=data["close_rate_requested"],
|
||||
close_profit=data["close_profit"],
|
||||
close_profit_abs=data["close_profit_abs"],
|
||||
exit_reason=data["exit_reason"],
|
||||
exit_order_status=data["exit_order_status"],
|
||||
stop_loss=data["stop_loss_abs"],
|
||||
stop_loss_pct=data["stop_loss_ratio"],
|
||||
stoploss_last_update=(
|
||||
datetime.fromtimestamp(data["stoploss_last_update_timestamp"] // 1000,
|
||||
tz=timezone.utc)
|
||||
if data["stoploss_last_update_timestamp"] else None),
|
||||
initial_stop_loss=data["initial_stop_loss_abs"],
|
||||
initial_stop_loss_pct=data["initial_stop_loss_ratio"],
|
||||
min_rate=data["min_rate"],
|
||||
max_rate=data["max_rate"],
|
||||
leverage=data["leverage"],
|
||||
interest_rate=data["interest_rate"],
|
||||
liquidation_price=data["liquidation_price"],
|
||||
is_short=data["is_short"],
|
||||
trading_mode=data["trading_mode"],
|
||||
funding_fees=data["funding_fees"],
|
||||
amount_precision=data.get('amount_precision', None),
|
||||
price_precision=data.get('price_precision', None),
|
||||
precision_mode=data.get('precision_mode', None),
|
||||
contract_size=data.get('contract_size', None),
|
||||
)
|
||||
for order in data["orders"]:
|
||||
|
||||
order_obj = Order(
|
||||
amount=order["amount"],
|
||||
ft_amount=order["amount"],
|
||||
ft_order_side=order["ft_order_side"],
|
||||
ft_pair=order["pair"],
|
||||
ft_is_open=order["is_open"],
|
||||
order_id=order["order_id"],
|
||||
status=order["status"],
|
||||
average=order["average"],
|
||||
cost=order["cost"],
|
||||
filled=order["filled"],
|
||||
order_date=datetime.strptime(order["order_date"], DATETIME_PRINT_FORMAT),
|
||||
order_filled_date=(datetime.fromtimestamp(
|
||||
order["order_filled_timestamp"] // 1000, tz=timezone.utc)
|
||||
if order["order_filled_timestamp"] else None),
|
||||
order_type=order["order_type"],
|
||||
price=order["price"],
|
||||
ft_price=order["price"],
|
||||
remaining=order["remaining"],
|
||||
)
|
||||
trade.orders.append(order_obj)
|
||||
|
||||
return trade
|
||||
|
|
|
@ -21,6 +21,7 @@ from freqtrade.misc import pair_to_filename
|
|||
from freqtrade.plugins.pairlist.pairlist_helpers import expand_pairlist
|
||||
from freqtrade.resolvers import ExchangeResolver, StrategyResolver
|
||||
from freqtrade.strategy import IStrategy
|
||||
from freqtrade.strategy.strategy_wrapper import strategy_safe_wrapper
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
@ -636,7 +637,7 @@ def load_and_plot_trades(config: Config):
|
|||
exchange = ExchangeResolver.load_exchange(config)
|
||||
IStrategy.dp = DataProvider(config, exchange)
|
||||
strategy.ft_bot_start()
|
||||
strategy.bot_loop_start(datetime.now(timezone.utc))
|
||||
strategy_safe_wrapper(strategy.bot_loop_start)(current_time=datetime.now(timezone.utc))
|
||||
plot_elements = init_plotscript(config, list(exchange.markets), strategy.startup_candle_count)
|
||||
timerange = plot_elements['timerange']
|
||||
trades = plot_elements['trades']
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
from datetime import date, datetime
|
||||
from typing import Any, Dict, List, Optional, Union
|
||||
|
||||
from pydantic import BaseModel, ConfigDict, RootModel, SerializeAsAny
|
||||
from pydantic import BaseModel, RootModel, SerializeAsAny
|
||||
|
||||
from freqtrade.constants import DATETIME_PRINT_FORMAT, IntOrInf
|
||||
from freqtrade.constants import IntOrInf
|
||||
from freqtrade.enums import MarginMode, OrderTypeValues, SignalDirection, TradingMode
|
||||
from freqtrade.types import ValidExchangesType
|
||||
|
||||
|
@ -95,15 +95,30 @@ class Count(BaseModel):
|
|||
total_stake: float
|
||||
|
||||
|
||||
class PerformanceEntry(BaseModel):
|
||||
pair: str
|
||||
profit: float
|
||||
class __BaseStatsModel(BaseModel):
|
||||
profit_ratio: float
|
||||
profit_pct: float
|
||||
profit_abs: float
|
||||
count: int
|
||||
|
||||
|
||||
class Entry(__BaseStatsModel):
|
||||
enter_tag: str
|
||||
|
||||
|
||||
class Exit(__BaseStatsModel):
|
||||
exit_reason: str
|
||||
|
||||
|
||||
class MixTag(__BaseStatsModel):
|
||||
mix_tag: str
|
||||
|
||||
|
||||
class PerformanceEntry(__BaseStatsModel):
|
||||
pair: str
|
||||
profit: float
|
||||
|
||||
|
||||
class Profit(BaseModel):
|
||||
profit_closed_coin: float
|
||||
profit_closed_percent_mean: float
|
||||
|
@ -483,11 +498,6 @@ class PairHistory(BaseModel):
|
|||
data_start: str
|
||||
data_stop: str
|
||||
data_stop_ts: int
|
||||
# TODO[pydantic]: The following keys were removed: `json_encoders`.
|
||||
# Check https://docs.pydantic.dev/dev-v2/migration/#changes-to-config for more information.
|
||||
model_config = ConfigDict(json_encoders={
|
||||
datetime: lambda v: v.strftime(DATETIME_PRINT_FORMAT),
|
||||
})
|
||||
|
||||
|
||||
class BacktestFreqAIInputs(BaseModel):
|
||||
|
|
|
@ -12,15 +12,15 @@ from freqtrade.exceptions import OperationalException
|
|||
from freqtrade.rpc import RPC
|
||||
from freqtrade.rpc.api_server.api_schemas import (AvailablePairs, Balances, BlacklistPayload,
|
||||
BlacklistResponse, Count, DailyWeeklyMonthly,
|
||||
DeleteLockRequest, DeleteTrade,
|
||||
ExchangeListResponse, ForceEnterPayload,
|
||||
DeleteLockRequest, DeleteTrade, Entry,
|
||||
ExchangeListResponse, Exit, ForceEnterPayload,
|
||||
ForceEnterResponse, ForceExitPayload,
|
||||
FreqAIModelListResponse, Health, Locks, Logs,
|
||||
OpenTradeSchema, PairHistory, PerformanceEntry,
|
||||
Ping, PlotConfig, Profit, ResultMsg, ShowConfig,
|
||||
Stats, StatusMsg, StrategyListResponse,
|
||||
StrategyResponse, SysInfo, Version,
|
||||
WhitelistResponse)
|
||||
MixTag, OpenTradeSchema, PairHistory,
|
||||
PerformanceEntry, Ping, PlotConfig, Profit,
|
||||
ResultMsg, ShowConfig, Stats, StatusMsg,
|
||||
StrategyListResponse, StrategyResponse, SysInfo,
|
||||
Version, WhitelistResponse)
|
||||
from freqtrade.rpc.api_server.deps import get_config, get_exchange, get_rpc, get_rpc_optional
|
||||
from freqtrade.rpc.rpc import RPCException
|
||||
|
||||
|
@ -52,7 +52,8 @@ logger = logging.getLogger(__name__)
|
|||
# 2.31: new /backtest/history/ delete endpoint
|
||||
# 2.32: new /backtest/history/ patch endpoint
|
||||
# 2.33: Additional weekly/monthly metrics
|
||||
API_VERSION = 2.33
|
||||
# 2.34: new entries/exits/mix_tags endpoints
|
||||
API_VERSION = 2.34
|
||||
|
||||
# Public API, requires no auth.
|
||||
router_public = APIRouter()
|
||||
|
@ -83,6 +84,21 @@ def count(rpc: RPC = Depends(get_rpc)):
|
|||
return rpc._rpc_count()
|
||||
|
||||
|
||||
@router.get('/entries', response_model=List[Entry], tags=['info'])
|
||||
def entries(pair: Optional[str] = None, rpc: RPC = Depends(get_rpc)):
|
||||
return rpc._rpc_enter_tag_performance(pair)
|
||||
|
||||
|
||||
@router.get('/exits', response_model=List[Exit], tags=['info'])
|
||||
def exits(pair: Optional[str] = None, rpc: RPC = Depends(get_rpc)):
|
||||
return rpc._rpc_exit_reason_performance(pair)
|
||||
|
||||
|
||||
@router.get('/mix_tags', response_model=List[MixTag], tags=['info'])
|
||||
def mix_tags(pair: Optional[str] = None, rpc: RPC = Depends(get_rpc)):
|
||||
return rpc._rpc_mix_tag_performance(pair)
|
||||
|
||||
|
||||
@router.get('/performance', response_model=List[PerformanceEntry], tags=['info'])
|
||||
def performance(rpc: RPC = Depends(get_rpc)):
|
||||
return rpc._rpc_performance()
|
||||
|
|
|
@ -56,7 +56,7 @@ def get_exchange(config=Depends(get_config)):
|
|||
if not (exchange := ApiBG.exchanges.get(exchange_key)):
|
||||
from freqtrade.resolvers import ExchangeResolver
|
||||
exchange = ExchangeResolver.load_exchange(
|
||||
config, load_leverage_tiers=False)
|
||||
config, validate=False, load_leverage_tiers=False)
|
||||
ApiBG.exchanges[exchange_key] = exchange
|
||||
return exchange
|
||||
|
||||
|
|
|
@ -31,12 +31,11 @@ class Discord(Webhook):
|
|||
|
||||
def send_msg(self, msg) -> None:
|
||||
|
||||
if msg['type'].value in self._config['discord']:
|
||||
if (fields := self._config['discord'].get(msg['type'].value)):
|
||||
logger.info(f"Sending discord message: {msg}")
|
||||
|
||||
msg['strategy'] = self.strategy
|
||||
msg['timeframe'] = self.timeframe
|
||||
fields = self._config['discord'].get(msg['type'].value)
|
||||
color = 0x0000FF
|
||||
if msg['type'] in (RPCMessageType.EXIT, RPCMessageType.EXIT_FILL):
|
||||
profit_ratio = msg.get('profit_ratio')
|
||||
|
|
|
@ -27,6 +27,7 @@ coingecko_mapping = {
|
|||
'usdt': 'tether',
|
||||
'busd': 'binance-usd',
|
||||
'tusd': 'true-usd',
|
||||
'usdc': 'usd-coin',
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -795,14 +795,14 @@ class RPC:
|
|||
|
||||
if order['side'] == trade.entry_side:
|
||||
fully_canceled = self._freqtrade.handle_cancel_enter(
|
||||
trade, order, oo.order_id, CANCEL_REASON['FORCE_EXIT'])
|
||||
trade, order, oo, CANCEL_REASON['FORCE_EXIT'])
|
||||
trade_entry_cancelation_res['cancel_state'] = fully_canceled
|
||||
trade_entry_cancelation_registry.append(trade_entry_cancelation_res)
|
||||
|
||||
if order['side'] == trade.exit_side:
|
||||
# Cancel order - so it is placed anew with a fresh price.
|
||||
self._freqtrade.handle_cancel_exit(
|
||||
trade, order, oo.order_id, CANCEL_REASON['FORCE_EXIT'])
|
||||
trade, order, oo, CANCEL_REASON['FORCE_EXIT'])
|
||||
|
||||
if all(tocr['cancel_state'] is False for tocr in trade_entry_cancelation_registry):
|
||||
if trade.has_open_orders:
|
||||
|
@ -955,7 +955,7 @@ class RPC:
|
|||
logger.info(f"Cannot query order for {trade} due to {e}.", exc_info=True)
|
||||
raise RPCException("Order not found.")
|
||||
self._freqtrade.handle_cancel_order(
|
||||
order, open_order.order_id, trade, CANCEL_REASON['USER_CANCEL'])
|
||||
order, open_order, trade, CANCEL_REASON['USER_CANCEL'])
|
||||
Trade.commit()
|
||||
|
||||
def _rpc_delete(self, trade_id: int) -> Dict[str, Union[str, int]]:
|
||||
|
|
|
@ -223,7 +223,8 @@ class Telegram(RPCHandler):
|
|||
CommandHandler('health', self._health),
|
||||
CommandHandler('help', self._help),
|
||||
CommandHandler('version', self._version),
|
||||
CommandHandler('marketdir', self._changemarketdir)
|
||||
CommandHandler('marketdir', self._changemarketdir),
|
||||
CommandHandler('order', self._order),
|
||||
]
|
||||
callbacks = [
|
||||
CallbackQueryHandler(self._status_table, pattern='update_status_table'),
|
||||
|
@ -240,7 +241,7 @@ class Telegram(RPCHandler):
|
|||
CallbackQueryHandler(self._mix_tag_performance, pattern='update_mix_tag_performance'),
|
||||
CallbackQueryHandler(self._count, pattern='update_count'),
|
||||
CallbackQueryHandler(self._force_exit_inline, pattern=r"force_exit__\S+"),
|
||||
CallbackQueryHandler(self._force_enter_inline, pattern=r"\S+\/\S+"),
|
||||
CallbackQueryHandler(self._force_enter_inline, pattern=r"force_enter__\S+"),
|
||||
]
|
||||
for handle in handles:
|
||||
self._app.add_handler(handle)
|
||||
|
@ -555,6 +556,47 @@ class Telegram(RPCHandler):
|
|||
|
||||
return lines_detail
|
||||
|
||||
@authorized_only
|
||||
async def _order(self, update: Update, context: CallbackContext) -> None:
|
||||
"""
|
||||
Handler for /order.
|
||||
Returns the orders of the trade
|
||||
:param bot: telegram bot
|
||||
:param update: message update
|
||||
:return: None
|
||||
"""
|
||||
|
||||
trade_ids = []
|
||||
if context.args and len(context.args) > 0:
|
||||
trade_ids = [int(i) for i in context.args if i.isnumeric()]
|
||||
|
||||
results = self._rpc._rpc_trade_status(trade_ids=trade_ids)
|
||||
for r in results:
|
||||
lines = [
|
||||
"*Order List for Trade #*`{trade_id}`"
|
||||
]
|
||||
|
||||
lines_detail = self._prepare_order_details(
|
||||
r['orders'], r['quote_currency'], r['is_open'])
|
||||
lines.extend(lines_detail if lines_detail else "")
|
||||
await self.__send_order_msg(lines, r)
|
||||
|
||||
async def __send_order_msg(self, lines: List[str], r: Dict[str, Any]) -> None:
|
||||
"""
|
||||
Send status message.
|
||||
"""
|
||||
msg = ''
|
||||
|
||||
for line in lines:
|
||||
if line:
|
||||
if (len(msg) + len(line) + 1) < MAX_MESSAGE_LENGTH:
|
||||
msg += line + '\n'
|
||||
else:
|
||||
await self._send_msg(msg.format(**r))
|
||||
msg = "*Order List for Trade #*`{trade_id}` - continued\n" + line + '\n'
|
||||
|
||||
await self._send_msg(msg.format(**r))
|
||||
|
||||
@authorized_only
|
||||
async def _status(self, update: Update, context: CallbackContext) -> None:
|
||||
"""
|
||||
|
@ -652,9 +694,6 @@ class Telegram(RPCHandler):
|
|||
"*Open Order:* `{open_orders}`"
|
||||
+ ("- `{exit_order_status}`" if r['exit_order_status'] else ""))
|
||||
|
||||
lines_detail = self._prepare_order_details(
|
||||
r['orders'], r['quote_currency'], r['is_open'])
|
||||
lines.extend(lines_detail if lines_detail else "")
|
||||
await self.__send_status_msg(lines, r)
|
||||
|
||||
async def __send_status_msg(self, lines: List[str], r: Dict[str, Any]) -> None:
|
||||
|
@ -1149,12 +1188,19 @@ class Telegram(RPCHandler):
|
|||
async def _force_enter_inline(self, update: Update, _: CallbackContext) -> None:
|
||||
if update.callback_query:
|
||||
query = update.callback_query
|
||||
if query.data and '_||_' in query.data:
|
||||
pair, side = query.data.split('_||_')
|
||||
order_side = SignalDirection(side)
|
||||
await query.answer()
|
||||
await query.edit_message_text(text=f"Manually entering {order_side} for {pair}")
|
||||
await self._force_enter_action(pair, None, order_side)
|
||||
if query.data and '__' in query.data:
|
||||
# Input data is "force_enter__<pair|cancel>_<side>"
|
||||
payload = query.data.split("__")[1]
|
||||
if payload == 'cancel':
|
||||
await query.answer()
|
||||
await query.edit_message_text(text="Force enter canceled.")
|
||||
return
|
||||
if payload and '_||_' in payload:
|
||||
pair, side = payload.split('_||_')
|
||||
order_side = SignalDirection(side)
|
||||
await query.answer()
|
||||
await query.edit_message_text(text=f"Manually entering {order_side} for {pair}")
|
||||
await self._force_enter_action(pair, None, order_side)
|
||||
|
||||
@staticmethod
|
||||
def _layout_inline_keyboard(
|
||||
|
@ -1183,12 +1229,14 @@ class Telegram(RPCHandler):
|
|||
else:
|
||||
whitelist = self._rpc._rpc_whitelist()['whitelist']
|
||||
pair_buttons = [
|
||||
InlineKeyboardButton(text=pair, callback_data=f"{pair}_||_{order_side}")
|
||||
for pair in sorted(whitelist)
|
||||
InlineKeyboardButton(
|
||||
text=pair, callback_data=f"force_enter__{pair}_||_{order_side}"
|
||||
) for pair in sorted(whitelist)
|
||||
]
|
||||
buttons_aligned = self._layout_inline_keyboard(pair_buttons)
|
||||
|
||||
buttons_aligned.append([InlineKeyboardButton(text='Cancel', callback_data='cancel')])
|
||||
buttons_aligned.append([InlineKeyboardButton(text='Cancel',
|
||||
callback_data='force_enter__cancel')])
|
||||
await self._send_msg(msg="Which pair?",
|
||||
keyboard=buttons_aligned,
|
||||
query=update.callback_query)
|
||||
|
@ -1369,7 +1417,7 @@ class Telegram(RPCHandler):
|
|||
stat_line = (
|
||||
f"{i+1}.\t <code>{trade['mix_tag']}\t"
|
||||
f"{round_coin_value(trade['profit_abs'], self._config['stake_currency'])} "
|
||||
f"({trade['profit']:.2%}) "
|
||||
f"({trade['profit_ratio']:.2%}) "
|
||||
f"({trade['count']})</code>\n")
|
||||
|
||||
if len(output + stat_line) >= MAX_MESSAGE_LENGTH:
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from dataclasses import dataclass
|
||||
from typing import Any, Callable, Optional, Union
|
||||
from typing import Any, Callable, Dict, Optional, Union
|
||||
|
||||
from pandas import DataFrame
|
||||
|
||||
|
@ -38,17 +38,18 @@ def informative(timeframe: str, asset: str = '',
|
|||
|
||||
:param timeframe: Informative timeframe. Must always be equal or higher than strategy timeframe.
|
||||
:param asset: Informative asset, for example BTC, BTC/USDT, ETH/BTC. Do not specify to use
|
||||
current pair.
|
||||
current pair. Also supports limited pair format strings (see below)
|
||||
:param fmt: Column format (str) or column formatter (callable(name, asset, timeframe)). When not
|
||||
specified, defaults to:
|
||||
* {base}_{quote}_{column}_{timeframe} if asset is specified.
|
||||
* {column}_{timeframe} if asset is not specified.
|
||||
Format string supports these format variables:
|
||||
* {asset} - full name of the asset, for example 'BTC/USDT'.
|
||||
Pair format supports these format variables:
|
||||
* {base} - base currency in lower case, for example 'eth'.
|
||||
* {BASE} - same as {base}, except in upper case.
|
||||
* {quote} - quote currency in lower case, for example 'usdt'.
|
||||
* {QUOTE} - same as {quote}, except in upper case.
|
||||
Format string additionally supports this variables.
|
||||
* {asset} - full name of the asset, for example 'BTC/USDT'.
|
||||
* {column} - name of dataframe column.
|
||||
* {timeframe} - timeframe of informative dataframe.
|
||||
:param ffill: ffill dataframe after merging informative pair.
|
||||
|
@ -68,9 +69,25 @@ def informative(timeframe: str, asset: str = '',
|
|||
return decorator
|
||||
|
||||
|
||||
def _format_pair_name(config, pair: str) -> str:
|
||||
return pair.format(stake_currency=config['stake_currency'],
|
||||
stake=config['stake_currency']).upper()
|
||||
def __get_pair_formats(market: Optional[Dict[str, Any]]) -> Dict[str, str]:
|
||||
if not market:
|
||||
return {}
|
||||
base = market['base']
|
||||
quote = market['quote']
|
||||
return {
|
||||
'base': base.lower(),
|
||||
'BASE': base.upper(),
|
||||
'quote': quote.lower(),
|
||||
'QUOTE': quote.upper(),
|
||||
}
|
||||
|
||||
|
||||
def _format_pair_name(config, pair: str, market: Optional[Dict[str, Any]] = None) -> str:
|
||||
return pair.format(
|
||||
stake_currency=config['stake_currency'],
|
||||
stake=config['stake_currency'],
|
||||
**__get_pair_formats(market),
|
||||
).upper()
|
||||
|
||||
|
||||
def _create_and_merge_informative_pair(strategy, dataframe: DataFrame, metadata: dict,
|
||||
|
@ -85,7 +102,8 @@ def _create_and_merge_informative_pair(strategy, dataframe: DataFrame, metadata:
|
|||
|
||||
if asset:
|
||||
# Insert stake currency if needed.
|
||||
asset = _format_pair_name(config, asset)
|
||||
market1 = strategy.dp.market(metadata['pair'])
|
||||
asset = _format_pair_name(config, asset, market1)
|
||||
else:
|
||||
# Not specifying an asset will define informative dataframe for current pair.
|
||||
asset = metadata['pair']
|
||||
|
@ -93,8 +111,6 @@ def _create_and_merge_informative_pair(strategy, dataframe: DataFrame, metadata:
|
|||
market = strategy.dp.market(asset)
|
||||
if market is None:
|
||||
raise OperationalException(f'Market {asset} is not available.')
|
||||
base = market['base']
|
||||
quote = market['quote']
|
||||
|
||||
# Default format. This optimizes for the common case: informative pairs using same stake
|
||||
# currency. When quote currency matches stake currency, column name will omit base currency.
|
||||
|
@ -117,10 +133,7 @@ def _create_and_merge_informative_pair(strategy, dataframe: DataFrame, metadata:
|
|||
formatter = fmt.format # A default string formatter.
|
||||
|
||||
fmt_args = {
|
||||
'BASE': base.upper(),
|
||||
'QUOTE': quote.upper(),
|
||||
'base': base.lower(),
|
||||
'quote': quote.lower(),
|
||||
**__get_pair_formats(market),
|
||||
'asset': asset,
|
||||
'timeframe': timeframe,
|
||||
}
|
||||
|
|
|
@ -756,12 +756,23 @@ class IStrategy(ABC, HyperStrategyMixin):
|
|||
candle_type = (inf_data.candle_type if inf_data.candle_type
|
||||
else self.config.get('candle_type_def', CandleType.SPOT))
|
||||
if inf_data.asset:
|
||||
pair_tf = (
|
||||
_format_pair_name(self.config, inf_data.asset),
|
||||
inf_data.timeframe,
|
||||
candle_type,
|
||||
)
|
||||
informative_pairs.append(pair_tf)
|
||||
if any(s in inf_data.asset for s in ("{BASE}", "{base}")):
|
||||
for pair in self.dp.current_whitelist():
|
||||
|
||||
pair_tf = (
|
||||
_format_pair_name(self.config, inf_data.asset, self.dp.market(pair)),
|
||||
inf_data.timeframe,
|
||||
candle_type,
|
||||
)
|
||||
informative_pairs.append(pair_tf)
|
||||
|
||||
else:
|
||||
pair_tf = (
|
||||
_format_pair_name(self.config, inf_data.asset),
|
||||
inf_data.timeframe,
|
||||
candle_type,
|
||||
)
|
||||
informative_pairs.append(pair_tf)
|
||||
else:
|
||||
for pair in self.dp.current_whitelist():
|
||||
informative_pairs.append((pair, inf_data.timeframe, candle_type))
|
||||
|
@ -1006,7 +1017,7 @@ class IStrategy(ABC, HyperStrategyMixin):
|
|||
exit_ = latest.get(SignalType.EXIT_LONG.value, 0) == 1
|
||||
exit_tag = latest.get(SignalTagType.EXIT_TAG.value, None)
|
||||
# Tags can be None, which does not resolve to False.
|
||||
exit_tag = exit_tag if isinstance(exit_tag, str) else None
|
||||
exit_tag = exit_tag if isinstance(exit_tag, str) and exit_tag != 'nan' else None
|
||||
|
||||
logger.debug(f"exit-trigger: {latest['date']} (pair={pair}) "
|
||||
f"enter={enter} exit={exit_}")
|
||||
|
@ -1038,17 +1049,17 @@ class IStrategy(ABC, HyperStrategyMixin):
|
|||
exit_short = latest.get(SignalType.EXIT_SHORT.value, 0) == 1
|
||||
|
||||
enter_signal: Optional[SignalDirection] = None
|
||||
enter_tag_value: Optional[str] = None
|
||||
enter_tag: Optional[str] = None
|
||||
if enter_long == 1 and not any([exit_long, enter_short]):
|
||||
enter_signal = SignalDirection.LONG
|
||||
enter_tag_value = latest.get(SignalTagType.ENTER_TAG.value, None)
|
||||
enter_tag = latest.get(SignalTagType.ENTER_TAG.value, None)
|
||||
if (self.config.get('trading_mode', TradingMode.SPOT) != TradingMode.SPOT
|
||||
and self.can_short
|
||||
and enter_short == 1 and not any([exit_short, enter_long])):
|
||||
enter_signal = SignalDirection.SHORT
|
||||
enter_tag_value = latest.get(SignalTagType.ENTER_TAG.value, None)
|
||||
enter_tag = latest.get(SignalTagType.ENTER_TAG.value, None)
|
||||
|
||||
enter_tag_value = enter_tag_value if isinstance(enter_tag_value, str) else None
|
||||
enter_tag = enter_tag if isinstance(enter_tag, str) and enter_tag != 'nan' else None
|
||||
|
||||
timeframe_seconds = timeframe_to_seconds(timeframe)
|
||||
|
||||
|
@ -1058,11 +1069,11 @@ class IStrategy(ABC, HyperStrategyMixin):
|
|||
timeframe_seconds=timeframe_seconds,
|
||||
enter=bool(enter_signal)
|
||||
):
|
||||
return None, enter_tag_value
|
||||
return None, enter_tag
|
||||
|
||||
logger.debug(f"entry trigger: {latest['date']} (pair={pair}) "
|
||||
f"enter={enter_long} enter_tag_value={enter_tag_value}")
|
||||
return enter_signal, enter_tag_value
|
||||
f"enter={enter_long} enter_tag_value={enter_tag}")
|
||||
return enter_signal, enter_tag
|
||||
|
||||
def ignore_expired_candle(
|
||||
self,
|
||||
|
@ -1244,10 +1255,6 @@ class IStrategy(ABC, HyperStrategyMixin):
|
|||
and trade.liquidation_price <= (high or current_rate)
|
||||
and trade.is_short)
|
||||
|
||||
if (liq_higher_long or liq_lower_short):
|
||||
logger.debug(f"{trade.pair} - Liquidation price hit. exit_type=ExitType.LIQUIDATION")
|
||||
return ExitCheckTuple(exit_type=ExitType.LIQUIDATION)
|
||||
|
||||
# evaluate if the stoploss was hit if stoploss is not on exchange
|
||||
# in Dry-Run, this handles stoploss logic as well, as the logic will not be different to
|
||||
# regular stoploss handling.
|
||||
|
@ -1268,6 +1275,10 @@ class IStrategy(ABC, HyperStrategyMixin):
|
|||
|
||||
return ExitCheckTuple(exit_type=exit_type)
|
||||
|
||||
if (liq_higher_long or liq_lower_short):
|
||||
logger.debug(f"{trade.pair} - Liquidation price hit. exit_type=ExitType.LIQUIDATION")
|
||||
return ExitCheckTuple(exit_type=ExitType.LIQUIDATION)
|
||||
|
||||
return ExitCheckTuple(exit_type=ExitType.NONE)
|
||||
|
||||
def min_roi_reached_entry(self, trade_dur: int) -> Tuple[Optional[int], Optional[float]]:
|
||||
|
|
|
@ -290,9 +290,6 @@ class FreqaiExampleStrategy(IStrategy):
|
|||
|
||||
return df
|
||||
|
||||
def get_ticker_indicator(self):
|
||||
return int(self.config["timeframe"][:-1])
|
||||
|
||||
def confirm_trade_entry(
|
||||
self,
|
||||
pair: str,
|
||||
|
|
2
freqtrade/vendor/qtpylib/indicators.py
vendored
2
freqtrade/vendor/qtpylib/indicators.py
vendored
|
@ -226,7 +226,7 @@ def crossed(series1, series2, direction=None):
|
|||
series1.shift(1) >= series2.shift(1)))
|
||||
|
||||
if direction is None:
|
||||
return above or below
|
||||
return above | below
|
||||
|
||||
return above if direction == "above" else below
|
||||
|
||||
|
|
|
@ -82,6 +82,11 @@ extend-select = [
|
|||
# "TCH", # flake8-type-checking
|
||||
"PTH", # flake8-use-pathlib
|
||||
]
|
||||
extend-ignore = [
|
||||
"E241", # Multiple spaces after comma
|
||||
"E272", # Multiple spaces before keyword
|
||||
"E221", # Multiple spaces before operator
|
||||
]
|
||||
|
||||
[tool.ruff.mccabe]
|
||||
max-complexity = 12
|
||||
|
|
|
@ -7,24 +7,24 @@
|
|||
-r docs/requirements-docs.txt
|
||||
|
||||
coveralls==3.3.1
|
||||
ruff==0.0.291
|
||||
mypy==1.5.1
|
||||
pre-commit==3.4.0
|
||||
pytest==7.4.2
|
||||
ruff==0.1.6
|
||||
mypy==1.7.1
|
||||
pre-commit==3.5.0
|
||||
pytest==7.4.3
|
||||
pytest-asyncio==0.21.1
|
||||
pytest-cov==4.1.0
|
||||
pytest-mock==3.11.1
|
||||
pytest-mock==3.12.0
|
||||
pytest-random-order==1.1.0
|
||||
isort==5.12.0
|
||||
# For datetime mocking
|
||||
time-machine==2.13.0
|
||||
|
||||
# Convert jupyter notebooks to markdown documents
|
||||
nbconvert==7.8.0
|
||||
nbconvert==7.11.0
|
||||
|
||||
# mypy types
|
||||
types-cachetools==5.3.0.6
|
||||
types-cachetools==5.3.0.7
|
||||
types-filelock==3.2.7
|
||||
types-requests==2.31.0.7
|
||||
types-requests==2.31.0.10
|
||||
types-tabulate==0.9.0.3
|
||||
types-python-dateutil==2.8.19.14
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
torch==2.0.1
|
||||
#until these branches will be released we can use this
|
||||
gymnasium==0.29.1
|
||||
stable_baselines3==2.1.0
|
||||
stable_baselines3==2.2.1
|
||||
sb3_contrib>=2.0.0a9
|
||||
# Progress bar for stable-baselines3 and sb3-contrib
|
||||
tqdm==4.66.1
|
||||
|
|
|
@ -7,6 +7,6 @@ scikit-learn==1.1.3
|
|||
joblib==1.3.2
|
||||
catboost==1.2.2; 'arm' not in platform_machine
|
||||
lightgbm==4.1.0
|
||||
xgboost==2.0.0
|
||||
tensorboard==2.14.1
|
||||
xgboost==2.0.2
|
||||
tensorboard==2.15.1
|
||||
datasieve==0.1.7
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
-r requirements.txt
|
||||
|
||||
# Required for hyperopt
|
||||
scipy==1.11.3
|
||||
scipy==1.11.4
|
||||
scikit-learn==1.1.3
|
||||
scikit-optimize==0.9.0
|
||||
filelock==3.12.4
|
||||
filelock==3.13.1
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# Include all requirements to run the bot.
|
||||
-r requirements.txt
|
||||
|
||||
plotly==5.17.0
|
||||
plotly==5.18.0
|
||||
|
|
|
@ -1,48 +1,47 @@
|
|||
numpy==1.26.0
|
||||
pandas==2.0.3
|
||||
numpy==1.26.2
|
||||
pandas==2.1.3
|
||||
pandas-ta==0.3.14b
|
||||
|
||||
ccxt==4.0.112
|
||||
cryptography==41.0.4
|
||||
aiohttp==3.8.5
|
||||
SQLAlchemy==2.0.21
|
||||
python-telegram-bot==20.5
|
||||
ccxt==4.1.66
|
||||
cryptography==41.0.5
|
||||
aiohttp==3.9.1
|
||||
SQLAlchemy==2.0.23
|
||||
python-telegram-bot==20.6
|
||||
# can't be hard-pinned due to telegram-bot pinning httpx with ~
|
||||
httpx>=0.24.1
|
||||
arrow==1.3.0
|
||||
cachetools==5.3.1
|
||||
cachetools==5.3.2
|
||||
requests==2.31.0
|
||||
urllib3==2.0.6
|
||||
jsonschema==4.19.1
|
||||
urllib3==2.1.0
|
||||
jsonschema==4.20.0
|
||||
TA-Lib==0.4.28
|
||||
technical==1.4.0
|
||||
tabulate==0.9.0
|
||||
pycoingecko==3.1.0
|
||||
jinja2==3.1.2
|
||||
tables==3.8.0
|
||||
blosc==1.11.1
|
||||
tables==3.9.1
|
||||
joblib==1.3.2
|
||||
rich==13.6.0
|
||||
pyarrow==13.0.0; platform_machine != 'armv7l'
|
||||
rich==13.7.0
|
||||
pyarrow==14.0.1; platform_machine != 'armv7l'
|
||||
|
||||
# find first, C search in arrays
|
||||
py_find_1st==1.1.5
|
||||
py_find_1st==1.1.6
|
||||
|
||||
# Load ticker files 30% faster
|
||||
python-rapidjson==1.11
|
||||
python-rapidjson==1.13
|
||||
# Properly format api responses
|
||||
orjson==3.9.7
|
||||
orjson==3.9.10
|
||||
|
||||
# Notify systemd
|
||||
sdnotify==0.3.2
|
||||
|
||||
# API Server
|
||||
fastapi==0.103.2
|
||||
pydantic==2.4.2
|
||||
uvicorn==0.23.2
|
||||
fastapi==0.104.1
|
||||
pydantic==2.5.2
|
||||
uvicorn==0.24.0.post1
|
||||
pyjwt==2.8.0
|
||||
aiofiles==23.2.1
|
||||
psutil==5.9.5
|
||||
psutil==5.9.6
|
||||
|
||||
# Support for colorized terminal output
|
||||
colorama==0.4.6
|
||||
|
@ -56,8 +55,8 @@ python-dateutil==2.8.2
|
|||
schedule==1.2.1
|
||||
|
||||
#WS Messages
|
||||
websockets==11.0.3
|
||||
websockets==12.0
|
||||
janus==1.0.0
|
||||
|
||||
ast-comments==1.1.0
|
||||
ast-comments==1.2.0
|
||||
packaging==23.2
|
||||
|
|
|
@ -112,6 +112,30 @@ class FtRestClient:
|
|||
"""
|
||||
return self._get("count")
|
||||
|
||||
def entries(self, pair=None):
|
||||
"""Returns List of dicts containing all Trades, based on buy tag performance
|
||||
Can either be average for all pairs or a specific pair provided
|
||||
|
||||
:return: json object
|
||||
"""
|
||||
return self._get("entries", params={"pair": pair} if pair else None)
|
||||
|
||||
def exits(self, pair=None):
|
||||
"""Returns List of dicts containing all Trades, based on exit reason performance
|
||||
Can either be average for all pairs or a specific pair provided
|
||||
|
||||
:return: json object
|
||||
"""
|
||||
return self._get("exits", params={"pair": pair} if pair else None)
|
||||
|
||||
def mix_tags(self, pair=None):
|
||||
"""Returns List of dicts containing all Trades, based on entry_tag + exit_reason performance
|
||||
Can either be average for all pairs or a specific pair provided
|
||||
|
||||
:return: json object
|
||||
"""
|
||||
return self._get("mix_tags", params={"pair": pair} if pair else None)
|
||||
|
||||
def locks(self):
|
||||
"""Return current locks
|
||||
|
||||
|
|
|
@ -550,7 +550,7 @@ def test_start_install_ui(mocker):
|
|||
assert download_mock.call_count == 0
|
||||
|
||||
|
||||
def test_clean_ui_subdir(mocker, tmpdir, caplog):
|
||||
def test_clean_ui_subdir(mocker, tmp_path, caplog):
|
||||
mocker.patch("freqtrade.commands.deploy_commands.Path.is_dir",
|
||||
side_effect=[True, True])
|
||||
mocker.patch("freqtrade.commands.deploy_commands.Path.is_file",
|
||||
|
@ -560,14 +560,14 @@ def test_clean_ui_subdir(mocker, tmpdir, caplog):
|
|||
|
||||
mocker.patch("freqtrade.commands.deploy_commands.Path.glob",
|
||||
return_value=[Path('test1'), Path('test2'), Path('.gitkeep')])
|
||||
folder = Path(tmpdir) / "uitests"
|
||||
folder = tmp_path / "uitests"
|
||||
clean_ui_subdir(folder)
|
||||
assert log_has("Removing UI directory content.", caplog)
|
||||
assert rd_mock.call_count == 1
|
||||
assert ul_mock.call_count == 1
|
||||
|
||||
|
||||
def test_download_and_install_ui(mocker, tmpdir):
|
||||
def test_download_and_install_ui(mocker, tmp_path):
|
||||
# Create zipfile
|
||||
requests_mock = MagicMock()
|
||||
file_like_object = BytesIO()
|
||||
|
@ -583,7 +583,7 @@ def test_download_and_install_ui(mocker, tmpdir):
|
|||
side_effect=[True, False])
|
||||
wb_mock = mocker.patch("freqtrade.commands.deploy_commands.Path.write_bytes")
|
||||
|
||||
folder = Path(tmpdir) / "uitests_dl"
|
||||
folder = tmp_path / "uitests_dl"
|
||||
folder.mkdir(exist_ok=True)
|
||||
|
||||
assert read_ui_version(folder) is None
|
||||
|
@ -1010,8 +1010,8 @@ def test_start_test_pairlist(mocker, caplog, tickers, default_conf, capsys):
|
|||
pytest.fail(f'Expected well formed JSON, but failed to parse: {captured.out}')
|
||||
|
||||
|
||||
def test_hyperopt_list(mocker, capsys, caplog, saved_hyperopt_results, tmpdir):
|
||||
csv_file = Path(tmpdir) / "test.csv"
|
||||
def test_hyperopt_list(mocker, capsys, caplog, saved_hyperopt_results, tmp_path):
|
||||
csv_file = tmp_path / "test.csv"
|
||||
mocker.patch(
|
||||
'freqtrade.optimize.hyperopt_tools.HyperoptTools._test_hyperopt_results_exist',
|
||||
return_value=True
|
||||
|
@ -1512,10 +1512,10 @@ def test_backtesting_show(mocker, testdatadir, capsys):
|
|||
assert "Pairs for Strategy" in out
|
||||
|
||||
|
||||
def test_start_convert_db(mocker, fee, tmpdir, caplog):
|
||||
db_src_file = Path(f"{tmpdir}/db.sqlite")
|
||||
def test_start_convert_db(fee, tmp_path):
|
||||
db_src_file = tmp_path / "db.sqlite"
|
||||
db_from = f"sqlite:///{db_src_file}"
|
||||
db_target_file = Path(f"{tmpdir}/db_target.sqlite")
|
||||
db_target_file = tmp_path / "db_target.sqlite"
|
||||
db_to = f"sqlite:///{db_target_file}"
|
||||
args = [
|
||||
"convert-db",
|
||||
|
@ -1542,13 +1542,13 @@ def test_start_convert_db(mocker, fee, tmpdir, caplog):
|
|||
assert db_target_file.is_file()
|
||||
|
||||
|
||||
def test_start_strategy_updater(mocker, tmpdir):
|
||||
def test_start_strategy_updater(mocker, tmp_path):
|
||||
sc_mock = mocker.patch('freqtrade.commands.strategy_utils_commands.start_conversion')
|
||||
teststrats = Path(__file__).parent.parent / 'strategy/strats'
|
||||
args = [
|
||||
"strategy-updater",
|
||||
"--userdir",
|
||||
str(tmpdir),
|
||||
str(tmp_path),
|
||||
"--strategy-path",
|
||||
str(teststrats),
|
||||
]
|
||||
|
@ -1562,7 +1562,7 @@ def test_start_strategy_updater(mocker, tmpdir):
|
|||
args = [
|
||||
"strategy-updater",
|
||||
"--userdir",
|
||||
str(tmpdir),
|
||||
str(tmp_path),
|
||||
"--strategy-path",
|
||||
str(teststrats),
|
||||
"--strategy-list",
|
||||
|
|
|
@ -413,8 +413,8 @@ def patch_gc(mocker) -> None:
|
|||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def user_dir(mocker, tmpdir) -> Path:
|
||||
user_dir = Path(tmpdir) / "user_data"
|
||||
def user_dir(mocker, tmp_path) -> Path:
|
||||
user_dir = tmp_path / "user_data"
|
||||
mocker.patch('freqtrade.configuration.configuration.create_userdata_dir',
|
||||
return_value=user_dir)
|
||||
return user_dir
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
# pragma pylint: disable=missing-docstring, C0103
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from shutil import copyfile
|
||||
|
||||
import numpy as np
|
||||
|
@ -50,8 +49,8 @@ def test_trades_to_ohlcv(trades_history_df, caplog):
|
|||
assert 'high' in df.columns
|
||||
assert 'low' in df.columns
|
||||
assert 'close' in df.columns
|
||||
assert df.loc[:, 'high'][0] == 0.019627
|
||||
assert df.loc[:, 'low'][0] == 0.019626
|
||||
assert df.iloc[0, :]['high'] == 0.019627
|
||||
assert df.iloc[0, :]['low'] == 0.019626
|
||||
|
||||
|
||||
def test_ohlcv_fill_up_missing_data(testdatadir, caplog):
|
||||
|
@ -323,18 +322,17 @@ def test_trades_dict_to_list(fetch_trades_result):
|
|||
assert t[6] == fetch_trades_result[i]['cost']
|
||||
|
||||
|
||||
def test_convert_trades_format(default_conf, testdatadir, tmpdir):
|
||||
tmpdir1 = Path(tmpdir)
|
||||
files = [{'old': tmpdir1 / "XRP_ETH-trades.json.gz",
|
||||
'new': tmpdir1 / "XRP_ETH-trades.json"},
|
||||
{'old': tmpdir1 / "XRP_OLD-trades.json.gz",
|
||||
'new': tmpdir1 / "XRP_OLD-trades.json"},
|
||||
def test_convert_trades_format(default_conf, testdatadir, tmp_path):
|
||||
files = [{'old': tmp_path / "XRP_ETH-trades.json.gz",
|
||||
'new': tmp_path / "XRP_ETH-trades.json"},
|
||||
{'old': tmp_path / "XRP_OLD-trades.json.gz",
|
||||
'new': tmp_path / "XRP_OLD-trades.json"},
|
||||
]
|
||||
for file in files:
|
||||
copyfile(testdatadir / file['old'].name, file['old'])
|
||||
assert not file['new'].exists()
|
||||
|
||||
default_conf['datadir'] = tmpdir1
|
||||
default_conf['datadir'] = tmp_path
|
||||
|
||||
convert_trades_format(default_conf, convert_from='jsongz',
|
||||
convert_to='json', erase=False)
|
||||
|
@ -362,16 +360,15 @@ def test_convert_trades_format(default_conf, testdatadir, tmpdir):
|
|||
(['UNITTEST_USDT_USDT-1h-mark', 'XRP_USDT_USDT-1h-mark'], CandleType.MARK),
|
||||
(['XRP_USDT_USDT-1h-futures'], CandleType.FUTURES),
|
||||
])
|
||||
def test_convert_ohlcv_format(default_conf, testdatadir, tmpdir, file_base, candletype):
|
||||
tmpdir1 = Path(tmpdir)
|
||||
def test_convert_ohlcv_format(default_conf, testdatadir, tmp_path, file_base, candletype):
|
||||
prependix = '' if candletype == CandleType.SPOT else 'futures/'
|
||||
files_orig = []
|
||||
files_temp = []
|
||||
files_new = []
|
||||
for file in file_base:
|
||||
file_orig = testdatadir / f"{prependix}{file}.feather"
|
||||
file_temp = tmpdir1 / f"{prependix}{file}.feather"
|
||||
file_new = tmpdir1 / f"{prependix}{file}.json.gz"
|
||||
file_temp = tmp_path / f"{prependix}{file}.feather"
|
||||
file_new = tmp_path / f"{prependix}{file}.json.gz"
|
||||
IDataHandler.create_dir_if_needed(file_temp)
|
||||
copyfile(file_orig, file_temp)
|
||||
|
||||
|
@ -379,7 +376,7 @@ def test_convert_ohlcv_format(default_conf, testdatadir, tmpdir, file_base, cand
|
|||
files_temp.append(file_temp)
|
||||
files_new.append(file_new)
|
||||
|
||||
default_conf['datadir'] = tmpdir1
|
||||
default_conf['datadir'] = tmp_path
|
||||
default_conf['candle_types'] = [candletype]
|
||||
|
||||
if candletype == CandleType.SPOT:
|
||||
|
@ -445,30 +442,29 @@ def test_reduce_dataframe_footprint():
|
|||
assert df2['close_copy'].dtype == np.float32
|
||||
|
||||
|
||||
def test_convert_trades_to_ohlcv(testdatadir, tmpdir, caplog):
|
||||
tmpdir1 = Path(tmpdir)
|
||||
def test_convert_trades_to_ohlcv(testdatadir, tmp_path, caplog):
|
||||
pair = 'XRP/ETH'
|
||||
file1 = tmpdir1 / 'XRP_ETH-1m.feather'
|
||||
file5 = tmpdir1 / 'XRP_ETH-5m.feather'
|
||||
filetrades = tmpdir1 / 'XRP_ETH-trades.json.gz'
|
||||
file1 = tmp_path / 'XRP_ETH-1m.feather'
|
||||
file5 = tmp_path / 'XRP_ETH-5m.feather'
|
||||
filetrades = tmp_path / 'XRP_ETH-trades.json.gz'
|
||||
copyfile(testdatadir / file1.name, file1)
|
||||
copyfile(testdatadir / file5.name, file5)
|
||||
copyfile(testdatadir / filetrades.name, filetrades)
|
||||
|
||||
# Compare downloaded dataset with converted dataset
|
||||
dfbak_1m = load_pair_history(datadir=tmpdir1, timeframe="1m", pair=pair)
|
||||
dfbak_5m = load_pair_history(datadir=tmpdir1, timeframe="5m", pair=pair)
|
||||
dfbak_1m = load_pair_history(datadir=tmp_path, timeframe="1m", pair=pair)
|
||||
dfbak_5m = load_pair_history(datadir=tmp_path, timeframe="5m", pair=pair)
|
||||
|
||||
tr = TimeRange.parse_timerange('20191011-20191012')
|
||||
|
||||
convert_trades_to_ohlcv([pair], timeframes=['1m', '5m'],
|
||||
data_format_trades='jsongz',
|
||||
datadir=tmpdir1, timerange=tr, erase=True)
|
||||
datadir=tmp_path, timerange=tr, erase=True)
|
||||
|
||||
assert log_has("Deleting existing data for pair XRP/ETH, interval 1m.", caplog)
|
||||
# Load new data
|
||||
df_1m = load_pair_history(datadir=tmpdir1, timeframe="1m", pair=pair)
|
||||
df_5m = load_pair_history(datadir=tmpdir1, timeframe="5m", pair=pair)
|
||||
df_1m = load_pair_history(datadir=tmp_path, timeframe="1m", pair=pair)
|
||||
df_5m = load_pair_history(datadir=tmp_path, timeframe="5m", pair=pair)
|
||||
|
||||
assert_frame_equal(dfbak_1m, df_1m, check_exact=True)
|
||||
assert_frame_equal(dfbak_5m, df_5m, check_exact=True)
|
||||
|
@ -477,5 +473,5 @@ def test_convert_trades_to_ohlcv(testdatadir, tmpdir, caplog):
|
|||
|
||||
convert_trades_to_ohlcv(['NoDatapair'], timeframes=['1m', '5m'],
|
||||
data_format_trades='jsongz',
|
||||
datadir=tmpdir1, timerange=tr, erase=True)
|
||||
datadir=tmp_path, timerange=tr, erase=True)
|
||||
assert log_has(msg, caplog)
|
||||
|
|
|
@ -328,17 +328,16 @@ def test_hdf5datahandler_trades_load(testdatadir):
|
|||
])
|
||||
def test_hdf5datahandler_ohlcv_load_and_resave(
|
||||
testdatadir,
|
||||
tmpdir,
|
||||
tmp_path,
|
||||
pair,
|
||||
timeframe,
|
||||
candle_type,
|
||||
candle_append,
|
||||
startdt, enddt
|
||||
):
|
||||
tmpdir1 = Path(tmpdir)
|
||||
tmpdir2 = tmpdir1
|
||||
tmpdir2 = tmp_path
|
||||
if candle_type not in ('', 'spot'):
|
||||
tmpdir2 = tmpdir1 / 'futures'
|
||||
tmpdir2 = tmp_path / 'futures'
|
||||
tmpdir2.mkdir()
|
||||
dh = get_datahandler(testdatadir, 'hdf5')
|
||||
ohlcv = dh._ohlcv_load(pair, timeframe, None, candle_type=candle_type)
|
||||
|
@ -348,7 +347,7 @@ def test_hdf5datahandler_ohlcv_load_and_resave(
|
|||
file = tmpdir2 / f"UNITTEST_NEW-{timeframe}{candle_append}.h5"
|
||||
assert not file.is_file()
|
||||
|
||||
dh1 = get_datahandler(tmpdir1, 'hdf5')
|
||||
dh1 = get_datahandler(tmp_path, 'hdf5')
|
||||
dh1.ohlcv_store('UNITTEST/NEW', timeframe, ohlcv, candle_type=candle_type)
|
||||
assert file.is_file()
|
||||
|
||||
|
@ -379,17 +378,16 @@ def test_hdf5datahandler_ohlcv_load_and_resave(
|
|||
def test_generic_datahandler_ohlcv_load_and_resave(
|
||||
datahandler,
|
||||
testdatadir,
|
||||
tmpdir,
|
||||
tmp_path,
|
||||
pair,
|
||||
timeframe,
|
||||
candle_type,
|
||||
candle_append,
|
||||
startdt, enddt
|
||||
):
|
||||
tmpdir1 = Path(tmpdir)
|
||||
tmpdir2 = tmpdir1
|
||||
tmpdir2 = tmp_path
|
||||
if candle_type not in ('', 'spot'):
|
||||
tmpdir2 = tmpdir1 / 'futures'
|
||||
tmpdir2 = tmp_path / 'futures'
|
||||
tmpdir2.mkdir()
|
||||
# Load data from one common file
|
||||
dhbase = get_datahandler(testdatadir, 'feather')
|
||||
|
@ -403,7 +401,7 @@ def test_generic_datahandler_ohlcv_load_and_resave(
|
|||
file = tmpdir2 / f"UNITTEST_NEW-{timeframe}{candle_append}.{dh._get_file_extension()}"
|
||||
assert not file.is_file()
|
||||
|
||||
dh1 = get_datahandler(tmpdir1, datahandler)
|
||||
dh1 = get_datahandler(tmp_path, datahandler)
|
||||
dh1.ohlcv_store('UNITTEST/NEW', timeframe, ohlcv, candle_type=candle_type)
|
||||
assert file.is_file()
|
||||
|
||||
|
@ -459,15 +457,14 @@ def test_datahandler_trades_load(testdatadir, datahandler):
|
|||
|
||||
|
||||
@pytest.mark.parametrize('datahandler', ['jsongz', 'hdf5', 'feather', 'parquet'])
|
||||
def test_datahandler_trades_store(testdatadir, tmpdir, datahandler):
|
||||
tmpdir1 = Path(tmpdir)
|
||||
def test_datahandler_trades_store(testdatadir, tmp_path, datahandler):
|
||||
dh = get_datahandler(testdatadir, datahandler)
|
||||
trades = dh.trades_load('XRP/ETH')
|
||||
|
||||
dh1 = get_datahandler(tmpdir1, datahandler)
|
||||
dh1 = get_datahandler(tmp_path, datahandler)
|
||||
dh1.trades_store('XRP/NEW', trades)
|
||||
|
||||
file = tmpdir1 / f'XRP_NEW-trades.{dh1._get_file_extension()}'
|
||||
file = tmp_path / f'XRP_NEW-trades.{dh1._get_file_extension()}'
|
||||
assert file.is_file()
|
||||
# Load trades back
|
||||
trades_new = dh1.trades_load('XRP/NEW')
|
||||
|
|
|
@ -106,17 +106,16 @@ def test_load_data_startup_candles(mocker, testdatadir) -> None:
|
|||
|
||||
@pytest.mark.parametrize('candle_type', ['mark', ''])
|
||||
def test_load_data_with_new_pair_1min(ohlcv_history_list, mocker, caplog,
|
||||
default_conf, tmpdir, candle_type) -> None:
|
||||
default_conf, tmp_path, candle_type) -> None:
|
||||
"""
|
||||
Test load_pair_history() with 1 min timeframe
|
||||
"""
|
||||
tmpdir1 = Path(tmpdir)
|
||||
mocker.patch(f'{EXMS}.get_historic_ohlcv', return_value=ohlcv_history_list)
|
||||
exchange = get_patched_exchange(mocker, default_conf)
|
||||
file = tmpdir1 / 'MEME_BTC-1m.feather'
|
||||
file = tmp_path / 'MEME_BTC-1m.feather'
|
||||
|
||||
# do not download a new pair if refresh_pairs isn't set
|
||||
load_pair_history(datadir=tmpdir1, timeframe='1m', pair='MEME/BTC', candle_type=candle_type)
|
||||
load_pair_history(datadir=tmp_path, timeframe='1m', pair='MEME/BTC', candle_type=candle_type)
|
||||
assert not file.is_file()
|
||||
assert log_has(
|
||||
f"No history for MEME/BTC, {candle_type}, 1m found. "
|
||||
|
@ -124,10 +123,10 @@ def test_load_data_with_new_pair_1min(ohlcv_history_list, mocker, caplog,
|
|||
)
|
||||
|
||||
# download a new pair if refresh_pairs is set
|
||||
refresh_data(datadir=tmpdir1, timeframe='1m', pairs=['MEME/BTC'],
|
||||
refresh_data(datadir=tmp_path, timeframe='1m', pairs=['MEME/BTC'],
|
||||
exchange=exchange, candle_type=CandleType.SPOT
|
||||
)
|
||||
load_pair_history(datadir=tmpdir1, timeframe='1m', pair='MEME/BTC', candle_type=candle_type)
|
||||
load_pair_history(datadir=tmp_path, timeframe='1m', pair='MEME/BTC', candle_type=candle_type)
|
||||
assert file.is_file()
|
||||
assert log_has_re(
|
||||
r'\(0/1\) - Download history data for "MEME/BTC", 1m, '
|
||||
|
@ -273,27 +272,26 @@ def test_download_pair_history(
|
|||
ohlcv_history_list,
|
||||
mocker,
|
||||
default_conf,
|
||||
tmpdir,
|
||||
tmp_path,
|
||||
candle_type,
|
||||
subdir,
|
||||
file_tail
|
||||
) -> None:
|
||||
mocker.patch(f'{EXMS}.get_historic_ohlcv', return_value=ohlcv_history_list)
|
||||
exchange = get_patched_exchange(mocker, default_conf)
|
||||
tmpdir1 = Path(tmpdir)
|
||||
file1_1 = tmpdir1 / f'{subdir}MEME_BTC-1m{file_tail}.feather'
|
||||
file1_5 = tmpdir1 / f'{subdir}MEME_BTC-5m{file_tail}.feather'
|
||||
file2_1 = tmpdir1 / f'{subdir}CFI_BTC-1m{file_tail}.feather'
|
||||
file2_5 = tmpdir1 / f'{subdir}CFI_BTC-5m{file_tail}.feather'
|
||||
file1_1 = tmp_path / f'{subdir}MEME_BTC-1m{file_tail}.feather'
|
||||
file1_5 = tmp_path / f'{subdir}MEME_BTC-5m{file_tail}.feather'
|
||||
file2_1 = tmp_path / f'{subdir}CFI_BTC-1m{file_tail}.feather'
|
||||
file2_5 = tmp_path / f'{subdir}CFI_BTC-5m{file_tail}.feather'
|
||||
|
||||
assert not file1_1.is_file()
|
||||
assert not file2_1.is_file()
|
||||
|
||||
assert _download_pair_history(datadir=tmpdir1, exchange=exchange,
|
||||
assert _download_pair_history(datadir=tmp_path, exchange=exchange,
|
||||
pair='MEME/BTC',
|
||||
timeframe='1m',
|
||||
candle_type=candle_type)
|
||||
assert _download_pair_history(datadir=tmpdir1, exchange=exchange,
|
||||
assert _download_pair_history(datadir=tmp_path, exchange=exchange,
|
||||
pair='CFI/BTC',
|
||||
timeframe='1m',
|
||||
candle_type=candle_type)
|
||||
|
@ -308,11 +306,11 @@ def test_download_pair_history(
|
|||
assert not file1_5.is_file()
|
||||
assert not file2_5.is_file()
|
||||
|
||||
assert _download_pair_history(datadir=tmpdir1, exchange=exchange,
|
||||
assert _download_pair_history(datadir=tmp_path, exchange=exchange,
|
||||
pair='MEME/BTC',
|
||||
timeframe='5m',
|
||||
candle_type=candle_type)
|
||||
assert _download_pair_history(datadir=tmpdir1, exchange=exchange,
|
||||
assert _download_pair_history(datadir=tmp_path, exchange=exchange,
|
||||
pair='CFI/BTC',
|
||||
timeframe='5m',
|
||||
candle_type=candle_type)
|
||||
|
@ -340,13 +338,12 @@ def test_download_pair_history2(mocker, default_conf, testdatadir) -> None:
|
|||
assert json_dump_mock.call_count == 3
|
||||
|
||||
|
||||
def test_download_backtesting_data_exception(mocker, caplog, default_conf, tmpdir) -> None:
|
||||
def test_download_backtesting_data_exception(mocker, caplog, default_conf, tmp_path) -> None:
|
||||
mocker.patch(f'{EXMS}.get_historic_ohlcv',
|
||||
side_effect=Exception('File Error'))
|
||||
tmpdir1 = Path(tmpdir)
|
||||
exchange = get_patched_exchange(mocker, default_conf)
|
||||
|
||||
assert not _download_pair_history(datadir=tmpdir1, exchange=exchange,
|
||||
assert not _download_pair_history(datadir=tmp_path, exchange=exchange,
|
||||
pair='MEME/BTC',
|
||||
timeframe='1m', candle_type='spot')
|
||||
assert log_has('Failed to download history data for pair: "MEME/BTC", timeframe: 1m.', caplog)
|
||||
|
@ -570,16 +567,15 @@ def test_refresh_backtest_trades_data(mocker, default_conf, markets, caplog, tes
|
|||
|
||||
|
||||
def test_download_trades_history(trades_history, mocker, default_conf, testdatadir, caplog,
|
||||
tmpdir, time_machine) -> None:
|
||||
tmp_path, time_machine) -> None:
|
||||
start_dt = dt_utc(2023, 1, 1)
|
||||
time_machine.move_to(start_dt, tick=False)
|
||||
|
||||
tmpdir1 = Path(tmpdir)
|
||||
ght_mock = MagicMock(side_effect=lambda pair, *args, **kwargs: (pair, trades_history))
|
||||
mocker.patch(f'{EXMS}.get_historic_trades', ght_mock)
|
||||
exchange = get_patched_exchange(mocker, default_conf)
|
||||
file1 = tmpdir1 / 'ETH_BTC-trades.json.gz'
|
||||
data_handler = get_datahandler(tmpdir1, data_format='jsongz')
|
||||
file1 = tmp_path / 'ETH_BTC-trades.json.gz'
|
||||
data_handler = get_datahandler(tmp_path, data_format='jsongz')
|
||||
|
||||
assert not file1.is_file()
|
||||
|
||||
|
@ -614,7 +610,7 @@ def test_download_trades_history(trades_history, mocker, default_conf, testdatad
|
|||
pair='ETH/BTC')
|
||||
assert log_has_re('Failed to download historic trades for pair: "ETH/BTC".*', caplog)
|
||||
|
||||
file2 = tmpdir1 / 'XRP_ETH-trades.json.gz'
|
||||
file2 = tmp_path / 'XRP_ETH-trades.json.gz'
|
||||
copyfile(testdatadir / file2.name, file2)
|
||||
|
||||
ght_mock.reset_mock()
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from shutil import copytree
|
||||
from unittest.mock import PropertyMock
|
||||
|
||||
|
@ -11,7 +10,7 @@ from freqtrade.exceptions import OperationalException
|
|||
from tests.conftest import EXMS, log_has, log_has_re, patch_exchange
|
||||
|
||||
|
||||
def test_import_kraken_trades_from_csv(testdatadir, tmpdir, caplog, default_conf_usdt, mocker):
|
||||
def test_import_kraken_trades_from_csv(testdatadir, tmp_path, caplog, default_conf_usdt, mocker):
|
||||
with pytest.raises(OperationalException, match="This function is only for the kraken exchange"):
|
||||
import_kraken_trades_from_csv(default_conf_usdt, 'feather')
|
||||
|
||||
|
@ -21,10 +20,9 @@ def test_import_kraken_trades_from_csv(testdatadir, tmpdir, caplog, default_conf
|
|||
mocker.patch(f'{EXMS}.markets', PropertyMock(return_value={
|
||||
'BCH/EUR': {'symbol': 'BCH/EUR', 'id': 'BCHEUR', 'altname': 'BCHEUR'},
|
||||
}))
|
||||
tmpdir1 = Path(tmpdir)
|
||||
dstfile = tmpdir1 / 'BCH_EUR-trades.feather'
|
||||
dstfile = tmp_path / 'BCH_EUR-trades.feather'
|
||||
assert not dstfile.is_file()
|
||||
default_conf_usdt['datadir'] = tmpdir1
|
||||
default_conf_usdt['datadir'] = tmp_path
|
||||
# There's 2 files in this tree, containing a total of 2 days.
|
||||
# tests/testdata/kraken/
|
||||
# └── trades_csv
|
||||
|
@ -32,7 +30,7 @@ def test_import_kraken_trades_from_csv(testdatadir, tmpdir, caplog, default_conf
|
|||
# └── incremental_q2
|
||||
# └── BCHEUR.csv <-- 2023-01-02
|
||||
|
||||
copytree(testdatadir / 'kraken/trades_csv', tmpdir1 / 'trades_csv')
|
||||
copytree(testdatadir / 'kraken/trades_csv', tmp_path / 'trades_csv')
|
||||
|
||||
import_kraken_trades_from_csv(default_conf_usdt, 'feather')
|
||||
assert log_has("Found csv files for BCHEUR.", caplog)
|
||||
|
@ -40,7 +38,7 @@ def test_import_kraken_trades_from_csv(testdatadir, tmpdir, caplog, default_conf
|
|||
|
||||
assert dstfile.is_file()
|
||||
|
||||
dh = get_datahandler(tmpdir1, 'feather')
|
||||
dh = get_datahandler(tmp_path, 'feather')
|
||||
trades = dh.trades_load('BCH_EUR')
|
||||
assert len(trades) == 340
|
||||
|
||||
|
|
|
@ -1851,7 +1851,7 @@ def test_fetch_bids_asks(default_conf, mocker):
|
|||
|
||||
|
||||
@pytest.mark.parametrize("exchange_name", EXCHANGES)
|
||||
def test_get_tickers(default_conf, mocker, exchange_name):
|
||||
def test_get_tickers(default_conf, mocker, exchange_name, caplog):
|
||||
api_mock = MagicMock()
|
||||
tick = {'ETH/BTC': {
|
||||
'symbol': 'ETH/BTC',
|
||||
|
@ -1900,6 +1900,14 @@ def test_get_tickers(default_conf, mocker, exchange_name):
|
|||
exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name)
|
||||
exchange.get_tickers()
|
||||
|
||||
caplog.clear()
|
||||
api_mock.fetch_tickers = MagicMock(side_effect=[ccxt.BadSymbol("SomeSymbol"), []])
|
||||
exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name)
|
||||
x = exchange.get_tickers()
|
||||
assert x == []
|
||||
assert log_has_re(r'Could not load tickers due to BadSymbol\..*SomeSymbol', caplog)
|
||||
caplog.clear()
|
||||
|
||||
api_mock.fetch_tickers = MagicMock(return_value={})
|
||||
exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange_name)
|
||||
exchange.get_tickers()
|
||||
|
@ -3737,6 +3745,18 @@ def test_calculate_backoff(retrycount, max_retries, expected):
|
|||
assert calculate_backoff(retrycount, max_retries) == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize("exchange_name", EXCHANGES)
|
||||
def test_get_funding_fees(default_conf_usdt, mocker, exchange_name, caplog):
|
||||
now = datetime.now(timezone.utc)
|
||||
default_conf_usdt['trading_mode'] = 'futures'
|
||||
default_conf_usdt['margin_mode'] = 'isolated'
|
||||
exchange = get_patched_exchange(mocker, default_conf_usdt, id=exchange_name)
|
||||
exchange._fetch_and_calculate_funding_fees = MagicMock(side_effect=ExchangeError)
|
||||
assert exchange.get_funding_fees('BTC/USDT:USDT', 1, False, now) == 0.0
|
||||
assert exchange._fetch_and_calculate_funding_fees.call_count == 1
|
||||
assert log_has("Could not update funding fees for BTC/USDT:USDT.", caplog)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("exchange_name", ['binance'])
|
||||
def test__get_funding_fees_from_exchange(default_conf, mocker, exchange_name):
|
||||
api_mock = MagicMock()
|
||||
|
@ -4075,7 +4095,10 @@ def test_combine_funding_and_mark(
|
|||
('binance', 1, 2, "2021-09-01 00:00:16", "2021-09-01 08:00:00", 30.0, -0.0002493),
|
||||
('binance', 0, 1, "2021-09-01 00:00:00", "2021-09-01 07:59:59", 30.0, -0.00066479999),
|
||||
('binance', 0, 2, "2021-09-01 00:00:00", "2021-09-01 12:00:00", 30.0, -0.00091409999),
|
||||
('binance', 0, 2, "2021-09-01 00:00:01", "2021-09-01 08:00:00", 30.0, -0.0002493),
|
||||
# :01 must be rounded down.
|
||||
('binance', 0, 2, "2021-09-01 00:00:01", "2021-09-01 08:00:00", 30.0, -0.00091409999),
|
||||
('binance', 0, 2, "2021-08-31 23:58:00", "2021-09-01 08:00:00", 30.0, -0.00091409999),
|
||||
('binance', 0, 2, "2021-09-01 00:10:01", "2021-09-01 08:00:00", 30.0, -0.0002493),
|
||||
# TODO: Uncoment once _calculate_funding_fees can pas time_in_ratio to exchange._get_funding_fee
|
||||
# ('kraken', "2021-09-01 00:00:00", "2021-09-01 08:00:00", 30.0, -0.0014937),
|
||||
# ('kraken', "2021-09-01 00:00:15", "2021-09-01 08:00:00", 30.0, -0.0008289),
|
||||
|
@ -4191,7 +4214,7 @@ def test__fetch_and_calculate_funding_fees_datetime_called(
|
|||
type(api_mock).has = PropertyMock(return_value={'fetchFundingRateHistory': True})
|
||||
mocker.patch(f'{EXMS}.timeframes', PropertyMock(return_value=['4h', '8h']))
|
||||
exchange = get_patched_exchange(mocker, default_conf, api_mock, id=exchange)
|
||||
d1 = datetime.strptime("2021-09-01 00:00:00 +0000", '%Y-%m-%d %H:%M:%S %z')
|
||||
d1 = datetime.strptime("2021-08-31 23:00:01 +0000", '%Y-%m-%d %H:%M:%S %z')
|
||||
|
||||
time_machine.move_to("2021-09-01 08:00:00 +00:00")
|
||||
funding_fees = exchange._fetch_and_calculate_funding_fees('ADA/USDT', 30.0, True, d1)
|
||||
|
|
|
@ -18,7 +18,7 @@ from tests.conftest import log_has_re
|
|||
def test_check_exchange(default_conf, caplog) -> None:
|
||||
# Test an officially supported by Freqtrade team exchange
|
||||
default_conf['runmode'] = RunMode.DRY_RUN
|
||||
default_conf.get('exchange').update({'name': 'BITTREX'})
|
||||
default_conf.get('exchange').update({'name': 'BINANCE'})
|
||||
assert check_exchange(default_conf)
|
||||
assert log_has_re(r"Exchange .* is officially supported by the Freqtrade development team\.",
|
||||
caplog)
|
||||
|
@ -41,14 +41,14 @@ def test_check_exchange(default_conf, caplog) -> None:
|
|||
caplog.clear()
|
||||
|
||||
# Test an officially supported by Freqtrade team exchange - with remapping
|
||||
default_conf.get('exchange').update({'name': 'okex'})
|
||||
default_conf.get('exchange').update({'name': 'okx'})
|
||||
assert check_exchange(default_conf)
|
||||
assert log_has_re(
|
||||
r"Exchange \"okex\" is officially supported by the Freqtrade development team\.",
|
||||
r"Exchange \"okx\" is officially supported by the Freqtrade development team\.",
|
||||
caplog)
|
||||
caplog.clear()
|
||||
# Test an available exchange, supported by ccxt
|
||||
default_conf.get('exchange').update({'name': 'huobipro'})
|
||||
default_conf.get('exchange').update({'name': 'huobijp'})
|
||||
assert check_exchange(default_conf)
|
||||
assert log_has_re(r"Exchange .* is known to the the ccxt library, available for the bot, "
|
||||
r"but not officially supported "
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
from datetime import datetime, timedelta, timezone
|
||||
from pathlib import Path
|
||||
from unittest.mock import AsyncMock, MagicMock, PropertyMock
|
||||
|
||||
import ccxt
|
||||
|
@ -269,9 +268,9 @@ def test_additional_exchange_init_okx(default_conf, mocker):
|
|||
"additional_exchange_init", "fetch_accounts")
|
||||
|
||||
|
||||
def test_load_leverage_tiers_okx(default_conf, mocker, markets, tmpdir, caplog, time_machine):
|
||||
def test_load_leverage_tiers_okx(default_conf, mocker, markets, tmp_path, caplog, time_machine):
|
||||
|
||||
default_conf['datadir'] = Path(tmpdir)
|
||||
default_conf['datadir'] = tmp_path
|
||||
# fd_mock = mocker.patch('freqtrade.exchange.exchange.file_dump_json')
|
||||
api_mock = MagicMock()
|
||||
type(api_mock).has = PropertyMock(return_value={
|
||||
|
|
|
@ -21,13 +21,13 @@ def is_mac() -> bool:
|
|||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def freqai_conf(default_conf, tmpdir):
|
||||
def freqai_conf(default_conf, tmp_path):
|
||||
freqaiconf = deepcopy(default_conf)
|
||||
freqaiconf.update(
|
||||
{
|
||||
"datadir": Path(default_conf["datadir"]),
|
||||
"strategy": "freqai_test_strat",
|
||||
"user_data_dir": Path(tmpdir),
|
||||
"user_data_dir": tmp_path,
|
||||
"strategy-path": "freqtrade/tests/strategy/strats",
|
||||
"freqaimodel": "LightGBMRegressor",
|
||||
"freqaimodel_path": "freqai/prediction_models",
|
||||
|
|
|
@ -179,10 +179,9 @@ def test_set_initial_return_values(mocker, freqai_conf):
|
|||
hist_pred_df = freqai.dd.historic_predictions[pair]
|
||||
model_return_df = freqai.dd.model_return_values[pair]
|
||||
|
||||
assert (hist_pred_df['date_pred'].iloc[-1] ==
|
||||
pd.Timestamp(end_x_plus_5) - pd.Timedelta(days=1))
|
||||
assert hist_pred_df['date_pred'].iloc[-1] == pd.Timestamp(end_x_plus_5)
|
||||
assert 'date_pred' in hist_pred_df.columns
|
||||
assert hist_pred_df.shape[0] == 7 # Total rows: 5 from historic and 2 new zeros
|
||||
assert hist_pred_df.shape[0] == 8
|
||||
|
||||
# compare values in model_return_df with hist_pred_df
|
||||
assert (model_return_df["value"].values ==
|
||||
|
@ -234,9 +233,9 @@ def test_set_initial_return_values_warning(mocker, freqai_conf):
|
|||
hist_pred_df = freqai.dd.historic_predictions[pair]
|
||||
model_return_df = freqai.dd.model_return_values[pair]
|
||||
|
||||
assert hist_pred_df['date_pred'].iloc[-1] == pd.Timestamp(end_x_plus_5) - pd.Timedelta(days=1)
|
||||
assert hist_pred_df['date_pred'].iloc[-1] == pd.Timestamp(end_x_plus_5)
|
||||
assert 'date_pred' in hist_pred_df.columns
|
||||
assert hist_pred_df.shape[0] == 9 # Total rows: 5 from historic and 4 new zeros
|
||||
assert hist_pred_df.shape[0] == 10
|
||||
|
||||
# compare values in model_return_df with hist_pred_df
|
||||
assert (model_return_df["value"].values == hist_pred_df.tail(
|
||||
|
|
|
@ -500,14 +500,14 @@ def test_get_required_data_timerange(mocker, freqai_conf):
|
|||
assert (time_range.stopts - time_range.startts) == 177300
|
||||
|
||||
|
||||
def test_download_all_data_for_training(mocker, freqai_conf, caplog, tmpdir):
|
||||
def test_download_all_data_for_training(mocker, freqai_conf, caplog, tmp_path):
|
||||
caplog.set_level(logging.DEBUG)
|
||||
strategy = get_patched_freqai_strategy(mocker, freqai_conf)
|
||||
exchange = get_patched_exchange(mocker, freqai_conf)
|
||||
pairlist = PairListManager(exchange, freqai_conf)
|
||||
strategy.dp = DataProvider(freqai_conf, exchange, pairlist)
|
||||
freqai_conf['pairs'] = freqai_conf['exchange']['pair_whitelist']
|
||||
freqai_conf['datadir'] = Path(tmpdir)
|
||||
freqai_conf['datadir'] = tmp_path
|
||||
download_all_data_for_training(strategy.dp, freqai_conf)
|
||||
|
||||
assert log_has_re(
|
||||
|
|
|
@ -665,7 +665,7 @@ def test_backtest__check_trade_exit(default_conf, fee, mocker) -> None:
|
|||
]
|
||||
|
||||
# No data available.
|
||||
res = backtesting._check_trade_exit(trade, row_sell)
|
||||
res = backtesting._check_trade_exit(trade, row_sell, row_sell[0].to_pydatetime())
|
||||
assert res is not None
|
||||
assert res.exit_reason == ExitType.ROI.value
|
||||
assert res.close_date_utc == datetime(2020, 1, 1, 5, 0, tzinfo=timezone.utc)
|
||||
|
@ -678,7 +678,7 @@ def test_backtest__check_trade_exit(default_conf, fee, mocker) -> None:
|
|||
[], columns=['date', 'open', 'high', 'low', 'close', 'enter_long', 'exit_long',
|
||||
'enter_short', 'exit_short', 'long_tag', 'short_tag', 'exit_tag'])
|
||||
|
||||
res = backtesting._check_trade_exit(trade, row)
|
||||
res = backtesting._check_trade_exit(trade, row, row[0].to_pydatetime())
|
||||
assert res is None
|
||||
|
||||
|
||||
|
@ -1006,7 +1006,7 @@ def test_backtest_one_detail_futures_funding_fees(
|
|||
assert t.nr_of_successful_entries >= 6
|
||||
# Funding fees will vary depending on the number of adjustment orders
|
||||
# That number is a lot higher with detail data.
|
||||
assert -20 < t.funding_fees < -0.1
|
||||
assert -1.81 < t.funding_fees < -0.1
|
||||
|
||||
|
||||
def test_backtest_timedout_entry_orders(default_conf, fee, mocker, testdatadir) -> None:
|
||||
|
|
|
@ -133,6 +133,7 @@ def test_backtest_position_adjustment_detailed(default_conf, fee, mocker, levera
|
|||
]
|
||||
backtesting.strategy.leverage = MagicMock(return_value=leverage)
|
||||
trade = backtesting._enter_trade(pair, row=row, direction='long')
|
||||
current_time = row[0].to_pydatetime()
|
||||
assert trade
|
||||
assert pytest.approx(trade.stake_amount) == 100.0
|
||||
assert pytest.approx(trade.amount) == 47.61904762 * leverage
|
||||
|
@ -140,7 +141,7 @@ def test_backtest_position_adjustment_detailed(default_conf, fee, mocker, levera
|
|||
backtesting.strategy.adjust_trade_position = MagicMock(return_value=None)
|
||||
assert pytest.approx(trade.liquidation_price) == (0.10278333 if leverage == 1 else 1.2122249)
|
||||
|
||||
trade = backtesting._get_adjust_trade_entry_for_candle(trade, row)
|
||||
trade = backtesting._get_adjust_trade_entry_for_candle(trade, row, current_time)
|
||||
assert trade
|
||||
assert pytest.approx(trade.stake_amount) == 100.0
|
||||
assert pytest.approx(trade.amount) == 47.61904762 * leverage
|
||||
|
@ -148,7 +149,7 @@ def test_backtest_position_adjustment_detailed(default_conf, fee, mocker, levera
|
|||
# Increase position by 100
|
||||
backtesting.strategy.adjust_trade_position = MagicMock(return_value=100)
|
||||
|
||||
trade = backtesting._get_adjust_trade_entry_for_candle(trade, row)
|
||||
trade = backtesting._get_adjust_trade_entry_for_candle(trade, row, current_time)
|
||||
|
||||
assert trade
|
||||
assert pytest.approx(trade.stake_amount) == 200.0
|
||||
|
@ -159,7 +160,7 @@ def test_backtest_position_adjustment_detailed(default_conf, fee, mocker, levera
|
|||
# Reduce by more than amount - no change to trade.
|
||||
backtesting.strategy.adjust_trade_position = MagicMock(return_value=-500)
|
||||
|
||||
trade = backtesting._get_adjust_trade_entry_for_candle(trade, row)
|
||||
trade = backtesting._get_adjust_trade_entry_for_candle(trade, row, current_time)
|
||||
|
||||
assert trade
|
||||
assert pytest.approx(trade.stake_amount) == 200.0
|
||||
|
@ -170,7 +171,7 @@ def test_backtest_position_adjustment_detailed(default_conf, fee, mocker, levera
|
|||
|
||||
# Reduce position by 50
|
||||
backtesting.strategy.adjust_trade_position = MagicMock(return_value=-100)
|
||||
trade = backtesting._get_adjust_trade_entry_for_candle(trade, row)
|
||||
trade = backtesting._get_adjust_trade_entry_for_candle(trade, row, current_time)
|
||||
|
||||
assert trade
|
||||
assert pytest.approx(trade.stake_amount) == 100.0
|
||||
|
@ -182,7 +183,7 @@ def test_backtest_position_adjustment_detailed(default_conf, fee, mocker, levera
|
|||
|
||||
# Adjust below minimum
|
||||
backtesting.strategy.adjust_trade_position = MagicMock(return_value=-99)
|
||||
trade = backtesting._get_adjust_trade_entry_for_candle(trade, row)
|
||||
trade = backtesting._get_adjust_trade_entry_for_candle(trade, row, current_time)
|
||||
|
||||
assert trade
|
||||
assert pytest.approx(trade.stake_amount) == 100.0
|
||||
|
|
|
@ -193,8 +193,8 @@ def test_start_no_hyperopt_allowed(mocker, hyperopt_conf, caplog) -> None:
|
|||
start_hyperopt(pargs)
|
||||
|
||||
|
||||
def test_start_no_data(mocker, hyperopt_conf, tmpdir) -> None:
|
||||
hyperopt_conf['user_data_dir'] = Path(tmpdir)
|
||||
def test_start_no_data(mocker, hyperopt_conf, tmp_path) -> None:
|
||||
hyperopt_conf['user_data_dir'] = tmp_path
|
||||
patched_configuration_load_config_file(mocker, hyperopt_conf)
|
||||
mocker.patch('freqtrade.data.history.load_pair_history', MagicMock(return_value=pd.DataFrame))
|
||||
mocker.patch(
|
||||
|
@ -310,6 +310,8 @@ def test_start_calls_optimizer(mocker, hyperopt_conf, capsys) -> None:
|
|||
'freqtrade.optimize.hyperopt.get_timerange',
|
||||
MagicMock(return_value=(datetime(2017, 12, 10), datetime(2017, 12, 13)))
|
||||
)
|
||||
# Dummy-reduce points to ensure scikit-learn is forced to generate new values
|
||||
mocker.patch('freqtrade.optimize.hyperopt.INITIAL_POINTS', 2)
|
||||
|
||||
parallel = mocker.patch(
|
||||
'freqtrade.optimize.hyperopt.Hyperopt.run_optimizer_parallel',
|
||||
|
@ -857,14 +859,16 @@ def test_simplified_interface_failed(mocker, hyperopt_conf, space) -> None:
|
|||
hyperopt.start()
|
||||
|
||||
|
||||
def test_in_strategy_auto_hyperopt(mocker, hyperopt_conf, tmpdir, fee) -> None:
|
||||
def test_in_strategy_auto_hyperopt(mocker, hyperopt_conf, tmp_path, fee) -> None:
|
||||
patch_exchange(mocker)
|
||||
mocker.patch(f'{EXMS}.get_fee', fee)
|
||||
(Path(tmpdir) / 'hyperopt_results').mkdir(parents=True)
|
||||
# Dummy-reduce points to ensure scikit-learn is forced to generate new values
|
||||
mocker.patch('freqtrade.optimize.hyperopt.INITIAL_POINTS', 2)
|
||||
(tmp_path / 'hyperopt_results').mkdir(parents=True)
|
||||
# No hyperopt needed
|
||||
hyperopt_conf.update({
|
||||
'strategy': 'HyperoptableStrategy',
|
||||
'user_data_dir': Path(tmpdir),
|
||||
'user_data_dir': tmp_path,
|
||||
'hyperopt_random_state': 42,
|
||||
'spaces': ['all'],
|
||||
})
|
||||
|
@ -897,17 +901,19 @@ def test_in_strategy_auto_hyperopt(mocker, hyperopt_conf, tmpdir, fee) -> None:
|
|||
hyperopt.get_optimizer([], 2)
|
||||
|
||||
|
||||
def test_in_strategy_auto_hyperopt_with_parallel(mocker, hyperopt_conf, tmpdir, fee) -> None:
|
||||
def test_in_strategy_auto_hyperopt_with_parallel(mocker, hyperopt_conf, tmp_path, fee) -> None:
|
||||
mocker.patch(f'{EXMS}.validate_config', MagicMock())
|
||||
mocker.patch(f'{EXMS}.get_fee', fee)
|
||||
mocker.patch(f'{EXMS}._load_markets')
|
||||
mocker.patch(f'{EXMS}.markets',
|
||||
PropertyMock(return_value=get_markets()))
|
||||
(Path(tmpdir) / 'hyperopt_results').mkdir(parents=True)
|
||||
(tmp_path / 'hyperopt_results').mkdir(parents=True)
|
||||
# Dummy-reduce points to ensure scikit-learn is forced to generate new values
|
||||
mocker.patch('freqtrade.optimize.hyperopt.INITIAL_POINTS', 2)
|
||||
# No hyperopt needed
|
||||
hyperopt_conf.update({
|
||||
'strategy': 'HyperoptableStrategy',
|
||||
'user_data_dir': Path(tmpdir),
|
||||
'user_data_dir': tmp_path,
|
||||
'hyperopt_random_state': 42,
|
||||
'spaces': ['all'],
|
||||
# Enforce parallelity
|
||||
|
@ -938,14 +944,14 @@ def test_in_strategy_auto_hyperopt_with_parallel(mocker, hyperopt_conf, tmpdir,
|
|||
hyperopt.start()
|
||||
|
||||
|
||||
def test_in_strategy_auto_hyperopt_per_epoch(mocker, hyperopt_conf, tmpdir, fee) -> None:
|
||||
def test_in_strategy_auto_hyperopt_per_epoch(mocker, hyperopt_conf, tmp_path, fee) -> None:
|
||||
patch_exchange(mocker)
|
||||
mocker.patch(f'{EXMS}.get_fee', fee)
|
||||
(Path(tmpdir) / 'hyperopt_results').mkdir(parents=True)
|
||||
(tmp_path / 'hyperopt_results').mkdir(parents=True)
|
||||
|
||||
hyperopt_conf.update({
|
||||
'strategy': 'HyperoptableStrategy',
|
||||
'user_data_dir': Path(tmpdir),
|
||||
'user_data_dir': tmp_path,
|
||||
'hyperopt_random_state': 42,
|
||||
'spaces': ['all'],
|
||||
'epochs': 3,
|
||||
|
@ -995,15 +1001,15 @@ def test_SKDecimal():
|
|||
assert space.transform([1.5, 1.6]) == [150, 160]
|
||||
|
||||
|
||||
def test_stake_amount_unlimited_max_open_trades(mocker, hyperopt_conf, tmpdir, fee) -> None:
|
||||
def test_stake_amount_unlimited_max_open_trades(mocker, hyperopt_conf, tmp_path, fee) -> None:
|
||||
# This test is to ensure that unlimited max_open_trades are ignored for the backtesting
|
||||
# if we have an unlimited stake amount
|
||||
patch_exchange(mocker)
|
||||
mocker.patch(f'{EXMS}.get_fee', fee)
|
||||
(Path(tmpdir) / 'hyperopt_results').mkdir(parents=True)
|
||||
(tmp_path / 'hyperopt_results').mkdir(parents=True)
|
||||
hyperopt_conf.update({
|
||||
'strategy': 'HyperoptableStrategy',
|
||||
'user_data_dir': Path(tmpdir),
|
||||
'user_data_dir': tmp_path,
|
||||
'hyperopt_random_state': 42,
|
||||
'spaces': ['trades'],
|
||||
'stake_amount': 'unlimited'
|
||||
|
@ -1023,15 +1029,15 @@ def test_stake_amount_unlimited_max_open_trades(mocker, hyperopt_conf, tmpdir, f
|
|||
assert hyperopt.backtesting.strategy.max_open_trades == 1
|
||||
|
||||
|
||||
def test_max_open_trades_dump(mocker, hyperopt_conf, tmpdir, fee, capsys) -> None:
|
||||
def test_max_open_trades_dump(mocker, hyperopt_conf, tmp_path, fee, capsys) -> None:
|
||||
# This test is to ensure that after hyperopting, max_open_trades is never
|
||||
# saved as inf in the output json params
|
||||
patch_exchange(mocker)
|
||||
mocker.patch(f'{EXMS}.get_fee', fee)
|
||||
(Path(tmpdir) / 'hyperopt_results').mkdir(parents=True)
|
||||
(tmp_path / 'hyperopt_results').mkdir(parents=True)
|
||||
hyperopt_conf.update({
|
||||
'strategy': 'HyperoptableStrategy',
|
||||
'user_data_dir': Path(tmpdir),
|
||||
'user_data_dir': tmp_path,
|
||||
'hyperopt_random_state': 42,
|
||||
'spaces': ['trades'],
|
||||
})
|
||||
|
@ -1069,16 +1075,16 @@ def test_max_open_trades_dump(mocker, hyperopt_conf, tmpdir, fee, capsys) -> Non
|
|||
assert '"max_open_trades":-1' in out
|
||||
|
||||
|
||||
def test_max_open_trades_consistency(mocker, hyperopt_conf, tmpdir, fee) -> None:
|
||||
def test_max_open_trades_consistency(mocker, hyperopt_conf, tmp_path, fee) -> None:
|
||||
# This test is to ensure that max_open_trades is the same across all functions needing it
|
||||
# after it has been changed from the hyperopt
|
||||
patch_exchange(mocker)
|
||||
mocker.patch(f'{EXMS}.get_fee', return_value=0)
|
||||
|
||||
(Path(tmpdir) / 'hyperopt_results').mkdir(parents=True)
|
||||
(tmp_path / 'hyperopt_results').mkdir(parents=True)
|
||||
hyperopt_conf.update({
|
||||
'strategy': 'HyperoptableStrategy',
|
||||
'user_data_dir': Path(tmpdir),
|
||||
'user_data_dir': tmp_path,
|
||||
'hyperopt_random_state': 42,
|
||||
'spaces': ['trades'],
|
||||
'stake_amount': 'unlimited',
|
||||
|
|
|
@ -19,9 +19,9 @@ def create_results() -> List[Dict]:
|
|||
return [{'loss': 1, 'result': 'foo', 'params': {}, 'is_best': True}]
|
||||
|
||||
|
||||
def test_save_results_saves_epochs(hyperopt, tmpdir, caplog) -> None:
|
||||
def test_save_results_saves_epochs(hyperopt, tmp_path, caplog) -> None:
|
||||
|
||||
hyperopt.results_file = Path(tmpdir / 'ut_results.fthypt')
|
||||
hyperopt.results_file = tmp_path / 'ut_results.fthypt'
|
||||
|
||||
hyperopt_epochs = HyperoptTools.load_filtered_results(hyperopt.results_file, {})
|
||||
assert log_has_re("Hyperopt file .* not found.", caplog)
|
||||
|
@ -182,9 +182,9 @@ def test_get_strategy_filename(default_conf):
|
|||
assert x is None
|
||||
|
||||
|
||||
def test_export_params(tmpdir):
|
||||
def test_export_params(tmp_path):
|
||||
|
||||
filename = Path(tmpdir) / f"{CURRENT_TEST_STRATEGY}.json"
|
||||
filename = tmp_path / f"{CURRENT_TEST_STRATEGY}.json"
|
||||
assert not filename.is_file()
|
||||
params = {
|
||||
"params_details": {
|
||||
|
@ -231,11 +231,11 @@ def test_export_params(tmpdir):
|
|||
assert "max_open_trades" in content["params"]
|
||||
|
||||
|
||||
def test_try_export_params(default_conf, tmpdir, caplog, mocker):
|
||||
def test_try_export_params(default_conf, tmp_path, caplog, mocker):
|
||||
default_conf['disableparamexport'] = False
|
||||
export_mock = mocker.patch("freqtrade.optimize.hyperopt_tools.HyperoptTools.export_params")
|
||||
|
||||
filename = Path(tmpdir) / f"{CURRENT_TEST_STRATEGY}.json"
|
||||
filename = tmp_path / f"{CURRENT_TEST_STRATEGY}.json"
|
||||
assert not filename.is_file()
|
||||
params = {
|
||||
"params_details": {
|
||||
|
|
|
@ -74,7 +74,7 @@ def test_text_table_bt_results():
|
|||
assert text_table_bt_results(pair_results, stake_currency='BTC') == result_str
|
||||
|
||||
|
||||
def test_generate_backtest_stats(default_conf, testdatadir, tmpdir):
|
||||
def test_generate_backtest_stats(default_conf, testdatadir, tmp_path):
|
||||
default_conf.update({'strategy': CURRENT_TEST_STRATEGY})
|
||||
StrategyResolver.load_strategy(default_conf)
|
||||
|
||||
|
@ -185,8 +185,8 @@ def test_generate_backtest_stats(default_conf, testdatadir, tmpdir):
|
|||
assert strat_stats['pairlist'] == ['UNITTEST/BTC']
|
||||
|
||||
# Test storing stats
|
||||
filename = Path(tmpdir / 'btresult.json')
|
||||
filename_last = Path(tmpdir / LAST_BT_RESULT_FN)
|
||||
filename = tmp_path / 'btresult.json'
|
||||
filename_last = tmp_path / LAST_BT_RESULT_FN
|
||||
_backup_file(filename_last, copy_file=True)
|
||||
assert not filename.is_file()
|
||||
|
||||
|
@ -196,7 +196,7 @@ def test_generate_backtest_stats(default_conf, testdatadir, tmpdir):
|
|||
last_fn = get_latest_backtest_filename(filename_last.parent)
|
||||
assert re.match(r"btresult-.*\.json", last_fn)
|
||||
|
||||
filename1 = Path(tmpdir / last_fn)
|
||||
filename1 = tmp_path / last_fn
|
||||
assert filename1.is_file()
|
||||
content = filename1.read_text()
|
||||
assert 'max_drawdown_account' in content
|
||||
|
@ -254,14 +254,14 @@ def test_store_backtest_candles(testdatadir, mocker):
|
|||
dump_mock.reset_mock()
|
||||
|
||||
|
||||
def test_write_read_backtest_candles(tmpdir):
|
||||
def test_write_read_backtest_candles(tmp_path):
|
||||
|
||||
candle_dict = {'DefStrat': {'UNITTEST/BTC': pd.DataFrame()}}
|
||||
|
||||
# test directory exporting
|
||||
sample_date = '2022_01_01_15_05_13'
|
||||
store_backtest_analysis_results(Path(tmpdir), candle_dict, {}, sample_date)
|
||||
stored_file = Path(tmpdir / f'backtest-result-{sample_date}_signals.pkl')
|
||||
store_backtest_analysis_results(tmp_path, candle_dict, {}, sample_date)
|
||||
stored_file = tmp_path / f'backtest-result-{sample_date}_signals.pkl'
|
||||
with stored_file.open("rb") as scp:
|
||||
pickled_signal_candles = joblib.load(scp)
|
||||
|
||||
|
@ -273,9 +273,9 @@ def test_write_read_backtest_candles(tmpdir):
|
|||
_clean_test_file(stored_file)
|
||||
|
||||
# test file exporting
|
||||
filename = Path(tmpdir / 'testresult')
|
||||
filename = tmp_path / 'testresult'
|
||||
store_backtest_analysis_results(filename, candle_dict, {}, sample_date)
|
||||
stored_file = Path(tmpdir / f'testresult-{sample_date}_signals.pkl')
|
||||
stored_file = tmp_path / f'testresult-{sample_date}_signals.pkl'
|
||||
with stored_file.open("rb") as scp:
|
||||
pickled_signal_candles = joblib.load(scp)
|
||||
|
||||
|
|
|
@ -29,15 +29,15 @@ def test_init_create_session(default_conf):
|
|||
assert 'scoped_session' in type(Trade.session).__name__
|
||||
|
||||
|
||||
def test_init_custom_db_url(default_conf, tmpdir):
|
||||
def test_init_custom_db_url(default_conf, tmp_path):
|
||||
# Update path to a value other than default, but still in-memory
|
||||
filename = f"{tmpdir}/freqtrade2_test.sqlite"
|
||||
assert not Path(filename).is_file()
|
||||
filename = tmp_path / "freqtrade2_test.sqlite"
|
||||
assert not filename.is_file()
|
||||
|
||||
default_conf.update({'db_url': f'sqlite:///{filename}'})
|
||||
|
||||
init_db(default_conf['db_url'])
|
||||
assert Path(filename).is_file()
|
||||
assert filename.is_file()
|
||||
r = Trade.session.execute(text("PRAGMA journal_mode"))
|
||||
assert r.first() == ('wal',)
|
||||
|
||||
|
|
|
@ -583,7 +583,7 @@ def test_calc_open_close_trade_price(
|
|||
oobj.update_from_ccxt_object(entry_order)
|
||||
trade.update_trade(oobj)
|
||||
|
||||
trade.funding_fees = funding_fees
|
||||
trade.funding_fee_running = funding_fees
|
||||
|
||||
oobj = Order.parse_from_ccxt_object(exit_order, 'ADA/USDT', trade.exit_side)
|
||||
oobj._trade_live = trade
|
||||
|
@ -591,7 +591,9 @@ def test_calc_open_close_trade_price(
|
|||
trade.update_trade(oobj)
|
||||
|
||||
assert trade.is_open is False
|
||||
# Funding fees transfer from funding_fee_running to funding_Fees
|
||||
assert trade.funding_fees == funding_fees
|
||||
assert trade.orders[-1].funding_fee == funding_fees
|
||||
|
||||
assert pytest.approx(trade._calc_open_trade_value(trade.amount, trade.open_rate)) == open_value
|
||||
assert pytest.approx(trade.calc_close_trade_value(trade.close_rate)) == close_value
|
||||
|
@ -2094,11 +2096,10 @@ def test_Trade_object_idem():
|
|||
'get_enter_tag_performance',
|
||||
'get_mix_tag_performance',
|
||||
'get_trading_volume',
|
||||
'from_json',
|
||||
'validate_string_len',
|
||||
)
|
||||
EXCLUDES2 = ('trades', 'trades_open', 'bt_trades_open_pp', 'bt_open_open_trade_count',
|
||||
'total_profit')
|
||||
'total_profit', 'from_json',)
|
||||
|
||||
# Parent (LocalTrade) should have the same attributes
|
||||
for item in trade:
|
||||
|
@ -2301,6 +2302,101 @@ def test_recalc_trade_from_orders(fee):
|
|||
assert pytest.approx(trade.open_trade_value) == o1_trade_val + o2_trade_val + o3_trade_val
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("init_persistence")
|
||||
def test_recalc_trade_from_orders_kucoin():
|
||||
# Taken from https://github.com/freqtrade/freqtrade/issues/9346
|
||||
o1_amount = 11511963.8634448908
|
||||
o2_amount = 11750101.7743937783
|
||||
o3_amount = 23262065.6378386617 # Exit amount - barely doesn't even out
|
||||
|
||||
res = o1_amount + o2_amount - o3_amount
|
||||
assert res > 0.0
|
||||
assert res < 0.1
|
||||
o1_rate = 0.000029901
|
||||
o2_rate = 0.000029295
|
||||
o3_rate = 0.000029822
|
||||
|
||||
o1_cost = o1_amount * o1_rate
|
||||
|
||||
trade = Trade(
|
||||
pair='FLOKI/USDT',
|
||||
stake_amount=o1_cost,
|
||||
open_date=dt_now() - timedelta(hours=2),
|
||||
amount=o1_amount,
|
||||
fee_open=0.001,
|
||||
fee_close=0.001,
|
||||
exchange='binance',
|
||||
open_rate=o1_rate,
|
||||
max_rate=o1_rate,
|
||||
leverage=1,
|
||||
)
|
||||
# Check with 1 order
|
||||
order1 = Order(
|
||||
ft_order_side='buy',
|
||||
ft_pair=trade.pair,
|
||||
ft_is_open=False,
|
||||
status="closed",
|
||||
symbol=trade.pair,
|
||||
order_type="market",
|
||||
side="buy",
|
||||
price=o1_rate,
|
||||
average=o1_rate,
|
||||
filled=o1_amount,
|
||||
remaining=0,
|
||||
cost=o1_cost,
|
||||
order_date=trade.open_date,
|
||||
order_filled_date=trade.open_date,
|
||||
)
|
||||
trade.orders.append(order1)
|
||||
order2 = Order(
|
||||
ft_order_side='buy',
|
||||
ft_pair=trade.pair,
|
||||
ft_is_open=False,
|
||||
status="closed",
|
||||
symbol=trade.pair,
|
||||
order_type="market",
|
||||
side="buy",
|
||||
price=o2_rate,
|
||||
average=o2_rate,
|
||||
filled=o2_amount,
|
||||
remaining=0,
|
||||
cost=o2_amount * o2_rate,
|
||||
order_date=trade.open_date,
|
||||
order_filled_date=trade.open_date,
|
||||
)
|
||||
trade.orders.append(order2)
|
||||
trade.recalc_trade_from_orders()
|
||||
assert trade.amount == o1_amount + o2_amount
|
||||
profit = trade.calculate_profit(o3_rate)
|
||||
assert profit.profit_abs == pytest.approx(3.90069871)
|
||||
assert profit.profit_ratio == pytest.approx(0.00566035)
|
||||
|
||||
order3 = Order(
|
||||
ft_order_side='sell',
|
||||
ft_pair=trade.pair,
|
||||
ft_is_open=False,
|
||||
status="closed",
|
||||
symbol=trade.pair,
|
||||
order_type="market",
|
||||
side="sell",
|
||||
price=o3_rate,
|
||||
average=o3_rate,
|
||||
filled=o3_amount,
|
||||
remaining=0,
|
||||
cost=o2_amount * o2_rate,
|
||||
order_date=trade.open_date,
|
||||
order_filled_date=trade.open_date,
|
||||
)
|
||||
|
||||
trade.orders.append(order3)
|
||||
trade.update_trade(order3)
|
||||
assert trade.is_open is False
|
||||
# Trade closed correctly - but left a minimal amount.
|
||||
assert trade.amount == 8e-09
|
||||
assert pytest.approx(trade.close_profit_abs) == 3.90069871
|
||||
assert pytest.approx(trade.close_profit) == 0.00566035
|
||||
|
||||
|
||||
@pytest.mark.parametrize('is_short', [True, False])
|
||||
def test_recalc_trade_from_orders_ignores_bad_orders(fee, is_short):
|
||||
|
||||
|
@ -2580,9 +2676,9 @@ def test_order_to_ccxt(limit_buy_order_open, limit_sell_order_usdt_open):
|
|||
'orders': [
|
||||
(('buy', 100, 10), (100.0, 10.0, 1000.0, 0.0, None, None)),
|
||||
(('buy', 100, 15), (200.0, 12.5, 2500.0, 0.0, None, None)),
|
||||
(('sell', 50, 12), (150.0, 12.5, 1875.0, -25.0, -25.0, -0.04)),
|
||||
(('sell', 100, 20), (50.0, 12.5, 625.0, 725.0, 750.0, 0.60)),
|
||||
(('sell', 50, 5), (50.0, 12.5, 625.0, 350.0, -375.0, -0.60)),
|
||||
(('sell', 50, 12), (150.0, 12.5, 1875.0, -25.0, -25.0, -0.01)),
|
||||
(('sell', 100, 20), (50.0, 12.5, 625.0, 725.0, 750.0, 0.29)),
|
||||
(('sell', 50, 5), (50.0, 12.5, 625.0, 350.0, -375.0, 0.14)),
|
||||
],
|
||||
'end_profit': 350.0,
|
||||
'end_profit_ratio': 0.14,
|
||||
|
@ -2592,9 +2688,9 @@ def test_order_to_ccxt(limit_buy_order_open, limit_sell_order_usdt_open):
|
|||
'orders': [
|
||||
(('buy', 100, 10), (100.0, 10.0, 1000.0, 0.0, None, None)),
|
||||
(('buy', 100, 15), (200.0, 12.5, 2500.0, 0.0, None, None)),
|
||||
(('sell', 50, 12), (150.0, 12.5, 1875.0, -28.0625, -28.0625, -0.044788)),
|
||||
(('sell', 100, 20), (50.0, 12.5, 625.0, 713.8125, 741.875, 0.59201995)),
|
||||
(('sell', 50, 5), (50.0, 12.5, 625.0, 336.625, -377.1875, -0.60199501)),
|
||||
(('sell', 50, 12), (150.0, 12.5, 1875.0, -28.0625, -28.0625, -0.011197)),
|
||||
(('sell', 100, 20), (50.0, 12.5, 625.0, 713.8125, 741.875, 0.2848129)),
|
||||
(('sell', 50, 5), (50.0, 12.5, 625.0, 336.625, -377.1875, 0.1343142)),
|
||||
],
|
||||
'end_profit': 336.625,
|
||||
'end_profit_ratio': 0.1343142,
|
||||
|
@ -2604,10 +2700,10 @@ def test_order_to_ccxt(limit_buy_order_open, limit_sell_order_usdt_open):
|
|||
'orders': [
|
||||
(('buy', 100, 3), (100.0, 3.0, 300.0, 0.0, None, None)),
|
||||
(('buy', 100, 7), (200.0, 5.0, 1000.0, 0.0, None, None)),
|
||||
(('sell', 100, 11), (100.0, 5.0, 500.0, 596.0, 596.0, 1.189027)),
|
||||
(('buy', 150, 15), (250.0, 11.0, 2750.0, 596.0, 596.0, 1.189027)),
|
||||
(('sell', 100, 19), (150.0, 11.0, 1650.0, 1388.5, 792.5, 0.7186579)),
|
||||
(('sell', 150, 23), (150.0, 11.0, 1650.0, 3175.75, 1787.25, 1.08048062)),
|
||||
(('sell', 100, 11), (100.0, 5.0, 500.0, 596.0, 596.0, 0.5945137)),
|
||||
(('buy', 150, 15), (250.0, 11.0, 2750.0, 596.0, 596.0, 0.5945137)),
|
||||
(('sell', 100, 19), (150.0, 11.0, 1650.0, 1388.5, 792.5, 0.4261653)),
|
||||
(('sell', 150, 23), (150.0, 11.0, 1650.0, 3175.75, 1787.25, 0.9747170)),
|
||||
],
|
||||
'end_profit': 3175.75,
|
||||
'end_profit_ratio': 0.9747170,
|
||||
|
@ -2618,10 +2714,10 @@ def test_order_to_ccxt(limit_buy_order_open, limit_sell_order_usdt_open):
|
|||
'orders': [
|
||||
(('buy', 100, 3), (100.0, 3.0, 300.0, 0.0, None, None)),
|
||||
(('buy', 100, 7), (200.0, 5.0, 1000.0, 0.0, None, None)),
|
||||
(('sell', 100, 11), (100.0, 5.0, 500.0, 600.0, 600.0, 1.2)),
|
||||
(('buy', 150, 15), (250.0, 11.0, 2750.0, 600.0, 600.0, 1.2)),
|
||||
(('sell', 100, 19), (150.0, 11.0, 1650.0, 1400.0, 800.0, 0.72727273)),
|
||||
(('sell', 150, 23), (150.0, 11.0, 1650.0, 3200.0, 1800.0, 1.09090909)),
|
||||
(('sell', 100, 11), (100.0, 5.0, 500.0, 600.0, 600.0, 0.6)),
|
||||
(('buy', 150, 15), (250.0, 11.0, 2750.0, 600.0, 600.0, 0.6)),
|
||||
(('sell', 100, 19), (150.0, 11.0, 1650.0, 1400.0, 800.0, 0.43076923)),
|
||||
(('sell', 150, 23), (150.0, 11.0, 1650.0, 3200.0, 1800.0, 0.98461538)),
|
||||
],
|
||||
'end_profit': 3200.0,
|
||||
'end_profit_ratio': 0.98461538,
|
||||
|
@ -2631,10 +2727,10 @@ def test_order_to_ccxt(limit_buy_order_open, limit_sell_order_usdt_open):
|
|||
'orders': [
|
||||
(('buy', 100, 8), (100.0, 8.0, 800.0, 0.0, None, None)),
|
||||
(('buy', 100, 9), (200.0, 8.5, 1700.0, 0.0, None, None)),
|
||||
(('sell', 100, 10), (100.0, 8.5, 850.0, 150.0, 150.0, 0.17647059)),
|
||||
(('buy', 150, 11), (250.0, 10, 2500.0, 150.0, 150.0, 0.17647059)),
|
||||
(('sell', 100, 12), (150.0, 10.0, 1500.0, 350.0, 200.0, 0.2)),
|
||||
(('sell', 150, 14), (150.0, 10.0, 1500.0, 950.0, 600.0, 0.40)),
|
||||
(('sell', 100, 10), (100.0, 8.5, 850.0, 150.0, 150.0, 0.08823529)),
|
||||
(('buy', 150, 11), (250.0, 10, 2500.0, 150.0, 150.0, 0.08823529)),
|
||||
(('sell', 100, 12), (150.0, 10.0, 1500.0, 350.0, 200.0, 0.1044776)),
|
||||
(('sell', 150, 14), (150.0, 10.0, 1500.0, 950.0, 600.0, 0.283582)),
|
||||
],
|
||||
'end_profit': 950.0,
|
||||
'end_profit_ratio': 0.283582,
|
||||
|
|
|
@ -1,8 +1,10 @@
|
|||
import json
|
||||
from datetime import datetime, timezone
|
||||
|
||||
import pytest
|
||||
|
||||
from freqtrade.persistence.trade_model import Trade
|
||||
from freqtrade.persistence.trade_model import LocalTrade, Trade
|
||||
from tests.conftest import create_mock_trades_usdt
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("init_persistence")
|
||||
|
@ -170,7 +172,8 @@ def test_trade_fromjson():
|
|||
"order_filled_date": "2022-10-18 09:45:22",
|
||||
"order_type": "market",
|
||||
"price": 0.2592,
|
||||
"remaining": 0.0
|
||||
"remaining": 0.0,
|
||||
"funding_fee": -0.055
|
||||
}
|
||||
]
|
||||
}"""
|
||||
|
@ -192,3 +195,72 @@ def test_trade_fromjson():
|
|||
last_o = trade.orders[-1]
|
||||
assert last_o.order_filled_utc == datetime(2022, 10, 18, 9, 45, 22, tzinfo=timezone.utc)
|
||||
assert isinstance(last_o.order_date, datetime)
|
||||
assert last_o.funding_fee == -0.055
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("init_persistence")
|
||||
def test_trade_serialize_load_back(fee):
|
||||
|
||||
create_mock_trades_usdt(fee, None)
|
||||
|
||||
t = Trade.get_trades([Trade.id == 1]).first()
|
||||
assert t.id == 1
|
||||
t.funding_fees = 0.025
|
||||
t.orders[0].funding_fee = 0.0125
|
||||
assert len(t.orders) == 2
|
||||
Trade.commit()
|
||||
|
||||
tjson = t.to_json(False)
|
||||
assert isinstance(tjson, dict)
|
||||
trade_string = json.dumps(tjson)
|
||||
trade = Trade.from_json(trade_string)
|
||||
|
||||
assert trade.id == t.id
|
||||
assert trade.funding_fees == t.funding_fees
|
||||
assert len(trade.orders) == len(t.orders)
|
||||
assert trade.orders[0].funding_fee == t.orders[0].funding_fee
|
||||
excluded = [
|
||||
'trade_id', 'quote_currency', 'open_timestamp', 'close_timestamp',
|
||||
'realized_profit_ratio', 'close_profit_pct',
|
||||
'trade_duration_s', 'trade_duration',
|
||||
'profit_ratio', 'profit_pct', 'profit_abs', 'stop_loss_abs',
|
||||
'initial_stop_loss_abs',
|
||||
'orders',
|
||||
]
|
||||
failed = []
|
||||
# Ensure all attributes written can be read.
|
||||
for obj, value in tjson.items():
|
||||
if obj in excluded:
|
||||
continue
|
||||
tattr = getattr(trade, obj, None)
|
||||
if isinstance(tattr, datetime):
|
||||
tattr = tattr.strftime('%Y-%m-%d %H:%M:%S')
|
||||
if tattr != value:
|
||||
failed.append((obj, tattr, value))
|
||||
|
||||
assert tjson.get('trade_id') == trade.id
|
||||
assert tjson.get('quote_currency') == trade.stake_currency
|
||||
assert tjson.get('stop_loss_abs') == trade.stop_loss
|
||||
assert tjson.get('initial_stop_loss_abs') == trade.initial_stop_loss
|
||||
|
||||
excluded_o = [
|
||||
'order_filled_timestamp', 'ft_is_entry', 'pair', 'is_open', 'order_timestamp',
|
||||
]
|
||||
order_obj = trade.orders[0]
|
||||
for obj, value in tjson['orders'][0].items():
|
||||
if obj in excluded_o:
|
||||
continue
|
||||
tattr = getattr(order_obj, obj, None)
|
||||
if isinstance(tattr, datetime):
|
||||
tattr = tattr.strftime('%Y-%m-%d %H:%M:%S')
|
||||
if tattr != value:
|
||||
failed.append((obj, tattr, value))
|
||||
|
||||
assert tjson['orders'][0]['pair'] == order_obj.ft_pair
|
||||
assert not failed
|
||||
|
||||
trade2 = LocalTrade.from_json(trade_string)
|
||||
assert len(trade2.orders) == len(t.orders)
|
||||
|
||||
trade3 = LocalTrade.from_json(trade_string)
|
||||
assert len(trade3.orders) == len(t.orders)
|
||||
|
|
|
@ -242,7 +242,7 @@ def test_stoploss_guard_perpair(mocker, default_conf, fee, caplog, only_per_pair
|
|||
# 2nd Trade that counts with correct pair
|
||||
generate_mock_trade(
|
||||
pair, fee.return_value, False, exit_reason=ExitType.STOP_LOSS.value,
|
||||
min_ago_open=180, min_ago_close=30, profit_rate=0.9, is_short=is_short
|
||||
min_ago_open=180, min_ago_close=31, profit_rate=0.9, is_short=is_short
|
||||
)
|
||||
|
||||
freqtrade.protections.stop_per_pair(pair)
|
||||
|
|
|
@ -1063,6 +1063,63 @@ def test_api_performance(botclient, fee):
|
|||
'profit_ratio': -0.05570419, 'profit_abs': -0.1150375}]
|
||||
|
||||
|
||||
def test_api_entries(botclient, fee):
|
||||
ftbot, client = botclient
|
||||
patch_get_signal(ftbot)
|
||||
# Empty
|
||||
rc = client_get(client, f"{BASE_URI}/entries")
|
||||
assert_response(rc)
|
||||
assert len(rc.json()) == 0
|
||||
|
||||
create_mock_trades(fee)
|
||||
rc = client_get(client, f"{BASE_URI}/entries")
|
||||
assert_response(rc)
|
||||
response = rc.json()
|
||||
assert len(response) == 2
|
||||
resp = response[0]
|
||||
assert resp['enter_tag'] == 'TEST1'
|
||||
assert resp['count'] == 1
|
||||
assert resp['profit_pct'] == 0.5
|
||||
|
||||
|
||||
def test_api_exits(botclient, fee):
|
||||
ftbot, client = botclient
|
||||
patch_get_signal(ftbot)
|
||||
# Empty
|
||||
rc = client_get(client, f"{BASE_URI}/exits")
|
||||
assert_response(rc)
|
||||
assert len(rc.json()) == 0
|
||||
|
||||
create_mock_trades(fee)
|
||||
rc = client_get(client, f"{BASE_URI}/exits")
|
||||
assert_response(rc)
|
||||
response = rc.json()
|
||||
assert len(response) == 2
|
||||
resp = response[0]
|
||||
assert resp['exit_reason'] == 'sell_signal'
|
||||
assert resp['count'] == 1
|
||||
assert resp['profit_pct'] == 0.5
|
||||
|
||||
|
||||
def test_api_mix_tag(botclient, fee):
|
||||
ftbot, client = botclient
|
||||
patch_get_signal(ftbot)
|
||||
# Empty
|
||||
rc = client_get(client, f"{BASE_URI}/mix_tags")
|
||||
assert_response(rc)
|
||||
assert len(rc.json()) == 0
|
||||
|
||||
create_mock_trades(fee)
|
||||
rc = client_get(client, f"{BASE_URI}/mix_tags")
|
||||
assert_response(rc)
|
||||
response = rc.json()
|
||||
assert len(response) == 2
|
||||
resp = response[0]
|
||||
assert resp['mix_tag'] == 'TEST1 sell_signal'
|
||||
assert resp['count'] == 1
|
||||
assert resp['profit_pct'] == 0.5
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'is_short,current_rate,open_trade_value',
|
||||
[(True, 1.098e-05, 15.0911775),
|
||||
|
@ -1616,9 +1673,9 @@ def test_api_plot_config(botclient, mocker):
|
|||
assert_response(rc)
|
||||
|
||||
|
||||
def test_api_strategies(botclient, tmpdir):
|
||||
def test_api_strategies(botclient, tmp_path):
|
||||
ftbot, client = botclient
|
||||
ftbot.config['user_data_dir'] = Path(tmpdir)
|
||||
ftbot.config['user_data_dir'] = tmp_path
|
||||
|
||||
rc = client_get(client, f"{BASE_URI}/strategies")
|
||||
|
||||
|
@ -1701,9 +1758,9 @@ def test_api_exchanges(botclient):
|
|||
}
|
||||
|
||||
|
||||
def test_api_freqaimodels(botclient, tmpdir, mocker):
|
||||
def test_api_freqaimodels(botclient, tmp_path, mocker):
|
||||
ftbot, client = botclient
|
||||
ftbot.config['user_data_dir'] = Path(tmpdir)
|
||||
ftbot.config['user_data_dir'] = tmp_path
|
||||
mocker.patch(
|
||||
"freqtrade.resolvers.freqaimodel_resolver.FreqaiModelResolver.search_all_objects",
|
||||
return_value=[
|
||||
|
@ -1739,9 +1796,9 @@ def test_api_freqaimodels(botclient, tmpdir, mocker):
|
|||
]}
|
||||
|
||||
|
||||
def test_api_pairlists_available(botclient, tmpdir):
|
||||
def test_api_pairlists_available(botclient, tmp_path):
|
||||
ftbot, client = botclient
|
||||
ftbot.config['user_data_dir'] = Path(tmpdir)
|
||||
ftbot.config['user_data_dir'] = tmp_path
|
||||
|
||||
rc = client_get(client, f"{BASE_URI}/pairlists/available")
|
||||
|
||||
|
@ -1768,9 +1825,9 @@ def test_api_pairlists_available(botclient, tmpdir):
|
|||
assert len(volumepl['params']) > 2
|
||||
|
||||
|
||||
def test_api_pairlists_evaluate(botclient, tmpdir, mocker):
|
||||
def test_api_pairlists_evaluate(botclient, tmp_path, mocker):
|
||||
ftbot, client = botclient
|
||||
ftbot.config['user_data_dir'] = Path(tmpdir)
|
||||
ftbot.config['user_data_dir'] = tmp_path
|
||||
|
||||
rc = client_get(client, f"{BASE_URI}/pairlists/evaluate/randomJob")
|
||||
|
||||
|
@ -1905,7 +1962,7 @@ def test_sysinfo(botclient):
|
|||
assert 'ram_pct' in result
|
||||
|
||||
|
||||
def test_api_backtesting(botclient, mocker, fee, caplog, tmpdir):
|
||||
def test_api_backtesting(botclient, mocker, fee, caplog, tmp_path):
|
||||
try:
|
||||
ftbot, client = botclient
|
||||
mocker.patch(f'{EXMS}.get_fee', fee)
|
||||
|
@ -1935,8 +1992,8 @@ def test_api_backtesting(botclient, mocker, fee, caplog, tmpdir):
|
|||
assert result['status_msg'] == 'Backtest reset'
|
||||
ftbot.config['export'] = 'trades'
|
||||
ftbot.config['backtest_cache'] = 'day'
|
||||
ftbot.config['user_data_dir'] = Path(tmpdir)
|
||||
ftbot.config['exportfilename'] = Path(tmpdir) / "backtest_results"
|
||||
ftbot.config['user_data_dir'] = tmp_path
|
||||
ftbot.config['exportfilename'] = tmp_path / "backtest_results"
|
||||
ftbot.config['exportfilename'].mkdir()
|
||||
|
||||
# start backtesting
|
||||
|
@ -2194,14 +2251,14 @@ def test_api_ws_subscribe(botclient, mocker):
|
|||
|
||||
with client.websocket_connect(ws_url) as ws:
|
||||
ws.send_json({'type': 'subscribe', 'data': ['whitelist']})
|
||||
time.sleep(1)
|
||||
time.sleep(0.2)
|
||||
|
||||
# Check call count is now 1 as we sent a valid subscribe request
|
||||
assert sub_mock.call_count == 1
|
||||
|
||||
with client.websocket_connect(ws_url) as ws:
|
||||
ws.send_json({'type': 'subscribe', 'data': 'whitelist'})
|
||||
time.sleep(1)
|
||||
time.sleep(0.2)
|
||||
|
||||
# Call count hasn't changed as the subscribe request was invalid
|
||||
assert sub_mock.call_count == 1
|
||||
|
|
|
@ -150,8 +150,8 @@ def test_telegram_init(default_conf, mocker, caplog) -> None:
|
|||
"['reload_conf', 'reload_config'], ['show_conf', 'show_config'], "
|
||||
"['stopbuy', 'stopentry'], ['whitelist'], ['blacklist'], "
|
||||
"['bl_delete', 'blacklist_delete'], "
|
||||
"['logs'], ['edge'], ['health'], ['help'], ['version'], ['marketdir']"
|
||||
"]")
|
||||
"['logs'], ['edge'], ['health'], ['help'], ['version'], ['marketdir'], "
|
||||
"['order']]")
|
||||
|
||||
assert log_has(message_str, caplog)
|
||||
|
||||
|
@ -347,8 +347,6 @@ async def test_telegram_status_multi_entry(default_conf, update, mocker, fee) ->
|
|||
msg = msg_mock.call_args_list[3][0][0]
|
||||
assert re.search(r'Number of Entries.*2', msg)
|
||||
assert re.search(r'Number of Exits.*1', msg)
|
||||
assert re.search(r'from 1st entry rate', msg)
|
||||
assert re.search(r'Order Filled', msg)
|
||||
assert re.search(r'Close Date:', msg) is None
|
||||
assert re.search(r'Close Profit:', msg) is None
|
||||
|
||||
|
@ -375,6 +373,105 @@ async def test_telegram_status_closed_trade(default_conf, update, mocker, fee) -
|
|||
assert re.search(r'Close Profit:', msg)
|
||||
|
||||
|
||||
async def test_order_handle(default_conf, update, ticker, fee, mocker) -> None:
|
||||
default_conf['max_open_trades'] = 3
|
||||
mocker.patch.multiple(
|
||||
EXMS,
|
||||
fetch_ticker=ticker,
|
||||
get_fee=fee,
|
||||
_dry_is_price_crossed=MagicMock(return_value=True),
|
||||
)
|
||||
status_table = MagicMock()
|
||||
mocker.patch.multiple(
|
||||
'freqtrade.rpc.telegram.Telegram',
|
||||
_status_table=status_table,
|
||||
)
|
||||
|
||||
telegram, freqtradebot, msg_mock = get_telegram_testobject(mocker, default_conf)
|
||||
|
||||
patch_get_signal(freqtradebot)
|
||||
|
||||
freqtradebot.state = State.RUNNING
|
||||
msg_mock.reset_mock()
|
||||
|
||||
# Create some test data
|
||||
freqtradebot.enter_positions()
|
||||
|
||||
mocker.patch('freqtrade.rpc.telegram.MAX_MESSAGE_LENGTH', 500)
|
||||
|
||||
msg_mock.reset_mock()
|
||||
context = MagicMock()
|
||||
context.args = ["2"]
|
||||
await telegram._order(update=update, context=context)
|
||||
|
||||
assert msg_mock.call_count == 1
|
||||
|
||||
msg1 = msg_mock.call_args_list[0][0][0]
|
||||
|
||||
assert 'Order List for Trade #*`2`' in msg1
|
||||
|
||||
msg_mock.reset_mock()
|
||||
mocker.patch('freqtrade.rpc.telegram.MAX_MESSAGE_LENGTH', 50)
|
||||
context = MagicMock()
|
||||
context.args = ["2"]
|
||||
await telegram._order(update=update, context=context)
|
||||
|
||||
assert msg_mock.call_count == 2
|
||||
|
||||
msg1 = msg_mock.call_args_list[0][0][0]
|
||||
msg2 = msg_mock.call_args_list[1][0][0]
|
||||
|
||||
assert 'Order List for Trade #*`2`' in msg1
|
||||
assert '*Order List for Trade #*`2` - continued' in msg2
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("init_persistence")
|
||||
async def test_telegram_order_multi_entry(default_conf, update, mocker, fee) -> None:
|
||||
default_conf['telegram']['enabled'] = False
|
||||
default_conf['position_adjustment_enable'] = True
|
||||
mocker.patch.multiple(
|
||||
EXMS,
|
||||
fetch_order=MagicMock(return_value=None),
|
||||
get_rate=MagicMock(return_value=0.22),
|
||||
)
|
||||
|
||||
telegram, _, msg_mock = get_telegram_testobject(mocker, default_conf)
|
||||
|
||||
create_mock_trades(fee)
|
||||
trades = Trade.get_open_trades()
|
||||
trade = trades[3]
|
||||
# Average may be empty on some exchanges
|
||||
trade.orders[0].average = 0
|
||||
trade.orders.append(Order(
|
||||
order_id='5412vbb',
|
||||
ft_order_side='buy',
|
||||
ft_pair=trade.pair,
|
||||
ft_is_open=False,
|
||||
ft_amount=trade.amount,
|
||||
ft_price=trade.open_rate,
|
||||
status="closed",
|
||||
symbol=trade.pair,
|
||||
order_type="market",
|
||||
side="buy",
|
||||
price=trade.open_rate * 0.95,
|
||||
average=0,
|
||||
filled=trade.amount,
|
||||
remaining=0,
|
||||
cost=trade.amount,
|
||||
order_date=trade.open_date,
|
||||
order_filled_date=trade.open_date,
|
||||
)
|
||||
)
|
||||
trade.recalc_trade_from_orders()
|
||||
Trade.commit()
|
||||
|
||||
await telegram._order(update=update, context=MagicMock())
|
||||
assert msg_mock.call_count == 4
|
||||
msg = msg_mock.call_args_list[3][0][0]
|
||||
assert re.search(r'from 1st entry rate', msg)
|
||||
assert re.search(r'Order Filled', msg)
|
||||
|
||||
|
||||
async def test_status_handle(default_conf, update, ticker, fee, mocker) -> None:
|
||||
default_conf['max_open_trades'] = 3
|
||||
mocker.patch.multiple(
|
||||
|
@ -443,14 +540,12 @@ async def test_status_handle(default_conf, update, ticker, fee, mocker) -> None:
|
|||
context.args = ["2"]
|
||||
await telegram._status(update=update, context=context)
|
||||
|
||||
assert msg_mock.call_count == 2
|
||||
assert msg_mock.call_count == 1
|
||||
|
||||
msg1 = msg_mock.call_args_list[0][0][0]
|
||||
msg2 = msg_mock.call_args_list[1][0][0]
|
||||
|
||||
assert 'Close Rate' not in msg1
|
||||
assert 'Trade ID:* `2`' in msg1
|
||||
assert 'Trade ID:* `2` - continued' in msg2
|
||||
|
||||
|
||||
async def test_status_table_handle(default_conf, update, ticker, fee, mocker) -> None:
|
||||
|
@ -1359,10 +1454,19 @@ async def test_force_enter_no_pair(default_conf, update, mocker) -> None:
|
|||
assert reduce(lambda acc, x: acc + len(x), keyboard, 0) == 5
|
||||
update = MagicMock()
|
||||
update.callback_query = AsyncMock()
|
||||
update.callback_query.data = 'XRP/USDT_||_long'
|
||||
update.callback_query.data = 'force_enter__XRP/USDT_||_long'
|
||||
await telegram._force_enter_inline(update, None)
|
||||
assert fbuy_mock.call_count == 1
|
||||
|
||||
fbuy_mock.reset_mock()
|
||||
update.callback_query = AsyncMock()
|
||||
update.callback_query.data = 'force_enter__cancel'
|
||||
await telegram._force_enter_inline(update, None)
|
||||
assert fbuy_mock.call_count == 0
|
||||
query = update.callback_query
|
||||
assert query.edit_message_text.call_count == 1
|
||||
assert query.edit_message_text.call_args_list[-1][1]['text'] == "Force enter canceled."
|
||||
|
||||
|
||||
async def test_telegram_performance_handle(default_conf_usdt, update, ticker, fee, mocker) -> None:
|
||||
|
||||
|
|
|
@ -47,6 +47,11 @@ class InformativeDecoratorTest(IStrategy):
|
|||
dataframe['rsi'] = 14
|
||||
return dataframe
|
||||
|
||||
@informative('1h', '{base}/BTC')
|
||||
def populate_indicators_base_1h(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
|
||||
dataframe['rsi'] = 14
|
||||
return dataframe
|
||||
|
||||
# Quote currency different from stake currency test.
|
||||
@informative('1h', 'ETH/BTC', candle_type='spot')
|
||||
def populate_indicators_eth_btc_1h(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
|
||||
|
|
|
@ -422,7 +422,7 @@ def test_min_roi_reached3(default_conf, fee) -> None:
|
|||
# enable custom stoploss, expected after 1st call, expected after 2nd call
|
||||
(0.2, 0.9, ExitType.NONE, None, False, False, 0.3, 0.9, ExitType.NONE, None),
|
||||
(0.2, 0.9, ExitType.NONE, None, False, False, -0.2, 0.9, ExitType.STOP_LOSS, None),
|
||||
(0.2, 0.9, ExitType.NONE, 0.8, False, False, -0.2, 0.9, ExitType.LIQUIDATION, None),
|
||||
(0.2, 0.9, ExitType.NONE, 0.92, False, False, -0.09, 0.9, ExitType.LIQUIDATION, None),
|
||||
(0.2, 1.14, ExitType.NONE, None, True, False, 0.05, 1.14, ExitType.TRAILING_STOP_LOSS,
|
||||
None),
|
||||
(0.01, 0.96, ExitType.NONE, None, True, False, 0.05, 1, ExitType.NONE, None),
|
||||
|
|
|
@ -277,9 +277,11 @@ def test_informative_decorator(mocker, default_conf_usdt, trading_mode):
|
|||
('XRP/USDT', '5m', candle_def): test_data_5m,
|
||||
('XRP/USDT', '30m', candle_def): test_data_30m,
|
||||
('XRP/USDT', '1h', candle_def): test_data_1h,
|
||||
('XRP/BTC', '1h', candle_def): test_data_1h, # from {base}/BTC
|
||||
('LTC/USDT', '5m', candle_def): test_data_5m,
|
||||
('LTC/USDT', '30m', candle_def): test_data_30m,
|
||||
('LTC/USDT', '1h', candle_def): test_data_1h,
|
||||
('LTC/BTC', '1h', candle_def): test_data_1h, # from {base}/BTC
|
||||
('NEO/USDT', '30m', candle_def): test_data_30m,
|
||||
('NEO/USDT', '5m', CandleType.SPOT): test_data_5m, # Explicit request with '' as candletype
|
||||
('NEO/USDT', '15m', candle_def): test_data_5m, # Explicit request with '' as candletype
|
||||
|
@ -296,10 +298,12 @@ def test_informative_decorator(mocker, default_conf_usdt, trading_mode):
|
|||
'XRP/USDT', 'LTC/USDT', 'NEO/USDT'
|
||||
])
|
||||
|
||||
assert len(strategy._ft_informative) == 6 # Equal to number of decorators used
|
||||
assert len(strategy._ft_informative) == 7 # Equal to number of decorators used
|
||||
informative_pairs = [
|
||||
('XRP/USDT', '1h', candle_def),
|
||||
('XRP/BTC', '1h', candle_def),
|
||||
('LTC/USDT', '1h', candle_def),
|
||||
('LTC/BTC', '1h', candle_def),
|
||||
('XRP/USDT', '30m', candle_def),
|
||||
('LTC/USDT', '30m', candle_def),
|
||||
('NEO/USDT', '1h', candle_def),
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
|
||||
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
|
@ -10,7 +9,7 @@ from freqtrade.util.binance_mig import migrate_binance_futures_data, migrate_bin
|
|||
from tests.conftest import create_mock_trades_usdt, log_has
|
||||
|
||||
|
||||
def test_binance_mig_data_conversion(default_conf_usdt, tmpdir, testdatadir):
|
||||
def test_binance_mig_data_conversion(default_conf_usdt, tmp_path, testdatadir):
|
||||
|
||||
# call doing nothing (spot mode)
|
||||
migrate_binance_futures_data(default_conf_usdt)
|
||||
|
@ -18,7 +17,7 @@ def test_binance_mig_data_conversion(default_conf_usdt, tmpdir, testdatadir):
|
|||
pair_old = 'XRP_USDT'
|
||||
pair_unified = 'XRP_USDT_USDT'
|
||||
futures_src = testdatadir / 'futures'
|
||||
futures_dst = tmpdir / 'futures'
|
||||
futures_dst = tmp_path / 'futures'
|
||||
futures_dst.mkdir()
|
||||
files = [
|
||||
'-1h-mark.feather',
|
||||
|
@ -32,7 +31,7 @@ def test_binance_mig_data_conversion(default_conf_usdt, tmpdir, testdatadir):
|
|||
fn_after = futures_dst / f'{pair_old}{file}'
|
||||
shutil.copy(futures_src / f'{pair_unified}{file}', fn_after)
|
||||
|
||||
default_conf_usdt['datadir'] = Path(tmpdir)
|
||||
default_conf_usdt['datadir'] = tmp_path
|
||||
# Migrate files to unified namings
|
||||
migrate_binance_futures_data(default_conf_usdt)
|
||||
|
||||
|
|
|
@ -104,8 +104,8 @@ def test_load_config_file_error_range(default_conf, mocker, caplog) -> None:
|
|||
assert x == ''
|
||||
|
||||
|
||||
def test_load_file_error(tmpdir):
|
||||
testpath = Path(tmpdir) / 'config.json'
|
||||
def test_load_file_error(tmp_path):
|
||||
testpath = tmp_path / 'config.json'
|
||||
with pytest.raises(OperationalException, match=r"File .* not found!"):
|
||||
load_file(testpath)
|
||||
|
||||
|
@ -601,9 +601,9 @@ def test_cli_verbose_with_params(default_conf, mocker, caplog) -> None:
|
|||
assert log_has('Verbosity set to 3', caplog)
|
||||
|
||||
|
||||
def test_set_logfile(default_conf, mocker, tmpdir):
|
||||
def test_set_logfile(default_conf, mocker, tmp_path):
|
||||
patched_configuration_load_config_file(mocker, default_conf)
|
||||
f = Path(tmpdir / "test_file.log")
|
||||
f = tmp_path / "test_file.log"
|
||||
assert not f.is_file()
|
||||
arglist = [
|
||||
'trade', '--logfile', str(f),
|
||||
|
@ -1145,7 +1145,7 @@ def test_pairlist_resolving_with_config_pl_not_exists(mocker, default_conf):
|
|||
configuration.get_config()
|
||||
|
||||
|
||||
def test_pairlist_resolving_fallback(mocker, tmpdir):
|
||||
def test_pairlist_resolving_fallback(mocker, tmp_path):
|
||||
mocker.patch.object(Path, "exists", MagicMock(return_value=True))
|
||||
mocker.patch.object(Path, "open", MagicMock(return_value=MagicMock()))
|
||||
mocker.patch("freqtrade.configuration.configuration.load_file",
|
||||
|
@ -1164,7 +1164,7 @@ def test_pairlist_resolving_fallback(mocker, tmpdir):
|
|||
|
||||
assert config['pairs'] == ['ETH/BTC', 'XRP/BTC']
|
||||
assert config['exchange']['name'] == 'binance'
|
||||
assert config['datadir'] == Path(tmpdir) / "user_data/data/binance"
|
||||
assert config['datadir'] == tmp_path / "user_data/data/binance"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("setting", [
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user