mirror of
https://github.com/freqtrade/freqtrade.git
synced 2024-11-14 20:23:57 +00:00
Merge remote-tracking branch 'origin/develop' into feature/proceed-exit-while-open-order
This commit is contained in:
commit
5ba592ff47
|
@ -32,7 +32,7 @@ jobs:
|
|||
run: python build_helpers/binance_update_lev_tiers.py
|
||||
|
||||
|
||||
- uses: peter-evans/create-pull-request@v6
|
||||
- uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.REPO_SCOPED_TOKEN }}
|
||||
add-paths: freqtrade/exchange/binance_leverage_tiers.json
|
||||
|
|
4
.github/workflows/ci.yml
vendored
4
.github/workflows/ci.yml
vendored
|
@ -537,12 +537,12 @@ jobs:
|
|||
|
||||
|
||||
- name: Publish to PyPI (Test)
|
||||
uses: pypa/gh-action-pypi-publish@v1.10.0
|
||||
uses: pypa/gh-action-pypi-publish@v1.10.2
|
||||
with:
|
||||
repository-url: https://test.pypi.org/legacy/
|
||||
|
||||
- name: Publish to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@v1.10.0
|
||||
uses: pypa/gh-action-pypi-publish@v1.10.2
|
||||
|
||||
|
||||
deploy-docker:
|
||||
|
|
2
.github/workflows/pre-commit-update.yml
vendored
2
.github/workflows/pre-commit-update.yml
vendored
|
@ -26,7 +26,7 @@ jobs:
|
|||
- name: Run auto-update
|
||||
run: pre-commit autoupdate
|
||||
|
||||
- uses: peter-evans/create-pull-request@v6
|
||||
- uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.REPO_SCOPED_TOKEN }}
|
||||
add-paths: .pre-commit-config.yaml
|
||||
|
|
|
@ -16,10 +16,10 @@ repos:
|
|||
additional_dependencies:
|
||||
- types-cachetools==5.5.0.20240820
|
||||
- types-filelock==3.2.7
|
||||
- types-requests==2.32.0.20240712
|
||||
- types-requests==2.32.0.20240914
|
||||
- types-tabulate==0.9.0.20240106
|
||||
- types-python-dateutil==2.9.0.20240821
|
||||
- SQLAlchemy==2.0.32
|
||||
- types-python-dateutil==2.9.0.20240906
|
||||
- SQLAlchemy==2.0.35
|
||||
# stages: [push]
|
||||
|
||||
- repo: https://github.com/pycqa/isort
|
||||
|
@ -31,9 +31,10 @@ repos:
|
|||
|
||||
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: 'v0.6.3'
|
||||
rev: 'v0.6.7'
|
||||
hooks:
|
||||
- id: ruff
|
||||
- id: ruff-format
|
||||
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.6.0
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
FROM python:3.12.5-slim-bookworm as base
|
||||
FROM python:3.12.6-slim-bookworm as base
|
||||
|
||||
# Setup env
|
||||
ENV LANG C.UTF-8
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
FROM python:3.11.9-slim-bookworm as base
|
||||
FROM python:3.11.10-slim-bookworm as base
|
||||
|
||||
# Setup env
|
||||
ENV LANG C.UTF-8
|
||||
|
|
|
@ -18,15 +18,13 @@ freqtrade backtesting -c <config.json> --timeframe <tf> --strategy <strategy_nam
|
|||
```
|
||||
|
||||
This will tell freqtrade to output a pickled dictionary of strategy, pairs and corresponding
|
||||
DataFrame of the candles that resulted in buy signals. Depending on how many buys your strategy
|
||||
makes, this file may get quite large, so periodically check your `user_data/backtest_results`
|
||||
folder to delete old exports.
|
||||
DataFrame of the candles that resulted in entry and exit signals.
|
||||
Depending on how many entries your strategy makes, this file may get quite large, so periodically check your `user_data/backtest_results` folder to delete old exports.
|
||||
|
||||
Before running your next backtest, make sure you either delete your old backtest results or run
|
||||
backtesting with the `--cache none` option to make sure no cached results are used.
|
||||
|
||||
If all goes well, you should now see a `backtest-result-{timestamp}_signals.pkl` file in the
|
||||
`user_data/backtest_results` folder.
|
||||
If all goes well, you should now see a `backtest-result-{timestamp}_signals.pkl` and `backtest-result-{timestamp}_exited.pkl` files in the `user_data/backtest_results` folder.
|
||||
|
||||
To analyze the entry/exit tags, we now need to use the `freqtrade backtesting-analysis` command
|
||||
with `--analysis-groups` option provided with space-separated arguments:
|
||||
|
@ -103,6 +101,10 @@ The indicators have to be present in your strategy's main DataFrame (either for
|
|||
timeframe or for informative timeframes) otherwise they will simply be ignored in the script
|
||||
output.
|
||||
|
||||
!!! Note "Indicator List"
|
||||
The indicator values will be displayed for both entry and exit points. If `--indicator-list all` is specified,
|
||||
only the indicators at the entry point will be shown to avoid excessively large lists, which could occur depending on the strategy.
|
||||
|
||||
There are a range of candle and trade-related fields that are included in the analysis so are
|
||||
automatically accessible by including them on the indicator-list, and these include:
|
||||
|
||||
|
@ -118,6 +120,53 @@ automatically accessible by including them on the indicator-list, and these incl
|
|||
- **profit_ratio :** trade profit ratio
|
||||
- **profit_abs :** absolute profit return of the trade
|
||||
|
||||
#### Sample Output for Indicator Values
|
||||
|
||||
```bash
|
||||
freqtrade backtesting-analysis -c user_data/config.json --analysis-groups 0 --indicator-list chikou_span tenkan_sen
|
||||
```
|
||||
|
||||
In this example,
|
||||
we aim to display the `chikou_span` and `tenkan_sen` indicator values at both the entry and exit points of trades.
|
||||
|
||||
A sample output for indicators might look like this:
|
||||
|
||||
| pair | open_date | enter_reason | exit_reason | chikou_span (entry) | tenkan_sen (entry) | chikou_span (exit) | tenkan_sen (exit) |
|
||||
|-----------|---------------------------|--------------|-------------|---------------------|--------------------|--------------------|-------------------|
|
||||
| DOGE/USDT | 2024-07-06 00:35:00+00:00 | | exit_signal | 0.105 | 0.106 | 0.105 | 0.107 |
|
||||
| BTC/USDT | 2024-08-05 14:20:00+00:00 | | roi | 54643.440 | 51696.400 | 54386.000 | 52072.010 |
|
||||
|
||||
As shown in the table, `chikou_span (entry)` represents the indicator value at the time of trade entry,
|
||||
while `chikou_span (exit)` reflects its value at the time of exit.
|
||||
This detailed view of indicator values enhances the analysis.
|
||||
|
||||
The `(entry)` and `(exit)` suffixes are added to indicators
|
||||
to distinguish the values at the entry and exit points of the trade.
|
||||
|
||||
!!! Note "Trade-wide Indicators"
|
||||
Certain trade-wide indicators do not have the `(entry)` or `(exit)` suffix. These indicators include: `pair`, `stake_amount`,
|
||||
`max_stake_amount`, `amount`, `open_date`, `close_date`, `open_rate`, `close_rate`, `fee_open`, `fee_close`, `trade_duration`,
|
||||
`profit_ratio`, `profit_abs`, `exit_reason`,`initial_stop_loss_abs`, `initial_stop_loss_ratio`, `stop_loss_abs`, `stop_loss_ratio`,
|
||||
`min_rate`, `max_rate`, `is_open`, `enter_tag`, `leverage`, `is_short`, `open_timestamp`, `close_timestamp` and `orders`
|
||||
|
||||
#### Filtering Indicators Based on Entry or Exit Signals
|
||||
|
||||
The `--indicator-list` option, by default, displays indicator values for both entry and exit signals. To filter the indicator values exclusively for entry signals, you can use the `--entry-only` argument. Similarly, to display indicator values only at exit signals, use the `--exit-only` argument.
|
||||
|
||||
Example: Display indicator values at entry signals:
|
||||
|
||||
```bash
|
||||
freqtrade backtesting-analysis -c user_data/config.json --analysis-groups 0 --indicator-list chikou_span tenkan_sen --entry-only
|
||||
```
|
||||
|
||||
Example: Display indicator values at exit signals:
|
||||
|
||||
```bash
|
||||
freqtrade backtesting-analysis -c user_data/config.json --analysis-groups 0 --indicator-list chikou_span tenkan_sen --exit-only
|
||||
```
|
||||
|
||||
!!! note
|
||||
When using these filters, the indicator names will not be suffixed with `(entry)` or `(exit)`.
|
||||
|
||||
### Filtering the trade output by date
|
||||
|
||||
|
|
|
@ -293,6 +293,7 @@ A backtesting result will look like that:
|
|||
|-----------------------------+---------------------|
|
||||
| Backtesting from | 2019-01-01 00:00:00 |
|
||||
| Backtesting to | 2019-05-01 00:00:00 |
|
||||
| Trading Mode | Spot |
|
||||
| Max open trades | 3 |
|
||||
| | |
|
||||
| Total/Daily Avg Trades | 429 / 3.575 |
|
||||
|
@ -398,6 +399,7 @@ It contains some useful key metrics about performance of your strategy on backte
|
|||
|-----------------------------+---------------------|
|
||||
| Backtesting from | 2019-01-01 00:00:00 |
|
||||
| Backtesting to | 2019-05-01 00:00:00 |
|
||||
| Trading Mode | Spot |
|
||||
| Max open trades | 3 |
|
||||
| | |
|
||||
| Total/Daily Avg Trades | 429 / 3.575 |
|
||||
|
@ -452,6 +454,7 @@ It contains some useful key metrics about performance of your strategy on backte
|
|||
|
||||
- `Backtesting from` / `Backtesting to`: Backtesting range (usually defined with the `--timerange` option).
|
||||
- `Max open trades`: Setting of `max_open_trades` (or `--max-open-trades`) - or number of pairs in the pairlist (whatever is lower).
|
||||
- `Trading Mode`: Spot or Futures trading.
|
||||
- `Total/Daily Avg Trades`: Identical to the total trades of the backtest output table / Total trades divided by the backtesting duration in days (this will give you information about how many trades to expect from the strategy).
|
||||
- `Starting balance`: Start balance - as given by dry-run-wallet (config or command line).
|
||||
- `Final balance`: Final balance - starting balance + absolute profit.
|
||||
|
|
|
@ -222,7 +222,6 @@ Mandatory parameters are marked as **Required**, which means that they are requi
|
|||
| `exchange.ccxt_async_config` | Additional CCXT parameters passed to the async ccxt instance. Parameters may differ from exchange to exchange and are documented in the [ccxt documentation](https://docs.ccxt.com/#/README?id=overriding-exchange-properties-upon-instantiation) <br> **Datatype:** Dict
|
||||
| `exchange.enable_ws` | Enable the usage of Websockets for the exchange. <br>[More information](#consuming-exchange-websockets).<br>*Defaults to `true`.* <br> **Datatype:** Boolean
|
||||
| `exchange.markets_refresh_interval` | The interval in minutes in which markets are reloaded. <br>*Defaults to `60` minutes.* <br> **Datatype:** Positive Integer
|
||||
| `exchange.skip_pair_validation` | Skip pairlist validation on startup.<br>*Defaults to `false`*<br> **Datatype:** Boolean
|
||||
| `exchange.skip_open_order_update` | Skips open order updates on startup should the exchange cause problems. Only relevant in live conditions.<br>*Defaults to `false`*<br> **Datatype:** Boolean
|
||||
| `exchange.unknown_fee_rate` | Fallback value to use when calculating trading fees. This can be useful for exchanges which have fees in non-tradable currencies. The value provided here will be multiplied with the "fee cost".<br>*Defaults to `None`<br> **Datatype:** float
|
||||
| `exchange.log_responses` | Log relevant exchange responses. For debug mode only - use with care.<br>*Defaults to `false`*<br> **Datatype:** Boolean
|
||||
|
|
|
@ -205,7 +205,7 @@ This is called with each iteration of the bot (only if the Pairlist Handler is a
|
|||
|
||||
It must return the resulting pairlist (which may then be passed into the chain of Pairlist Handlers).
|
||||
|
||||
Validations are optional, the parent class exposes a `_verify_blacklist(pairlist)` and `_whitelist_for_active_markets(pairlist)` to do default filtering. Use this if you limit your result to a certain number of pairs - so the end-result is not shorter than expected.
|
||||
Validations are optional, the parent class exposes a `verify_blacklist(pairlist)` and `_whitelist_for_active_markets(pairlist)` to do default filtering. Use this if you limit your result to a certain number of pairs - so the end-result is not shorter than expected.
|
||||
|
||||
#### filter_pairlist
|
||||
|
||||
|
@ -219,7 +219,7 @@ The default implementation in the base class simply calls the `_validate_pair()`
|
|||
|
||||
If overridden, it must return the resulting pairlist (which may then be passed into the next Pairlist Handler in the chain).
|
||||
|
||||
Validations are optional, the parent class exposes a `_verify_blacklist(pairlist)` and `_whitelist_for_active_markets(pairlist)` to do default filters. Use this if you limit your result to a certain number of pairs - so the end result is not shorter than expected.
|
||||
Validations are optional, the parent class exposes a `verify_blacklist(pairlist)` and `_whitelist_for_active_markets(pairlist)` to do default filters. Use this if you limit your result to a certain number of pairs - so the end result is not shorter than expected.
|
||||
|
||||
In `VolumePairList`, this implements different methods of sorting, does early validation so only the expected number of pairs is returned.
|
||||
|
||||
|
|
|
@ -255,18 +255,24 @@ The configuration parameter `exchange.unknown_fee_rate` can be used to specify t
|
|||
## Bybit
|
||||
|
||||
Futures trading on bybit is currently supported for USDT markets, and will use isolated futures mode.
|
||||
Users with unified accounts (there's no way back) can create a Sub-account which will start as "non-unified", and can therefore use isolated futures.
|
||||
On startup, freqtrade will set the position mode to "One-way Mode" for the whole (sub)account. This avoids making this call over and over again (slowing down bot operations), but means that changes to this setting may result in exceptions and errors
|
||||
|
||||
On startup, freqtrade will set the position mode to "One-way Mode" for the whole (sub)account. This avoids making this call over and over again (slowing down bot operations), but means that changes to this setting may result in exceptions and errors.
|
||||
|
||||
As bybit doesn't provide funding rate history, the dry-run calculation is used for live trades as well.
|
||||
|
||||
API Keys for live futures trading (Subaccount on non-unified) must have the following permissions:
|
||||
API Keys for live futures trading must have the following permissions:
|
||||
* Read-write
|
||||
* Contract - Orders
|
||||
* Contract - Positions
|
||||
|
||||
We do strongly recommend to limit all API keys to the IP you're going to use it from.
|
||||
|
||||
!!! Warning "Unified accounts"
|
||||
Freqtrade assumes accounts to be dedicated to the bot.
|
||||
We therefore recommend the usage of one subaccount per bot. This is especially important when using unified accounts.
|
||||
Other configurations (multiple bots on one account, manual non-bot trades on the bot account) are not supported and may lead to unexpected behavior.
|
||||
|
||||
|
||||
!!! Tip "Stoploss on Exchange"
|
||||
Bybit (futures only) supports `stoploss_on_exchange` and uses `stop-loss-limit` orders. It provides great advantages, so we recommend to benefit from it by enabling stoploss on exchange.
|
||||
On futures, Bybit supports both `stop-limit` as well as `stop-market` orders. You can use either `"limit"` or `"market"` in the `order_types.stoploss` configuration setting to decide which type to use.
|
||||
|
|
|
@ -55,7 +55,6 @@ It uses configuration from `exchange.pair_whitelist` and `exchange.pair_blacklis
|
|||
By default, only currently enabled pairs are allowed.
|
||||
To skip pair validation against active markets, set `"allow_inactive": true` within the `StaticPairList` configuration.
|
||||
This can be useful for backtesting expired pairs (like quarterly spot-markets).
|
||||
This option must be configured along with `exchange.skip_pair_validation` in the exchange configuration.
|
||||
|
||||
When used in a "follow-up" position (e.g. after VolumePairlist), all pairs in `'pair_whitelist'` will be added to the end of the pairlist.
|
||||
|
||||
|
|
|
@ -101,3 +101,4 @@ This could lead to a false-negative (the strategy will then be reported as non-b
|
|||
- `lookahead-analysis` has access to everything that backtesting has too.
|
||||
Please don't provoke any configs like enabling position stacking.
|
||||
If you decide to do so, then make doubly sure that you won't ever run out of `max_open_trades` amount and neither leftover money in your wallet.
|
||||
- In the results table, the `biased_indicators` column will falsely flag FreqAI target indicators defined in `set_freqai_targets()` as biased. These are not biased and can safely be ignored.
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
markdown==3.7
|
||||
mkdocs==1.6.1
|
||||
mkdocs-material==9.5.34
|
||||
mkdocs-material==9.5.36
|
||||
mdx_truly_sane_lists==1.3
|
||||
pymdown-extensions==10.9
|
||||
pymdown-extensions==10.10.1
|
||||
jinja2==3.1.4
|
||||
mike==2.1.3
|
||||
|
|
|
@ -18,7 +18,7 @@ The following attributes / properties are available for each individual trade -
|
|||
| `open_rate` | float | Rate this trade was entered at (Avg. entry rate in case of trade-adjustments). |
|
||||
| `close_rate` | float | Close rate - only set when is_open = False. |
|
||||
| `stake_amount` | float | Amount in Stake (or Quote) currency. |
|
||||
| `amount` | float | Amount in Asset / Base currency that is currently owned. |
|
||||
| `amount` | float | Amount in Asset / Base currency that is currently owned. Will be 0.0 until the initial order fills. |
|
||||
| `open_date` | datetime | Timestamp when trade was opened **use `open_date_utc` instead** |
|
||||
| `open_date_utc` | datetime | Timestamp when trade was opened - in UTC. |
|
||||
| `close_date` | datetime | Timestamp when trade was closed **use `close_date_utc` instead** |
|
||||
|
@ -130,20 +130,20 @@ Most properties here can be None as they are dependent on the exchange response.
|
|||
|
||||
| Attribute | DataType | Description |
|
||||
|------------|-------------|-------------|
|
||||
`trade` | Trade | Trade object this order is attached to
|
||||
`ft_pair` | string | Pair this order is for
|
||||
`ft_is_open` | boolean | is the order filled?
|
||||
`order_type` | string | Order type as defined on the exchange - usually market, limit or stoploss
|
||||
`status` | string | Status as defined by ccxt. Usually open, closed, expired or canceled
|
||||
`side` | string | Buy or Sell
|
||||
`price` | float | Price the order was placed at
|
||||
`average` | float | Average price the order filled at
|
||||
`amount` | float | Amount in base currency
|
||||
`filled` | float | Filled amount (in base currency)
|
||||
`remaining` | float | Remaining amount
|
||||
`cost` | float | Cost of the order - usually average * filled (*Exchange dependent on futures, may contain the cost with or without leverage and may be in contracts.*)
|
||||
`stake_amount` | float | Stake amount used for this order. *Added in 2023.7.*
|
||||
`order_date` | datetime | Order creation date **use `order_date_utc` instead**
|
||||
`order_date_utc` | datetime | Order creation date (in UTC)
|
||||
`order_fill_date` | datetime | Order fill date **use `order_fill_utc` instead**
|
||||
`order_fill_date_utc` | datetime | Order fill date
|
||||
| `trade` | Trade | Trade object this order is attached to |
|
||||
| `ft_pair` | string | Pair this order is for |
|
||||
| `ft_is_open` | boolean | is the order filled? |
|
||||
| `order_type` | string | Order type as defined on the exchange - usually market, limit or stoploss |
|
||||
| `status` | string | Status as defined by ccxt. Usually open, closed, expired or canceled |
|
||||
| `side` | string | Buy or Sell |
|
||||
| `price` | float | Price the order was placed at |
|
||||
| `average` | float | Average price the order filled at |
|
||||
| `amount` | float | Amount in base currency |
|
||||
| `filled` | float | Filled amount (in base currency) |
|
||||
| `remaining` | float | Remaining amount |
|
||||
| `cost` | float | Cost of the order - usually average * filled (*Exchange dependent on futures, may contain the cost with or without leverage and may be in contracts.*) |
|
||||
| `stake_amount` | float | Stake amount used for this order. *Added in 2023.7.* |
|
||||
| `order_date` | datetime | Order creation date **use `order_date_utc` instead** |
|
||||
| `order_date_utc` | datetime | Order creation date (in UTC) |
|
||||
| `order_fill_date` | datetime | Order fill date **use `order_fill_utc` instead** |
|
||||
| `order_fill_date_utc` | datetime | Order fill date |
|
||||
|
|
|
@ -228,6 +228,8 @@ ARGS_ANALYZE_ENTRIES_EXITS = [
|
|||
"enter_reason_list",
|
||||
"exit_reason_list",
|
||||
"indicator_list",
|
||||
"entry_only",
|
||||
"exit_only",
|
||||
"timerange",
|
||||
"analysis_rejected",
|
||||
"analysis_to_csv",
|
||||
|
|
|
@ -274,8 +274,6 @@ def start_new_config(args: Dict[str, Any]) -> None:
|
|||
def start_show_config(args: Dict[str, Any]) -> None:
|
||||
config = setup_utils_configuration(args, RunMode.UTIL_EXCHANGE, set_dry=False)
|
||||
|
||||
# TODO: Sanitize from sensitive info before printing
|
||||
|
||||
print("Your combined configuration is:")
|
||||
config_sanitized = sanitize_config(
|
||||
config["original_config"], show_sensitive=args.get("show_sensitive", False)
|
||||
|
|
|
@ -719,6 +719,12 @@ AVAILABLE_CLI_OPTIONS = {
|
|||
nargs="+",
|
||||
default=[],
|
||||
),
|
||||
"entry_only": Arg(
|
||||
"--entry-only", help=("Only analyze entry signals."), action="store_true", default=False
|
||||
),
|
||||
"exit_only": Arg(
|
||||
"--exit-only", help=("Only analyze exit signals."), action="store_true", default=False
|
||||
),
|
||||
"analysis_rejected": Arg(
|
||||
"--rejected-signals",
|
||||
help="Analyse rejected signals",
|
||||
|
|
|
@ -15,7 +15,14 @@ from freqtrade.configuration.directory_operations import create_datadir, create_
|
|||
from freqtrade.configuration.environment_vars import enironment_vars_to_dict
|
||||
from freqtrade.configuration.load_config import load_file, load_from_files
|
||||
from freqtrade.constants import Config
|
||||
from freqtrade.enums import NON_UTIL_MODES, TRADE_MODES, CandleType, RunMode, TradingMode
|
||||
from freqtrade.enums import (
|
||||
NON_UTIL_MODES,
|
||||
TRADE_MODES,
|
||||
CandleType,
|
||||
MarginMode,
|
||||
RunMode,
|
||||
TradingMode,
|
||||
)
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.loggers import setup_logging
|
||||
from freqtrade.misc import deep_merge_dicts, parse_db_uri_for_logging
|
||||
|
@ -389,6 +396,7 @@ class Configuration:
|
|||
config.get("trading_mode", "spot") or "spot"
|
||||
)
|
||||
config["trading_mode"] = TradingMode(config.get("trading_mode", "spot") or "spot")
|
||||
config["margin_mode"] = MarginMode(config.get("margin_mode", "") or "")
|
||||
self._args_to_config(
|
||||
config, argname="candle_types", logstring="Detected --candle-types: {}"
|
||||
)
|
||||
|
@ -399,6 +407,8 @@ class Configuration:
|
|||
("enter_reason_list", "Analysis enter tag list: {}"),
|
||||
("exit_reason_list", "Analysis exit tag list: {}"),
|
||||
("indicator_list", "Analysis indicator list: {}"),
|
||||
("entry_only", "Only analyze entry signals: {}"),
|
||||
("exit_only", "Only analyze exit signals: {}"),
|
||||
("timerange", "Filter trades by timerange: {}"),
|
||||
("analysis_rejected", "Analyse rejected signals: {}"),
|
||||
("analysis_to_csv", "Store analysis tables to CSV: {}"),
|
||||
|
|
|
@ -12,7 +12,7 @@ from typing import Tuple
|
|||
import numpy as np
|
||||
import pandas as pd
|
||||
|
||||
from freqtrade.constants import DEFAULT_ORDERFLOW_COLUMNS
|
||||
from freqtrade.constants import DEFAULT_ORDERFLOW_COLUMNS, Config
|
||||
from freqtrade.enums import RunMode
|
||||
from freqtrade.exceptions import DependencyException
|
||||
|
||||
|
@ -63,7 +63,7 @@ def _calculate_ohlcv_candle_start_and_end(df: pd.DataFrame, timeframe: str):
|
|||
|
||||
def populate_dataframe_with_trades(
|
||||
cached_grouped_trades: OrderedDict[Tuple[datetime, datetime], pd.DataFrame],
|
||||
config,
|
||||
config: Config,
|
||||
dataframe: pd.DataFrame,
|
||||
trades: pd.DataFrame,
|
||||
) -> Tuple[pd.DataFrame, OrderedDict[Tuple[datetime, datetime], pd.DataFrame]]:
|
||||
|
|
|
@ -520,7 +520,7 @@ class DataProvider:
|
|||
return self._exchange.trades(
|
||||
(pair, timeframe or self._config["timeframe"], _candle_type), copy=copy
|
||||
)
|
||||
elif self.runmode in (RunMode.BACKTEST, RunMode.HYPEROPT):
|
||||
else:
|
||||
data_handler = get_datahandler(
|
||||
self._config["datadir"], data_format=self._config["dataformat_trades"]
|
||||
)
|
||||
|
@ -529,9 +529,6 @@ class DataProvider:
|
|||
)
|
||||
return trades_df
|
||||
|
||||
else:
|
||||
return DataFrame()
|
||||
|
||||
def market(self, pair: str) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Return market data for the pair
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import logging
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
from typing import Dict, List
|
||||
|
||||
import joblib
|
||||
import pandas as pd
|
||||
|
@ -8,6 +8,7 @@ import pandas as pd
|
|||
from freqtrade.configuration import TimeRange
|
||||
from freqtrade.constants import Config
|
||||
from freqtrade.data.btanalysis import (
|
||||
BT_DATA_COLUMNS,
|
||||
get_latest_backtest_filename,
|
||||
load_backtest_data,
|
||||
load_backtest_stats,
|
||||
|
@ -47,9 +48,14 @@ def _load_signal_candles(backtest_dir: Path):
|
|||
return _load_backtest_analysis_data(backtest_dir, "signals")
|
||||
|
||||
|
||||
def _process_candles_and_indicators(pairlist, strategy_name, trades, signal_candles):
|
||||
analysed_trades_dict = {}
|
||||
analysed_trades_dict[strategy_name] = {}
|
||||
def _load_exit_signal_candles(backtest_dir: Path) -> Dict[str, Dict[str, pd.DataFrame]]:
|
||||
return _load_backtest_analysis_data(backtest_dir, "exited")
|
||||
|
||||
|
||||
def _process_candles_and_indicators(
|
||||
pairlist, strategy_name, trades, signal_candles, date_col: str = "open_date"
|
||||
):
|
||||
analysed_trades_dict: Dict[str, Dict] = {strategy_name: {}}
|
||||
|
||||
try:
|
||||
logger.info(f"Processing {strategy_name} : {len(pairlist)} pairs")
|
||||
|
@ -57,7 +63,7 @@ def _process_candles_and_indicators(pairlist, strategy_name, trades, signal_cand
|
|||
for pair in pairlist:
|
||||
if pair in signal_candles[strategy_name]:
|
||||
analysed_trades_dict[strategy_name][pair] = _analyze_candles_and_indicators(
|
||||
pair, trades, signal_candles[strategy_name][pair]
|
||||
pair, trades, signal_candles[strategy_name][pair], date_col
|
||||
)
|
||||
except Exception as e:
|
||||
print(f"Cannot process entry/exit reasons for {strategy_name}: ", e)
|
||||
|
@ -65,7 +71,9 @@ def _process_candles_and_indicators(pairlist, strategy_name, trades, signal_cand
|
|||
return analysed_trades_dict
|
||||
|
||||
|
||||
def _analyze_candles_and_indicators(pair, trades: pd.DataFrame, signal_candles: pd.DataFrame):
|
||||
def _analyze_candles_and_indicators(
|
||||
pair: str, trades: pd.DataFrame, signal_candles: pd.DataFrame, date_col: str = "open_date"
|
||||
) -> pd.DataFrame:
|
||||
buyf = signal_candles
|
||||
|
||||
if len(buyf) > 0:
|
||||
|
@ -75,8 +83,8 @@ def _analyze_candles_and_indicators(pair, trades: pd.DataFrame, signal_candles:
|
|||
trades_inds = pd.DataFrame()
|
||||
|
||||
if trades_red.shape[0] > 0 and buyf.shape[0] > 0:
|
||||
for t, v in trades_red.open_date.items():
|
||||
allinds = buyf.loc[(buyf["date"] < v)]
|
||||
for t, v in trades_red.iterrows():
|
||||
allinds = buyf.loc[(buyf["date"] < v[date_col])]
|
||||
if allinds.shape[0] > 0:
|
||||
tmp_inds = allinds.iloc[[-1]]
|
||||
|
||||
|
@ -235,7 +243,7 @@ def _select_rows_by_tags(df, enter_reason_list, exit_reason_list):
|
|||
|
||||
def prepare_results(
|
||||
analysed_trades, stratname, enter_reason_list, exit_reason_list, timerange=None
|
||||
):
|
||||
) -> pd.DataFrame:
|
||||
res_df = pd.DataFrame()
|
||||
for pair, trades in analysed_trades[stratname].items():
|
||||
if trades.shape[0] > 0:
|
||||
|
@ -252,8 +260,11 @@ def prepare_results(
|
|||
|
||||
def print_results(
|
||||
res_df: pd.DataFrame,
|
||||
exit_df: pd.DataFrame,
|
||||
analysis_groups: List[str],
|
||||
indicator_list: List[str],
|
||||
entry_only: bool,
|
||||
exit_only: bool,
|
||||
csv_path: Path,
|
||||
rejected_signals=None,
|
||||
to_csv=False,
|
||||
|
@ -278,9 +289,11 @@ def print_results(
|
|||
for ind in indicator_list:
|
||||
if ind in res_df:
|
||||
available_inds.append(ind)
|
||||
ilist = ["pair", "enter_reason", "exit_reason"] + available_inds
|
||||
|
||||
merged_df = _merge_dfs(res_df, exit_df, available_inds, entry_only, exit_only)
|
||||
|
||||
_print_table(
|
||||
res_df[ilist],
|
||||
merged_df,
|
||||
sortcols=["exit_reason"],
|
||||
show_index=False,
|
||||
name="Indicators:",
|
||||
|
@ -291,6 +304,36 @@ def print_results(
|
|||
print("\\No trades to show")
|
||||
|
||||
|
||||
def _merge_dfs(
|
||||
entry_df: pd.DataFrame,
|
||||
exit_df: pd.DataFrame,
|
||||
available_inds: List[str],
|
||||
entry_only: bool,
|
||||
exit_only: bool,
|
||||
):
|
||||
merge_on = ["pair", "open_date"]
|
||||
signal_wide_indicators = list(set(available_inds) - set(BT_DATA_COLUMNS))
|
||||
columns_to_keep = merge_on + ["enter_reason", "exit_reason"]
|
||||
|
||||
if exit_df is None or exit_df.empty or entry_only is True:
|
||||
return entry_df[columns_to_keep + available_inds]
|
||||
|
||||
if exit_only is True:
|
||||
return pd.merge(
|
||||
entry_df[columns_to_keep],
|
||||
exit_df[merge_on + signal_wide_indicators],
|
||||
on=merge_on,
|
||||
suffixes=(" (entry)", " (exit)"),
|
||||
)
|
||||
|
||||
return pd.merge(
|
||||
entry_df[columns_to_keep + available_inds],
|
||||
exit_df[merge_on + signal_wide_indicators],
|
||||
on=merge_on,
|
||||
suffixes=(" (entry)", " (exit)"),
|
||||
)
|
||||
|
||||
|
||||
def _print_table(
|
||||
df: pd.DataFrame, sortcols=None, *, show_index=False, name=None, to_csv=False, csv_path: Path
|
||||
):
|
||||
|
@ -316,9 +359,16 @@ def process_entry_exit_reasons(config: Config):
|
|||
enter_reason_list = config.get("enter_reason_list", ["all"])
|
||||
exit_reason_list = config.get("exit_reason_list", ["all"])
|
||||
indicator_list = config.get("indicator_list", [])
|
||||
entry_only = config.get("entry_only", False)
|
||||
exit_only = config.get("exit_only", False)
|
||||
do_rejected = config.get("analysis_rejected", False)
|
||||
to_csv = config.get("analysis_to_csv", False)
|
||||
csv_path = Path(config.get("analysis_csv_path", config["exportfilename"]))
|
||||
|
||||
if entry_only is True and exit_only is True:
|
||||
raise OperationalException(
|
||||
"Cannot use --entry-only and --exit-only at the same time. Please choose one."
|
||||
)
|
||||
if to_csv and not csv_path.is_dir():
|
||||
raise OperationalException(f"Specified directory {csv_path} does not exist.")
|
||||
|
||||
|
@ -333,6 +383,7 @@ def process_entry_exit_reasons(config: Config):
|
|||
|
||||
if trades is not None and not trades.empty:
|
||||
signal_candles = _load_signal_candles(config["exportfilename"])
|
||||
exit_signals = _load_exit_signal_candles(config["exportfilename"])
|
||||
|
||||
rej_df = None
|
||||
if do_rejected:
|
||||
|
@ -345,22 +396,35 @@ def process_entry_exit_reasons(config: Config):
|
|||
timerange=timerange,
|
||||
)
|
||||
|
||||
analysed_trades_dict = _process_candles_and_indicators(
|
||||
config["exchange"]["pair_whitelist"], strategy_name, trades, signal_candles
|
||||
)
|
||||
|
||||
res_df = prepare_results(
|
||||
analysed_trades_dict,
|
||||
strategy_name,
|
||||
entry_df = _generate_dfs(
|
||||
config["exchange"]["pair_whitelist"],
|
||||
enter_reason_list,
|
||||
exit_reason_list,
|
||||
timerange=timerange,
|
||||
signal_candles,
|
||||
strategy_name,
|
||||
timerange,
|
||||
trades,
|
||||
"open_date",
|
||||
)
|
||||
|
||||
exit_df = _generate_dfs(
|
||||
config["exchange"]["pair_whitelist"],
|
||||
enter_reason_list,
|
||||
exit_reason_list,
|
||||
exit_signals,
|
||||
strategy_name,
|
||||
timerange,
|
||||
trades,
|
||||
"close_date",
|
||||
)
|
||||
|
||||
print_results(
|
||||
res_df,
|
||||
entry_df,
|
||||
exit_df,
|
||||
analysis_groups,
|
||||
indicator_list,
|
||||
entry_only,
|
||||
exit_only,
|
||||
rejected_signals=rej_df,
|
||||
to_csv=to_csv,
|
||||
csv_path=csv_path,
|
||||
|
@ -368,3 +432,30 @@ def process_entry_exit_reasons(config: Config):
|
|||
|
||||
except ValueError as e:
|
||||
raise OperationalException(e) from e
|
||||
|
||||
|
||||
def _generate_dfs(
|
||||
pairlist: list,
|
||||
enter_reason_list: list,
|
||||
exit_reason_list: list,
|
||||
signal_candles: Dict,
|
||||
strategy_name: str,
|
||||
timerange: TimeRange,
|
||||
trades: pd.DataFrame,
|
||||
date_col: str,
|
||||
) -> pd.DataFrame:
|
||||
analysed_trades_dict = _process_candles_and_indicators(
|
||||
pairlist,
|
||||
strategy_name,
|
||||
trades,
|
||||
signal_candles,
|
||||
date_col,
|
||||
)
|
||||
res_df = prepare_results(
|
||||
analysed_trades_dict,
|
||||
strategy_name,
|
||||
enter_reason_list,
|
||||
exit_reason_list,
|
||||
timerange=timerange,
|
||||
)
|
||||
return res_df
|
||||
|
|
|
@ -17,7 +17,6 @@ from freqtrade.constants import (
|
|||
from freqtrade.data.converter import (
|
||||
clean_ohlcv_dataframe,
|
||||
convert_trades_to_ohlcv,
|
||||
ohlcv_to_dataframe,
|
||||
trades_df_remove_duplicates,
|
||||
trades_list_to_df,
|
||||
)
|
||||
|
@ -273,7 +272,7 @@ def _download_pair_history(
|
|||
)
|
||||
|
||||
# Default since_ms to 30 days if nothing is given
|
||||
new_data = exchange.get_historic_ohlcv(
|
||||
new_dataframe = exchange.get_historic_ohlcv(
|
||||
pair=pair,
|
||||
timeframe=timeframe,
|
||||
since_ms=(
|
||||
|
@ -285,10 +284,6 @@ def _download_pair_history(
|
|||
candle_type=candle_type,
|
||||
until_ms=until_ms if until_ms else None,
|
||||
)
|
||||
# TODO: Maybe move parsing to exchange class (?)
|
||||
new_dataframe = ohlcv_to_dataframe(
|
||||
new_data, timeframe, pair, fill_missing=False, drop_incomplete=True
|
||||
)
|
||||
if data.empty:
|
||||
data = new_dataframe
|
||||
else:
|
||||
|
@ -610,9 +605,6 @@ def download_data_main(config: Config) -> None:
|
|||
if "timeframes" not in config:
|
||||
config["timeframes"] = DL_DATA_TIMEFRAMES
|
||||
|
||||
# Manual validations of relevant settings
|
||||
if not config["exchange"].get("skip_pair_validation", False):
|
||||
exchange.validate_pairs(expanded_pairs)
|
||||
logger.info(
|
||||
f"About to download pairs: {expanded_pairs}, "
|
||||
f"intervals: {config['timeframes']} to {config['datadir']}"
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -90,10 +90,8 @@ class Bybit(Exchange):
|
|||
# Returns a tuple of bools, first for margin, second for Account
|
||||
if is_unified and len(is_unified) > 1 and is_unified[1]:
|
||||
self.unified_account = True
|
||||
logger.info("Bybit: Unified account.")
|
||||
raise OperationalException(
|
||||
"Bybit: Unified account is not supported. "
|
||||
"Please use a standard (sub)account."
|
||||
logger.info(
|
||||
"Bybit: Unified account. Assuming dedicated subaccount for this bot."
|
||||
)
|
||||
else:
|
||||
self.unified_account = False
|
||||
|
|
|
@ -164,6 +164,10 @@ F = TypeVar("F", bound=Callable[..., Any])
|
|||
def retrier(_func: F) -> F: ...
|
||||
|
||||
|
||||
@overload
|
||||
def retrier(_func: F, *, retries=API_RETRY_COUNT) -> F: ...
|
||||
|
||||
|
||||
@overload
|
||||
def retrier(*, retries=API_RETRY_COUNT) -> Callable[[F], F]: ...
|
||||
|
||||
|
|
|
@ -104,7 +104,6 @@ from freqtrade.misc import (
|
|||
file_load_json,
|
||||
safe_value_fallback2,
|
||||
)
|
||||
from freqtrade.plugins.pairlist.pairlist_helpers import expand_pairlist
|
||||
from freqtrade.util import dt_from_ts, dt_now
|
||||
from freqtrade.util.datetime_helpers import dt_humanize_delta, dt_ts, format_ms_time
|
||||
from freqtrade.util.periodic_cache import PeriodicCache
|
||||
|
@ -331,8 +330,6 @@ class Exchange:
|
|||
|
||||
# Check if all pairs are available
|
||||
self.validate_stakecurrency(config["stake_currency"])
|
||||
if not config["exchange"].get("skip_pair_validation"):
|
||||
self.validate_pairs(config["exchange"]["pair_whitelist"])
|
||||
self.validate_ordertypes(config.get("order_types", {}))
|
||||
self.validate_order_time_in_force(config.get("order_time_in_force", {}))
|
||||
self.validate_trading_mode_and_margin_mode(self.trading_mode, self.margin_mode)
|
||||
|
@ -623,11 +620,21 @@ class Exchange:
|
|||
if self._exchange_ws:
|
||||
self._exchange_ws.reset_connections()
|
||||
|
||||
async def _api_reload_markets(self, reload: bool = False) -> Dict[str, Any]:
|
||||
try:
|
||||
return await self._api_async.load_markets(reload=reload, params={})
|
||||
except ccxt.DDoSProtection as e:
|
||||
raise DDosProtection(e) from e
|
||||
except (ccxt.OperationFailed, ccxt.ExchangeError) as e:
|
||||
raise TemporaryError(
|
||||
f"Error in reload_markets due to {e.__class__.__name__}. Message: {e}"
|
||||
) from e
|
||||
except ccxt.BaseError as e:
|
||||
raise TemporaryError(e) from e
|
||||
|
||||
def _load_async_markets(self, reload: bool = False) -> Dict[str, Any]:
|
||||
try:
|
||||
markets = self.loop.run_until_complete(
|
||||
self._api_async.load_markets(reload=reload, params={})
|
||||
)
|
||||
markets = self.loop.run_until_complete(self._api_reload_markets(reload=reload))
|
||||
|
||||
if isinstance(markets, Exception):
|
||||
raise markets
|
||||
|
@ -651,8 +658,10 @@ class Exchange:
|
|||
return None
|
||||
logger.debug("Performing scheduled market reload..")
|
||||
try:
|
||||
# on initial load, we retry 3 times to ensure we get the markets
|
||||
retries: int = 3 if force else 0
|
||||
# Reload async markets, then assign them to sync api
|
||||
self._markets = self._load_async_markets(reload=True)
|
||||
self._markets = retrier(self._load_async_markets, retries=retries)(reload=True)
|
||||
self._api.set_markets(self._api_async.markets, self._api_async.currencies)
|
||||
# Assign options array, as it contains some temporary information from the exchange.
|
||||
self._api.options = self._api_async.options
|
||||
|
@ -690,54 +699,6 @@ class Exchange:
|
|||
f"Available currencies are: {', '.join(quote_currencies)}"
|
||||
)
|
||||
|
||||
def validate_pairs(self, pairs: List[str]) -> None:
|
||||
"""
|
||||
Checks if all given pairs are tradable on the current exchange.
|
||||
:param pairs: list of pairs
|
||||
:raise: OperationalException if one pair is not available
|
||||
:return: None
|
||||
"""
|
||||
|
||||
if not self.markets:
|
||||
logger.warning("Unable to validate pairs (assuming they are correct).")
|
||||
return
|
||||
extended_pairs = expand_pairlist(pairs, list(self.markets), keep_invalid=True)
|
||||
invalid_pairs = []
|
||||
for pair in extended_pairs:
|
||||
# Note: ccxt has BaseCurrency/QuoteCurrency format for pairs
|
||||
if self.markets and pair not in self.markets:
|
||||
raise OperationalException(
|
||||
f"Pair {pair} is not available on {self.name} {self.trading_mode}. "
|
||||
f"Please remove {pair} from your whitelist."
|
||||
)
|
||||
|
||||
# From ccxt Documentation:
|
||||
# markets.info: An associative array of non-common market properties,
|
||||
# including fees, rates, limits and other general market information.
|
||||
# The internal info array is different for each particular market,
|
||||
# its contents depend on the exchange.
|
||||
# It can also be a string or similar ... so we need to verify that first.
|
||||
elif isinstance(self.markets[pair].get("info"), dict) and self.markets[pair].get(
|
||||
"info", {}
|
||||
).get("prohibitedIn", False):
|
||||
# Warn users about restricted pairs in whitelist.
|
||||
# We cannot determine reliably if Users are affected.
|
||||
logger.warning(
|
||||
f"Pair {pair} is restricted for some users on this exchange."
|
||||
f"Please check if you are impacted by this restriction "
|
||||
f"on the exchange and eventually remove {pair} from your whitelist."
|
||||
)
|
||||
if (
|
||||
self._config["stake_currency"]
|
||||
and self.get_pair_quote_currency(pair) != self._config["stake_currency"]
|
||||
):
|
||||
invalid_pairs.append(pair)
|
||||
if invalid_pairs:
|
||||
raise OperationalException(
|
||||
f"Stake-currency '{self._config['stake_currency']}' not compatible with "
|
||||
f"pair-whitelist. Please remove the following pairs: {invalid_pairs}"
|
||||
)
|
||||
|
||||
def get_valid_pair_combination(self, curr_1: str, curr_2: str) -> str:
|
||||
"""
|
||||
Get valid pair combination of curr_1 and curr_2 by trying both combinations.
|
||||
|
@ -2262,7 +2223,7 @@ class Exchange:
|
|||
candle_type: CandleType,
|
||||
is_new_pair: bool = False,
|
||||
until_ms: Optional[int] = None,
|
||||
) -> List:
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Get candle history using asyncio and returns the list of candles.
|
||||
Handles all async work for this.
|
||||
|
@ -2272,7 +2233,7 @@ class Exchange:
|
|||
:param since_ms: Timestamp in milliseconds to get history from
|
||||
:param until_ms: Timestamp in milliseconds to get history up to
|
||||
:param candle_type: '', mark, index, premiumIndex, or funding_rate
|
||||
:return: List with candle (OHLCV) data
|
||||
:return: Dataframe with candle (OHLCV) data
|
||||
"""
|
||||
pair, _, _, data, _ = self.loop.run_until_complete(
|
||||
self._async_get_historic_ohlcv(
|
||||
|
@ -2285,7 +2246,7 @@ class Exchange:
|
|||
)
|
||||
)
|
||||
logger.info(f"Downloaded data for {pair} with length {len(data)}.")
|
||||
return data
|
||||
return ohlcv_to_dataframe(data, timeframe, pair, fill_missing=False, drop_incomplete=True)
|
||||
|
||||
async def _async_get_historic_ohlcv(
|
||||
self,
|
||||
|
@ -3633,7 +3594,7 @@ class Exchange:
|
|||
Wherein, "+" or "-" depends on whether the contract goes long or short:
|
||||
"-" for long, and "+" for short.
|
||||
|
||||
okex: https://www.okex.com/support/hc/en-us/articles/
|
||||
okex: https://www.okx.com/support/hc/en-us/articles/
|
||||
360053909592-VI-Introduction-to-the-isolated-mode-of-Single-Multi-currency-Portfolio-margin
|
||||
|
||||
:param pair: Pair to calculate liquidation price for
|
||||
|
|
|
@ -78,6 +78,7 @@ class Kraken(Exchange):
|
|||
# x["side"], x["amount"],
|
||||
)
|
||||
for x in orders
|
||||
if x["remaining"] is not None and (x["side"] == "sell" or x["price"] is not None)
|
||||
]
|
||||
for bal in balances:
|
||||
if not isinstance(balances[bal], dict):
|
||||
|
|
|
@ -22,6 +22,7 @@ from freqtrade.edge import Edge
|
|||
from freqtrade.enums import (
|
||||
ExitCheckTuple,
|
||||
ExitType,
|
||||
MarginMode,
|
||||
RPCMessageType,
|
||||
SignalDirection,
|
||||
State,
|
||||
|
@ -61,7 +62,7 @@ from freqtrade.rpc.rpc_types import (
|
|||
)
|
||||
from freqtrade.strategy.interface import IStrategy
|
||||
from freqtrade.strategy.strategy_wrapper import strategy_safe_wrapper
|
||||
from freqtrade.util import MeasureTime
|
||||
from freqtrade.util import FtPrecise, MeasureTime
|
||||
from freqtrade.util.migrations.binance_mig import migrate_binance_futures_names
|
||||
from freqtrade.wallets import Wallets
|
||||
|
||||
|
@ -108,6 +109,7 @@ class FreqtradeBot(LoggingMixin):
|
|||
PairLocks.timeframe = self.config["timeframe"]
|
||||
|
||||
self.trading_mode: TradingMode = self.config.get("trading_mode", TradingMode.SPOT)
|
||||
self.margin_mode: MarginMode = self.config.get("margin_mode", MarginMode.NONE)
|
||||
self.last_process: Optional[datetime] = None
|
||||
|
||||
# RPC runs in separate threads, can start handling external commands just after
|
||||
|
@ -780,7 +782,14 @@ class FreqtradeBot(LoggingMixin):
|
|||
if stake_amount is not None and stake_amount < 0.0:
|
||||
# We should decrease our position
|
||||
amount = self.exchange.amount_to_contract_precision(
|
||||
trade.pair, abs(float(stake_amount * trade.amount / trade.stake_amount))
|
||||
trade.pair,
|
||||
abs(
|
||||
float(
|
||||
FtPrecise(stake_amount)
|
||||
* FtPrecise(trade.amount)
|
||||
/ FtPrecise(trade.stake_amount)
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
if amount == 0.0:
|
||||
|
@ -976,7 +985,7 @@ class FreqtradeBot(LoggingMixin):
|
|||
base_currency=base_currency,
|
||||
stake_currency=self.config["stake_currency"],
|
||||
stake_amount=stake_amount,
|
||||
amount=amount,
|
||||
amount=0,
|
||||
is_open=True,
|
||||
amount_requested=amount_requested,
|
||||
fee_open=fee,
|
||||
|
@ -2229,7 +2238,11 @@ class FreqtradeBot(LoggingMixin):
|
|||
# TODO: should shorting/leverage be supported by Edge,
|
||||
# then this will need to be fixed.
|
||||
trade.adjust_stop_loss(trade.open_rate, self.strategy.stoploss, initial=True)
|
||||
if order.ft_order_side == trade.entry_side or (trade.amount > 0 and trade.is_open):
|
||||
if (
|
||||
order.ft_order_side == trade.entry_side
|
||||
or (trade.amount > 0 and trade.is_open)
|
||||
or self.margin_mode == MarginMode.CROSS
|
||||
):
|
||||
# Must also run for partial exits
|
||||
# TODO: Margin will need to use interest_rate as well.
|
||||
# interest_rate = self.exchange.get_interest_rate()
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import logging
|
||||
import time
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List
|
||||
from typing import Any, Dict, List, Union
|
||||
|
||||
import pandas as pd
|
||||
from rich.text import Text
|
||||
|
@ -19,7 +19,9 @@ logger = logging.getLogger(__name__)
|
|||
class LookaheadAnalysisSubFunctions:
|
||||
@staticmethod
|
||||
def text_table_lookahead_analysis_instances(
|
||||
config: Dict[str, Any], lookahead_instances: List[LookaheadAnalysis]
|
||||
config: Dict[str, Any],
|
||||
lookahead_instances: List[LookaheadAnalysis],
|
||||
caption: Union[str, None] = None,
|
||||
):
|
||||
headers = [
|
||||
"filename",
|
||||
|
@ -65,7 +67,9 @@ class LookaheadAnalysisSubFunctions:
|
|||
]
|
||||
)
|
||||
|
||||
print_rich_table(data, headers, summary="Lookahead Analysis")
|
||||
print_rich_table(
|
||||
data, headers, summary="Lookahead Analysis", table_kwargs={"caption": caption}
|
||||
)
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
|
@ -239,8 +243,24 @@ class LookaheadAnalysisSubFunctions:
|
|||
|
||||
# report the results
|
||||
if lookaheadAnalysis_instances:
|
||||
caption: Union[str, None] = None
|
||||
if any(
|
||||
[
|
||||
any(
|
||||
[
|
||||
indicator.startswith("&")
|
||||
for indicator in inst.current_analysis.false_indicators
|
||||
]
|
||||
)
|
||||
for inst in lookaheadAnalysis_instances
|
||||
]
|
||||
):
|
||||
caption = (
|
||||
"Any indicators in 'biased_indicators' which are used within "
|
||||
"set_freqai_targets() can be ignored."
|
||||
)
|
||||
LookaheadAnalysisSubFunctions.text_table_lookahead_analysis_instances(
|
||||
config, lookaheadAnalysis_instances
|
||||
config, lookaheadAnalysis_instances, caption=caption
|
||||
)
|
||||
if config.get("lookahead_analysis_exportfilename") is not None:
|
||||
LookaheadAnalysisSubFunctions.export_to_csv(config, lookaheadAnalysis_instances)
|
||||
|
|
|
@ -122,6 +122,7 @@ class Backtesting:
|
|||
self.processed_dfs: Dict[str, Dict] = {}
|
||||
self.rejected_dict: Dict[str, List] = {}
|
||||
self.rejected_df: Dict[str, Dict] = {}
|
||||
self.exited_dfs: Dict[str, Dict] = {}
|
||||
|
||||
self._exchange_name = self.config["exchange"]["name"]
|
||||
if not exchange:
|
||||
|
@ -1098,7 +1099,7 @@ class Backtesting:
|
|||
open_rate_requested=propose_rate,
|
||||
open_date=current_time,
|
||||
stake_amount=stake_amount,
|
||||
amount=amount,
|
||||
amount=0,
|
||||
amount_requested=amount,
|
||||
fee_open=self.fee,
|
||||
fee_close=self.fee,
|
||||
|
@ -1165,12 +1166,10 @@ class Backtesting:
|
|||
self._exit_trade(
|
||||
trade, exit_row, exit_row[OPEN_IDX], trade.amount, ExitType.FORCE_EXIT.value
|
||||
)
|
||||
trade.orders[-1].close_bt_order(exit_row[DATE_IDX].to_pydatetime(), trade)
|
||||
|
||||
trade.close_date = exit_row[DATE_IDX].to_pydatetime()
|
||||
trade.exit_reason = ExitType.FORCE_EXIT.value
|
||||
trade.close(exit_row[OPEN_IDX], show_msg=False)
|
||||
LocalTrade.close_bt_trade(trade)
|
||||
self._process_exit_order(
|
||||
trade.orders[-1], trade, exit_row[DATE_IDX].to_pydatetime(), exit_row, pair
|
||||
)
|
||||
|
||||
def trade_slot_available(self, open_trade_count: int) -> bool:
|
||||
# Always allow trades when max_open_trades is enabled.
|
||||
|
@ -1566,11 +1565,14 @@ class Backtesting:
|
|||
and self.dataprovider.runmode == RunMode.BACKTEST
|
||||
):
|
||||
self.processed_dfs[strategy_name] = generate_trade_signal_candles(
|
||||
preprocessed_tmp, results
|
||||
preprocessed_tmp, results, "open_date"
|
||||
)
|
||||
self.rejected_df[strategy_name] = generate_rejected_signals(
|
||||
preprocessed_tmp, self.rejected_dict
|
||||
)
|
||||
self.exited_dfs[strategy_name] = generate_trade_signal_candles(
|
||||
preprocessed_tmp, results, "close_date"
|
||||
)
|
||||
|
||||
return min_date, max_date
|
||||
|
||||
|
@ -1646,7 +1648,11 @@ class Backtesting:
|
|||
and self.dataprovider.runmode == RunMode.BACKTEST
|
||||
):
|
||||
store_backtest_analysis_results(
|
||||
self.config["exportfilename"], self.processed_dfs, self.rejected_df, dt_appendix
|
||||
self.config["exportfilename"],
|
||||
self.processed_dfs,
|
||||
self.rejected_df,
|
||||
self.exited_dfs,
|
||||
dt_appendix,
|
||||
)
|
||||
|
||||
# Results may be mixed up now. Sort them so they follow --strategy-list order.
|
||||
|
|
|
@ -263,12 +263,32 @@ def text_table_add_metrics(strat_results: Dict) -> None:
|
|||
else []
|
||||
)
|
||||
|
||||
trading_mode = (
|
||||
(
|
||||
[
|
||||
(
|
||||
"Trading Mode",
|
||||
(
|
||||
""
|
||||
if not strat_results.get("margin_mode")
|
||||
or strat_results.get("trading_mode", "spot") == "spot"
|
||||
else f"{strat_results['margin_mode'].capitalize()} "
|
||||
)
|
||||
+ f"{strat_results['trading_mode'].capitalize()}",
|
||||
)
|
||||
]
|
||||
)
|
||||
if "trading_mode" in strat_results
|
||||
else []
|
||||
)
|
||||
|
||||
# Newly added fields should be ignored if they are missing in strat_results. hyperopt-show
|
||||
# command stores these results and newer version of freqtrade must be able to handle old
|
||||
# results with missing new fields.
|
||||
metrics = [
|
||||
("Backtesting from", strat_results["backtest_start"]),
|
||||
("Backtesting to", strat_results["backtest_end"]),
|
||||
*trading_mode,
|
||||
("Max open trades", strat_results["max_open_trades"]),
|
||||
("", ""), # Empty line to improve readability
|
||||
(
|
||||
|
|
|
@ -90,7 +90,12 @@ def _store_backtest_analysis_data(
|
|||
|
||||
|
||||
def store_backtest_analysis_results(
|
||||
recordfilename: Path, candles: Dict[str, Dict], trades: Dict[str, Dict], dtappendix: str
|
||||
recordfilename: Path,
|
||||
candles: Dict[str, Dict],
|
||||
trades: Dict[str, Dict],
|
||||
exited: Dict[str, Dict],
|
||||
dtappendix: str,
|
||||
) -> None:
|
||||
_store_backtest_analysis_data(recordfilename, candles, dtappendix, "signals")
|
||||
_store_backtest_analysis_data(recordfilename, trades, dtappendix, "rejected")
|
||||
_store_backtest_analysis_data(recordfilename, exited, dtappendix, "exited")
|
||||
|
|
|
@ -25,8 +25,8 @@ logger = logging.getLogger(__name__)
|
|||
|
||||
|
||||
def generate_trade_signal_candles(
|
||||
preprocessed_df: Dict[str, DataFrame], bt_results: Dict[str, Any]
|
||||
) -> DataFrame:
|
||||
preprocessed_df: Dict[str, DataFrame], bt_results: Dict[str, Any], date_col: str
|
||||
) -> Dict[str, DataFrame]:
|
||||
signal_candles_only = {}
|
||||
for pair in preprocessed_df.keys():
|
||||
signal_candles_only_df = DataFrame()
|
||||
|
@ -36,8 +36,8 @@ def generate_trade_signal_candles(
|
|||
pairresults = resdf.loc[(resdf["pair"] == pair)]
|
||||
|
||||
if pairdf.shape[0] > 0:
|
||||
for t, v in pairresults.open_date.items():
|
||||
allinds = pairdf.loc[(pairdf["date"] < v)]
|
||||
for t, v in pairresults.iterrows():
|
||||
allinds = pairdf.loc[(pairdf["date"] < v[date_col])]
|
||||
signal_inds = allinds.iloc[[-1]]
|
||||
signal_candles_only_df = concat(
|
||||
[signal_candles_only_df.infer_objects(), signal_inds.infer_objects()]
|
||||
|
@ -504,6 +504,8 @@ def generate_strategy_stats(
|
|||
"exit_profit_only": config["exit_profit_only"],
|
||||
"exit_profit_offset": config["exit_profit_offset"],
|
||||
"ignore_roi_if_entry_signal": config["ignore_roi_if_entry_signal"],
|
||||
"trading_mode": config["trading_mode"],
|
||||
"margin_mode": config["margin_mode"],
|
||||
**periodic_breakdown,
|
||||
**daily_stats,
|
||||
**trade_stats,
|
||||
|
|
|
@ -342,8 +342,8 @@ class Order(ModelBase):
|
|||
order_id=str(order["id"]),
|
||||
ft_order_side=side,
|
||||
ft_pair=pair,
|
||||
ft_amount=amount if amount else order["amount"],
|
||||
ft_price=price if price else order["price"],
|
||||
ft_amount=amount or order.get("amount", None) or 0.0,
|
||||
ft_price=price or order.get("price", None),
|
||||
)
|
||||
|
||||
o.update_from_ccxt_object(order)
|
||||
|
@ -1175,10 +1175,7 @@ class LocalTrade:
|
|||
else:
|
||||
open_trade_value = self._calc_open_trade_value(amount, open_rate)
|
||||
|
||||
short_close_zero = self.is_short and close_trade_value == 0.0
|
||||
long_close_zero = not self.is_short and open_trade_value == 0.0
|
||||
|
||||
if short_close_zero or long_close_zero:
|
||||
if open_trade_value == 0.0:
|
||||
return 0.0
|
||||
else:
|
||||
if self.is_short:
|
||||
|
|
|
@ -61,14 +61,15 @@ class StaticPairList(IPairList):
|
|||
:param tickers: Tickers (from exchange.get_tickers). May be cached.
|
||||
:return: List of pairs
|
||||
"""
|
||||
wl = self.verify_whitelist(
|
||||
self._config["exchange"]["pair_whitelist"], logger.info, keep_invalid=True
|
||||
)
|
||||
if self._allow_inactive:
|
||||
return self.verify_whitelist(
|
||||
self._config["exchange"]["pair_whitelist"], logger.info, keep_invalid=True
|
||||
)
|
||||
return wl
|
||||
else:
|
||||
return self._whitelist_for_active_markets(
|
||||
self.verify_whitelist(self._config["exchange"]["pair_whitelist"], logger.info)
|
||||
)
|
||||
# Avoid implicit filtering of "verify_whitelist" to keep
|
||||
# proper warnings in the log
|
||||
return self._whitelist_for_active_markets(wl)
|
||||
|
||||
def filter_pairlist(self, pairlist: List[str], tickers: Tickers) -> List[str]:
|
||||
"""
|
||||
|
|
|
@ -28,6 +28,7 @@ def expand_pairlist(
|
|||
except re.error as err:
|
||||
raise ValueError(f"Wildcard error in {pair_wc}, {err}")
|
||||
|
||||
# Remove wildcard pairs that didn't have a match.
|
||||
result = [element for element in result if re.fullmatch(r"^[A-Za-z0-9:/-]+$", element)]
|
||||
|
||||
else:
|
||||
|
|
|
@ -1274,7 +1274,7 @@ class Telegram(RPCHandler):
|
|||
InlineKeyboardButton(text=trade[1], callback_data=f"force_exit__{trade[0]}")
|
||||
for trade in trades
|
||||
]
|
||||
buttons_aligned = self._layout_inline_keyboard_onecol(trade_buttons)
|
||||
buttons_aligned = self._layout_inline_keyboard(trade_buttons, cols=1)
|
||||
|
||||
buttons_aligned.append(
|
||||
[InlineKeyboardButton(text="Cancel", callback_data="force_exit__cancel")]
|
||||
|
@ -1348,12 +1348,6 @@ class Telegram(RPCHandler):
|
|||
) -> List[List[InlineKeyboardButton]]:
|
||||
return [buttons[i : i + cols] for i in range(0, len(buttons), cols)]
|
||||
|
||||
@staticmethod
|
||||
def _layout_inline_keyboard_onecol(
|
||||
buttons: List[InlineKeyboardButton], cols=1
|
||||
) -> List[List[InlineKeyboardButton]]:
|
||||
return [buttons[i : i + cols] for i in range(0, len(buttons), cols)]
|
||||
|
||||
@authorized_only
|
||||
async def _force_enter(
|
||||
self, update: Update, context: CallbackContext, order_side: SignalDirection
|
||||
|
|
|
@ -99,11 +99,21 @@ class Wallets:
|
|||
used_stake = 0.0
|
||||
|
||||
if self._config.get("trading_mode", "spot") != TradingMode.FUTURES:
|
||||
current_stake = self.start_cap + tot_profit - tot_in_trades
|
||||
total_stake = current_stake
|
||||
for trade in open_trades:
|
||||
curr = self._exchange.get_pair_base_currency(trade.pair)
|
||||
_wallets[curr] = Wallet(curr, trade.amount, 0, trade.amount)
|
||||
used_stake += sum(
|
||||
o.stake_amount for o in trade.open_orders if o.ft_order_side == trade.entry_side
|
||||
)
|
||||
pending = sum(
|
||||
o.amount
|
||||
for o in trade.open_orders
|
||||
if o.amount and o.ft_order_side == trade.exit_side
|
||||
)
|
||||
|
||||
_wallets[curr] = Wallet(curr, trade.amount - pending, pending, trade.amount)
|
||||
|
||||
current_stake = self.start_cap + tot_profit - tot_in_trades
|
||||
total_stake = current_stake + used_stake
|
||||
else:
|
||||
tot_in_trades = 0
|
||||
for position in open_trades:
|
||||
|
|
|
@ -7,10 +7,10 @@
|
|||
-r docs/requirements-docs.txt
|
||||
|
||||
coveralls==4.0.1
|
||||
ruff==0.6.3
|
||||
ruff==0.6.7
|
||||
mypy==1.11.2
|
||||
pre-commit==3.8.0
|
||||
pytest==8.3.2
|
||||
pytest==8.3.3
|
||||
pytest-asyncio==0.24.0
|
||||
pytest-cov==5.0.0
|
||||
pytest-mock==3.14.0
|
||||
|
@ -27,6 +27,6 @@ nbconvert==7.16.4
|
|||
# mypy types
|
||||
types-cachetools==5.5.0.20240820
|
||||
types-filelock==3.2.7
|
||||
types-requests==2.32.0.20240712
|
||||
types-requests==2.32.0.20240914
|
||||
types-tabulate==0.9.0.20240106
|
||||
types-python-dateutil==2.9.0.20240821
|
||||
types-python-dateutil==2.9.0.20240906
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
|
||||
# Required for freqai-rl
|
||||
torch==2.2.2; sys_platform == 'darwin' and platform_machine == 'x86_64'
|
||||
torch==2.4.0; sys_platform != 'darwin' or platform_machine != 'x86_64'
|
||||
torch==2.4.1; sys_platform != 'darwin' or platform_machine != 'x86_64'
|
||||
gymnasium==0.29.1
|
||||
stable_baselines3==2.3.2
|
||||
sb3_contrib>=2.2.1
|
||||
|
|
|
@ -3,9 +3,9 @@
|
|||
-r requirements-plot.txt
|
||||
|
||||
# Required for freqai
|
||||
scikit-learn==1.5.1
|
||||
scikit-learn==1.5.2
|
||||
joblib==1.4.2
|
||||
catboost==1.2.5; 'arm' not in platform_machine
|
||||
catboost==1.2.7; 'arm' not in platform_machine
|
||||
# Pin Matplotlib - it's depended on by catboost
|
||||
# Temporary downgrade of matplotlib due to https://github.com/matplotlib/matplotlib/issues/28551
|
||||
matplotlib==3.9.2
|
||||
|
|
|
@ -4,6 +4,6 @@
|
|||
# Required for hyperopt
|
||||
scipy==1.14.1; python_version >= "3.10"
|
||||
scipy==1.13.1; python_version < "3.10"
|
||||
scikit-learn==1.5.1
|
||||
scikit-learn==1.5.2
|
||||
ft-scikit-optimize==0.9.2
|
||||
filelock==3.15.4
|
||||
filelock==3.16.1
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# Include all requirements to run the bot.
|
||||
-r requirements.txt
|
||||
|
||||
plotly==5.24.0
|
||||
plotly==5.24.1
|
||||
|
|
|
@ -1,21 +1,21 @@
|
|||
numpy==1.26.4
|
||||
pandas==2.2.2
|
||||
pandas==2.2.3
|
||||
bottleneck==1.4.0
|
||||
numexpr==2.10.1
|
||||
pandas-ta==0.3.14b
|
||||
|
||||
ccxt==4.3.93
|
||||
ccxt==4.4.6
|
||||
cryptography==42.0.8; platform_machine == 'armv7l'
|
||||
cryptography==43.0.1; platform_machine != 'armv7l'
|
||||
aiohttp==3.10.5
|
||||
SQLAlchemy==2.0.32
|
||||
python-telegram-bot==21.5
|
||||
SQLAlchemy==2.0.35
|
||||
python-telegram-bot==21.6
|
||||
# can't be hard-pinned due to telegram-bot pinning httpx with ~
|
||||
httpx>=0.24.1
|
||||
humanize==4.10.0
|
||||
cachetools==5.5.0
|
||||
requests==2.32.3
|
||||
urllib3==2.2.2
|
||||
urllib3==2.2.3
|
||||
jsonschema==4.23.0
|
||||
TA-Lib==0.4.32
|
||||
technical==1.4.4
|
||||
|
@ -26,7 +26,7 @@ jinja2==3.1.4
|
|||
tables==3.9.1; python_version < "3.10"
|
||||
tables==3.10.1; python_version >= "3.10"
|
||||
joblib==1.4.2
|
||||
rich==13.8.0
|
||||
rich==13.8.1
|
||||
pyarrow==17.0.0; platform_machine != 'armv7l'
|
||||
|
||||
# find first, C search in arrays
|
||||
|
@ -41,8 +41,8 @@ orjson==3.10.7
|
|||
sdnotify==0.3.2
|
||||
|
||||
# API Server
|
||||
fastapi==0.112.2
|
||||
pydantic==2.8.2
|
||||
fastapi==0.115.0
|
||||
pydantic==2.9.2
|
||||
uvicorn==0.30.6
|
||||
pyjwt==2.9.0
|
||||
aiofiles==24.1.0
|
||||
|
@ -53,13 +53,13 @@ questionary==2.0.1
|
|||
prompt-toolkit==3.0.36
|
||||
# Extensions to datetime library
|
||||
python-dateutil==2.9.0.post0
|
||||
pytz==2024.1
|
||||
pytz==2024.2
|
||||
|
||||
#Futures
|
||||
schedule==1.2.2
|
||||
|
||||
#WS Messages
|
||||
websockets==13.0.1
|
||||
websockets==13.1
|
||||
janus==1.0.0
|
||||
|
||||
ast-comments==1.2.2
|
||||
|
|
|
@ -90,13 +90,6 @@ def test_historic_trades(mocker, default_conf, trades_history_df):
|
|||
assert isinstance(data, DataFrame)
|
||||
assert len(data) == len(trades_history_df)
|
||||
|
||||
# Random other runmode
|
||||
default_conf["runmode"] = RunMode.UTIL_EXCHANGE
|
||||
dp = DataProvider(default_conf, None)
|
||||
data = dp.trades("UNITTEST/BTC", "5m")
|
||||
assert isinstance(data, DataFrame)
|
||||
assert len(data) == 0
|
||||
|
||||
|
||||
def test_historic_ohlcv_dataformat(mocker, default_conf, ohlcv_history):
|
||||
hdf5loadmock = MagicMock(return_value=ohlcv_history)
|
||||
|
|
|
@ -7,6 +7,7 @@ import pytest
|
|||
from freqtrade.commands.analyze_commands import start_analysis_entries_exits
|
||||
from freqtrade.commands.optimize_commands import start_backtesting
|
||||
from freqtrade.enums import ExitType
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.optimize.backtesting import Backtesting
|
||||
from tests.conftest import get_args, patch_exchange, patched_configuration_load_config_file
|
||||
|
||||
|
@ -18,7 +19,9 @@ def entryexitanalysis_cleanup() -> None:
|
|||
Backtesting.cleanup()
|
||||
|
||||
|
||||
def test_backtest_analysis_nomock(default_conf, mocker, caplog, testdatadir, user_dir, capsys):
|
||||
def test_backtest_analysis_on_entry_and_rejected_signals_nomock(
|
||||
default_conf, mocker, caplog, testdatadir, user_dir, capsys
|
||||
):
|
||||
caplog.set_level(logging.INFO)
|
||||
(user_dir / "backtest_results").mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
@ -158,6 +161,15 @@ def test_backtest_analysis_nomock(default_conf, mocker, caplog, testdatadir, use
|
|||
assert "34.049" in captured.out
|
||||
assert "0.104" in captured.out
|
||||
assert "52.829" in captured.out
|
||||
# assert indicator list
|
||||
assert "close (entry)" in captured.out
|
||||
assert "0.016" in captured.out
|
||||
assert "rsi (entry)" in captured.out
|
||||
assert "54.320" in captured.out
|
||||
assert "close (exit)" in captured.out
|
||||
assert "rsi (exit)" in captured.out
|
||||
assert "52.829" in captured.out
|
||||
assert "profit_abs" in captured.out
|
||||
|
||||
# test group 1
|
||||
args = get_args(base_args + ["--analysis-groups", "1"])
|
||||
|
@ -245,3 +257,306 @@ def test_backtest_analysis_nomock(default_conf, mocker, caplog, testdatadir, use
|
|||
start_analysis_entries_exits(args)
|
||||
captured = capsys.readouterr()
|
||||
assert "no rejected signals" in captured.out
|
||||
|
||||
|
||||
def test_backtest_analysis_with_invalid_config(
|
||||
default_conf, mocker, caplog, testdatadir, user_dir, capsys
|
||||
):
|
||||
caplog.set_level(logging.INFO)
|
||||
(user_dir / "backtest_results").mkdir(parents=True, exist_ok=True)
|
||||
|
||||
default_conf.update(
|
||||
{
|
||||
"use_exit_signal": True,
|
||||
"exit_profit_only": False,
|
||||
"exit_profit_offset": 0.0,
|
||||
"ignore_roi_if_entry_signal": False,
|
||||
}
|
||||
)
|
||||
patch_exchange(mocker)
|
||||
result1 = pd.DataFrame(
|
||||
{
|
||||
"pair": ["ETH/BTC", "LTC/BTC", "ETH/BTC", "LTC/BTC"],
|
||||
"profit_ratio": [0.025, 0.05, -0.1, -0.05],
|
||||
"profit_abs": [0.5, 2.0, -4.0, -2.0],
|
||||
"open_date": pd.to_datetime(
|
||||
[
|
||||
"2018-01-29 18:40:00",
|
||||
"2018-01-30 03:30:00",
|
||||
"2018-01-30 08:10:00",
|
||||
"2018-01-31 13:30:00",
|
||||
],
|
||||
utc=True,
|
||||
),
|
||||
"close_date": pd.to_datetime(
|
||||
[
|
||||
"2018-01-29 20:45:00",
|
||||
"2018-01-30 05:35:00",
|
||||
"2018-01-30 09:10:00",
|
||||
"2018-01-31 15:00:00",
|
||||
],
|
||||
utc=True,
|
||||
),
|
||||
"trade_duration": [235, 40, 60, 90],
|
||||
"is_open": [False, False, False, False],
|
||||
"stake_amount": [0.01, 0.01, 0.01, 0.01],
|
||||
"open_rate": [0.104445, 0.10302485, 0.10302485, 0.10302485],
|
||||
"close_rate": [0.104969, 0.103541, 0.102041, 0.102541],
|
||||
"is_short": [False, False, False, False],
|
||||
"enter_tag": [
|
||||
"enter_tag_long_a",
|
||||
"enter_tag_long_b",
|
||||
"enter_tag_long_a",
|
||||
"enter_tag_long_b",
|
||||
],
|
||||
"exit_reason": [
|
||||
ExitType.ROI.value,
|
||||
ExitType.EXIT_SIGNAL.value,
|
||||
ExitType.STOP_LOSS.value,
|
||||
ExitType.TRAILING_STOP_LOSS.value,
|
||||
],
|
||||
}
|
||||
)
|
||||
|
||||
backtestmock = MagicMock(
|
||||
side_effect=[
|
||||
{
|
||||
"results": result1,
|
||||
"config": default_conf,
|
||||
"locks": [],
|
||||
"rejected_signals": 20,
|
||||
"timedout_entry_orders": 0,
|
||||
"timedout_exit_orders": 0,
|
||||
"canceled_trade_entries": 0,
|
||||
"canceled_entry_orders": 0,
|
||||
"replaced_entry_orders": 0,
|
||||
"final_balance": 1000,
|
||||
}
|
||||
]
|
||||
)
|
||||
mocker.patch(
|
||||
"freqtrade.plugins.pairlistmanager.PairListManager.whitelist",
|
||||
PropertyMock(return_value=["ETH/BTC", "LTC/BTC", "DASH/BTC"]),
|
||||
)
|
||||
mocker.patch("freqtrade.optimize.backtesting.Backtesting.backtest", backtestmock)
|
||||
|
||||
patched_configuration_load_config_file(mocker, default_conf)
|
||||
|
||||
args = [
|
||||
"backtesting",
|
||||
"--config",
|
||||
"config.json",
|
||||
"--datadir",
|
||||
str(testdatadir),
|
||||
"--user-data-dir",
|
||||
str(user_dir),
|
||||
"--timeframe",
|
||||
"5m",
|
||||
"--timerange",
|
||||
"1515560100-1517287800",
|
||||
"--export",
|
||||
"signals",
|
||||
"--cache",
|
||||
"none",
|
||||
]
|
||||
args = get_args(args)
|
||||
start_backtesting(args)
|
||||
|
||||
captured = capsys.readouterr()
|
||||
assert "BACKTESTING REPORT" in captured.out
|
||||
assert "EXIT REASON STATS" in captured.out
|
||||
assert "LEFT OPEN TRADES REPORT" in captured.out
|
||||
|
||||
base_args = [
|
||||
"backtesting-analysis",
|
||||
"--config",
|
||||
"config.json",
|
||||
"--datadir",
|
||||
str(testdatadir),
|
||||
"--user-data-dir",
|
||||
str(user_dir),
|
||||
]
|
||||
|
||||
# test with both entry and exit only arguments
|
||||
args = get_args(
|
||||
base_args
|
||||
+ [
|
||||
"--analysis-groups",
|
||||
"0",
|
||||
"--indicator-list",
|
||||
"close",
|
||||
"rsi",
|
||||
"profit_abs",
|
||||
"--entry-only",
|
||||
"--exit-only",
|
||||
]
|
||||
)
|
||||
with pytest.raises(
|
||||
OperationalException,
|
||||
match=r"Cannot use --entry-only and --exit-only at the same time. Please choose one.",
|
||||
):
|
||||
start_analysis_entries_exits(args)
|
||||
|
||||
|
||||
def test_backtest_analysis_on_entry_and_rejected_signals_only_entry_signals(
|
||||
default_conf, mocker, caplog, testdatadir, user_dir, capsys
|
||||
):
|
||||
caplog.set_level(logging.INFO)
|
||||
(user_dir / "backtest_results").mkdir(parents=True, exist_ok=True)
|
||||
|
||||
default_conf.update(
|
||||
{
|
||||
"use_exit_signal": True,
|
||||
"exit_profit_only": False,
|
||||
"exit_profit_offset": 0.0,
|
||||
"ignore_roi_if_entry_signal": False,
|
||||
}
|
||||
)
|
||||
patch_exchange(mocker)
|
||||
result1 = pd.DataFrame(
|
||||
{
|
||||
"pair": ["ETH/BTC", "LTC/BTC", "ETH/BTC", "LTC/BTC"],
|
||||
"profit_ratio": [0.025, 0.05, -0.1, -0.05],
|
||||
"profit_abs": [0.5, 2.0, -4.0, -2.0],
|
||||
"open_date": pd.to_datetime(
|
||||
[
|
||||
"2018-01-29 18:40:00",
|
||||
"2018-01-30 03:30:00",
|
||||
"2018-01-30 08:10:00",
|
||||
"2018-01-31 13:30:00",
|
||||
],
|
||||
utc=True,
|
||||
),
|
||||
"close_date": pd.to_datetime(
|
||||
[
|
||||
"2018-01-29 20:45:00",
|
||||
"2018-01-30 05:35:00",
|
||||
"2018-01-30 09:10:00",
|
||||
"2018-01-31 15:00:00",
|
||||
],
|
||||
utc=True,
|
||||
),
|
||||
"trade_duration": [235, 40, 60, 90],
|
||||
"is_open": [False, False, False, False],
|
||||
"stake_amount": [0.01, 0.01, 0.01, 0.01],
|
||||
"open_rate": [0.104445, 0.10302485, 0.10302485, 0.10302485],
|
||||
"close_rate": [0.104969, 0.103541, 0.102041, 0.102541],
|
||||
"is_short": [False, False, False, False],
|
||||
"enter_tag": [
|
||||
"enter_tag_long_a",
|
||||
"enter_tag_long_b",
|
||||
"enter_tag_long_a",
|
||||
"enter_tag_long_b",
|
||||
],
|
||||
"exit_reason": [
|
||||
ExitType.ROI.value,
|
||||
ExitType.EXIT_SIGNAL.value,
|
||||
ExitType.STOP_LOSS.value,
|
||||
ExitType.TRAILING_STOP_LOSS.value,
|
||||
],
|
||||
}
|
||||
)
|
||||
|
||||
backtestmock = MagicMock(
|
||||
side_effect=[
|
||||
{
|
||||
"results": result1,
|
||||
"config": default_conf,
|
||||
"locks": [],
|
||||
"rejected_signals": 20,
|
||||
"timedout_entry_orders": 0,
|
||||
"timedout_exit_orders": 0,
|
||||
"canceled_trade_entries": 0,
|
||||
"canceled_entry_orders": 0,
|
||||
"replaced_entry_orders": 0,
|
||||
"final_balance": 1000,
|
||||
}
|
||||
]
|
||||
)
|
||||
mocker.patch(
|
||||
"freqtrade.plugins.pairlistmanager.PairListManager.whitelist",
|
||||
PropertyMock(return_value=["ETH/BTC", "LTC/BTC", "DASH/BTC"]),
|
||||
)
|
||||
mocker.patch("freqtrade.optimize.backtesting.Backtesting.backtest", backtestmock)
|
||||
|
||||
patched_configuration_load_config_file(mocker, default_conf)
|
||||
|
||||
args = [
|
||||
"backtesting",
|
||||
"--config",
|
||||
"config.json",
|
||||
"--datadir",
|
||||
str(testdatadir),
|
||||
"--user-data-dir",
|
||||
str(user_dir),
|
||||
"--timeframe",
|
||||
"5m",
|
||||
"--timerange",
|
||||
"1515560100-1517287800",
|
||||
"--export",
|
||||
"signals",
|
||||
"--cache",
|
||||
"none",
|
||||
]
|
||||
args = get_args(args)
|
||||
start_backtesting(args)
|
||||
|
||||
captured = capsys.readouterr()
|
||||
assert "BACKTESTING REPORT" in captured.out
|
||||
assert "EXIT REASON STATS" in captured.out
|
||||
assert "LEFT OPEN TRADES REPORT" in captured.out
|
||||
|
||||
base_args = [
|
||||
"backtesting-analysis",
|
||||
"--config",
|
||||
"config.json",
|
||||
"--datadir",
|
||||
str(testdatadir),
|
||||
"--user-data-dir",
|
||||
str(user_dir),
|
||||
]
|
||||
|
||||
# test group 0 and indicator list
|
||||
args = get_args(
|
||||
base_args
|
||||
+ [
|
||||
"--analysis-groups",
|
||||
"0",
|
||||
"--indicator-list",
|
||||
"close",
|
||||
"rsi",
|
||||
"profit_abs",
|
||||
"--entry-only",
|
||||
]
|
||||
)
|
||||
start_analysis_entries_exits(args)
|
||||
captured = capsys.readouterr()
|
||||
assert "LTC/BTC" in captured.out
|
||||
assert "ETH/BTC" in captured.out
|
||||
assert "enter_tag_long_a" in captured.out
|
||||
assert "enter_tag_long_b" in captured.out
|
||||
assert "exit_signal" in captured.out
|
||||
assert "roi" in captured.out
|
||||
assert "stop_loss" in captured.out
|
||||
assert "trailing_stop_loss" in captured.out
|
||||
assert "0.5" in captured.out
|
||||
assert "-4" in captured.out
|
||||
assert "-2" in captured.out
|
||||
assert "-3.5" in captured.out
|
||||
assert "50" in captured.out
|
||||
assert "0" in captured.out
|
||||
assert "0.016" in captured.out
|
||||
assert "34.049" in captured.out
|
||||
assert "0.104" in captured.out
|
||||
assert "52.829" in captured.out
|
||||
# assert indicator list
|
||||
assert "close" in captured.out
|
||||
assert "close (entry)" not in captured.out
|
||||
assert "0.016" in captured.out
|
||||
assert "rsi (entry)" not in captured.out
|
||||
assert "rsi" in captured.out
|
||||
assert "54.320" in captured.out
|
||||
assert "close (exit)" not in captured.out
|
||||
assert "rsi (exit)" not in captured.out
|
||||
assert "52.829" in captured.out
|
||||
assert "profit_abs" in captured.out
|
||||
|
|
|
@ -123,12 +123,12 @@ def test_load_data_startup_candles(mocker, testdatadir) -> None:
|
|||
|
||||
@pytest.mark.parametrize("candle_type", ["mark", ""])
|
||||
def test_load_data_with_new_pair_1min(
|
||||
ohlcv_history_list, mocker, caplog, default_conf, tmp_path, candle_type
|
||||
ohlcv_history, mocker, caplog, default_conf, tmp_path, candle_type
|
||||
) -> None:
|
||||
"""
|
||||
Test load_pair_history() with 1 min timeframe
|
||||
"""
|
||||
mocker.patch(f"{EXMS}.get_historic_ohlcv", return_value=ohlcv_history_list)
|
||||
mocker.patch(f"{EXMS}.get_historic_ohlcv", return_value=ohlcv_history)
|
||||
exchange = get_patched_exchange(mocker, default_conf)
|
||||
file = tmp_path / "MEME_BTC-1m.feather"
|
||||
|
||||
|
@ -303,9 +303,9 @@ def test_load_cached_data_for_updating(mocker, testdatadir) -> None:
|
|||
],
|
||||
)
|
||||
def test_download_pair_history(
|
||||
ohlcv_history_list, mocker, default_conf, tmp_path, candle_type, subdir, file_tail
|
||||
ohlcv_history, mocker, default_conf, tmp_path, candle_type, subdir, file_tail
|
||||
) -> None:
|
||||
mocker.patch(f"{EXMS}.get_historic_ohlcv", return_value=ohlcv_history_list)
|
||||
mocker.patch(f"{EXMS}.get_historic_ohlcv", return_value=ohlcv_history)
|
||||
exchange = get_patched_exchange(mocker, default_conf)
|
||||
file1_1 = tmp_path / f"{subdir}MEME_BTC-1m{file_tail}.feather"
|
||||
file1_5 = tmp_path / f"{subdir}MEME_BTC-5m{file_tail}.feather"
|
||||
|
@ -351,16 +351,12 @@ def test_download_pair_history(
|
|||
assert file2_5.is_file()
|
||||
|
||||
|
||||
def test_download_pair_history2(mocker, default_conf, testdatadir) -> None:
|
||||
tick = [
|
||||
[1509836520000, 0.00162008, 0.00162008, 0.00162008, 0.00162008, 108.14853839],
|
||||
[1509836580000, 0.00161, 0.00161, 0.00161, 0.00161, 82.390199],
|
||||
]
|
||||
def test_download_pair_history2(mocker, default_conf, testdatadir, ohlcv_history) -> None:
|
||||
json_dump_mock = mocker.patch(
|
||||
"freqtrade.data.history.datahandlers.featherdatahandler.FeatherDataHandler.ohlcv_store",
|
||||
return_value=None,
|
||||
)
|
||||
mocker.patch(f"{EXMS}.get_historic_ohlcv", return_value=tick)
|
||||
mocker.patch(f"{EXMS}.get_historic_ohlcv", return_value=ohlcv_history)
|
||||
exchange = get_patched_exchange(mocker, default_conf)
|
||||
_download_pair_history(
|
||||
datadir=testdatadir,
|
||||
|
|
|
@ -1,11 +1,8 @@
|
|||
from datetime import datetime, timedelta, timezone
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
|
||||
from freqtrade.enums.marginmode import MarginMode
|
||||
from freqtrade.enums.tradingmode import TradingMode
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from tests.conftest import EXMS, get_mock_coro, get_patched_exchange, log_has
|
||||
from tests.exchange.test_exchange import ccxt_exceptionhandlers
|
||||
|
||||
|
@ -27,13 +24,11 @@ def test_additional_exchange_init_bybit(default_conf, mocker, caplog):
|
|||
|
||||
api_mock.set_position_mode.reset_mock()
|
||||
api_mock.is_unified_enabled = MagicMock(return_value=[False, True])
|
||||
with pytest.raises(OperationalException, match=r"Bybit: Unified account is not supported.*"):
|
||||
get_patched_exchange(mocker, default_conf, exchange="bybit", api_mock=api_mock)
|
||||
assert log_has("Bybit: Unified account.", caplog)
|
||||
# exchange = get_patched_exchange(mocker, default_conf, exchange="bybit", api_mock=api_mock)
|
||||
# assert api_mock.set_position_mode.call_count == 1
|
||||
# assert api_mock.is_unified_enabled.call_count == 1
|
||||
# assert exchange.unified_account is True
|
||||
exchange = get_patched_exchange(mocker, default_conf, exchange="bybit", api_mock=api_mock)
|
||||
assert log_has("Bybit: Unified account. Assuming dedicated subaccount for this bot.", caplog)
|
||||
assert api_mock.set_position_mode.call_count == 1
|
||||
assert api_mock.is_unified_enabled.call_count == 1
|
||||
assert exchange.unified_account is True
|
||||
|
||||
ccxt_exceptionhandlers(
|
||||
mocker, default_conf, api_mock, "bybit", "additional_exchange_init", "set_position_mode"
|
||||
|
|
|
@ -255,7 +255,6 @@ def test_init_exception(default_conf, mocker):
|
|||
def test_exchange_resolver(default_conf, mocker, caplog):
|
||||
mocker.patch(f"{EXMS}._init_ccxt", MagicMock(return_value=MagicMock()))
|
||||
mocker.patch(f"{EXMS}._load_async_markets")
|
||||
mocker.patch(f"{EXMS}.validate_pairs")
|
||||
mocker.patch(f"{EXMS}.validate_timeframes")
|
||||
mocker.patch(f"{EXMS}.validate_stakecurrency")
|
||||
mocker.patch(f"{EXMS}.validate_pricing")
|
||||
|
@ -555,7 +554,6 @@ def test_get_min_pair_stake_amount_real_data(mocker, default_conf) -> None:
|
|||
|
||||
def test__load_async_markets(default_conf, mocker, caplog):
|
||||
mocker.patch(f"{EXMS}._init_ccxt")
|
||||
mocker.patch(f"{EXMS}.validate_pairs")
|
||||
mocker.patch(f"{EXMS}.validate_timeframes")
|
||||
mocker.patch(f"{EXMS}.reload_markets")
|
||||
mocker.patch(f"{EXMS}.validate_stakecurrency")
|
||||
|
@ -567,7 +565,15 @@ def test__load_async_markets(default_conf, mocker, caplog):
|
|||
caplog.set_level(logging.DEBUG)
|
||||
|
||||
exchange._api_async.load_markets = get_mock_coro(side_effect=ccxt.BaseError("deadbeef"))
|
||||
with pytest.raises(ccxt.BaseError, match="deadbeef"):
|
||||
with pytest.raises(TemporaryError, match="deadbeef"):
|
||||
exchange._load_async_markets()
|
||||
|
||||
exchange._api_async.load_markets = get_mock_coro(side_effect=ccxt.DDoSProtection("deadbeef"))
|
||||
with pytest.raises(DDosProtection, match="deadbeef"):
|
||||
exchange._load_async_markets()
|
||||
|
||||
exchange._api_async.load_markets = get_mock_coro(side_effect=ccxt.OperationFailed("deadbeef"))
|
||||
with pytest.raises(TemporaryError, match="deadbeef"):
|
||||
exchange._load_async_markets()
|
||||
|
||||
|
||||
|
@ -576,7 +582,6 @@ def test__load_markets(default_conf, mocker, caplog):
|
|||
api_mock = MagicMock()
|
||||
api_mock.load_markets = get_mock_coro(side_effect=ccxt.BaseError("SomeError"))
|
||||
mocker.patch(f"{EXMS}._init_ccxt", MagicMock(return_value=api_mock))
|
||||
mocker.patch(f"{EXMS}.validate_pairs")
|
||||
mocker.patch(f"{EXMS}.validate_timeframes")
|
||||
mocker.patch(f"{EXMS}.validate_stakecurrency")
|
||||
mocker.patch(f"{EXMS}.validate_pricing")
|
||||
|
@ -630,6 +635,21 @@ def test_reload_markets(default_conf, mocker, caplog, time_machine):
|
|||
exchange.reload_markets()
|
||||
assert lam_spy.call_count == 0
|
||||
|
||||
# Another reload should happen but it fails.
|
||||
time_machine.move_to(start_dt + timedelta(minutes=51), tick=False)
|
||||
api_mock.load_markets = get_mock_coro(side_effect=ccxt.NetworkError("LoadError"))
|
||||
|
||||
exchange.reload_markets(force=False)
|
||||
assert exchange.markets == updated_markets
|
||||
assert lam_spy.call_count == 1
|
||||
# Tried once, failed
|
||||
|
||||
lam_spy.reset_mock()
|
||||
# When forceing (bot startup), it should retry 3 times.
|
||||
exchange.reload_markets(force=True)
|
||||
assert lam_spy.call_count == 4
|
||||
assert exchange.markets == updated_markets
|
||||
|
||||
|
||||
def test_reload_markets_exception(default_conf, mocker, caplog):
|
||||
caplog.set_level(logging.DEBUG)
|
||||
|
@ -661,7 +681,6 @@ def test_validate_stakecurrency(default_conf, stake_currency, mocker, caplog):
|
|||
}
|
||||
)
|
||||
mocker.patch(f"{EXMS}._init_ccxt", MagicMock(return_value=api_mock))
|
||||
mocker.patch(f"{EXMS}.validate_pairs")
|
||||
mocker.patch(f"{EXMS}.validate_timeframes")
|
||||
mocker.patch(f"{EXMS}.validate_pricing")
|
||||
Exchange(default_conf)
|
||||
|
@ -679,7 +698,6 @@ def test_validate_stakecurrency_error(default_conf, mocker, caplog):
|
|||
}
|
||||
)
|
||||
mocker.patch(f"{EXMS}._init_ccxt", MagicMock(return_value=api_mock))
|
||||
mocker.patch(f"{EXMS}.validate_pairs")
|
||||
mocker.patch(f"{EXMS}.validate_timeframes")
|
||||
with pytest.raises(
|
||||
ConfigurationError,
|
||||
|
@ -732,147 +750,6 @@ def test_get_pair_base_currency(default_conf, mocker, pair, expected):
|
|||
assert ex.get_pair_base_currency(pair) == expected
|
||||
|
||||
|
||||
def test_validate_pairs(default_conf, mocker):
|
||||
api_mock = MagicMock()
|
||||
id_mock = PropertyMock(return_value="test_exchange")
|
||||
type(api_mock).id = id_mock
|
||||
|
||||
mocker.patch(f"{EXMS}._init_ccxt", MagicMock(return_value=api_mock))
|
||||
mocker.patch(f"{EXMS}.validate_timeframes")
|
||||
mocker.patch(
|
||||
f"{EXMS}._load_async_markets",
|
||||
return_value={
|
||||
"ETH/BTC": {"quote": "BTC"},
|
||||
"LTC/BTC": {"quote": "BTC"},
|
||||
"XRP/BTC": {"quote": "BTC"},
|
||||
"NEO/BTC": {"quote": "BTC"},
|
||||
},
|
||||
)
|
||||
mocker.patch(f"{EXMS}.validate_stakecurrency")
|
||||
mocker.patch(f"{EXMS}.validate_pricing")
|
||||
# test exchange.validate_pairs directly
|
||||
# No assert - but this should not fail (!)
|
||||
Exchange(default_conf)
|
||||
|
||||
|
||||
def test_validate_pairs_not_available(default_conf, mocker):
|
||||
api_mock = MagicMock()
|
||||
type(api_mock).markets = PropertyMock(
|
||||
return_value={"XRP/BTC": {"inactive": True, "base": "XRP", "quote": "BTC"}}
|
||||
)
|
||||
mocker.patch(f"{EXMS}._init_ccxt", MagicMock(return_value=api_mock))
|
||||
mocker.patch(f"{EXMS}.validate_timeframes")
|
||||
mocker.patch(f"{EXMS}.validate_stakecurrency")
|
||||
mocker.patch(f"{EXMS}._load_async_markets")
|
||||
|
||||
with pytest.raises(OperationalException, match=r"not available"):
|
||||
Exchange(default_conf)
|
||||
|
||||
|
||||
def test_validate_pairs_exception(default_conf, mocker, caplog):
|
||||
caplog.set_level(logging.INFO)
|
||||
api_mock = MagicMock()
|
||||
mocker.patch(f"{EXMS}.name", PropertyMock(return_value="Binance"))
|
||||
|
||||
type(api_mock).markets = PropertyMock(return_value={})
|
||||
mocker.patch(f"{EXMS}._init_ccxt", api_mock)
|
||||
mocker.patch(f"{EXMS}.validate_timeframes")
|
||||
mocker.patch(f"{EXMS}.validate_stakecurrency")
|
||||
mocker.patch(f"{EXMS}.validate_pricing")
|
||||
mocker.patch(f"{EXMS}._load_async_markets")
|
||||
|
||||
with pytest.raises(OperationalException, match=r"Pair ETH/BTC is not available on Binance"):
|
||||
Exchange(default_conf)
|
||||
|
||||
mocker.patch(f"{EXMS}.markets", PropertyMock(return_value={}))
|
||||
Exchange(default_conf)
|
||||
assert log_has("Unable to validate pairs (assuming they are correct).", caplog)
|
||||
|
||||
|
||||
def test_validate_pairs_restricted(default_conf, mocker, caplog):
|
||||
api_mock = MagicMock()
|
||||
type(api_mock).load_markets = get_mock_coro(
|
||||
return_value={
|
||||
"ETH/BTC": {"quote": "BTC"},
|
||||
"LTC/BTC": {"quote": "BTC"},
|
||||
"XRP/BTC": {"quote": "BTC", "info": {"prohibitedIn": ["US"]}},
|
||||
"NEO/BTC": {"quote": "BTC", "info": "TestString"}, # info can also be a string ...
|
||||
}
|
||||
)
|
||||
mocker.patch(f"{EXMS}._init_ccxt", MagicMock(return_value=api_mock))
|
||||
mocker.patch(f"{EXMS}.validate_timeframes")
|
||||
mocker.patch(f"{EXMS}.validate_pricing")
|
||||
mocker.patch(f"{EXMS}.validate_stakecurrency")
|
||||
|
||||
Exchange(default_conf)
|
||||
assert log_has(
|
||||
"Pair XRP/BTC is restricted for some users on this exchange."
|
||||
"Please check if you are impacted by this restriction "
|
||||
"on the exchange and eventually remove XRP/BTC from your whitelist.",
|
||||
caplog,
|
||||
)
|
||||
|
||||
|
||||
def test_validate_pairs_stakecompatibility(default_conf, mocker):
|
||||
api_mock = MagicMock()
|
||||
type(api_mock).load_markets = get_mock_coro(
|
||||
return_value={
|
||||
"ETH/BTC": {"quote": "BTC"},
|
||||
"LTC/BTC": {"quote": "BTC"},
|
||||
"XRP/BTC": {"quote": "BTC"},
|
||||
"NEO/BTC": {"quote": "BTC"},
|
||||
"HELLO-WORLD": {"quote": "BTC"},
|
||||
}
|
||||
)
|
||||
mocker.patch(f"{EXMS}._init_ccxt", MagicMock(return_value=api_mock))
|
||||
mocker.patch(f"{EXMS}.validate_timeframes")
|
||||
mocker.patch(f"{EXMS}.validate_stakecurrency")
|
||||
mocker.patch(f"{EXMS}.validate_pricing")
|
||||
|
||||
Exchange(default_conf)
|
||||
|
||||
|
||||
def test_validate_pairs_stakecompatibility_downloaddata(default_conf, mocker):
|
||||
api_mock = MagicMock()
|
||||
default_conf["stake_currency"] = ""
|
||||
type(api_mock).load_markets = get_mock_coro(
|
||||
return_value={
|
||||
"ETH/BTC": {"quote": "BTC"},
|
||||
"LTC/BTC": {"quote": "BTC"},
|
||||
"XRP/BTC": {"quote": "BTC"},
|
||||
"NEO/BTC": {"quote": "BTC"},
|
||||
"HELLO-WORLD": {"quote": "BTC"},
|
||||
}
|
||||
)
|
||||
mocker.patch(f"{EXMS}._init_ccxt", MagicMock(return_value=api_mock))
|
||||
mocker.patch(f"{EXMS}.validate_timeframes")
|
||||
mocker.patch(f"{EXMS}.validate_stakecurrency")
|
||||
mocker.patch(f"{EXMS}.validate_pricing")
|
||||
|
||||
Exchange(default_conf)
|
||||
assert type(api_mock).load_markets.call_count == 1
|
||||
|
||||
|
||||
def test_validate_pairs_stakecompatibility_fail(default_conf, mocker):
|
||||
default_conf["exchange"]["pair_whitelist"].append("HELLO-WORLD")
|
||||
api_mock = MagicMock()
|
||||
type(api_mock).load_markets = get_mock_coro(
|
||||
return_value={
|
||||
"ETH/BTC": {"quote": "BTC"},
|
||||
"LTC/BTC": {"quote": "BTC"},
|
||||
"XRP/BTC": {"quote": "BTC"},
|
||||
"NEO/BTC": {"quote": "BTC"},
|
||||
"HELLO-WORLD": {"quote": "USDT"},
|
||||
}
|
||||
)
|
||||
mocker.patch(f"{EXMS}._init_ccxt", MagicMock(return_value=api_mock))
|
||||
mocker.patch(f"{EXMS}.validate_timeframes")
|
||||
mocker.patch(f"{EXMS}.validate_stakecurrency")
|
||||
|
||||
with pytest.raises(OperationalException, match=r"Stake-currency 'BTC' not compatible with.*"):
|
||||
Exchange(default_conf)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("timeframe", [("5m"), ("1m"), ("15m"), ("1h")])
|
||||
def test_validate_timeframes(default_conf, mocker, timeframe):
|
||||
default_conf["timeframe"] = timeframe
|
||||
|
@ -884,7 +761,6 @@ def test_validate_timeframes(default_conf, mocker, timeframe):
|
|||
|
||||
mocker.patch(f"{EXMS}._init_ccxt", MagicMock(return_value=api_mock))
|
||||
mocker.patch(f"{EXMS}.reload_markets")
|
||||
mocker.patch(f"{EXMS}.validate_pairs")
|
||||
mocker.patch(f"{EXMS}.validate_stakecurrency")
|
||||
mocker.patch(f"{EXMS}.validate_pricing")
|
||||
Exchange(default_conf)
|
||||
|
@ -902,7 +778,6 @@ def test_validate_timeframes_failed(default_conf, mocker):
|
|||
|
||||
mocker.patch(f"{EXMS}._init_ccxt", MagicMock(return_value=api_mock))
|
||||
mocker.patch(f"{EXMS}.reload_markets")
|
||||
mocker.patch(f"{EXMS}.validate_pairs")
|
||||
mocker.patch(f"{EXMS}.validate_stakecurrency")
|
||||
mocker.patch(f"{EXMS}.validate_pricing")
|
||||
with pytest.raises(
|
||||
|
@ -932,7 +807,6 @@ def test_validate_timeframes_emulated_ohlcv_1(default_conf, mocker):
|
|||
|
||||
mocker.patch(f"{EXMS}._init_ccxt", MagicMock(return_value=api_mock))
|
||||
mocker.patch(f"{EXMS}.reload_markets")
|
||||
mocker.patch(f"{EXMS}.validate_pairs")
|
||||
mocker.patch(f"{EXMS}.validate_stakecurrency")
|
||||
with pytest.raises(
|
||||
OperationalException,
|
||||
|
@ -954,7 +828,6 @@ def test_validate_timeframes_emulated_ohlcvi_2(default_conf, mocker):
|
|||
|
||||
mocker.patch(f"{EXMS}._init_ccxt", MagicMock(return_value=api_mock))
|
||||
mocker.patch(f"{EXMS}.reload_markets")
|
||||
mocker.patch(f"{EXMS}.validate_pairs", MagicMock())
|
||||
mocker.patch(f"{EXMS}.validate_stakecurrency")
|
||||
with pytest.raises(
|
||||
OperationalException,
|
||||
|
@ -976,7 +849,6 @@ def test_validate_timeframes_not_in_config(default_conf, mocker):
|
|||
|
||||
mocker.patch(f"{EXMS}._init_ccxt", MagicMock(return_value=api_mock))
|
||||
mocker.patch(f"{EXMS}.reload_markets")
|
||||
mocker.patch(f"{EXMS}.validate_pairs")
|
||||
mocker.patch(f"{EXMS}.validate_stakecurrency")
|
||||
mocker.patch(f"{EXMS}.validate_pricing")
|
||||
mocker.patch(f"{EXMS}.validate_required_startup_candles")
|
||||
|
@ -993,7 +865,6 @@ def test_validate_pricing(default_conf, mocker):
|
|||
mocker.patch(f"{EXMS}._init_ccxt", MagicMock(return_value=api_mock))
|
||||
mocker.patch(f"{EXMS}.reload_markets")
|
||||
mocker.patch(f"{EXMS}.validate_trading_mode_and_margin_mode")
|
||||
mocker.patch(f"{EXMS}.validate_pairs")
|
||||
mocker.patch(f"{EXMS}.validate_timeframes")
|
||||
mocker.patch(f"{EXMS}.validate_stakecurrency")
|
||||
mocker.patch(f"{EXMS}.name", "Binance")
|
||||
|
@ -1028,7 +899,6 @@ def test_validate_ordertypes(default_conf, mocker):
|
|||
type(api_mock).has = PropertyMock(return_value={"createMarketOrder": True})
|
||||
mocker.patch(f"{EXMS}._init_ccxt", MagicMock(return_value=api_mock))
|
||||
mocker.patch(f"{EXMS}.reload_markets")
|
||||
mocker.patch(f"{EXMS}.validate_pairs")
|
||||
mocker.patch(f"{EXMS}.validate_timeframes")
|
||||
mocker.patch(f"{EXMS}.validate_stakecurrency")
|
||||
mocker.patch(f"{EXMS}.validate_pricing")
|
||||
|
@ -1087,7 +957,6 @@ def test_validate_ordertypes_stop_advanced(default_conf, mocker, exchange_name,
|
|||
type(api_mock).has = PropertyMock(return_value={"createMarketOrder": True})
|
||||
mocker.patch(f"{EXMS}._init_ccxt", MagicMock(return_value=api_mock))
|
||||
mocker.patch(f"{EXMS}.reload_markets")
|
||||
mocker.patch(f"{EXMS}.validate_pairs")
|
||||
mocker.patch(f"{EXMS}.validate_timeframes")
|
||||
mocker.patch(f"{EXMS}.validate_stakecurrency")
|
||||
mocker.patch(f"{EXMS}.validate_pricing")
|
||||
|
@ -1112,7 +981,6 @@ def test_validate_order_types_not_in_config(default_conf, mocker):
|
|||
api_mock = MagicMock()
|
||||
mocker.patch(f"{EXMS}._init_ccxt", MagicMock(return_value=api_mock))
|
||||
mocker.patch(f"{EXMS}.reload_markets")
|
||||
mocker.patch(f"{EXMS}.validate_pairs")
|
||||
mocker.patch(f"{EXMS}.validate_timeframes")
|
||||
mocker.patch(f"{EXMS}.validate_pricing")
|
||||
mocker.patch(f"{EXMS}.validate_stakecurrency")
|
||||
|
@ -1128,7 +996,6 @@ def test_validate_required_startup_candles(default_conf, mocker, caplog):
|
|||
mocker.patch(f"{EXMS}._init_ccxt", api_mock)
|
||||
mocker.patch(f"{EXMS}.validate_timeframes")
|
||||
mocker.patch(f"{EXMS}._load_async_markets")
|
||||
mocker.patch(f"{EXMS}.validate_pairs")
|
||||
mocker.patch(f"{EXMS}.validate_pricing")
|
||||
mocker.patch(f"{EXMS}.validate_stakecurrency")
|
||||
|
||||
|
@ -2217,24 +2084,29 @@ def test___now_is_time_to_refresh(default_conf, mocker, exchange_name, time_mach
|
|||
assert exchange._now_is_time_to_refresh(pair, "5m", candle_type) is True
|
||||
|
||||
|
||||
@pytest.mark.parametrize("exchange_name", EXCHANGES)
|
||||
@pytest.mark.parametrize("candle_type", ["mark", ""])
|
||||
@pytest.mark.parametrize("exchange_name", EXCHANGES)
|
||||
def test_get_historic_ohlcv(default_conf, mocker, caplog, exchange_name, candle_type):
|
||||
exchange = get_patched_exchange(mocker, default_conf, exchange=exchange_name)
|
||||
ohlcv = [
|
||||
[
|
||||
dt_ts(), # unix timestamp ms
|
||||
1, # open
|
||||
2, # high
|
||||
3, # low
|
||||
4, # close
|
||||
5, # volume (in quote currency)
|
||||
]
|
||||
]
|
||||
pair = "ETH/BTC"
|
||||
calls = 0
|
||||
now = dt_now()
|
||||
|
||||
async def mock_candle_hist(pair, timeframe, candle_type, since_ms):
|
||||
return pair, timeframe, candle_type, ohlcv, True
|
||||
nonlocal calls
|
||||
calls += 1
|
||||
ohlcv = [
|
||||
[
|
||||
dt_ts(now + timedelta(minutes=5 * (calls + i))), # unix timestamp ms
|
||||
1, # open
|
||||
2, # high
|
||||
3, # low
|
||||
4, # close
|
||||
5, # volume (in quote currency)
|
||||
]
|
||||
for i in range(2)
|
||||
]
|
||||
return (pair, timeframe, candle_type, ohlcv, True)
|
||||
|
||||
exchange._async_get_candle_history = Mock(wraps=mock_candle_hist)
|
||||
# one_call calculation * 1.8 should do 2 calls
|
||||
|
@ -2245,7 +2117,7 @@ def test_get_historic_ohlcv(default_conf, mocker, caplog, exchange_name, candle_
|
|||
)
|
||||
|
||||
assert exchange._async_get_candle_history.call_count == 2
|
||||
# Returns twice the above OHLCV data
|
||||
# Returns twice the above OHLCV data after truncating the open candle.
|
||||
assert len(ret) == 2
|
||||
assert log_has_re(r"Downloaded data for .* with length .*\.", caplog)
|
||||
|
||||
|
@ -4162,7 +4034,6 @@ def test_merge_ft_has_dict(default_conf, mocker):
|
|||
EXMS,
|
||||
_init_ccxt=MagicMock(return_value=MagicMock()),
|
||||
_load_async_markets=MagicMock(),
|
||||
validate_pairs=MagicMock(),
|
||||
validate_timeframes=MagicMock(),
|
||||
validate_stakecurrency=MagicMock(),
|
||||
validate_pricing=MagicMock(),
|
||||
|
@ -4197,7 +4068,6 @@ def test_get_valid_pair_combination(default_conf, mocker, markets):
|
|||
EXMS,
|
||||
_init_ccxt=MagicMock(return_value=MagicMock()),
|
||||
_load_async_markets=MagicMock(),
|
||||
validate_pairs=MagicMock(),
|
||||
validate_timeframes=MagicMock(),
|
||||
validate_pricing=MagicMock(),
|
||||
markets=PropertyMock(return_value=markets),
|
||||
|
@ -4477,7 +4347,6 @@ def test_get_markets(
|
|||
EXMS,
|
||||
_init_ccxt=MagicMock(return_value=MagicMock()),
|
||||
_load_async_markets=MagicMock(),
|
||||
validate_pairs=MagicMock(),
|
||||
validate_timeframes=MagicMock(),
|
||||
validate_pricing=MagicMock(),
|
||||
markets=PropertyMock(return_value=markets_static),
|
||||
|
|
|
@ -689,13 +689,29 @@ def test_process_trade_creation(
|
|||
assert trade.open_date is not None
|
||||
assert trade.exchange == "binance"
|
||||
assert trade.open_rate == ticker_usdt.return_value[ticker_side]
|
||||
assert pytest.approx(trade.amount) == 60 / ticker_usdt.return_value[ticker_side]
|
||||
# Trade opens with 0 amount. Only trade filling will set the amount
|
||||
assert pytest.approx(trade.amount) == 0
|
||||
assert pytest.approx(trade.amount_requested) == 60 / ticker_usdt.return_value[ticker_side]
|
||||
|
||||
assert log_has(
|
||||
f'{"Short" if is_short else "Long"} signal found: about create a new trade for ETH/USDT '
|
||||
"with stake_amount: 60.0 ...",
|
||||
caplog,
|
||||
)
|
||||
mocker.patch("freqtrade.freqtradebot.FreqtradeBot._check_and_execute_exit")
|
||||
|
||||
# Fill trade.
|
||||
freqtrade.process()
|
||||
trades = Trade.get_open_trades()
|
||||
assert len(trades) == 1
|
||||
trade = trades[0]
|
||||
assert trade is not None
|
||||
assert trade.is_open
|
||||
assert trade.open_date is not None
|
||||
assert trade.exchange == "binance"
|
||||
assert trade.open_rate == limit_order[entry_side(is_short)]["price"]
|
||||
# Filled trade has amount set to filled order amount
|
||||
assert pytest.approx(trade.amount) == limit_order[entry_side(is_short)]["filled"]
|
||||
|
||||
|
||||
def test_process_exchange_failures(default_conf_usdt, ticker_usdt, mocker) -> None:
|
||||
|
@ -1685,7 +1701,7 @@ def test_handle_trade_roi(
|
|||
create_order=MagicMock(
|
||||
side_effect=[
|
||||
open_order,
|
||||
{"id": 1234553382},
|
||||
{"id": 1234553382, "amount": open_order["amount"]},
|
||||
]
|
||||
),
|
||||
get_fee=fee,
|
||||
|
@ -2205,7 +2221,6 @@ def test_manage_open_orders_buy_exception(
|
|||
patch_exchange(mocker)
|
||||
mocker.patch.multiple(
|
||||
EXMS,
|
||||
validate_pairs=MagicMock(),
|
||||
fetch_ticker=ticker_usdt,
|
||||
fetch_order=MagicMock(side_effect=ExchangeError),
|
||||
cancel_order=cancel_order_mock,
|
||||
|
@ -2884,7 +2899,7 @@ def test_execute_trade_exit_up(
|
|||
EXMS,
|
||||
fetch_ticker=ticker_usdt,
|
||||
get_fee=fee,
|
||||
_dry_is_price_crossed=MagicMock(return_value=False),
|
||||
_dry_is_price_crossed=MagicMock(side_effect=[True, False]),
|
||||
)
|
||||
patch_whitelist(mocker, default_conf_usdt)
|
||||
freqtrade = FreqtradeBot(default_conf_usdt)
|
||||
|
@ -2976,7 +2991,7 @@ def test_execute_trade_exit_down(
|
|||
EXMS,
|
||||
fetch_ticker=ticker_usdt,
|
||||
get_fee=fee,
|
||||
_dry_is_price_crossed=MagicMock(return_value=False),
|
||||
_dry_is_price_crossed=MagicMock(side_effect=[True, False]),
|
||||
)
|
||||
patch_whitelist(mocker, default_conf_usdt)
|
||||
freqtrade = FreqtradeBot(default_conf_usdt)
|
||||
|
@ -2999,7 +3014,7 @@ def test_execute_trade_exit_down(
|
|||
exit_check=ExitCheckTuple(exit_type=ExitType.STOP_LOSS),
|
||||
)
|
||||
|
||||
assert rpc_mock.call_count == 2
|
||||
assert rpc_mock.call_count == 3
|
||||
last_msg = rpc_mock.call_args_list[-1][0][0]
|
||||
assert {
|
||||
"type": RPCMessageType.EXIT,
|
||||
|
@ -3063,7 +3078,7 @@ def test_execute_trade_exit_custom_exit_price(
|
|||
EXMS,
|
||||
fetch_ticker=ticker_usdt,
|
||||
get_fee=fee,
|
||||
_dry_is_price_crossed=MagicMock(return_value=False),
|
||||
_dry_is_price_crossed=MagicMock(side_effect=[True, False]),
|
||||
)
|
||||
config = deepcopy(default_conf_usdt)
|
||||
config["custom_price_max_distance_ratio"] = 0.1
|
||||
|
|
|
@ -1109,7 +1109,7 @@ def test_execute_trade_exit_down_stoploss_on_exchange_dry_run(
|
|||
EXMS,
|
||||
fetch_ticker=ticker_usdt,
|
||||
get_fee=fee,
|
||||
_dry_is_price_crossed=MagicMock(return_value=False),
|
||||
_dry_is_price_crossed=MagicMock(side_effect=[True, False]),
|
||||
)
|
||||
patch_whitelist(mocker, default_conf_usdt)
|
||||
freqtrade = FreqtradeBot(default_conf_usdt)
|
||||
|
@ -1136,7 +1136,7 @@ def test_execute_trade_exit_down_stoploss_on_exchange_dry_run(
|
|||
trade=trade, limit=trade.stop_loss, exit_check=ExitCheckTuple(exit_type=ExitType.STOP_LOSS)
|
||||
)
|
||||
|
||||
assert rpc_mock.call_count == 2
|
||||
# assert rpc_mock.call_count == 2
|
||||
last_msg = rpc_mock.call_args_list[-1][0][0]
|
||||
|
||||
assert {
|
||||
|
@ -1169,7 +1169,7 @@ def test_execute_trade_exit_down_stoploss_on_exchange_dry_run(
|
|||
"cumulative_profit": 0.0,
|
||||
"stake_amount": pytest.approx(60),
|
||||
"is_final_exit": False,
|
||||
"final_profit_ratio": None,
|
||||
"final_profit_ratio": ANY,
|
||||
} == last_msg
|
||||
|
||||
|
||||
|
|
|
@ -941,7 +941,7 @@ def test_backtest_one_detail(default_conf_usdt, mocker, testdatadir, use_detail)
|
|||
"use_detail,exp_funding_fee, exp_ff_updates",
|
||||
[
|
||||
(True, -0.018054162, 11),
|
||||
(False, -0.01780296, 5),
|
||||
(False, -0.01780296, 6),
|
||||
],
|
||||
)
|
||||
def test_backtest_one_detail_futures(
|
||||
|
@ -1051,8 +1051,8 @@ def test_backtest_one_detail_futures(
|
|||
@pytest.mark.parametrize(
|
||||
"use_detail,entries,max_stake,ff_updates,expected_ff",
|
||||
[
|
||||
(True, 50, 3000, 54, -1.18038144),
|
||||
(False, 6, 360, 10, -0.14679994),
|
||||
(True, 50, 3000, 55, -1.18038144),
|
||||
(False, 6, 360, 11, -0.14679994),
|
||||
],
|
||||
)
|
||||
def test_backtest_one_detail_futures_funding_fees(
|
||||
|
|
|
@ -13,6 +13,12 @@ from freqtrade.optimize.analysis.lookahead_helpers import LookaheadAnalysisSubFu
|
|||
from tests.conftest import EXMS, get_args, log_has_re, patch_exchange
|
||||
|
||||
|
||||
IGNORE_BIASED_INDICATORS_CAPTION = (
|
||||
"Any indicators in 'biased_indicators' which are used within "
|
||||
"set_freqai_targets() can be ignored."
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def lookahead_conf(default_conf_usdt, tmp_path):
|
||||
default_conf_usdt["user_data_dir"] = tmp_path
|
||||
|
@ -133,6 +139,58 @@ def test_lookahead_helper_start(lookahead_conf, mocker) -> None:
|
|||
text_table_mock.reset_mock()
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"indicators, expected_caption_text",
|
||||
[
|
||||
(
|
||||
["&indicator1", "indicator2"],
|
||||
IGNORE_BIASED_INDICATORS_CAPTION,
|
||||
),
|
||||
(
|
||||
["indicator1", "&indicator2"],
|
||||
IGNORE_BIASED_INDICATORS_CAPTION,
|
||||
),
|
||||
(
|
||||
["&indicator1", "&indicator2"],
|
||||
IGNORE_BIASED_INDICATORS_CAPTION,
|
||||
),
|
||||
(["indicator1", "indicator2"], None),
|
||||
([], None),
|
||||
],
|
||||
ids=(
|
||||
"First of two biased indicators starts with '&'",
|
||||
"Second of two biased indicators starts with '&'",
|
||||
"Both biased indicators start with '&'",
|
||||
"No biased indicators start with '&'",
|
||||
"Empty biased indicators list",
|
||||
),
|
||||
)
|
||||
def test_lookahead_helper_start__caption_based_on_indicators(
|
||||
indicators, expected_caption_text, lookahead_conf, mocker
|
||||
):
|
||||
"""Test that the table caption is only populated if a biased_indicator starts with '&'."""
|
||||
|
||||
single_mock = MagicMock()
|
||||
lookahead_analysis = LookaheadAnalysis(
|
||||
lookahead_conf,
|
||||
{"name": "strategy_test_v3_with_lookahead_bias"},
|
||||
)
|
||||
lookahead_analysis.current_analysis.false_indicators = indicators
|
||||
single_mock.return_value = lookahead_analysis
|
||||
text_table_mock = MagicMock()
|
||||
mocker.patch.multiple(
|
||||
"freqtrade.optimize.analysis.lookahead_helpers.LookaheadAnalysisSubFunctions",
|
||||
initialize_single_lookahead_analysis=single_mock,
|
||||
text_table_lookahead_analysis_instances=text_table_mock,
|
||||
)
|
||||
|
||||
LookaheadAnalysisSubFunctions.start(lookahead_conf)
|
||||
|
||||
text_table_mock.assert_called_once_with(
|
||||
lookahead_conf, [lookahead_analysis], caption=expected_caption_text
|
||||
)
|
||||
|
||||
|
||||
def test_lookahead_helper_text_table_lookahead_analysis_instances(lookahead_conf):
|
||||
analysis = Analysis()
|
||||
analysis.has_bias = True
|
||||
|
@ -199,6 +257,53 @@ def test_lookahead_helper_text_table_lookahead_analysis_instances(lookahead_conf
|
|||
assert len(data) == 3
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"caption",
|
||||
[
|
||||
"",
|
||||
"A test caption",
|
||||
None,
|
||||
False,
|
||||
],
|
||||
ids=(
|
||||
"Pass empty string",
|
||||
"Pass non-empty string",
|
||||
"Pass None",
|
||||
"Don't pass caption",
|
||||
),
|
||||
)
|
||||
def test_lookahead_helper_text_table_lookahead_analysis_instances__caption(
|
||||
caption,
|
||||
lookahead_conf,
|
||||
mocker,
|
||||
):
|
||||
"""Test that the caption is passed in the table kwargs when calling print_rich_table()."""
|
||||
|
||||
print_rich_table_mock = MagicMock()
|
||||
mocker.patch(
|
||||
"freqtrade.optimize.analysis.lookahead_helpers.print_rich_table",
|
||||
print_rich_table_mock,
|
||||
)
|
||||
lookahead_analysis = LookaheadAnalysis(
|
||||
lookahead_conf,
|
||||
{
|
||||
"name": "strategy_test_v3_with_lookahead_bias",
|
||||
"location": Path(lookahead_conf["strategy_path"], f"{lookahead_conf['strategy']}.py"),
|
||||
},
|
||||
)
|
||||
kwargs = {}
|
||||
if caption is not False:
|
||||
kwargs["caption"] = caption
|
||||
|
||||
LookaheadAnalysisSubFunctions.text_table_lookahead_analysis_instances(
|
||||
lookahead_conf, [lookahead_analysis], **kwargs
|
||||
)
|
||||
|
||||
assert print_rich_table_mock.call_args[-1]["table_kwargs"]["caption"] == (
|
||||
caption if caption is not False else None
|
||||
)
|
||||
|
||||
|
||||
def test_lookahead_helper_export_to_csv(lookahead_conf):
|
||||
import pandas as pd
|
||||
|
||||
|
|
|
@ -293,20 +293,25 @@ def test_store_backtest_candles(testdatadir, mocker):
|
|||
candle_dict = {"DefStrat": {"UNITTEST/BTC": pd.DataFrame()}}
|
||||
|
||||
# mock directory exporting
|
||||
store_backtest_analysis_results(testdatadir, candle_dict, {}, "2022_01_01_15_05_13")
|
||||
store_backtest_analysis_results(testdatadir, candle_dict, {}, {}, "2022_01_01_15_05_13")
|
||||
|
||||
assert dump_mock.call_count == 2
|
||||
assert dump_mock.call_count == 3
|
||||
assert isinstance(dump_mock.call_args_list[0][0][0], Path)
|
||||
assert str(dump_mock.call_args_list[0][0][0]).endswith("_signals.pkl")
|
||||
assert str(dump_mock.call_args_list[1][0][0]).endswith("_rejected.pkl")
|
||||
assert str(dump_mock.call_args_list[2][0][0]).endswith("_exited.pkl")
|
||||
|
||||
dump_mock.reset_mock()
|
||||
# mock file exporting
|
||||
filename = Path(testdatadir / "testresult")
|
||||
store_backtest_analysis_results(filename, candle_dict, {}, "2022_01_01_15_05_13")
|
||||
assert dump_mock.call_count == 2
|
||||
store_backtest_analysis_results(filename, candle_dict, {}, {}, "2022_01_01_15_05_13")
|
||||
assert dump_mock.call_count == 3
|
||||
assert isinstance(dump_mock.call_args_list[0][0][0], Path)
|
||||
# result will be testdatadir / testresult-<timestamp>_signals.pkl
|
||||
assert str(dump_mock.call_args_list[0][0][0]).endswith("_signals.pkl")
|
||||
assert str(dump_mock.call_args_list[1][0][0]).endswith("_rejected.pkl")
|
||||
assert str(dump_mock.call_args_list[2][0][0]).endswith("_exited.pkl")
|
||||
|
||||
dump_mock.reset_mock()
|
||||
|
||||
|
||||
|
@ -315,7 +320,7 @@ def test_write_read_backtest_candles(tmp_path):
|
|||
|
||||
# test directory exporting
|
||||
sample_date = "2022_01_01_15_05_13"
|
||||
store_backtest_analysis_results(tmp_path, candle_dict, {}, sample_date)
|
||||
store_backtest_analysis_results(tmp_path, candle_dict, {}, {}, sample_date)
|
||||
stored_file = tmp_path / f"backtest-result-{sample_date}_signals.pkl"
|
||||
with stored_file.open("rb") as scp:
|
||||
pickled_signal_candles = joblib.load(scp)
|
||||
|
@ -330,7 +335,7 @@ def test_write_read_backtest_candles(tmp_path):
|
|||
|
||||
# test file exporting
|
||||
filename = tmp_path / "testresult"
|
||||
store_backtest_analysis_results(filename, candle_dict, {}, sample_date)
|
||||
store_backtest_analysis_results(filename, candle_dict, {}, {}, sample_date)
|
||||
stored_file = tmp_path / f"testresult-{sample_date}_signals.pkl"
|
||||
with stored_file.open("rb") as scp:
|
||||
pickled_signal_candles = joblib.load(scp)
|
||||
|
|
|
@ -149,7 +149,10 @@ def test_rpc_trade_status(default_conf, ticker, fee, mocker) -> None:
|
|||
# Different from "filled" response:
|
||||
response_unfilled.update(
|
||||
{
|
||||
"amount": 91.07468124,
|
||||
"amount": 0.0,
|
||||
"open_trade_value": 0.0,
|
||||
"stoploss_entry_dist": 0.0,
|
||||
"stoploss_entry_dist_ratio": 0.0,
|
||||
"profit_ratio": 0.0,
|
||||
"profit_pct": 0.0,
|
||||
"profit_abs": 0.0,
|
||||
|
@ -762,7 +765,7 @@ def test_rpc_force_exit(default_conf, ticker, fee, mocker) -> None:
|
|||
freqtradebot.enter_positions()
|
||||
# make an limit-buy open trade
|
||||
trade = Trade.session.scalars(select(Trade).filter(Trade.id == "3")).first()
|
||||
filled_amount = trade.amount / 2
|
||||
filled_amount = trade.amount_requested / 2
|
||||
# Fetch order - it's open first, and closed after cancel_order is called.
|
||||
mocker.patch(
|
||||
f"{EXMS}.fetch_order",
|
||||
|
@ -799,7 +802,7 @@ def test_rpc_force_exit(default_conf, ticker, fee, mocker) -> None:
|
|||
|
||||
cancel_order_mock.reset_mock()
|
||||
trade = Trade.session.scalars(select(Trade).filter(Trade.id == "3")).first()
|
||||
amount = trade.amount
|
||||
amount = trade.amount_requested
|
||||
# make an limit-sell open order trade
|
||||
mocker.patch(
|
||||
f"{EXMS}.fetch_order",
|
||||
|
@ -832,7 +835,7 @@ def test_rpc_force_exit(default_conf, ticker, fee, mocker) -> None:
|
|||
assert cancel_order_mock.call_count == 0
|
||||
|
||||
trade = Trade.session.scalars(select(Trade).filter(Trade.id == "4")).first()
|
||||
amount = trade.amount
|
||||
amount = trade.amount_requested
|
||||
# make an limit-buy open trade, if there is no 'filled', don't sell it
|
||||
mocker.patch(
|
||||
f"{EXMS}.fetch_order",
|
||||
|
|
|
@ -365,13 +365,18 @@ def test_sync_wallet_dry(mocker, default_conf_usdt, fee):
|
|||
assert bal["NEO"].total == 10
|
||||
assert bal["XRP"].total == 10
|
||||
assert bal["LTC"].total == 2
|
||||
assert bal["USDT"].total == 922.74
|
||||
usdt_bal = bal["USDT"]
|
||||
assert usdt_bal.free == 922.74
|
||||
assert usdt_bal.total == 942.74
|
||||
assert usdt_bal.used == 20.0
|
||||
# sum of used and free should be total.
|
||||
assert usdt_bal.total == usdt_bal.free + usdt_bal.used
|
||||
|
||||
assert freqtrade.wallets.get_starting_balance() == default_conf_usdt["dry_run_wallet"]
|
||||
total = freqtrade.wallets.get_total("LTC")
|
||||
free = freqtrade.wallets.get_free("LTC")
|
||||
used = freqtrade.wallets.get_used("LTC")
|
||||
assert free != 0
|
||||
assert used != 0
|
||||
assert free + used == total
|
||||
|
||||
|
||||
|
|
Loading…
Reference in New Issue
Block a user