2017-11-18 07:52:28 +00:00
|
|
|
# pragma pylint: disable=W0603
|
2019-04-09 09:27:35 +00:00
|
|
|
"""
|
|
|
|
Cryptocurrency Exchanges support
|
|
|
|
"""
|
2024-05-12 15:02:37 +00:00
|
|
|
|
2019-06-09 12:05:36 +00:00
|
|
|
import asyncio
|
2018-08-14 17:51:49 +00:00
|
|
|
import inspect
|
2019-06-09 12:05:36 +00:00
|
|
|
import logging
|
2023-08-20 09:57:59 +00:00
|
|
|
import signal
|
2024-10-04 04:46:45 +00:00
|
|
|
from collections.abc import Coroutine
|
2019-06-09 12:05:36 +00:00
|
|
|
from copy import deepcopy
|
2021-11-18 07:56:25 +00:00
|
|
|
from datetime import datetime, timedelta, timezone
|
2024-02-29 06:22:40 +00:00
|
|
|
from math import floor, isnan
|
2022-04-28 04:29:14 +00:00
|
|
|
from threading import Lock
|
2024-11-04 18:40:11 +00:00
|
|
|
from typing import Any, Literal, Optional, TypeGuard, Union
|
2017-09-01 19:11:46 +00:00
|
|
|
|
2018-04-06 07:57:08 +00:00
|
|
|
import ccxt
|
2022-10-12 04:17:42 +00:00
|
|
|
import ccxt.pro as ccxt_pro
|
2021-04-13 18:09:22 +00:00
|
|
|
from cachetools import TTLCache
|
2022-10-29 07:06:21 +00:00
|
|
|
from ccxt import TICK_SIZE
|
2022-08-20 11:00:25 +00:00
|
|
|
from dateutil import parser
|
2022-10-03 18:49:54 +00:00
|
|
|
from pandas import DataFrame, concat
|
2018-08-03 16:10:03 +00:00
|
|
|
|
2024-05-12 13:18:32 +00:00
|
|
|
from freqtrade.constants import (
|
|
|
|
DEFAULT_AMOUNT_RESERVE_PERCENT,
|
2024-05-15 12:33:41 +00:00
|
|
|
DEFAULT_TRADES_COLUMNS,
|
2024-05-12 13:18:32 +00:00
|
|
|
NON_OPEN_EXCHANGE_STATES,
|
|
|
|
BidAsk,
|
|
|
|
BuySell,
|
|
|
|
Config,
|
|
|
|
EntryExit,
|
|
|
|
ExchangeConfig,
|
|
|
|
ListPairsWithTimeframes,
|
|
|
|
MakerTaker,
|
|
|
|
OBLiteral,
|
|
|
|
PairWithTimeframe,
|
|
|
|
)
|
2024-05-18 14:45:49 +00:00
|
|
|
from freqtrade.data.converter import (
|
|
|
|
clean_ohlcv_dataframe,
|
|
|
|
ohlcv_to_dataframe,
|
2024-05-15 12:33:41 +00:00
|
|
|
trades_df_remove_duplicates,
|
|
|
|
trades_dict_to_list,
|
|
|
|
trades_list_to_df,
|
|
|
|
)
|
2024-04-06 14:47:43 +00:00
|
|
|
from freqtrade.enums import (
|
|
|
|
OPTIMIZE_MODES,
|
|
|
|
TRADE_MODES,
|
|
|
|
CandleType,
|
|
|
|
MarginMode,
|
|
|
|
PriceType,
|
|
|
|
RunMode,
|
|
|
|
TradingMode,
|
|
|
|
)
|
2024-05-12 13:18:32 +00:00
|
|
|
from freqtrade.exceptions import (
|
|
|
|
ConfigurationError,
|
|
|
|
DDosProtection,
|
|
|
|
ExchangeError,
|
|
|
|
InsufficientFundsError,
|
|
|
|
InvalidOrderException,
|
|
|
|
OperationalException,
|
|
|
|
PricingError,
|
|
|
|
RetryableOrderError,
|
|
|
|
TemporaryError,
|
|
|
|
)
|
|
|
|
from freqtrade.exchange.common import (
|
|
|
|
API_FETCH_ORDER_RETRY_COUNT,
|
|
|
|
remove_exchange_credentials,
|
|
|
|
retrier,
|
|
|
|
retrier_async,
|
|
|
|
)
|
2024-09-04 04:42:51 +00:00
|
|
|
from freqtrade.exchange.exchange_types import (
|
|
|
|
CcxtBalances,
|
2024-11-04 06:23:25 +00:00
|
|
|
CcxtOrder,
|
2024-09-04 04:42:51 +00:00
|
|
|
CcxtPosition,
|
2024-09-04 05:15:17 +00:00
|
|
|
FtHas,
|
2024-09-04 04:42:51 +00:00
|
|
|
OHLCVResponse,
|
|
|
|
OrderBook,
|
|
|
|
Ticker,
|
|
|
|
Tickers,
|
|
|
|
)
|
2024-05-12 13:18:32 +00:00
|
|
|
from freqtrade.exchange.exchange_utils import (
|
|
|
|
ROUND,
|
|
|
|
ROUND_DOWN,
|
|
|
|
ROUND_UP,
|
|
|
|
amount_to_contract_precision,
|
|
|
|
amount_to_contracts,
|
|
|
|
amount_to_precision,
|
|
|
|
contracts_to_amount,
|
|
|
|
date_minus_candles,
|
|
|
|
is_exchange_known_ccxt,
|
|
|
|
market_is_active,
|
|
|
|
price_to_precision,
|
|
|
|
)
|
|
|
|
from freqtrade.exchange.exchange_utils_timeframe import (
|
|
|
|
timeframe_to_minutes,
|
|
|
|
timeframe_to_msecs,
|
|
|
|
timeframe_to_next_date,
|
|
|
|
timeframe_to_prev_date,
|
|
|
|
timeframe_to_seconds,
|
|
|
|
)
|
2022-10-18 18:48:40 +00:00
|
|
|
from freqtrade.exchange.exchange_ws import ExchangeWS
|
2024-05-12 13:18:32 +00:00
|
|
|
from freqtrade.misc import (
|
|
|
|
chunks,
|
|
|
|
deep_merge_dicts,
|
|
|
|
file_dump_json,
|
|
|
|
file_load_json,
|
|
|
|
safe_value_fallback2,
|
|
|
|
)
|
2023-05-14 08:48:04 +00:00
|
|
|
from freqtrade.util import dt_from_ts, dt_now
|
2024-04-25 08:16:31 +00:00
|
|
|
from freqtrade.util.datetime_helpers import dt_humanize_delta, dt_ts, format_ms_time
|
2024-02-17 15:17:32 +00:00
|
|
|
from freqtrade.util.periodic_cache import PeriodicCache
|
2020-02-02 04:00:40 +00:00
|
|
|
|
|
|
|
|
2017-05-14 12:14:16 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
2017-05-12 17:11:56 +00:00
|
|
|
|
2019-04-04 17:56:40 +00:00
|
|
|
|
2019-09-12 01:39:52 +00:00
|
|
|
class Exchange:
|
2019-08-14 08:45:57 +00:00
|
|
|
# Parameters to add directly to buy/sell calls (like agreeing to trading agreement)
|
2024-10-04 04:46:45 +00:00
|
|
|
_params: dict = {}
|
2018-08-09 10:47:26 +00:00
|
|
|
|
2022-08-22 18:23:19 +00:00
|
|
|
# Additional parameters - added to the ccxt object
|
2024-10-04 04:46:45 +00:00
|
|
|
_ccxt_params: dict = {}
|
2021-09-02 18:43:07 +00:00
|
|
|
|
2019-02-24 18:35:29 +00:00
|
|
|
# Dict to specify which options each exchange implements
|
2019-06-09 12:05:36 +00:00
|
|
|
# This defines defaults, which can be selectively overridden by subclasses using _ft_has
|
|
|
|
# or by specifying them in the configuration.
|
2024-09-04 05:15:17 +00:00
|
|
|
_ft_has_default: FtHas = {
|
2019-02-24 18:35:29 +00:00
|
|
|
"stoploss_on_exchange": False,
|
2023-09-28 17:33:59 +00:00
|
|
|
"stop_price_param": "stopLossPrice", # Used for stoploss_on_exchange request
|
|
|
|
"stop_price_prop": "stopLossPrice", # Used for stoploss_on_exchange response parsing
|
2024-09-04 05:15:17 +00:00
|
|
|
"stoploss_order_types": {},
|
2022-08-27 08:24:56 +00:00
|
|
|
"order_time_in_force": ["GTC"],
|
2021-05-01 15:29:53 +00:00
|
|
|
"ohlcv_params": {},
|
2019-06-09 12:52:17 +00:00
|
|
|
"ohlcv_candle_limit": 500,
|
2022-05-14 07:10:38 +00:00
|
|
|
"ohlcv_has_history": True, # Some exchanges (Kraken) don't provide history via ohlcv
|
2019-06-09 12:35:58 +00:00
|
|
|
"ohlcv_partial_candle": True,
|
2022-04-25 06:38:02 +00:00
|
|
|
"ohlcv_require_since": False,
|
2022-01-06 13:12:00 +00:00
|
|
|
# Check https://github.com/ccxt/ccxt/issues/10767 for removal of ohlcv_volume_currency
|
|
|
|
"ohlcv_volume_currency": "base", # "base" or "quote"
|
2022-03-18 15:49:37 +00:00
|
|
|
"tickers_have_quoteVolume": True,
|
2024-07-25 18:03:28 +00:00
|
|
|
"tickers_have_percentage": True,
|
2023-03-09 18:42:43 +00:00
|
|
|
"tickers_have_bid_ask": True, # bid / ask empty for fetch_tickers
|
2022-03-18 15:49:37 +00:00
|
|
|
"tickers_have_price": True,
|
2024-03-16 16:01:27 +00:00
|
|
|
"trades_limit": 1000, # Limit for 1 call to fetch_trades
|
2019-08-14 17:22:52 +00:00
|
|
|
"trades_pagination": "time", # Possible are "time" or "id"
|
|
|
|
"trades_pagination_arg": "since",
|
2024-06-20 16:24:43 +00:00
|
|
|
"trades_has_history": False,
|
2020-10-13 18:02:47 +00:00
|
|
|
"l2_limit_range": None,
|
2021-04-13 10:28:07 +00:00
|
|
|
"l2_limit_range_required": True, # Allow Empty L2 limit (kucoin)
|
2021-11-14 09:15:22 +00:00
|
|
|
"mark_ohlcv_price": "mark",
|
2021-12-05 09:01:44 +00:00
|
|
|
"mark_ohlcv_timeframe": "8h",
|
2024-01-04 14:29:28 +00:00
|
|
|
"funding_fee_timeframe": "8h",
|
2022-02-06 01:32:46 +00:00
|
|
|
"ccxt_futures_name": "swap",
|
2022-03-26 13:57:42 +00:00
|
|
|
"needs_trading_fees": False, # use fetch_trading_fees to cache fees
|
2024-05-12 15:02:37 +00:00
|
|
|
"order_props_in_contracts": ["amount", "filled", "remaining"],
|
2023-03-28 04:55:55 +00:00
|
|
|
# Override createMarketBuyOrderRequiresPrice where ccxt has it wrong
|
|
|
|
"marketOrderRequiresPrice": False,
|
2024-03-19 17:19:22 +00:00
|
|
|
"exchange_has_overrides": {}, # Dictionary overriding ccxt's "has".
|
|
|
|
# Expected to be in the format {"fetchOHLCV": True} or {"fetchOHLCV": False}
|
2024-09-04 04:57:13 +00:00
|
|
|
"ws_enabled": False, # Set to true for exchanges with tested websocket support
|
2019-02-24 18:35:29 +00:00
|
|
|
}
|
2024-09-04 05:15:17 +00:00
|
|
|
_ft_has: FtHas = {}
|
|
|
|
_ft_has_futures: FtHas = {}
|
2019-02-24 18:35:29 +00:00
|
|
|
|
2024-10-04 04:46:45 +00:00
|
|
|
_supported_trading_mode_margin_pairs: list[tuple[TradingMode, MarginMode]] = [
|
2021-09-05 03:55:55 +00:00
|
|
|
# TradingMode.SPOT always supported and not required in this list
|
|
|
|
]
|
|
|
|
|
2024-05-12 15:02:37 +00:00
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
config: Config,
|
|
|
|
*,
|
|
|
|
exchange_config: Optional[ExchangeConfig] = None,
|
|
|
|
validate: bool = True,
|
|
|
|
load_leverage_tiers: bool = False,
|
|
|
|
) -> None:
|
2018-06-17 10:41:33 +00:00
|
|
|
"""
|
|
|
|
Initializes this module with the given config,
|
2019-02-24 18:35:29 +00:00
|
|
|
it does basic validation whether the specified exchange and pairs are valid.
|
2018-06-17 10:41:33 +00:00
|
|
|
:return: None
|
|
|
|
"""
|
2022-05-07 09:41:57 +00:00
|
|
|
self._api: ccxt.Exchange
|
2024-06-04 17:45:22 +00:00
|
|
|
self._api_async: ccxt_pro.Exchange
|
2023-12-23 08:08:13 +00:00
|
|
|
self._ws_async: ccxt_pro.Exchange = None
|
|
|
|
self._exchange_ws: Optional[ExchangeWS] = None
|
2024-10-04 04:46:45 +00:00
|
|
|
self._markets: dict = {}
|
|
|
|
self._trading_fees: dict[str, Any] = {}
|
|
|
|
self._leverage_tiers: dict[str, list[dict]] = {}
|
2022-04-28 04:29:14 +00:00
|
|
|
# Lock event loop. This is necessary to avoid race-conditions when using force* commands
|
|
|
|
# Due to funding fee fetching.
|
|
|
|
self._loop_lock = Lock()
|
2023-04-16 15:45:56 +00:00
|
|
|
self.loop = self._init_async_loop()
|
2022-09-18 11:20:36 +00:00
|
|
|
self._config: Config = {}
|
2019-07-03 02:02:44 +00:00
|
|
|
|
2019-02-28 23:13:16 +00:00
|
|
|
self._config.update(config)
|
2017-09-08 13:51:00 +00:00
|
|
|
|
2018-08-19 17:37:48 +00:00
|
|
|
# Holds last candle refreshed time of each pair
|
2024-10-04 04:46:45 +00:00
|
|
|
self._pairs_last_refresh_time: dict[PairWithTimeframe, int] = {}
|
2019-03-10 12:30:45 +00:00
|
|
|
# Timestamp of last markets refresh
|
|
|
|
self._last_markets_refresh: int = 0
|
2018-08-19 17:37:48 +00:00
|
|
|
|
2021-04-13 18:09:22 +00:00
|
|
|
# Cache for 10 minutes ...
|
2022-08-16 17:43:41 +00:00
|
|
|
self._cache_lock = Lock()
|
2022-03-18 06:08:16 +00:00
|
|
|
self._fetch_tickers_cache: TTLCache = TTLCache(maxsize=2, ttl=60 * 10)
|
2024-01-04 13:22:02 +00:00
|
|
|
# Cache values for 300 to avoid frequent polling of the exchange for prices
|
2021-06-02 09:39:18 +00:00
|
|
|
# Caching only applies to RPC methods, so prices for open trades are still
|
|
|
|
# refreshed once every iteration.
|
2024-01-04 13:22:02 +00:00
|
|
|
# Shouldn't be too high either, as it'll freeze UI updates in case of open orders.
|
|
|
|
self._exit_rate_cache: TTLCache = TTLCache(maxsize=100, ttl=300)
|
|
|
|
self._entry_rate_cache: TTLCache = TTLCache(maxsize=100, ttl=300)
|
2021-04-13 18:09:22 +00:00
|
|
|
|
2018-08-19 17:37:48 +00:00
|
|
|
# Holds candles
|
2024-10-04 04:46:45 +00:00
|
|
|
self._klines: dict[PairWithTimeframe, DataFrame] = {}
|
|
|
|
self._expiring_candle_cache: dict[tuple[str, int], PeriodicCache] = {}
|
2018-08-19 17:37:48 +00:00
|
|
|
|
2023-04-26 13:14:45 +00:00
|
|
|
# Holds public_trades
|
2024-10-04 04:46:45 +00:00
|
|
|
self._trades: dict[PairWithTimeframe, DataFrame] = {}
|
2023-04-26 13:14:45 +00:00
|
|
|
|
2018-12-10 18:54:43 +00:00
|
|
|
# Holds all open sell orders for dry_run
|
2024-10-04 04:46:45 +00:00
|
|
|
self._dry_run_open_orders: dict[str, Any] = {}
|
2018-08-19 17:37:48 +00:00
|
|
|
|
2024-05-12 15:02:37 +00:00
|
|
|
if config["dry_run"]:
|
|
|
|
logger.info("Instance is running with dry_run enabled")
|
2020-03-25 16:01:11 +00:00
|
|
|
logger.info(f"Using CCXT {ccxt.__version__}")
|
2024-10-04 04:46:45 +00:00
|
|
|
exchange_conf: dict[str, Any] = exchange_config if exchange_config else config["exchange"]
|
2024-05-12 15:02:37 +00:00
|
|
|
remove_exchange_credentials(exchange_conf, config.get("dry_run", False))
|
|
|
|
self.log_responses = exchange_conf.get("log_responses", False)
|
2019-06-09 12:05:36 +00:00
|
|
|
|
2022-03-17 19:05:05 +00:00
|
|
|
# Leverage properties
|
2024-05-12 15:02:37 +00:00
|
|
|
self.trading_mode: TradingMode = config.get("trading_mode", TradingMode.SPOT)
|
2022-03-30 18:02:56 +00:00
|
|
|
self.margin_mode: MarginMode = (
|
2024-05-12 15:02:37 +00:00
|
|
|
MarginMode(config.get("margin_mode")) if config.get("margin_mode") else MarginMode.NONE
|
2022-03-17 19:05:05 +00:00
|
|
|
)
|
2024-05-12 15:02:37 +00:00
|
|
|
self.liquidation_buffer = config.get("liquidation_buffer", 0.05)
|
2022-03-17 19:05:05 +00:00
|
|
|
|
2019-06-09 12:05:36 +00:00
|
|
|
# Deep merge ft_has with default ft_has options
|
|
|
|
self._ft_has = deep_merge_dicts(self._ft_has, deepcopy(self._ft_has_default))
|
2022-03-17 19:05:05 +00:00
|
|
|
if self.trading_mode == TradingMode.FUTURES:
|
|
|
|
self._ft_has = deep_merge_dicts(self._ft_has_futures, self._ft_has)
|
2024-05-12 15:02:37 +00:00
|
|
|
if exchange_conf.get("_ft_has_params"):
|
|
|
|
self._ft_has = deep_merge_dicts(exchange_conf.get("_ft_has_params"), self._ft_has)
|
2019-06-09 12:35:58 +00:00
|
|
|
logger.info("Overriding exchange._ft_has with config params, result: %s", self._ft_has)
|
2019-06-09 12:05:36 +00:00
|
|
|
|
2019-06-09 12:35:58 +00:00
|
|
|
# Assign this directly for easy access
|
2024-05-12 15:02:37 +00:00
|
|
|
self._ohlcv_partial_candle = self._ft_has["ohlcv_partial_candle"]
|
2019-06-09 12:35:58 +00:00
|
|
|
|
2024-05-15 12:33:41 +00:00
|
|
|
self._max_trades_limit = self._ft_has["trades_limit"]
|
2019-06-09 12:35:58 +00:00
|
|
|
|
2024-05-12 15:02:37 +00:00
|
|
|
self._trades_pagination = self._ft_has["trades_pagination"]
|
|
|
|
self._trades_pagination_arg = self._ft_has["trades_pagination_arg"]
|
2019-08-14 17:22:52 +00:00
|
|
|
|
2019-06-09 12:35:58 +00:00
|
|
|
# Initialize ccxt objects
|
2021-09-19 23:44:12 +00:00
|
|
|
ccxt_config = self._ccxt_config
|
2024-05-12 15:02:37 +00:00
|
|
|
ccxt_config = deep_merge_dicts(exchange_conf.get("ccxt_config", {}), ccxt_config)
|
|
|
|
ccxt_config = deep_merge_dicts(exchange_conf.get("ccxt_sync_config", {}), ccxt_config)
|
2020-05-30 08:45:50 +00:00
|
|
|
|
2022-10-12 04:17:42 +00:00
|
|
|
self._api = self._init_ccxt(exchange_conf, True, ccxt_config)
|
2019-11-13 17:22:23 +00:00
|
|
|
|
2021-09-19 23:44:12 +00:00
|
|
|
ccxt_async_config = self._ccxt_config
|
2024-05-12 15:02:37 +00:00
|
|
|
ccxt_async_config = deep_merge_dicts(
|
|
|
|
exchange_conf.get("ccxt_config", {}), ccxt_async_config
|
|
|
|
)
|
|
|
|
ccxt_async_config = deep_merge_dicts(
|
|
|
|
exchange_conf.get("ccxt_async_config", {}), ccxt_async_config
|
|
|
|
)
|
2022-10-12 04:17:42 +00:00
|
|
|
self._api_async = self._init_ccxt(exchange_conf, False, ccxt_async_config)
|
2024-09-04 04:57:13 +00:00
|
|
|
self._has_watch_ohlcv = self.exchange_has("watchOHLCV") and self._ft_has["ws_enabled"]
|
2024-04-06 14:47:43 +00:00
|
|
|
if (
|
|
|
|
self._config["runmode"] in TRADE_MODES
|
|
|
|
and exchange_conf.get("enable_ws", True)
|
|
|
|
and self._has_watch_ohlcv
|
|
|
|
):
|
|
|
|
self._ws_async = self._init_ccxt(exchange_conf, False, ccxt_async_config)
|
2022-10-18 18:48:40 +00:00
|
|
|
self._exchange_ws = ExchangeWS(self._config, self._ws_async)
|
2022-10-12 19:08:01 +00:00
|
|
|
|
2022-04-28 17:43:52 +00:00
|
|
|
logger.info(f'Using Exchange "{self.name}"')
|
2022-10-09 07:29:37 +00:00
|
|
|
self.required_candle_call_count = 1
|
2019-10-13 08:33:22 +00:00
|
|
|
if validate:
|
|
|
|
# Initial markets load
|
2024-06-04 05:00:23 +00:00
|
|
|
self.reload_markets(True, load_leverage_tiers=False)
|
2022-07-16 11:27:13 +00:00
|
|
|
self.validate_config(config)
|
2024-05-12 15:02:37 +00:00
|
|
|
self._startup_candle_count: int = config.get("startup_candle_count", 0)
|
2021-11-07 12:10:40 +00:00
|
|
|
self.required_candle_call_count = self.validate_required_startup_candles(
|
2024-05-12 15:02:37 +00:00
|
|
|
self._startup_candle_count, config.get("timeframe", "")
|
|
|
|
)
|
2019-03-12 12:03:29 +00:00
|
|
|
|
2019-10-22 10:51:36 +00:00
|
|
|
# Converts the interval provided in minutes in config to seconds
|
2024-05-12 15:02:37 +00:00
|
|
|
self.markets_refresh_interval: int = (
|
|
|
|
exchange_conf.get("markets_refresh_interval", 60) * 60 * 1000
|
|
|
|
)
|
2019-10-22 10:51:36 +00:00
|
|
|
|
2022-07-23 17:56:38 +00:00
|
|
|
if self.trading_mode != TradingMode.SPOT and load_leverage_tiers:
|
2022-02-07 08:01:00 +00:00
|
|
|
self.fill_leverage_tiers()
|
2022-05-07 08:56:13 +00:00
|
|
|
self.additional_exchange_init()
|
2021-10-22 12:37:56 +00:00
|
|
|
|
2018-08-14 17:51:49 +00:00
|
|
|
def __del__(self):
|
|
|
|
"""
|
|
|
|
Destructor - clean up async stuff
|
|
|
|
"""
|
2021-02-06 09:22:59 +00:00
|
|
|
self.close()
|
|
|
|
|
|
|
|
def close(self):
|
2022-10-23 07:50:13 +00:00
|
|
|
if self._exchange_ws:
|
|
|
|
self._exchange_ws.cleanup()
|
2018-08-14 17:51:49 +00:00
|
|
|
logger.debug("Exchange object destroyed, closing async loop")
|
2024-05-12 15:02:37 +00:00
|
|
|
if (
|
2024-06-16 07:56:03 +00:00
|
|
|
getattr(self, "_api_async", None)
|
2024-05-12 15:02:37 +00:00
|
|
|
and inspect.iscoroutinefunction(self._api_async.close)
|
|
|
|
and self._api_async.session
|
|
|
|
):
|
2022-09-07 16:28:14 +00:00
|
|
|
logger.debug("Closing async ccxt session.")
|
2021-12-31 15:34:15 +00:00
|
|
|
self.loop.run_until_complete(self._api_async.close())
|
2023-08-11 05:30:07 +00:00
|
|
|
if (
|
|
|
|
self._ws_async
|
|
|
|
and inspect.iscoroutinefunction(self._ws_async.close)
|
|
|
|
and self._ws_async.session
|
|
|
|
):
|
2023-08-10 19:23:57 +00:00
|
|
|
logger.debug("Closing ws ccxt session.")
|
|
|
|
self.loop.run_until_complete(self._ws_async.close())
|
|
|
|
|
2023-03-26 13:46:20 +00:00
|
|
|
if self.loop and not self.loop.is_closed():
|
|
|
|
self.loop.close()
|
2018-08-14 17:51:49 +00:00
|
|
|
|
2023-04-22 09:11:03 +00:00
|
|
|
def _init_async_loop(self) -> asyncio.AbstractEventLoop:
|
2023-04-16 15:45:56 +00:00
|
|
|
loop = asyncio.new_event_loop()
|
|
|
|
asyncio.set_event_loop(loop)
|
|
|
|
return loop
|
|
|
|
|
2024-08-23 16:18:05 +00:00
|
|
|
def validate_config(self, config: Config) -> None:
|
2022-07-16 11:27:13 +00:00
|
|
|
# Check if timeframe is available
|
2024-05-12 15:02:37 +00:00
|
|
|
self.validate_timeframes(config.get("timeframe"))
|
2022-07-16 11:27:13 +00:00
|
|
|
|
|
|
|
# Check if all pairs are available
|
2024-05-12 15:02:37 +00:00
|
|
|
self.validate_stakecurrency(config["stake_currency"])
|
|
|
|
self.validate_ordertypes(config.get("order_types", {}))
|
|
|
|
self.validate_order_time_in_force(config.get("order_time_in_force", {}))
|
2022-07-16 11:27:13 +00:00
|
|
|
self.validate_trading_mode_and_margin_mode(self.trading_mode, self.margin_mode)
|
2024-05-12 15:02:37 +00:00
|
|
|
self.validate_pricing(config["exit_pricing"])
|
|
|
|
self.validate_pricing(config["entry_pricing"])
|
2024-07-21 18:09:14 +00:00
|
|
|
self.validate_orderflow(config["exchange"])
|
2024-08-23 16:16:01 +00:00
|
|
|
self.validate_freqai(config)
|
2022-07-16 11:27:13 +00:00
|
|
|
|
2024-05-12 15:02:37 +00:00
|
|
|
def _init_ccxt(
|
2024-10-04 04:46:45 +00:00
|
|
|
self, exchange_config: dict[str, Any], sync: bool, ccxt_kwargs: dict[str, Any]
|
2024-05-12 15:02:37 +00:00
|
|
|
) -> ccxt.Exchange:
|
2018-06-17 11:09:23 +00:00
|
|
|
"""
|
2022-10-12 04:17:42 +00:00
|
|
|
Initialize ccxt with given config and return valid ccxt instance.
|
2018-06-17 11:09:23 +00:00
|
|
|
"""
|
|
|
|
# Find matching class for the given exchange name
|
2024-05-12 15:02:37 +00:00
|
|
|
name = exchange_config["name"]
|
2022-10-12 04:17:42 +00:00
|
|
|
if sync:
|
|
|
|
ccxt_module = ccxt
|
|
|
|
else:
|
|
|
|
ccxt_module = ccxt_pro
|
|
|
|
if not is_exchange_known_ccxt(name, ccxt_module):
|
|
|
|
# Fall back to async if pro doesn't support this exchange
|
2023-08-10 16:12:09 +00:00
|
|
|
import ccxt.async_support as ccxt_async
|
2023-08-11 05:30:07 +00:00
|
|
|
|
2022-10-12 04:17:42 +00:00
|
|
|
ccxt_module = ccxt_async
|
2018-06-17 11:09:23 +00:00
|
|
|
|
2019-09-30 21:33:33 +00:00
|
|
|
if not is_exchange_known_ccxt(name, ccxt_module):
|
2024-05-12 15:02:37 +00:00
|
|
|
raise OperationalException(f"Exchange {name} is not supported by ccxt")
|
2018-10-04 18:11:02 +00:00
|
|
|
|
|
|
|
ex_config = {
|
2024-08-02 05:05:45 +00:00
|
|
|
"apiKey": exchange_config.get(
|
|
|
|
"api_key", exchange_config.get("apiKey", exchange_config.get("key"))
|
|
|
|
),
|
2024-05-12 15:02:37 +00:00
|
|
|
"secret": exchange_config.get("secret"),
|
|
|
|
"password": exchange_config.get("password"),
|
|
|
|
"uid": exchange_config.get("uid", ""),
|
2024-08-02 05:05:45 +00:00
|
|
|
"accountId": exchange_config.get("account_id", exchange_config.get("accountId", "")),
|
2024-07-14 07:05:25 +00:00
|
|
|
# DEX attributes:
|
2024-08-02 05:05:45 +00:00
|
|
|
"walletAddress": exchange_config.get(
|
|
|
|
"wallet_address", exchange_config.get("walletAddress")
|
|
|
|
),
|
|
|
|
"privateKey": exchange_config.get("private_key", exchange_config.get("privateKey")),
|
2018-12-11 06:11:43 +00:00
|
|
|
}
|
2018-10-04 18:11:02 +00:00
|
|
|
if ccxt_kwargs:
|
2024-05-12 15:02:37 +00:00
|
|
|
logger.info("Applying additional ccxt config: %s", ccxt_kwargs)
|
2022-08-22 18:23:19 +00:00
|
|
|
if self._ccxt_params:
|
|
|
|
# Inject static options after the above output to not confuse users.
|
|
|
|
ccxt_kwargs = deep_merge_dicts(self._ccxt_params, ccxt_kwargs)
|
2021-09-02 18:43:07 +00:00
|
|
|
if ccxt_kwargs:
|
2018-10-04 18:11:02 +00:00
|
|
|
ex_config.update(ccxt_kwargs)
|
|
|
|
try:
|
|
|
|
api = getattr(ccxt_module, name.lower())(ex_config)
|
2019-07-25 18:06:20 +00:00
|
|
|
except (KeyError, AttributeError) as e:
|
2024-05-12 15:02:37 +00:00
|
|
|
raise OperationalException(f"Exchange {name} is not supported") from e
|
2019-07-03 02:13:41 +00:00
|
|
|
except ccxt.BaseError as e:
|
2019-07-25 18:06:20 +00:00
|
|
|
raise OperationalException(f"Initialization of ccxt failed. Reason: {e}") from e
|
2018-06-17 11:09:23 +00:00
|
|
|
|
|
|
|
return api
|
|
|
|
|
2021-09-19 23:44:12 +00:00
|
|
|
@property
|
2024-10-04 04:46:45 +00:00
|
|
|
def _ccxt_config(self) -> dict:
|
2021-09-19 23:44:12 +00:00
|
|
|
# Parameters to add directly to ccxt sync/async initialization.
|
2021-11-13 11:00:47 +00:00
|
|
|
if self.trading_mode == TradingMode.MARGIN:
|
2024-05-12 15:02:37 +00:00
|
|
|
return {"options": {"defaultType": "margin"}}
|
2021-11-13 11:00:47 +00:00
|
|
|
elif self.trading_mode == TradingMode.FUTURES:
|
2024-05-12 15:02:37 +00:00
|
|
|
return {"options": {"defaultType": self._ft_has["ccxt_futures_name"]}}
|
2021-11-13 11:00:47 +00:00
|
|
|
else:
|
|
|
|
return {}
|
2021-09-19 23:44:12 +00:00
|
|
|
|
2018-06-18 20:20:50 +00:00
|
|
|
@property
|
|
|
|
def name(self) -> str:
|
|
|
|
"""exchange Name (from ccxt)"""
|
2018-06-18 20:07:15 +00:00
|
|
|
return self._api.name
|
2017-09-08 13:51:00 +00:00
|
|
|
|
2018-06-18 20:20:50 +00:00
|
|
|
@property
|
|
|
|
def id(self) -> str:
|
|
|
|
"""exchange ccxt id"""
|
2018-06-18 20:07:15 +00:00
|
|
|
return self._api.id
|
2017-10-06 10:22:04 +00:00
|
|
|
|
2019-09-29 20:08:11 +00:00
|
|
|
@property
|
2024-10-04 04:46:45 +00:00
|
|
|
def timeframes(self) -> list[str]:
|
2019-09-29 20:08:11 +00:00
|
|
|
return list((self._api.timeframes or {}).keys())
|
|
|
|
|
2019-03-04 22:59:08 +00:00
|
|
|
@property
|
2024-10-04 04:46:45 +00:00
|
|
|
def markets(self) -> dict[str, Any]:
|
2019-03-04 22:59:08 +00:00
|
|
|
"""exchange ccxt markets"""
|
2021-01-28 18:40:10 +00:00
|
|
|
if not self._markets:
|
2020-06-10 03:30:29 +00:00
|
|
|
logger.info("Markets were not loaded. Loading them now..")
|
2024-06-04 05:00:23 +00:00
|
|
|
self.reload_markets(True)
|
2021-01-28 18:40:10 +00:00
|
|
|
return self._markets
|
2019-03-04 22:59:08 +00:00
|
|
|
|
2020-01-12 13:37:45 +00:00
|
|
|
@property
|
2022-05-07 09:41:57 +00:00
|
|
|
def precisionMode(self) -> int:
|
2024-08-13 07:29:36 +00:00
|
|
|
"""Exchange ccxt precisionMode"""
|
2020-01-12 13:37:45 +00:00
|
|
|
return self._api.precisionMode
|
|
|
|
|
2024-08-13 07:11:44 +00:00
|
|
|
@property
|
|
|
|
def precision_mode_price(self) -> int:
|
2024-08-13 07:29:36 +00:00
|
|
|
"""
|
|
|
|
Exchange ccxt precisionMode used for price
|
|
|
|
Workaround for ccxt limitation to not have precisionMode for price
|
|
|
|
if it differs for an exchange
|
|
|
|
Might need to be updated if https://github.com/ccxt/ccxt/issues/20408 is fixed.
|
|
|
|
"""
|
2024-08-13 07:11:44 +00:00
|
|
|
return self._api.precisionMode
|
|
|
|
|
2022-05-07 08:56:13 +00:00
|
|
|
def additional_exchange_init(self) -> None:
|
|
|
|
"""
|
|
|
|
Additional exchange initialization logic.
|
|
|
|
.api will be available at this point.
|
|
|
|
Must be overridden in child methods if required.
|
|
|
|
"""
|
|
|
|
pass
|
|
|
|
|
2024-01-01 18:27:18 +00:00
|
|
|
def _log_exchange_response(self, endpoint: str, response, *, add_info=None) -> None:
|
2024-05-12 15:02:37 +00:00
|
|
|
"""Log exchange responses"""
|
2021-06-10 18:09:25 +00:00
|
|
|
if self.log_responses:
|
2024-01-02 07:53:13 +00:00
|
|
|
add_info_str = "" if add_info is None else f" {add_info}: "
|
2024-01-01 18:27:18 +00:00
|
|
|
logger.info(f"API {endpoint}: {add_info_str}{response}")
|
2021-06-10 18:09:25 +00:00
|
|
|
|
2022-05-14 07:51:44 +00:00
|
|
|
def ohlcv_candle_limit(
|
2024-05-12 15:02:37 +00:00
|
|
|
self, timeframe: str, candle_type: CandleType, since_ms: Optional[int] = None
|
|
|
|
) -> int:
|
2021-02-14 09:29:45 +00:00
|
|
|
"""
|
|
|
|
Exchange ohlcv candle limit
|
2021-06-25 13:45:49 +00:00
|
|
|
Uses ohlcv_candle_limit_per_timeframe if the exchange has different limits
|
2021-02-14 09:29:45 +00:00
|
|
|
per timeframe (e.g. bittrex), otherwise falls back to ohlcv_candle_limit
|
2023-12-18 05:47:24 +00:00
|
|
|
TODO: this is most likely no longer needed since only bittrex needed this.
|
2021-02-14 09:29:45 +00:00
|
|
|
:param timeframe: Timeframe to check
|
2022-05-14 07:51:44 +00:00
|
|
|
:param candle_type: Candle-type
|
2022-05-15 15:06:40 +00:00
|
|
|
:param since_ms: Starting timestamp
|
2021-02-14 09:29:45 +00:00
|
|
|
:return: Candle limit as integer
|
|
|
|
"""
|
2024-05-12 15:02:37 +00:00
|
|
|
return int(
|
|
|
|
self._ft_has.get("ohlcv_candle_limit_per_timeframe", {}).get(
|
2024-09-04 05:15:17 +00:00
|
|
|
timeframe, str(self._ft_has.get("ohlcv_candle_limit"))
|
2024-05-12 15:02:37 +00:00
|
|
|
)
|
|
|
|
)
|
2021-02-14 09:29:45 +00:00
|
|
|
|
2024-04-20 07:17:25 +00:00
|
|
|
def get_markets(
|
2024-05-12 15:02:37 +00:00
|
|
|
self,
|
2024-10-04 04:46:45 +00:00
|
|
|
base_currencies: Optional[list[str]] = None,
|
|
|
|
quote_currencies: Optional[list[str]] = None,
|
2024-05-12 15:02:37 +00:00
|
|
|
spot_only: bool = False,
|
|
|
|
margin_only: bool = False,
|
|
|
|
futures_only: bool = False,
|
|
|
|
tradable_only: bool = True,
|
|
|
|
active_only: bool = False,
|
2024-10-04 04:46:45 +00:00
|
|
|
) -> dict[str, Any]:
|
2019-10-13 10:12:20 +00:00
|
|
|
"""
|
|
|
|
Return exchange ccxt markets, filtered out by base currency and quote currency
|
|
|
|
if this was requested in parameters.
|
|
|
|
"""
|
|
|
|
markets = self.markets
|
2019-10-14 10:32:39 +00:00
|
|
|
if not markets:
|
|
|
|
raise OperationalException("Markets were not loaded.")
|
|
|
|
|
2019-10-16 23:09:19 +00:00
|
|
|
if base_currencies:
|
2024-05-12 15:02:37 +00:00
|
|
|
markets = {k: v for k, v in markets.items() if v["base"] in base_currencies}
|
2019-10-16 23:09:19 +00:00
|
|
|
if quote_currencies:
|
2024-05-12 15:02:37 +00:00
|
|
|
markets = {k: v for k, v in markets.items() if v["quote"] in quote_currencies}
|
2021-11-01 07:40:55 +00:00
|
|
|
if tradable_only:
|
2020-06-02 18:41:29 +00:00
|
|
|
markets = {k: v for k, v in markets.items() if self.market_is_tradable(v)}
|
2021-11-01 07:40:55 +00:00
|
|
|
if spot_only:
|
|
|
|
markets = {k: v for k, v in markets.items() if self.market_is_spot(v)}
|
|
|
|
if margin_only:
|
|
|
|
markets = {k: v for k, v in markets.items() if self.market_is_margin(v)}
|
|
|
|
if futures_only:
|
|
|
|
markets = {k: v for k, v in markets.items() if self.market_is_future(v)}
|
2019-10-13 10:12:20 +00:00
|
|
|
if active_only:
|
2019-10-14 10:32:39 +00:00
|
|
|
markets = {k: v for k, v in markets.items() if market_is_active(v)}
|
2019-10-13 10:12:20 +00:00
|
|
|
return markets
|
|
|
|
|
2024-10-04 04:46:45 +00:00
|
|
|
def get_quote_currencies(self) -> list[str]:
|
2020-01-11 10:53:44 +00:00
|
|
|
"""
|
|
|
|
Return a list of supported quote currencies
|
|
|
|
"""
|
|
|
|
markets = self.markets
|
2024-05-12 15:02:37 +00:00
|
|
|
return sorted(set([x["quote"] for _, x in markets.items()]))
|
2020-01-11 10:53:44 +00:00
|
|
|
|
2020-02-24 20:50:27 +00:00
|
|
|
def get_pair_quote_currency(self, pair: str) -> str:
|
2024-05-12 15:02:37 +00:00
|
|
|
"""Return a pair's quote currency (base/quote:settlement)"""
|
|
|
|
return self.markets.get(pair, {}).get("quote", "")
|
2020-02-24 20:50:27 +00:00
|
|
|
|
|
|
|
def get_pair_base_currency(self, pair: str) -> str:
|
2024-05-12 15:02:37 +00:00
|
|
|
"""Return a pair's base currency (base/quote:settlement)"""
|
|
|
|
return self.markets.get(pair, {}).get("base", "")
|
2020-02-24 20:50:27 +00:00
|
|
|
|
2024-10-04 04:46:45 +00:00
|
|
|
def market_is_future(self, market: dict[str, Any]) -> bool:
|
2022-02-14 22:53:29 +00:00
|
|
|
return (
|
2024-05-12 15:02:37 +00:00
|
|
|
market.get(self._ft_has["ccxt_futures_name"], False) is True
|
|
|
|
and market.get("linear", False) is True
|
2022-02-14 22:53:29 +00:00
|
|
|
)
|
2021-11-01 07:40:55 +00:00
|
|
|
|
2024-10-04 04:46:45 +00:00
|
|
|
def market_is_spot(self, market: dict[str, Any]) -> bool:
|
2024-05-12 15:02:37 +00:00
|
|
|
return market.get("spot", False) is True
|
2021-11-01 07:40:55 +00:00
|
|
|
|
2024-10-04 04:46:45 +00:00
|
|
|
def market_is_margin(self, market: dict[str, Any]) -> bool:
|
2024-05-12 15:02:37 +00:00
|
|
|
return market.get("margin", False) is True
|
2021-11-01 07:40:55 +00:00
|
|
|
|
2024-10-04 04:46:45 +00:00
|
|
|
def market_is_tradable(self, market: dict[str, Any]) -> bool:
|
2020-06-02 18:29:48 +00:00
|
|
|
"""
|
|
|
|
Check if the market symbol is tradable by Freqtrade.
|
2021-11-01 07:40:55 +00:00
|
|
|
Ensures that Configured mode aligns to
|
2020-06-02 18:29:48 +00:00
|
|
|
"""
|
2021-11-01 07:40:55 +00:00
|
|
|
return (
|
2024-05-12 15:02:37 +00:00
|
|
|
market.get("quote", None) is not None
|
|
|
|
and market.get("base", None) is not None
|
|
|
|
and (
|
|
|
|
self.precisionMode != TICK_SIZE
|
|
|
|
# Too low precision will falsify calculations
|
|
|
|
or market.get("precision", {}).get("price") > 1e-11
|
|
|
|
)
|
|
|
|
and (
|
|
|
|
(self.trading_mode == TradingMode.SPOT and self.market_is_spot(market))
|
|
|
|
or (self.trading_mode == TradingMode.MARGIN and self.market_is_margin(market))
|
|
|
|
or (self.trading_mode == TradingMode.FUTURES and self.market_is_future(market))
|
|
|
|
)
|
2021-11-01 07:40:55 +00:00
|
|
|
)
|
2020-06-02 18:29:48 +00:00
|
|
|
|
2021-12-03 13:11:24 +00:00
|
|
|
def klines(self, pair_interval: PairWithTimeframe, copy: bool = True) -> DataFrame:
|
2018-12-30 06:15:21 +00:00
|
|
|
if pair_interval in self._klines:
|
|
|
|
return self._klines[pair_interval].copy() if copy else self._klines[pair_interval]
|
2018-12-11 18:47:48 +00:00
|
|
|
else:
|
2018-12-29 12:00:50 +00:00
|
|
|
return DataFrame()
|
2018-12-11 18:47:48 +00:00
|
|
|
|
2023-04-26 13:14:45 +00:00
|
|
|
def trades(self, pair_interval: PairWithTimeframe, copy: bool = True) -> DataFrame:
|
|
|
|
if pair_interval in self._trades:
|
|
|
|
if copy:
|
2024-02-07 11:45:39 +00:00
|
|
|
return self._trades[pair_interval].copy()
|
2023-04-26 13:14:45 +00:00
|
|
|
else:
|
|
|
|
return self._trades[pair_interval]
|
|
|
|
else:
|
2024-08-10 15:51:46 +00:00
|
|
|
return DataFrame(columns=DEFAULT_TRADES_COLUMNS)
|
2023-04-26 13:14:45 +00:00
|
|
|
|
2022-10-17 10:02:55 +00:00
|
|
|
def get_contract_size(self, pair: str) -> Optional[float]:
|
2021-12-19 06:56:34 +00:00
|
|
|
if self.trading_mode == TradingMode.FUTURES:
|
2022-10-17 09:54:58 +00:00
|
|
|
market = self.markets.get(pair, {})
|
2022-01-06 09:40:31 +00:00
|
|
|
contract_size: float = 1.0
|
2022-10-17 09:54:58 +00:00
|
|
|
if not market:
|
|
|
|
return None
|
2024-05-12 15:02:37 +00:00
|
|
|
if market.get("contractSize") is not None:
|
2022-01-06 09:40:31 +00:00
|
|
|
# ccxt has contractSize in markets as string
|
2024-05-12 15:02:37 +00:00
|
|
|
contract_size = float(market["contractSize"])
|
2021-12-21 21:45:16 +00:00
|
|
|
return contract_size
|
2021-12-19 06:56:34 +00:00
|
|
|
else:
|
|
|
|
return 1
|
|
|
|
|
2024-10-04 04:46:45 +00:00
|
|
|
def _trades_contracts_to_amount(self, trades: list) -> list:
|
2024-05-12 15:02:37 +00:00
|
|
|
if len(trades) > 0 and "symbol" in trades[0]:
|
|
|
|
contract_size = self.get_contract_size(trades[0]["symbol"])
|
2021-12-19 06:56:34 +00:00
|
|
|
if contract_size != 1:
|
|
|
|
for trade in trades:
|
2024-05-12 15:02:37 +00:00
|
|
|
trade["amount"] = trade["amount"] * contract_size
|
2021-12-31 10:29:19 +00:00
|
|
|
return trades
|
2021-12-19 06:56:34 +00:00
|
|
|
|
2024-11-04 18:23:17 +00:00
|
|
|
def _order_contracts_to_amount(self, order: CcxtOrder) -> CcxtOrder:
|
2024-05-12 15:02:37 +00:00
|
|
|
if "symbol" in order and order["symbol"] is not None:
|
|
|
|
contract_size = self.get_contract_size(order["symbol"])
|
2021-12-31 10:29:19 +00:00
|
|
|
if contract_size != 1:
|
2024-05-12 15:02:37 +00:00
|
|
|
for prop in self._ft_has.get("order_props_in_contracts", []):
|
2021-12-31 10:29:19 +00:00
|
|
|
if prop in order and order[prop] is not None:
|
|
|
|
order[prop] = order[prop] * contract_size
|
2021-12-19 06:56:34 +00:00
|
|
|
return order
|
|
|
|
|
2022-03-30 18:02:56 +00:00
|
|
|
def _amount_to_contracts(self, pair: str, amount: float) -> float:
|
2022-08-22 18:28:33 +00:00
|
|
|
contract_size = self.get_contract_size(pair)
|
|
|
|
return amount_to_contracts(amount, contract_size)
|
2022-01-01 20:08:10 +00:00
|
|
|
|
2022-03-30 18:02:56 +00:00
|
|
|
def _contracts_to_amount(self, pair: str, num_contracts: float) -> float:
|
2022-08-22 18:28:33 +00:00
|
|
|
contract_size = self.get_contract_size(pair)
|
|
|
|
return contracts_to_amount(num_contracts, contract_size)
|
2022-01-01 20:08:10 +00:00
|
|
|
|
2022-09-07 04:34:03 +00:00
|
|
|
def amount_to_contract_precision(self, pair: str, amount: float) -> float:
|
|
|
|
"""
|
|
|
|
Helper wrapper around amount_to_contract_precision
|
|
|
|
"""
|
|
|
|
contract_size = self.get_contract_size(pair)
|
|
|
|
|
2024-05-12 15:02:37 +00:00
|
|
|
return amount_to_contract_precision(
|
|
|
|
amount, self.get_precision_amount(pair), self.precisionMode, contract_size
|
|
|
|
)
|
2022-09-07 04:34:03 +00:00
|
|
|
|
2024-05-31 04:52:11 +00:00
|
|
|
def ws_connection_reset(self):
|
|
|
|
"""
|
|
|
|
called at regular intervals to reset the websocket connection
|
|
|
|
"""
|
|
|
|
if self._exchange_ws:
|
|
|
|
self._exchange_ws.reset_connections()
|
|
|
|
|
2024-10-04 04:46:45 +00:00
|
|
|
async def _api_reload_markets(self, reload: bool = False) -> dict[str, Any]:
|
2024-09-13 05:16:05 +00:00
|
|
|
try:
|
|
|
|
return await self._api_async.load_markets(reload=reload, params={})
|
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
|
|
|
except (ccxt.OperationFailed, ccxt.ExchangeError) as e:
|
|
|
|
raise TemporaryError(
|
|
|
|
f"Error in reload_markets due to {e.__class__.__name__}. Message: {e}"
|
|
|
|
) from e
|
|
|
|
except ccxt.BaseError as e:
|
|
|
|
raise TemporaryError(e) from e
|
|
|
|
|
2024-10-04 04:46:45 +00:00
|
|
|
def _load_async_markets(self, reload: bool = False) -> dict[str, Any]:
|
2018-08-10 11:04:43 +00:00
|
|
|
try:
|
2024-09-13 05:16:05 +00:00
|
|
|
markets = self.loop.run_until_complete(self._api_reload_markets(reload=reload))
|
2018-08-10 11:04:43 +00:00
|
|
|
|
2024-06-04 17:42:04 +00:00
|
|
|
if isinstance(markets, Exception):
|
|
|
|
raise markets
|
|
|
|
return markets
|
2024-06-04 17:01:00 +00:00
|
|
|
except asyncio.TimeoutError as e:
|
|
|
|
logger.warning("Could not load markets. Reason: %s", e)
|
|
|
|
raise TemporaryError from e
|
2022-03-26 13:57:42 +00:00
|
|
|
|
2024-06-04 05:00:23 +00:00
|
|
|
def reload_markets(self, force: bool = False, *, load_leverage_tiers: bool = True) -> None:
|
|
|
|
"""
|
|
|
|
Reload / Initialize markets both sync and async if refresh interval has passed
|
2018-09-10 18:19:12 +00:00
|
|
|
|
2024-06-04 05:00:23 +00:00
|
|
|
"""
|
2019-03-10 12:30:45 +00:00
|
|
|
# Check whether markets have to be reloaded
|
2024-06-04 05:00:23 +00:00
|
|
|
is_initial = self._last_markets_refresh == 0
|
2023-11-25 14:05:56 +00:00
|
|
|
if (
|
|
|
|
not force
|
|
|
|
and self._last_markets_refresh > 0
|
|
|
|
and (self._last_markets_refresh + self.markets_refresh_interval > dt_ts())
|
|
|
|
):
|
2019-03-10 12:30:45 +00:00
|
|
|
return None
|
2019-03-10 15:36:25 +00:00
|
|
|
logger.debug("Performing scheduled market reload..")
|
2019-04-24 19:56:24 +00:00
|
|
|
try:
|
2024-09-13 05:19:15 +00:00
|
|
|
# on initial load, we retry 3 times to ensure we get the markets
|
2024-09-13 17:45:30 +00:00
|
|
|
retries: int = 3 if force else 0
|
2024-06-04 17:01:00 +00:00
|
|
|
# Reload async markets, then assign them to sync api
|
2024-09-13 05:19:15 +00:00
|
|
|
self._markets = retrier(self._load_async_markets, retries=retries)(reload=True)
|
2024-06-04 17:01:00 +00:00
|
|
|
self._api.set_markets(self._api_async.markets, self._api_async.currencies)
|
2024-07-04 16:12:14 +00:00
|
|
|
# Assign options array, as it contains some temporary information from the exchange.
|
|
|
|
self._api.options = self._api_async.options
|
2022-11-11 18:54:52 +00:00
|
|
|
if self._exchange_ws:
|
|
|
|
# Set markets to avoid reloading on websocket api
|
|
|
|
self._ws_async.set_markets(self._api.markets, self._api.currencies)
|
2024-07-04 16:12:14 +00:00
|
|
|
self._ws_async.options = self._api.options
|
2023-05-14 08:48:04 +00:00
|
|
|
self._last_markets_refresh = dt_ts()
|
2024-06-04 05:00:23 +00:00
|
|
|
|
|
|
|
if is_initial and self._ft_has["needs_trading_fees"]:
|
|
|
|
self._trading_fees = self.fetch_trading_fees()
|
|
|
|
|
|
|
|
if load_leverage_tiers and self.trading_mode == TradingMode.FUTURES:
|
|
|
|
self.fill_leverage_tiers()
|
2024-06-04 17:01:00 +00:00
|
|
|
except (ccxt.BaseError, TemporaryError):
|
2024-06-04 05:00:23 +00:00
|
|
|
logger.exception("Could not load markets.")
|
2018-09-10 18:19:12 +00:00
|
|
|
|
2020-02-02 04:00:40 +00:00
|
|
|
def validate_stakecurrency(self, stake_currency: str) -> None:
|
2020-01-11 10:53:44 +00:00
|
|
|
"""
|
|
|
|
Checks stake-currency against available currencies on the exchange.
|
2021-08-29 07:18:46 +00:00
|
|
|
Only runs on startup. If markets have not been loaded, there's been a problem with
|
|
|
|
the connection to the exchange.
|
2020-01-11 10:53:44 +00:00
|
|
|
:param stake_currency: Stake-currency to validate
|
|
|
|
:raise: OperationalException if stake-currency is not available.
|
|
|
|
"""
|
2021-08-29 07:18:46 +00:00
|
|
|
if not self._markets:
|
|
|
|
raise OperationalException(
|
2024-05-12 15:02:37 +00:00
|
|
|
"Could not load markets, therefore cannot start. "
|
|
|
|
"Please investigate the above error for more details."
|
2021-09-05 03:55:55 +00:00
|
|
|
)
|
2020-01-11 10:53:44 +00:00
|
|
|
quote_currencies = self.get_quote_currencies()
|
|
|
|
if stake_currency not in quote_currencies:
|
2024-03-19 06:13:17 +00:00
|
|
|
raise ConfigurationError(
|
2020-01-19 18:54:30 +00:00
|
|
|
f"{stake_currency} is not available as stake on {self.name}. "
|
2024-05-12 15:02:37 +00:00
|
|
|
f"Available currencies are: {', '.join(quote_currencies)}"
|
|
|
|
)
|
2020-01-11 10:53:44 +00:00
|
|
|
|
2020-02-02 04:00:40 +00:00
|
|
|
def get_valid_pair_combination(self, curr_1: str, curr_2: str) -> str:
|
2019-07-03 18:06:50 +00:00
|
|
|
"""
|
2019-07-07 04:36:35 +00:00
|
|
|
Get valid pair combination of curr_1 and curr_2 by trying both combinations.
|
2019-07-03 18:06:50 +00:00
|
|
|
"""
|
2019-07-07 04:36:35 +00:00
|
|
|
for pair in [f"{curr_1}/{curr_2}", f"{curr_2}/{curr_1}"]:
|
2024-05-12 15:02:37 +00:00
|
|
|
if pair in self.markets and self.markets[pair].get("active"):
|
2019-07-03 18:06:50 +00:00
|
|
|
return pair
|
2023-08-12 14:10:37 +00:00
|
|
|
raise ValueError(f"Could not combine {curr_1} and {curr_2} to get a valid pair.")
|
2019-07-03 18:06:50 +00:00
|
|
|
|
2019-09-29 20:08:11 +00:00
|
|
|
def validate_timeframes(self, timeframe: Optional[str]) -> None:
|
Handle if ticker_interval in config.json is not supported on exchange.
Returns.
Tested positive and negative data.
The ticker list in constants.py may be obsolete now, im not sure.
raise OperationalException(f'Invalid ticker {timeframe}, this Exchange supports {timeframes}')
freqtrade.OperationalException: Invalid ticker 14m, this Exchange supports {'1m': '1m', '3m': '3m', '5m': '5m', '15m': '15m', '30m': '30m', '1h': '1h', '2h': '2h', '4h': '4h', '6h': '6h', '8h': '8h', '12h': '12h', '1d': '1d', '3d': '3d', '1w': '1w', '1M': '1M'}
2018-07-05 11:57:59 +00:00
|
|
|
"""
|
2020-03-08 10:35:31 +00:00
|
|
|
Check if timeframe from config is a supported timeframe on the exchange
|
Handle if ticker_interval in config.json is not supported on exchange.
Returns.
Tested positive and negative data.
The ticker list in constants.py may be obsolete now, im not sure.
raise OperationalException(f'Invalid ticker {timeframe}, this Exchange supports {timeframes}')
freqtrade.OperationalException: Invalid ticker 14m, this Exchange supports {'1m': '1m', '3m': '3m', '5m': '5m', '15m': '15m', '30m': '30m', '1h': '1h', '2h': '2h', '4h': '4h', '6h': '6h', '8h': '8h', '12h': '12h', '1d': '1d', '3d': '3d', '1w': '1w', '1M': '1M'}
2018-07-05 11:57:59 +00:00
|
|
|
"""
|
2019-07-02 22:03:38 +00:00
|
|
|
if not hasattr(self._api, "timeframes") or self._api.timeframes is None:
|
|
|
|
# If timeframes attribute is missing (or is None), the exchange probably
|
|
|
|
# has no fetchOHLCV method.
|
2019-06-30 18:30:31 +00:00
|
|
|
# Therefore we also show that.
|
|
|
|
raise OperationalException(
|
2019-07-02 22:03:38 +00:00
|
|
|
f"The ccxt library does not provide the list of timeframes "
|
2022-04-28 17:43:52 +00:00
|
|
|
f"for the exchange {self.name} and this exchange "
|
2024-05-12 15:02:37 +00:00
|
|
|
f"is therefore not supported. ccxt fetchOHLCV: {self.exchange_has('fetchOHLCV')}"
|
|
|
|
)
|
2019-07-02 22:03:38 +00:00
|
|
|
|
2019-09-29 20:08:11 +00:00
|
|
|
if timeframe and (timeframe not in self.timeframes):
|
2024-03-19 06:13:17 +00:00
|
|
|
raise ConfigurationError(
|
2024-05-12 15:02:37 +00:00
|
|
|
f"Invalid timeframe '{timeframe}'. This exchange supports: {self.timeframes}"
|
|
|
|
)
|
Handle if ticker_interval in config.json is not supported on exchange.
Returns.
Tested positive and negative data.
The ticker list in constants.py may be obsolete now, im not sure.
raise OperationalException(f'Invalid ticker {timeframe}, this Exchange supports {timeframes}')
freqtrade.OperationalException: Invalid ticker 14m, this Exchange supports {'1m': '1m', '3m': '3m', '5m': '5m', '15m': '15m', '30m': '30m', '1h': '1h', '2h': '2h', '4h': '4h', '6h': '6h', '8h': '8h', '12h': '12h', '1d': '1d', '3d': '3d', '1w': '1w', '1M': '1M'}
2018-07-05 11:57:59 +00:00
|
|
|
|
2024-02-02 05:57:12 +00:00
|
|
|
if (
|
|
|
|
timeframe
|
2024-05-12 15:02:37 +00:00
|
|
|
and self._config["runmode"] != RunMode.UTIL_EXCHANGE
|
2024-02-02 05:57:12 +00:00
|
|
|
and timeframe_to_minutes(timeframe) < 1
|
|
|
|
):
|
2024-03-19 06:13:17 +00:00
|
|
|
raise ConfigurationError("Timeframes < 1m are currently not supported by Freqtrade.")
|
2020-01-11 10:36:28 +00:00
|
|
|
|
2024-10-04 04:46:45 +00:00
|
|
|
def validate_ordertypes(self, order_types: dict) -> None:
|
2018-11-17 18:54:55 +00:00
|
|
|
"""
|
|
|
|
Checks if order-types configured in strategy/config are supported
|
|
|
|
"""
|
2024-05-12 15:02:37 +00:00
|
|
|
if any(v == "market" for k, v in order_types.items()):
|
|
|
|
if not self.exchange_has("createMarketOrder"):
|
|
|
|
raise ConfigurationError(f"Exchange {self.name} does not support market orders.")
|
2023-02-07 17:00:44 +00:00
|
|
|
self.validate_stop_ordertypes(order_types)
|
2018-11-17 18:54:55 +00:00
|
|
|
|
2024-10-04 04:46:45 +00:00
|
|
|
def validate_stop_ordertypes(self, order_types: dict) -> None:
|
2023-02-07 17:00:44 +00:00
|
|
|
"""
|
|
|
|
Validate stoploss order types
|
|
|
|
"""
|
2024-05-12 15:02:37 +00:00
|
|
|
if order_types.get("stoploss_on_exchange") and not self._ft_has.get(
|
|
|
|
"stoploss_on_exchange", False
|
|
|
|
):
|
|
|
|
raise ConfigurationError(f"On exchange stoploss is not supported for {self.name}.")
|
2023-02-04 19:37:13 +00:00
|
|
|
if self.trading_mode == TradingMode.FUTURES:
|
2024-05-12 15:02:37 +00:00
|
|
|
price_mapping = self._ft_has.get("stop_price_type_value_mapping", {}).keys()
|
2023-02-04 19:37:13 +00:00
|
|
|
if (
|
|
|
|
order_types.get("stoploss_on_exchange", False) is True
|
2024-05-12 15:02:37 +00:00
|
|
|
and "stoploss_price_type" in order_types
|
|
|
|
and order_types["stoploss_price_type"] not in price_mapping
|
2023-02-04 19:37:13 +00:00
|
|
|
):
|
2024-03-19 06:13:17 +00:00
|
|
|
raise ConfigurationError(
|
2024-05-12 15:02:37 +00:00
|
|
|
f"On exchange stoploss price type is not supported for {self.name}."
|
2023-02-04 19:37:13 +00:00
|
|
|
)
|
2018-11-25 16:22:56 +00:00
|
|
|
|
2024-10-04 04:46:45 +00:00
|
|
|
def validate_pricing(self, pricing: dict) -> None:
|
2024-05-12 15:02:37 +00:00
|
|
|
if pricing.get("use_order_book", False) and not self.exchange_has("fetchL2OrderBook"):
|
|
|
|
raise ConfigurationError(f"Orderbook not available for {self.name}.")
|
|
|
|
if not pricing.get("use_order_book", False) and (
|
|
|
|
not self.exchange_has("fetchTicker") or not self._ft_has["tickers_have_price"]
|
|
|
|
):
|
|
|
|
raise ConfigurationError(f"Ticker pricing not available for {self.name}.")
|
2022-03-18 16:07:12 +00:00
|
|
|
|
2024-10-04 04:46:45 +00:00
|
|
|
def validate_order_time_in_force(self, order_time_in_force: dict) -> None:
|
2018-11-25 20:09:35 +00:00
|
|
|
"""
|
|
|
|
Checks if order time in force configured in strategy/config are supported
|
|
|
|
"""
|
2024-05-12 15:02:37 +00:00
|
|
|
if any(
|
|
|
|
v.upper() not in self._ft_has["order_time_in_force"]
|
|
|
|
for k, v in order_time_in_force.items()
|
|
|
|
):
|
2024-03-19 06:13:17 +00:00
|
|
|
raise ConfigurationError(
|
2024-05-12 15:02:37 +00:00
|
|
|
f"Time in force policies are not supported for {self.name} yet."
|
|
|
|
)
|
2018-11-25 20:09:35 +00:00
|
|
|
|
2024-10-04 04:46:45 +00:00
|
|
|
def validate_orderflow(self, exchange: dict) -> None:
|
2024-07-21 18:09:14 +00:00
|
|
|
if exchange.get("use_public_trades", False) and (
|
|
|
|
not self.exchange_has("fetchTrades") or not self._ft_has["trades_has_history"]
|
|
|
|
):
|
|
|
|
raise ConfigurationError(
|
|
|
|
f"Trade data not available for {self.name}. Can't use orderflow feature."
|
|
|
|
)
|
|
|
|
|
2024-08-23 16:16:01 +00:00
|
|
|
def validate_freqai(self, config: Config) -> None:
|
|
|
|
freqai_enabled = config.get("freqai", {}).get("enabled", False)
|
|
|
|
if freqai_enabled and not self._ft_has["ohlcv_has_history"]:
|
|
|
|
raise ConfigurationError(
|
|
|
|
f"Historic OHLCV data not available for {self.name}. Can't use freqAI."
|
|
|
|
)
|
|
|
|
|
2021-11-07 12:14:29 +00:00
|
|
|
def validate_required_startup_candles(self, startup_candles: int, timeframe: str) -> int:
|
2019-10-27 09:38:21 +00:00
|
|
|
"""
|
2021-02-14 09:29:45 +00:00
|
|
|
Checks if required startup_candles is more than ohlcv_candle_limit().
|
2019-10-27 09:56:38 +00:00
|
|
|
Requires a grace-period of 5 candles - so a startup-period up to 494 is allowed by default.
|
2019-10-27 09:38:21 +00:00
|
|
|
"""
|
2022-05-14 17:32:28 +00:00
|
|
|
|
|
|
|
candle_limit = self.ohlcv_candle_limit(
|
2024-05-12 15:02:37 +00:00
|
|
|
timeframe,
|
|
|
|
self._config["candle_type_def"],
|
|
|
|
dt_ts(date_minus_candles(timeframe, startup_candles)) if timeframe else None,
|
|
|
|
)
|
2021-11-07 12:10:40 +00:00
|
|
|
# Require one more candle - to account for the still open candle.
|
|
|
|
candle_count = startup_candles + 1
|
|
|
|
# Allow 5 calls to the exchange per pair
|
|
|
|
required_candle_call_count = int(
|
2024-05-12 15:02:37 +00:00
|
|
|
(candle_count / candle_limit) + (0 if candle_count % candle_limit == 0 else 1)
|
|
|
|
)
|
|
|
|
if self._ft_has["ohlcv_has_history"]:
|
2022-05-14 07:10:38 +00:00
|
|
|
if required_candle_call_count > 5:
|
|
|
|
# Only allow 5 calls per pair to somewhat limit the impact
|
2024-03-19 06:13:17 +00:00
|
|
|
raise ConfigurationError(
|
2022-05-14 11:52:58 +00:00
|
|
|
f"This strategy requires {startup_candles} candles to start, "
|
|
|
|
"which is more than 5x "
|
2024-05-12 15:02:37 +00:00
|
|
|
f"the amount of candles {self.name} provides for {timeframe}."
|
|
|
|
)
|
2022-05-14 07:10:38 +00:00
|
|
|
elif required_candle_call_count > 1:
|
2024-03-19 06:13:17 +00:00
|
|
|
raise ConfigurationError(
|
2022-05-14 07:10:38 +00:00
|
|
|
f"This strategy requires {startup_candles} candles to start, which is more than "
|
2024-05-12 15:02:37 +00:00
|
|
|
f"the amount of candles {self.name} provides for {timeframe}."
|
|
|
|
)
|
2021-11-07 12:10:40 +00:00
|
|
|
if required_candle_call_count > 1:
|
2024-05-12 15:02:37 +00:00
|
|
|
logger.warning(
|
|
|
|
f"Using {required_candle_call_count} calls to get OHLCV. "
|
|
|
|
f"This can result in slower operations for the bot. Please check "
|
|
|
|
f"if you really need {startup_candles} candles for your strategy"
|
|
|
|
)
|
2021-11-07 12:10:40 +00:00
|
|
|
return required_candle_call_count
|
2019-10-27 09:38:21 +00:00
|
|
|
|
2022-02-01 18:53:38 +00:00
|
|
|
def validate_trading_mode_and_margin_mode(
|
2021-09-05 03:55:55 +00:00
|
|
|
self,
|
|
|
|
trading_mode: TradingMode,
|
2024-05-12 15:02:37 +00:00
|
|
|
margin_mode: Optional[MarginMode], # Only None when trading_mode = TradingMode.SPOT
|
2021-09-05 03:55:55 +00:00
|
|
|
):
|
|
|
|
"""
|
2021-11-09 18:22:29 +00:00
|
|
|
Checks if freqtrade can perform trades using the configured
|
2022-02-01 18:53:38 +00:00
|
|
|
trading mode(Margin, Futures) and MarginMode(Cross, Isolated)
|
2021-11-09 18:22:29 +00:00
|
|
|
Throws OperationalException:
|
2022-02-01 18:53:38 +00:00
|
|
|
If the trading_mode/margin_mode type are not supported by freqtrade on this exchange
|
2021-09-05 03:55:55 +00:00
|
|
|
"""
|
|
|
|
if trading_mode != TradingMode.SPOT and (
|
2022-02-01 18:53:38 +00:00
|
|
|
(trading_mode, margin_mode) not in self._supported_trading_mode_margin_pairs
|
2021-09-05 03:55:55 +00:00
|
|
|
):
|
2022-02-01 18:53:38 +00:00
|
|
|
mm_value = margin_mode and margin_mode.value
|
2021-09-05 03:55:55 +00:00
|
|
|
raise OperationalException(
|
2024-09-06 18:58:54 +00:00
|
|
|
f"Freqtrade does not support {mm_value} {trading_mode} on {self.name}"
|
2021-09-05 03:55:55 +00:00
|
|
|
)
|
|
|
|
|
2023-01-21 14:01:56 +00:00
|
|
|
def get_option(self, param: str, default: Optional[Any] = None) -> Any:
|
2022-08-21 15:48:13 +00:00
|
|
|
"""
|
|
|
|
Get parameter value from _ft_has
|
|
|
|
"""
|
|
|
|
return self._ft_has.get(param, default)
|
|
|
|
|
2018-06-17 10:41:33 +00:00
|
|
|
def exchange_has(self, endpoint: str) -> bool:
|
|
|
|
"""
|
|
|
|
Checks if exchange implements a specific API endpoint.
|
|
|
|
Wrapper around ccxt 'has' attribute
|
|
|
|
:param endpoint: Name of endpoint (e.g. 'fetchOHLCV', 'fetchTickers')
|
|
|
|
:return: bool
|
|
|
|
"""
|
2024-05-12 15:02:37 +00:00
|
|
|
if endpoint in self._ft_has.get("exchange_has_overrides", {}):
|
|
|
|
return self._ft_has["exchange_has_overrides"][endpoint]
|
2022-10-10 18:48:35 +00:00
|
|
|
return endpoint in self._api_async.has and self._api_async.has[endpoint]
|
2018-06-17 10:41:33 +00:00
|
|
|
|
2022-08-15 17:56:25 +00:00
|
|
|
def get_precision_amount(self, pair: str) -> Optional[float]:
|
|
|
|
"""
|
|
|
|
Returns the amount precision of the exchange.
|
2022-08-15 18:06:29 +00:00
|
|
|
:param pair: Pair to get precision for
|
|
|
|
:return: precision for amount or None. Must be used in combination with precisionMode
|
2022-08-15 17:56:25 +00:00
|
|
|
"""
|
2024-05-12 15:02:37 +00:00
|
|
|
return self.markets.get(pair, {}).get("precision", {}).get("amount", None)
|
2022-08-15 17:56:25 +00:00
|
|
|
|
|
|
|
def get_precision_price(self, pair: str) -> Optional[float]:
|
|
|
|
"""
|
|
|
|
Returns the price precision of the exchange.
|
2022-08-15 18:06:29 +00:00
|
|
|
:param pair: Pair to get precision for
|
|
|
|
:return: precision for price or None. Must be used in combination with precisionMode
|
2022-08-15 17:56:25 +00:00
|
|
|
"""
|
2024-05-12 15:02:37 +00:00
|
|
|
return self.markets.get(pair, {}).get("precision", {}).get("price", None)
|
2022-08-15 17:56:25 +00:00
|
|
|
|
2020-02-02 04:00:40 +00:00
|
|
|
def amount_to_precision(self, pair: str, amount: float) -> float:
|
2021-06-25 17:13:31 +00:00
|
|
|
"""
|
2018-07-30 16:49:58 +00:00
|
|
|
Returns the amount to buy or sell to a precision the Exchange accepts
|
2020-01-12 13:40:58 +00:00
|
|
|
|
2022-08-15 06:43:58 +00:00
|
|
|
"""
|
2022-08-15 17:56:25 +00:00
|
|
|
return amount_to_precision(amount, self.get_precision_amount(pair), self.precisionMode)
|
2018-07-30 16:49:58 +00:00
|
|
|
|
2023-03-26 08:49:50 +00:00
|
|
|
def price_to_precision(self, pair: str, price: float, *, rounding_mode: int = ROUND) -> float:
|
2021-06-25 17:13:31 +00:00
|
|
|
"""
|
2023-03-09 00:11:31 +00:00
|
|
|
Returns the price rounded to the precision the Exchange accepts.
|
2023-03-26 08:17:01 +00:00
|
|
|
The default price_rounding_mode in conf is ROUND.
|
2023-03-26 08:37:18 +00:00
|
|
|
For stoploss calculations, must use ROUND_UP for longs, and ROUND_DOWN for shorts.
|
2021-06-25 17:13:31 +00:00
|
|
|
"""
|
2024-05-12 15:02:37 +00:00
|
|
|
return price_to_precision(
|
2024-08-13 07:13:10 +00:00
|
|
|
price,
|
|
|
|
self.get_precision_price(pair),
|
|
|
|
self.precision_mode_price,
|
|
|
|
rounding_mode=rounding_mode,
|
2024-05-12 15:02:37 +00:00
|
|
|
)
|
2018-07-30 16:49:58 +00:00
|
|
|
|
2020-04-15 05:19:27 +00:00
|
|
|
def price_get_one_pip(self, pair: str, price: float) -> float:
|
|
|
|
"""
|
2024-04-18 20:51:25 +00:00
|
|
|
Gets the "1 pip" value for this pair.
|
2020-04-15 05:19:27 +00:00
|
|
|
Used in PriceFilter to calculate the 1pip movements.
|
|
|
|
"""
|
2024-05-12 15:02:37 +00:00
|
|
|
precision = self.markets[pair]["precision"]["price"]
|
2020-04-15 05:19:27 +00:00
|
|
|
if self.precisionMode == TICK_SIZE:
|
2020-04-15 05:53:31 +00:00
|
|
|
return precision
|
2020-04-15 05:19:27 +00:00
|
|
|
else:
|
|
|
|
return 1 / pow(10, precision)
|
|
|
|
|
2022-02-02 02:24:06 +00:00
|
|
|
def get_min_pair_stake_amount(
|
2024-05-12 15:02:37 +00:00
|
|
|
self, pair: str, price: float, stoploss: float, leverage: Optional[float] = 1.0
|
2022-02-02 02:24:06 +00:00
|
|
|
) -> Optional[float]:
|
2024-05-12 15:02:37 +00:00
|
|
|
return self._get_stake_amount_limit(pair, price, stoploss, "min", leverage)
|
2022-02-02 02:24:06 +00:00
|
|
|
|
2022-03-30 18:02:56 +00:00
|
|
|
def get_max_pair_stake_amount(self, pair: str, price: float, leverage: float = 1.0) -> float:
|
2024-05-12 15:02:37 +00:00
|
|
|
max_stake_amount = self._get_stake_amount_limit(pair, price, 0.0, "max", leverage)
|
2022-02-03 06:10:14 +00:00
|
|
|
if max_stake_amount is None:
|
|
|
|
# * Should never be executed
|
2024-05-12 15:02:37 +00:00
|
|
|
raise OperationalException(
|
2024-05-12 15:51:21 +00:00
|
|
|
f"{self.name}.get_max_pair_stake_amount should never set max_stake_amount to None"
|
2024-05-12 15:02:37 +00:00
|
|
|
)
|
2023-04-02 14:59:45 +00:00
|
|
|
return max_stake_amount
|
2022-02-02 02:24:06 +00:00
|
|
|
|
|
|
|
def _get_stake_amount_limit(
|
|
|
|
self,
|
|
|
|
pair: str,
|
|
|
|
price: float,
|
|
|
|
stoploss: float,
|
2024-05-12 15:02:37 +00:00
|
|
|
limit: Literal["min", "max"],
|
|
|
|
leverage: Optional[float] = 1.0,
|
2022-02-02 02:24:06 +00:00
|
|
|
) -> Optional[float]:
|
2024-05-12 15:02:37 +00:00
|
|
|
isMin = limit == "min"
|
2022-02-02 02:24:06 +00:00
|
|
|
|
2021-02-02 18:47:21 +00:00
|
|
|
try:
|
|
|
|
market = self.markets[pair]
|
|
|
|
except KeyError:
|
|
|
|
raise ValueError(f"Can't get market information for symbol {pair}")
|
|
|
|
|
2023-04-02 15:10:04 +00:00
|
|
|
if isMin:
|
|
|
|
# reserve some percent defined in config (5% default) + stoploss
|
2024-05-12 15:02:37 +00:00
|
|
|
margin_reserve: float = 1.0 + self._config.get(
|
|
|
|
"amount_reserve_percent", DEFAULT_AMOUNT_RESERVE_PERCENT
|
2023-04-02 15:10:04 +00:00
|
|
|
)
|
2024-05-12 15:02:37 +00:00
|
|
|
stoploss_reserve = margin_reserve / (1 - abs(stoploss)) if abs(stoploss) != 1 else 1.5
|
2023-04-02 15:10:04 +00:00
|
|
|
# it should not be more than 50%
|
|
|
|
stoploss_reserve = max(min(stoploss_reserve, 1.5), 1)
|
|
|
|
else:
|
|
|
|
margin_reserve = 1.0
|
|
|
|
stoploss_reserve = 1.0
|
2023-04-02 14:45:42 +00:00
|
|
|
|
2022-02-02 02:24:06 +00:00
|
|
|
stake_limits = []
|
2024-05-12 15:02:37 +00:00
|
|
|
limits = market["limits"]
|
|
|
|
if limits["cost"][limit] is not None:
|
2022-02-02 02:24:06 +00:00
|
|
|
stake_limits.append(
|
2024-05-12 15:02:37 +00:00
|
|
|
self._contracts_to_amount(pair, limits["cost"][limit]) * stoploss_reserve
|
2021-12-21 21:45:16 +00:00
|
|
|
)
|
2021-02-02 18:47:21 +00:00
|
|
|
|
2024-05-12 15:02:37 +00:00
|
|
|
if limits["amount"][limit] is not None:
|
2022-02-02 02:24:06 +00:00
|
|
|
stake_limits.append(
|
2024-05-12 15:02:37 +00:00
|
|
|
self._contracts_to_amount(pair, limits["amount"][limit]) * price * margin_reserve
|
2021-11-17 13:17:27 +00:00
|
|
|
)
|
2021-02-02 18:47:21 +00:00
|
|
|
|
2022-02-02 02:24:06 +00:00
|
|
|
if not stake_limits:
|
2024-05-12 15:02:37 +00:00
|
|
|
return None if isMin else float("inf")
|
2021-02-02 18:47:21 +00:00
|
|
|
|
|
|
|
# The value returned should satisfy both limits: for amount (base currency) and
|
|
|
|
# for cost (quote, stake currency), so max() is used here.
|
|
|
|
# See also #2575 at github.
|
2022-01-31 08:31:44 +00:00
|
|
|
return self._get_stake_amount_considering_leverage(
|
2024-05-12 15:02:37 +00:00
|
|
|
max(stake_limits) if isMin else min(stake_limits), leverage or 1.0
|
2023-04-02 14:59:45 +00:00
|
|
|
)
|
2021-07-24 07:32:42 +00:00
|
|
|
|
2022-03-30 18:02:56 +00:00
|
|
|
def _get_stake_amount_considering_leverage(self, stake_amount: float, leverage: float) -> float:
|
2021-11-17 13:00:53 +00:00
|
|
|
"""
|
|
|
|
Takes the minimum stake amount for a pair with no leverage and returns the minimum
|
|
|
|
stake amount when leverage is considered
|
|
|
|
:param stake_amount: The stake amount for a pair before leverage is considered
|
|
|
|
:param leverage: The amount of leverage being used on the current trade
|
|
|
|
"""
|
|
|
|
return stake_amount / leverage
|
|
|
|
|
2021-06-02 09:17:50 +00:00
|
|
|
# Dry-run methods
|
|
|
|
|
2024-05-12 15:02:37 +00:00
|
|
|
def create_dry_run_order(
|
|
|
|
self,
|
|
|
|
pair: str,
|
|
|
|
ordertype: str,
|
2024-11-04 18:23:17 +00:00
|
|
|
side: BuySell,
|
2024-05-12 15:02:37 +00:00
|
|
|
amount: float,
|
|
|
|
rate: float,
|
|
|
|
leverage: float,
|
2024-10-04 04:46:45 +00:00
|
|
|
params: Optional[dict] = None,
|
2024-05-12 15:02:37 +00:00
|
|
|
stop_loss: bool = False,
|
2024-11-04 06:23:25 +00:00
|
|
|
) -> CcxtOrder:
|
2023-05-14 08:48:04 +00:00
|
|
|
now = dt_now()
|
2024-05-12 15:02:37 +00:00
|
|
|
order_id = f"dry_run_{side}_{pair}_{now.timestamp()}"
|
2022-04-27 17:58:19 +00:00
|
|
|
# Rounding here must respect to contract sizes
|
|
|
|
_amount = self._contracts_to_amount(
|
2024-05-12 15:02:37 +00:00
|
|
|
pair, self.amount_to_precision(pair, self._amount_to_contracts(pair, amount))
|
|
|
|
)
|
2024-11-04 06:23:25 +00:00
|
|
|
dry_order: CcxtOrder = {
|
2024-05-12 15:02:37 +00:00
|
|
|
"id": order_id,
|
|
|
|
"symbol": pair,
|
|
|
|
"price": rate,
|
|
|
|
"average": rate,
|
|
|
|
"amount": _amount,
|
|
|
|
"cost": _amount * rate,
|
|
|
|
"type": ordertype,
|
|
|
|
"side": side,
|
|
|
|
"filled": 0,
|
|
|
|
"remaining": _amount,
|
|
|
|
"datetime": now.strftime("%Y-%m-%dT%H:%M:%S.%fZ"),
|
|
|
|
"timestamp": dt_ts(now),
|
|
|
|
"status": "open",
|
|
|
|
"fee": None,
|
|
|
|
"info": {},
|
2019-02-22 18:02:31 +00:00
|
|
|
}
|
2022-02-25 18:10:16 +00:00
|
|
|
if stop_loss:
|
2021-06-03 18:55:18 +00:00
|
|
|
dry_order["info"] = {"stopPrice": dry_order["price"]}
|
2024-05-12 15:02:37 +00:00
|
|
|
dry_order[self._ft_has["stop_price_prop"]] = dry_order["price"]
|
2022-02-25 18:10:16 +00:00
|
|
|
# Workaround to avoid filling stoploss orders immediately
|
|
|
|
dry_order["ft_order_type"] = "stoploss"
|
2023-02-15 06:19:47 +00:00
|
|
|
orderbook: Optional[OrderBook] = None
|
2024-05-12 15:02:37 +00:00
|
|
|
if self.exchange_has("fetchL2OrderBook"):
|
2023-02-15 06:19:47 +00:00
|
|
|
orderbook = self.fetch_l2_order_book(pair, 20)
|
2023-02-16 19:20:06 +00:00
|
|
|
if ordertype == "limit" and orderbook:
|
2023-09-02 08:37:57 +00:00
|
|
|
# Allow a 1% price difference
|
|
|
|
allowed_diff = 0.01
|
2023-02-16 19:20:06 +00:00
|
|
|
if self._dry_is_price_crossed(pair, side, rate, orderbook, allowed_diff):
|
|
|
|
logger.info(
|
|
|
|
f"Converted order {pair} to market order due to price {rate} crossing spread "
|
2024-05-12 15:02:37 +00:00
|
|
|
f"by more than {allowed_diff:.2%}."
|
|
|
|
)
|
2023-02-16 19:20:06 +00:00
|
|
|
dry_order["type"] = "market"
|
2021-06-03 18:55:18 +00:00
|
|
|
|
2022-02-25 18:10:16 +00:00
|
|
|
if dry_order["type"] == "market" and not dry_order.get("ft_order_type"):
|
2021-06-03 18:55:18 +00:00
|
|
|
# Update market order pricing
|
2023-02-15 06:19:47 +00:00
|
|
|
average = self.get_dry_market_fill_price(pair, side, amount, rate, orderbook)
|
2024-05-12 15:02:37 +00:00
|
|
|
dry_order.update(
|
|
|
|
{
|
|
|
|
"average": average,
|
|
|
|
"filled": _amount,
|
|
|
|
"remaining": 0.0,
|
|
|
|
"status": "closed",
|
2024-11-04 17:22:14 +00:00
|
|
|
"cost": (_amount * average),
|
2024-05-12 15:02:37 +00:00
|
|
|
}
|
|
|
|
)
|
2022-07-16 13:42:17 +00:00
|
|
|
# market orders will always incurr taker fees
|
2024-05-12 15:02:37 +00:00
|
|
|
dry_order = self.add_dry_order_fee(pair, dry_order, "taker")
|
2021-06-05 13:22:52 +00:00
|
|
|
|
2023-02-15 06:21:18 +00:00
|
|
|
dry_order = self.check_dry_limit_order_filled(
|
2024-05-12 15:02:37 +00:00
|
|
|
dry_order, immediate=True, orderbook=orderbook
|
|
|
|
)
|
2021-06-03 18:55:18 +00:00
|
|
|
|
|
|
|
self._dry_run_open_orders[dry_order["id"]] = dry_order
|
2019-09-01 15:48:06 +00:00
|
|
|
# Copy order and close it - so the returned order is open unless it's a market order
|
2019-02-22 18:02:31 +00:00
|
|
|
return dry_order
|
2018-04-06 07:57:08 +00:00
|
|
|
|
2022-05-04 04:56:55 +00:00
|
|
|
def add_dry_order_fee(
|
|
|
|
self,
|
|
|
|
pair: str,
|
2024-11-04 06:23:25 +00:00
|
|
|
dry_order: CcxtOrder,
|
2022-07-16 13:42:17 +00:00
|
|
|
taker_or_maker: MakerTaker,
|
2024-11-04 06:23:25 +00:00
|
|
|
) -> CcxtOrder:
|
2022-07-16 15:59:05 +00:00
|
|
|
fee = self.get_fee(pair, taker_or_maker=taker_or_maker)
|
2024-05-12 15:02:37 +00:00
|
|
|
dry_order.update(
|
|
|
|
{
|
|
|
|
"fee": {
|
|
|
|
"currency": self.get_pair_quote_currency(pair),
|
|
|
|
"cost": dry_order["cost"] * fee,
|
|
|
|
"rate": fee,
|
|
|
|
}
|
2021-06-03 18:55:18 +00:00
|
|
|
}
|
2024-05-12 15:02:37 +00:00
|
|
|
)
|
2021-06-05 13:22:52 +00:00
|
|
|
return dry_order
|
2021-06-03 18:55:18 +00:00
|
|
|
|
2024-05-12 15:02:37 +00:00
|
|
|
def get_dry_market_fill_price(
|
|
|
|
self, pair: str, side: str, amount: float, rate: float, orderbook: Optional[OrderBook]
|
|
|
|
) -> float:
|
2021-06-03 18:55:18 +00:00
|
|
|
"""
|
|
|
|
Get the market order fill price based on orderbook interpolation
|
|
|
|
"""
|
2024-05-12 15:02:37 +00:00
|
|
|
if self.exchange_has("fetchL2OrderBook"):
|
2023-02-15 06:21:18 +00:00
|
|
|
if not orderbook:
|
|
|
|
orderbook = self.fetch_l2_order_book(pair, 20)
|
2024-05-12 15:02:37 +00:00
|
|
|
ob_type: OBLiteral = "asks" if side == "buy" else "bids"
|
2021-08-11 10:11:29 +00:00
|
|
|
slippage = 0.05
|
2024-05-12 15:02:37 +00:00
|
|
|
max_slippage_val = rate * ((1 + slippage) if side == "buy" else (1 - slippage))
|
2021-06-03 18:55:18 +00:00
|
|
|
|
|
|
|
remaining_amount = amount
|
2023-08-31 04:39:26 +00:00
|
|
|
filled_value = 0.0
|
2022-01-06 07:48:30 +00:00
|
|
|
book_entry_price = 0.0
|
2023-02-15 06:21:18 +00:00
|
|
|
for book_entry in orderbook[ob_type]:
|
2021-06-03 18:55:18 +00:00
|
|
|
book_entry_price = book_entry[0]
|
|
|
|
book_entry_coin_volume = book_entry[1]
|
|
|
|
if remaining_amount > 0:
|
2021-06-04 04:44:51 +00:00
|
|
|
if remaining_amount < book_entry_coin_volume:
|
2021-08-11 10:11:29 +00:00
|
|
|
# Orderbook at this slot bigger than remaining amount
|
2023-08-31 04:39:26 +00:00
|
|
|
filled_value += remaining_amount * book_entry_price
|
2021-08-11 10:11:29 +00:00
|
|
|
break
|
2021-06-03 18:55:18 +00:00
|
|
|
else:
|
2023-08-31 04:39:26 +00:00
|
|
|
filled_value += book_entry_coin_volume * book_entry_price
|
2021-06-04 04:44:51 +00:00
|
|
|
remaining_amount -= book_entry_coin_volume
|
2021-06-03 18:55:18 +00:00
|
|
|
else:
|
|
|
|
break
|
2021-06-04 04:44:51 +00:00
|
|
|
else:
|
|
|
|
# If remaining_amount wasn't consumed completely (break was not called)
|
2023-08-31 04:39:26 +00:00
|
|
|
filled_value += remaining_amount * book_entry_price
|
|
|
|
forecast_avg_filled_price = max(filled_value, 0) / amount
|
2021-08-11 10:11:29 +00:00
|
|
|
# Limit max. slippage to specified value
|
2024-05-12 15:02:37 +00:00
|
|
|
if side == "buy":
|
2021-08-11 10:11:29 +00:00
|
|
|
forecast_avg_filled_price = min(forecast_avg_filled_price, max_slippage_val)
|
|
|
|
|
|
|
|
else:
|
|
|
|
forecast_avg_filled_price = max(forecast_avg_filled_price, max_slippage_val)
|
|
|
|
|
2021-06-03 18:55:18 +00:00
|
|
|
return self.price_to_precision(pair, forecast_avg_filled_price)
|
|
|
|
|
|
|
|
return rate
|
|
|
|
|
2024-05-12 15:02:37 +00:00
|
|
|
def _dry_is_price_crossed(
|
|
|
|
self,
|
|
|
|
pair: str,
|
|
|
|
side: str,
|
|
|
|
limit: float,
|
|
|
|
orderbook: Optional[OrderBook] = None,
|
|
|
|
offset: float = 0.0,
|
|
|
|
) -> bool:
|
|
|
|
if not self.exchange_has("fetchL2OrderBook"):
|
2021-06-03 18:55:18 +00:00
|
|
|
return True
|
2023-02-15 06:21:18 +00:00
|
|
|
if not orderbook:
|
|
|
|
orderbook = self.fetch_l2_order_book(pair, 1)
|
2021-12-02 18:17:00 +00:00
|
|
|
try:
|
2024-05-12 15:02:37 +00:00
|
|
|
if side == "buy":
|
|
|
|
price = orderbook["asks"][0][0]
|
2023-02-16 19:20:06 +00:00
|
|
|
if limit * (1 - offset) >= price:
|
2021-12-02 18:17:00 +00:00
|
|
|
return True
|
|
|
|
else:
|
2024-05-12 15:02:37 +00:00
|
|
|
price = orderbook["bids"][0][0]
|
2023-02-16 19:20:06 +00:00
|
|
|
if limit * (1 + offset) <= price:
|
2021-12-02 18:17:00 +00:00
|
|
|
return True
|
|
|
|
except IndexError:
|
|
|
|
# Ignore empty orderbooks when filling - can be filled with the next iteration.
|
|
|
|
pass
|
2021-06-03 18:55:18 +00:00
|
|
|
return False
|
2019-02-23 15:03:15 +00:00
|
|
|
|
2022-07-16 15:03:45 +00:00
|
|
|
def check_dry_limit_order_filled(
|
2024-11-04 06:23:25 +00:00
|
|
|
self, order: CcxtOrder, immediate: bool = False, orderbook: Optional[OrderBook] = None
|
|
|
|
) -> CcxtOrder:
|
2021-06-05 13:22:52 +00:00
|
|
|
"""
|
|
|
|
Check dry-run limit order fill and update fee (if it filled).
|
|
|
|
"""
|
2024-05-12 15:02:37 +00:00
|
|
|
if (
|
|
|
|
order["status"] != "closed"
|
|
|
|
and order["type"] in ["limit"]
|
|
|
|
and not order.get("ft_order_type")
|
|
|
|
):
|
|
|
|
pair = order["symbol"]
|
|
|
|
if self._dry_is_price_crossed(pair, order["side"], order["price"], orderbook):
|
|
|
|
order.update(
|
|
|
|
{
|
|
|
|
"status": "closed",
|
|
|
|
"filled": order["amount"],
|
|
|
|
"remaining": 0,
|
|
|
|
}
|
|
|
|
)
|
2022-07-16 15:03:45 +00:00
|
|
|
|
2022-05-04 04:56:55 +00:00
|
|
|
self.add_dry_order_fee(
|
|
|
|
pair,
|
|
|
|
order,
|
2024-05-12 15:02:37 +00:00
|
|
|
"taker" if immediate else "maker",
|
2022-05-04 04:56:55 +00:00
|
|
|
)
|
2021-06-05 13:22:52 +00:00
|
|
|
|
|
|
|
return order
|
2019-02-23 15:03:15 +00:00
|
|
|
|
2024-11-04 06:23:25 +00:00
|
|
|
def fetch_dry_run_order(self, order_id) -> CcxtOrder:
|
2021-06-02 09:06:32 +00:00
|
|
|
"""
|
|
|
|
Return dry-run order
|
|
|
|
Only call if running in dry-run mode.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
order = self._dry_run_open_orders[order_id]
|
2021-06-05 13:22:52 +00:00
|
|
|
order = self.check_dry_limit_order_filled(order)
|
2021-06-02 09:06:32 +00:00
|
|
|
return order
|
|
|
|
except KeyError as e:
|
2022-05-19 04:42:38 +00:00
|
|
|
from freqtrade.persistence import Order
|
2024-05-12 15:02:37 +00:00
|
|
|
|
2022-05-19 04:42:38 +00:00
|
|
|
order = Order.order_by_id(order_id)
|
|
|
|
if order:
|
2024-05-12 15:02:37 +00:00
|
|
|
ccxt_order = order.to_ccxt_object(self._ft_has["stop_price_prop"])
|
2022-05-20 09:30:25 +00:00
|
|
|
self._dry_run_open_orders[order_id] = ccxt_order
|
|
|
|
return ccxt_order
|
2021-06-02 09:06:32 +00:00
|
|
|
# Gracefully handle errors with dry-run orders.
|
|
|
|
raise InvalidOrderException(
|
2024-05-12 15:02:37 +00:00
|
|
|
f"Tried to get an invalid dry-run-order (id: {order_id}). Message: {e}"
|
|
|
|
) from e
|
2021-06-02 09:06:32 +00:00
|
|
|
|
2021-06-02 09:17:50 +00:00
|
|
|
# Order handling
|
|
|
|
|
2023-03-21 18:29:27 +00:00
|
|
|
def _lev_prep(self, pair: str, leverage: float, side: BuySell, accept_fail: bool = False):
|
2021-09-12 08:06:18 +00:00
|
|
|
if self.trading_mode != TradingMode.SPOT:
|
2023-03-21 18:29:27 +00:00
|
|
|
self.set_margin_mode(pair, self.margin_mode, accept_fail)
|
|
|
|
self._set_leverage(leverage, pair, accept_fail)
|
2021-07-25 01:30:34 +00:00
|
|
|
|
2022-02-02 04:23:05 +00:00
|
|
|
def _get_params(
|
|
|
|
self,
|
2022-05-07 08:56:13 +00:00
|
|
|
side: BuySell,
|
2022-02-02 04:23:05 +00:00
|
|
|
ordertype: str,
|
|
|
|
leverage: float,
|
|
|
|
reduceOnly: bool,
|
2024-05-12 15:02:37 +00:00
|
|
|
time_in_force: str = "GTC",
|
2024-10-04 04:46:45 +00:00
|
|
|
) -> dict:
|
2021-07-25 01:30:34 +00:00
|
|
|
params = self._params.copy()
|
2024-05-12 15:02:37 +00:00
|
|
|
if time_in_force != "GTC" and ordertype != "market":
|
|
|
|
params.update({"timeInForce": time_in_force.upper()})
|
2022-02-02 04:23:05 +00:00
|
|
|
if reduceOnly:
|
2024-05-12 15:02:37 +00:00
|
|
|
params.update({"reduceOnly": True})
|
2021-09-19 23:44:12 +00:00
|
|
|
return params
|
|
|
|
|
2024-10-27 12:56:38 +00:00
|
|
|
def _order_needs_price(self, side: BuySell, ordertype: str) -> bool:
|
2023-03-28 04:55:55 +00:00
|
|
|
return (
|
2024-05-12 15:02:37 +00:00
|
|
|
ordertype != "market"
|
2024-10-27 12:56:38 +00:00
|
|
|
or (side == "buy" and self._api.options.get("createMarketBuyOrderRequiresPrice", False))
|
2024-05-12 15:02:37 +00:00
|
|
|
or self._ft_has.get("marketOrderRequiresPrice", False)
|
2023-03-28 04:55:55 +00:00
|
|
|
)
|
|
|
|
|
2022-02-02 04:23:05 +00:00
|
|
|
def create_order(
|
|
|
|
self,
|
2022-02-04 00:28:08 +00:00
|
|
|
*,
|
2022-02-02 04:23:05 +00:00
|
|
|
pair: str,
|
|
|
|
ordertype: str,
|
2022-05-07 06:45:37 +00:00
|
|
|
side: BuySell,
|
2022-02-02 04:23:05 +00:00
|
|
|
amount: float,
|
|
|
|
rate: float,
|
2022-02-02 19:15:42 +00:00
|
|
|
leverage: float,
|
2022-02-02 04:23:05 +00:00
|
|
|
reduceOnly: bool = False,
|
2024-05-12 15:02:37 +00:00
|
|
|
time_in_force: str = "GTC",
|
2024-11-04 18:23:17 +00:00
|
|
|
) -> CcxtOrder:
|
2024-05-12 15:02:37 +00:00
|
|
|
if self._config["dry_run"]:
|
2022-08-16 06:43:03 +00:00
|
|
|
dry_order = self.create_dry_run_order(
|
2024-05-12 15:02:37 +00:00
|
|
|
pair, ordertype, side, amount, self.price_to_precision(pair, rate), leverage
|
|
|
|
)
|
2021-09-19 23:44:12 +00:00
|
|
|
return dry_order
|
|
|
|
|
2022-05-07 08:56:13 +00:00
|
|
|
params = self._get_params(side, ordertype, leverage, reduceOnly, time_in_force)
|
2021-07-25 01:30:34 +00:00
|
|
|
|
2018-06-06 18:18:16 +00:00
|
|
|
try:
|
2018-07-30 16:49:58 +00:00
|
|
|
# Set the precision for amount and price(rate) as accepted by the exchange
|
2022-01-30 01:48:51 +00:00
|
|
|
amount = self.amount_to_precision(pair, self._amount_to_contracts(pair, amount))
|
2024-10-27 12:56:38 +00:00
|
|
|
needs_price = self._order_needs_price(side, ordertype)
|
2020-01-12 13:55:05 +00:00
|
|
|
rate_for_order = self.price_to_precision(pair, rate) if needs_price else None
|
2018-07-30 16:49:58 +00:00
|
|
|
|
2022-02-02 04:23:05 +00:00
|
|
|
if not reduceOnly:
|
2022-02-02 06:28:57 +00:00
|
|
|
self._lev_prep(pair, leverage, side)
|
2022-02-02 04:23:05 +00:00
|
|
|
|
2021-12-25 13:38:17 +00:00
|
|
|
order = self._api.create_order(
|
|
|
|
pair,
|
|
|
|
ordertype,
|
|
|
|
side,
|
|
|
|
amount,
|
|
|
|
rate_for_order,
|
2022-02-04 00:28:08 +00:00
|
|
|
params,
|
2021-10-04 05:13:34 +00:00
|
|
|
)
|
2024-05-12 15:02:37 +00:00
|
|
|
if order.get("status") is None:
|
2023-09-20 18:01:51 +00:00
|
|
|
# Map empty status to open.
|
2024-05-12 15:02:37 +00:00
|
|
|
order["status"] = "open"
|
2023-09-20 18:32:37 +00:00
|
|
|
|
2024-05-12 15:02:37 +00:00
|
|
|
if order.get("type") is None:
|
|
|
|
order["type"] = ordertype
|
2023-09-20 18:32:37 +00:00
|
|
|
|
2024-05-12 15:02:37 +00:00
|
|
|
self._log_exchange_response("create_order", order)
|
2021-12-25 13:38:17 +00:00
|
|
|
order = self._order_contracts_to_amount(order)
|
2021-06-10 18:09:25 +00:00
|
|
|
return order
|
2018-11-25 20:38:11 +00:00
|
|
|
|
2018-06-17 10:41:33 +00:00
|
|
|
except ccxt.InsufficientFunds as e:
|
2020-08-14 07:57:13 +00:00
|
|
|
raise InsufficientFundsError(
|
2024-05-12 15:02:37 +00:00
|
|
|
f"Insufficient funds to create {ordertype} {side} order on market {pair}. "
|
|
|
|
f"Tried to {side} amount {amount} at rate {rate}."
|
|
|
|
f"Message: {e}"
|
|
|
|
) from e
|
2018-06-17 10:41:33 +00:00
|
|
|
except ccxt.InvalidOrder as e:
|
2023-03-13 19:13:12 +00:00
|
|
|
raise InvalidOrderException(
|
2024-05-12 15:02:37 +00:00
|
|
|
f"Could not create {ordertype} {side} order on market {pair}. "
|
|
|
|
f"Tried to {side} amount {amount} at rate {rate}. "
|
|
|
|
f"Message: {e}"
|
|
|
|
) from e
|
2020-06-28 09:17:06 +00:00
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
2024-04-13 09:12:10 +00:00
|
|
|
except (ccxt.OperationFailed, ccxt.ExchangeError) as e:
|
2018-06-06 18:18:16 +00:00
|
|
|
raise TemporaryError(
|
2024-05-12 15:02:37 +00:00
|
|
|
f"Could not place {side} order due to {e.__class__.__name__}. Message: {e}"
|
|
|
|
) from e
|
2018-06-06 18:18:16 +00:00
|
|
|
except ccxt.BaseError as e:
|
2019-07-25 18:06:20 +00:00
|
|
|
raise OperationalException(e) from e
|
2017-10-06 10:22:04 +00:00
|
|
|
|
2024-11-04 18:23:17 +00:00
|
|
|
def stoploss_adjust(self, stop_loss: float, order: CcxtOrder, side: str) -> bool:
|
2020-01-19 18:54:30 +00:00
|
|
|
"""
|
|
|
|
Verify stop_loss against stoploss-order value (limit or price)
|
|
|
|
Returns True if adjustment is necessary.
|
|
|
|
"""
|
2024-05-12 15:02:37 +00:00
|
|
|
if not self._ft_has.get("stoploss_on_exchange"):
|
2022-10-26 05:12:42 +00:00
|
|
|
raise OperationalException(f"stoploss is not implemented for {self.name}.")
|
2024-05-12 15:02:37 +00:00
|
|
|
price_param = self._ft_has["stop_price_prop"]
|
|
|
|
return order.get(price_param, None) is None or (
|
|
|
|
(side == "sell" and stop_loss > float(order[price_param]))
|
|
|
|
or (side == "buy" and stop_loss < float(order[price_param]))
|
2022-10-26 05:14:33 +00:00
|
|
|
)
|
2020-01-19 18:54:30 +00:00
|
|
|
|
2024-10-04 04:46:45 +00:00
|
|
|
def _get_stop_order_type(self, user_order_type) -> tuple[str, str]:
|
|
|
|
available_order_Types: dict[str, str] = self._ft_has["stoploss_order_types"]
|
2022-03-05 12:57:54 +00:00
|
|
|
|
2022-03-04 06:07:34 +00:00
|
|
|
if user_order_type in available_order_Types.keys():
|
|
|
|
ordertype = available_order_Types[user_order_type]
|
|
|
|
else:
|
|
|
|
# Otherwise pick only one available
|
|
|
|
ordertype = list(available_order_Types.values())[0]
|
|
|
|
user_order_type = list(available_order_Types.keys())[0]
|
|
|
|
return ordertype, user_order_type
|
|
|
|
|
2024-10-04 04:46:45 +00:00
|
|
|
def _get_stop_limit_rate(self, stop_price: float, order_types: dict, side: str) -> float:
|
2022-03-04 06:10:14 +00:00
|
|
|
# Limit price threshold: As limit price should always be below stop-price
|
2024-05-12 15:02:37 +00:00
|
|
|
limit_price_pct = order_types.get("stoploss_on_exchange_limit_ratio", 0.99)
|
2022-03-04 06:10:14 +00:00
|
|
|
if side == "sell":
|
2022-03-30 18:02:56 +00:00
|
|
|
limit_rate = stop_price * limit_price_pct
|
2022-03-04 06:10:14 +00:00
|
|
|
else:
|
2022-03-30 18:02:56 +00:00
|
|
|
limit_rate = stop_price * (2 - limit_price_pct)
|
2022-03-04 06:10:14 +00:00
|
|
|
|
2024-05-12 15:02:37 +00:00
|
|
|
bad_stop_price = (stop_price < limit_rate) if side == "sell" else (stop_price > limit_rate)
|
2022-03-04 06:10:14 +00:00
|
|
|
# Ensure rate is less than stop price
|
|
|
|
if bad_stop_price:
|
2023-03-22 18:46:07 +00:00
|
|
|
# This can for example happen if the stop / liquidation price is set to 0
|
|
|
|
# Which is possible if a market-order closes right away.
|
|
|
|
# The InvalidOrderException will bubble up to exit_positions, where it will be
|
|
|
|
# handled gracefully.
|
|
|
|
raise InvalidOrderException(
|
2023-03-13 18:40:48 +00:00
|
|
|
"In stoploss limit order, stop price should be more than limit price. "
|
|
|
|
f"Stop price: {stop_price}, Limit price: {limit_rate}, "
|
|
|
|
f"Limit Price pct: {limit_price_pct}"
|
2024-05-12 15:02:37 +00:00
|
|
|
)
|
2022-03-30 18:02:56 +00:00
|
|
|
return limit_rate
|
2022-03-04 06:10:14 +00:00
|
|
|
|
2024-10-04 04:46:45 +00:00
|
|
|
def _get_stop_params(self, side: BuySell, ordertype: str, stop_price: float) -> dict:
|
2022-02-02 19:40:40 +00:00
|
|
|
params = self._params.copy()
|
2023-04-03 18:18:57 +00:00
|
|
|
# Verify if stopPrice works for your exchange, else configure stop_price_param
|
2024-05-12 15:02:37 +00:00
|
|
|
params.update({self._ft_has["stop_price_param"]: stop_price})
|
2022-02-02 19:40:40 +00:00
|
|
|
return params
|
|
|
|
|
2022-02-02 18:45:49 +00:00
|
|
|
@retrier(retries=0)
|
2024-05-12 15:02:37 +00:00
|
|
|
def create_stoploss(
|
|
|
|
self,
|
|
|
|
pair: str,
|
|
|
|
amount: float,
|
|
|
|
stop_price: float,
|
2024-10-04 04:46:45 +00:00
|
|
|
order_types: dict,
|
2024-05-12 15:02:37 +00:00
|
|
|
side: BuySell,
|
|
|
|
leverage: float,
|
2024-11-04 18:23:17 +00:00
|
|
|
) -> CcxtOrder:
|
2018-11-25 09:54:36 +00:00
|
|
|
"""
|
2020-01-19 12:30:56 +00:00
|
|
|
creates a stoploss order.
|
2022-02-02 19:40:40 +00:00
|
|
|
requires `_ft_has['stoploss_order_types']` to be set as a dict mapping limit and market
|
|
|
|
to the corresponding exchange type.
|
2022-02-02 18:45:49 +00:00
|
|
|
|
2020-01-19 12:30:56 +00:00
|
|
|
The precise ordertype is determined by the order_types dict or exchange default.
|
2022-02-02 18:45:49 +00:00
|
|
|
|
2019-08-25 07:50:37 +00:00
|
|
|
The exception below should never raise, since we disallow
|
|
|
|
starting the bot in validate_ordertypes()
|
2022-02-02 18:45:49 +00:00
|
|
|
|
|
|
|
This may work with a limited number of other exchanges, but correct working
|
|
|
|
needs to be tested individually.
|
|
|
|
WARNING: setting `stoploss_on_exchange` to True will NOT auto-enable stoploss on exchange.
|
|
|
|
`stoploss_adjust` must still be implemented for this to work.
|
2018-11-25 09:54:36 +00:00
|
|
|
"""
|
2024-05-12 15:02:37 +00:00
|
|
|
if not self._ft_has["stoploss_on_exchange"]:
|
2022-02-02 18:45:49 +00:00
|
|
|
raise OperationalException(f"stoploss is not implemented for {self.name}.")
|
2018-11-22 15:24:40 +00:00
|
|
|
|
2024-05-12 15:02:37 +00:00
|
|
|
user_order_type = order_types.get("stoploss", "market")
|
2022-03-04 06:07:34 +00:00
|
|
|
ordertype, user_order_type = self._get_stop_order_type(user_order_type)
|
2024-05-12 15:02:37 +00:00
|
|
|
round_mode = ROUND_DOWN if side == "buy" else ROUND_UP
|
2023-03-26 08:49:50 +00:00
|
|
|
stop_price_norm = self.price_to_precision(pair, stop_price, rounding_mode=round_mode)
|
2022-03-30 18:02:56 +00:00
|
|
|
limit_rate = None
|
2024-05-12 15:02:37 +00:00
|
|
|
if user_order_type == "limit":
|
2022-03-30 18:02:56 +00:00
|
|
|
limit_rate = self._get_stop_limit_rate(stop_price, order_types, side)
|
2023-03-26 08:49:50 +00:00
|
|
|
limit_rate = self.price_to_precision(pair, limit_rate, rounding_mode=round_mode)
|
2022-02-02 18:45:49 +00:00
|
|
|
|
2024-05-12 15:02:37 +00:00
|
|
|
if self._config["dry_run"]:
|
2022-02-02 18:45:49 +00:00
|
|
|
dry_order = self.create_dry_run_order(
|
2022-03-03 19:51:52 +00:00
|
|
|
pair,
|
|
|
|
ordertype,
|
|
|
|
side,
|
|
|
|
amount,
|
|
|
|
stop_price_norm,
|
|
|
|
stop_loss=True,
|
|
|
|
leverage=leverage,
|
|
|
|
)
|
2022-02-02 18:45:49 +00:00
|
|
|
return dry_order
|
|
|
|
|
|
|
|
try:
|
2024-05-12 15:02:37 +00:00
|
|
|
params = self._get_stop_params(
|
|
|
|
side=side, ordertype=ordertype, stop_price=stop_price_norm
|
|
|
|
)
|
2022-03-03 19:51:52 +00:00
|
|
|
if self.trading_mode == TradingMode.FUTURES:
|
2024-05-12 15:02:37 +00:00
|
|
|
params["reduceOnly"] = True
|
|
|
|
if "stoploss_price_type" in order_types and "stop_price_type_field" in self._ft_has:
|
|
|
|
price_type = self._ft_has["stop_price_type_value_mapping"][
|
|
|
|
order_types.get("stoploss_price_type", PriceType.LAST)
|
|
|
|
]
|
|
|
|
params[self._ft_has["stop_price_type_field"]] = price_type
|
2022-02-02 18:45:49 +00:00
|
|
|
|
2022-03-25 12:56:05 +00:00
|
|
|
amount = self.amount_to_precision(pair, self._amount_to_contracts(pair, amount))
|
2022-02-02 18:45:49 +00:00
|
|
|
|
2023-03-21 18:29:27 +00:00
|
|
|
self._lev_prep(pair, leverage, side, accept_fail=True)
|
2024-05-12 15:02:37 +00:00
|
|
|
order = self._api.create_order(
|
|
|
|
symbol=pair,
|
|
|
|
type=ordertype,
|
|
|
|
side=side,
|
|
|
|
amount=amount,
|
|
|
|
price=limit_rate,
|
|
|
|
params=params,
|
|
|
|
)
|
|
|
|
self._log_exchange_response("create_stoploss_order", order)
|
2022-03-25 12:56:05 +00:00
|
|
|
order = self._order_contracts_to_amount(order)
|
2024-05-12 15:02:37 +00:00
|
|
|
logger.info(
|
|
|
|
f"stoploss {user_order_type} order added for {pair}. "
|
|
|
|
f"stop price: {stop_price}. limit: {limit_rate}"
|
|
|
|
)
|
2022-02-02 18:45:49 +00:00
|
|
|
return order
|
|
|
|
except ccxt.InsufficientFunds as e:
|
|
|
|
raise InsufficientFundsError(
|
2024-05-12 15:02:37 +00:00
|
|
|
f"Insufficient funds to create {ordertype} {side} order on market {pair}. "
|
|
|
|
f"Tried to {side} amount {amount} at rate {limit_rate} with "
|
|
|
|
f"stop-price {stop_price_norm}. Message: {e}"
|
|
|
|
) from e
|
2024-02-15 18:20:08 +00:00
|
|
|
except (ccxt.InvalidOrder, ccxt.BadRequest, ccxt.OperationRejected) as e:
|
2022-02-02 18:45:49 +00:00
|
|
|
# Errors:
|
|
|
|
# `Order would trigger immediately.`
|
|
|
|
raise InvalidOrderException(
|
2024-05-12 15:02:37 +00:00
|
|
|
f"Could not create {ordertype} {side} order on market {pair}. "
|
|
|
|
f"Tried to {side} amount {amount} at rate {limit_rate} with "
|
|
|
|
f"stop-price {stop_price_norm}. Message: {e}"
|
|
|
|
) from e
|
2022-02-02 18:45:49 +00:00
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
2024-04-13 09:12:10 +00:00
|
|
|
except (ccxt.OperationFailed, ccxt.ExchangeError) as e:
|
2022-02-02 18:45:49 +00:00
|
|
|
raise TemporaryError(
|
2024-05-12 15:51:21 +00:00
|
|
|
f"Could not place stoploss order due to {e.__class__.__name__}. Message: {e}"
|
2024-05-12 15:02:37 +00:00
|
|
|
) from e
|
2022-02-02 18:45:49 +00:00
|
|
|
except ccxt.BaseError as e:
|
|
|
|
raise OperationalException(e) from e
|
2018-11-26 17:46:59 +00:00
|
|
|
|
2024-11-04 06:23:25 +00:00
|
|
|
def fetch_order_emulated(self, order_id: str, pair: str, params: dict) -> CcxtOrder:
|
2024-02-18 12:00:43 +00:00
|
|
|
"""
|
|
|
|
Emulated fetch_order if the exchange doesn't support fetch_order, but requires separate
|
|
|
|
calls for open and closed orders.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
order = self._api.fetch_open_order(order_id, pair, params=params)
|
2024-05-12 15:02:37 +00:00
|
|
|
self._log_exchange_response("fetch_open_order", order)
|
2024-02-18 12:00:43 +00:00
|
|
|
order = self._order_contracts_to_amount(order)
|
|
|
|
return order
|
|
|
|
except ccxt.OrderNotFound:
|
|
|
|
try:
|
|
|
|
order = self._api.fetch_closed_order(order_id, pair, params=params)
|
2024-05-12 15:02:37 +00:00
|
|
|
self._log_exchange_response("fetch_closed_order", order)
|
2024-02-18 12:00:43 +00:00
|
|
|
order = self._order_contracts_to_amount(order)
|
|
|
|
return order
|
|
|
|
except ccxt.OrderNotFound as e:
|
|
|
|
raise RetryableOrderError(
|
2024-05-12 15:02:37 +00:00
|
|
|
f"Order not found (pair: {pair} id: {order_id}). Message: {e}"
|
|
|
|
) from e
|
2024-02-18 12:00:43 +00:00
|
|
|
except ccxt.InvalidOrder as e:
|
|
|
|
raise InvalidOrderException(
|
2024-05-12 15:02:37 +00:00
|
|
|
f"Tried to get an invalid order (pair: {pair} id: {order_id}). Message: {e}"
|
|
|
|
) from e
|
2024-02-18 12:00:43 +00:00
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
2024-04-13 09:12:10 +00:00
|
|
|
except (ccxt.OperationFailed, ccxt.ExchangeError) as e:
|
2024-02-18 12:00:43 +00:00
|
|
|
raise TemporaryError(
|
2024-05-12 15:02:37 +00:00
|
|
|
f"Could not get order due to {e.__class__.__name__}. Message: {e}"
|
|
|
|
) from e
|
2024-02-18 12:00:43 +00:00
|
|
|
except ccxt.BaseError as e:
|
|
|
|
raise OperationalException(e) from e
|
|
|
|
|
2021-06-02 09:17:50 +00:00
|
|
|
@retrier(retries=API_FETCH_ORDER_RETRY_COUNT)
|
2024-11-04 06:23:25 +00:00
|
|
|
def fetch_order(self, order_id: str, pair: str, params: Optional[dict] = None) -> CcxtOrder:
|
2024-05-12 15:02:37 +00:00
|
|
|
if self._config["dry_run"]:
|
2021-06-02 09:17:50 +00:00
|
|
|
return self.fetch_dry_run_order(order_id)
|
2024-04-20 07:26:50 +00:00
|
|
|
if params is None:
|
|
|
|
params = {}
|
2021-06-02 09:17:50 +00:00
|
|
|
try:
|
2024-05-12 15:02:37 +00:00
|
|
|
if not self.exchange_has("fetchOrder"):
|
2024-02-18 12:00:43 +00:00
|
|
|
return self.fetch_order_emulated(order_id, pair, params)
|
2022-03-09 06:45:10 +00:00
|
|
|
order = self._api.fetch_order(order_id, pair, params=params)
|
2024-05-12 15:02:37 +00:00
|
|
|
self._log_exchange_response("fetch_order", order)
|
2021-12-25 13:38:17 +00:00
|
|
|
order = self._order_contracts_to_amount(order)
|
2021-06-10 18:09:25 +00:00
|
|
|
return order
|
2021-06-02 09:17:50 +00:00
|
|
|
except ccxt.OrderNotFound as e:
|
|
|
|
raise RetryableOrderError(
|
2024-05-12 15:02:37 +00:00
|
|
|
f"Order not found (pair: {pair} id: {order_id}). Message: {e}"
|
|
|
|
) from e
|
2021-06-02 09:17:50 +00:00
|
|
|
except ccxt.InvalidOrder as e:
|
|
|
|
raise InvalidOrderException(
|
2024-05-12 15:02:37 +00:00
|
|
|
f"Tried to get an invalid order (pair: {pair} id: {order_id}). Message: {e}"
|
|
|
|
) from e
|
2021-06-02 09:17:50 +00:00
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
2024-04-13 09:12:10 +00:00
|
|
|
except (ccxt.OperationFailed, ccxt.ExchangeError) as e:
|
2021-06-02 09:17:50 +00:00
|
|
|
raise TemporaryError(
|
2024-05-12 15:02:37 +00:00
|
|
|
f"Could not get order due to {e.__class__.__name__}. Message: {e}"
|
|
|
|
) from e
|
2021-06-02 09:17:50 +00:00
|
|
|
except ccxt.BaseError as e:
|
|
|
|
raise OperationalException(e) from e
|
|
|
|
|
2024-11-04 06:23:25 +00:00
|
|
|
def fetch_stoploss_order(
|
|
|
|
self, order_id: str, pair: str, params: Optional[dict] = None
|
|
|
|
) -> CcxtOrder:
|
2022-05-24 04:45:56 +00:00
|
|
|
return self.fetch_order(order_id, pair, params)
|
2021-06-02 09:17:50 +00:00
|
|
|
|
2024-05-12 15:02:37 +00:00
|
|
|
def fetch_order_or_stoploss_order(
|
|
|
|
self, order_id: str, pair: str, stoploss_order: bool = False
|
2024-11-04 06:23:25 +00:00
|
|
|
) -> CcxtOrder:
|
2021-06-02 09:17:50 +00:00
|
|
|
"""
|
|
|
|
Simple wrapper calling either fetch_order or fetch_stoploss_order depending on
|
|
|
|
the stoploss_order parameter
|
2021-06-25 17:13:31 +00:00
|
|
|
:param order_id: OrderId to fetch order
|
|
|
|
:param pair: Pair corresponding to order_id
|
2021-06-02 09:17:50 +00:00
|
|
|
:param stoploss_order: If true, uses fetch_stoploss_order, otherwise fetch_order.
|
|
|
|
"""
|
|
|
|
if stoploss_order:
|
|
|
|
return self.fetch_stoploss_order(order_id, pair)
|
|
|
|
return self.fetch_order(order_id, pair)
|
|
|
|
|
2024-11-04 18:23:17 +00:00
|
|
|
def check_order_canceled_empty(self, order: CcxtOrder) -> bool:
|
2021-06-02 09:17:50 +00:00
|
|
|
"""
|
|
|
|
Verify if an order has been cancelled without being partially filled
|
|
|
|
:param order: Order dict as returned from fetch_order()
|
|
|
|
:return: True if order has been cancelled without being filled, False otherwise.
|
|
|
|
"""
|
2024-05-12 15:02:37 +00:00
|
|
|
return order.get("status") in NON_OPEN_EXCHANGE_STATES and order.get("filled") == 0.0
|
2021-06-02 09:17:50 +00:00
|
|
|
|
|
|
|
@retrier
|
2024-11-04 18:31:35 +00:00
|
|
|
def cancel_order(
|
|
|
|
self, order_id: str, pair: str, params: Optional[dict] = None
|
|
|
|
) -> dict[str, Any]:
|
2024-05-12 15:02:37 +00:00
|
|
|
if self._config["dry_run"]:
|
2021-06-02 09:17:50 +00:00
|
|
|
try:
|
|
|
|
order = self.fetch_dry_run_order(order_id)
|
|
|
|
|
2024-05-12 15:02:37 +00:00
|
|
|
order.update({"status": "canceled", "filled": 0.0, "remaining": order["amount"]})
|
2021-06-02 09:17:50 +00:00
|
|
|
return order
|
|
|
|
except InvalidOrderException:
|
|
|
|
return {}
|
|
|
|
|
2024-04-20 07:24:51 +00:00
|
|
|
if params is None:
|
|
|
|
params = {}
|
2021-06-02 09:17:50 +00:00
|
|
|
try:
|
2022-03-09 06:45:10 +00:00
|
|
|
order = self._api.cancel_order(order_id, pair, params=params)
|
2024-05-12 15:02:37 +00:00
|
|
|
self._log_exchange_response("cancel_order", order)
|
2021-12-25 13:38:17 +00:00
|
|
|
order = self._order_contracts_to_amount(order)
|
2021-06-10 18:09:25 +00:00
|
|
|
return order
|
2021-06-02 09:17:50 +00:00
|
|
|
except ccxt.InvalidOrder as e:
|
2024-05-12 15:02:37 +00:00
|
|
|
raise InvalidOrderException(f"Could not cancel order. Message: {e}") from e
|
2021-06-02 09:17:50 +00:00
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
2024-04-13 09:12:10 +00:00
|
|
|
except (ccxt.OperationFailed, ccxt.ExchangeError) as e:
|
2021-06-02 09:17:50 +00:00
|
|
|
raise TemporaryError(
|
2024-05-12 15:02:37 +00:00
|
|
|
f"Could not cancel order due to {e.__class__.__name__}. Message: {e}"
|
|
|
|
) from e
|
2021-06-02 09:17:50 +00:00
|
|
|
except ccxt.BaseError as e:
|
|
|
|
raise OperationalException(e) from e
|
|
|
|
|
2024-04-20 07:24:51 +00:00
|
|
|
def cancel_stoploss_order(
|
2024-10-04 04:46:45 +00:00
|
|
|
self, order_id: str, pair: str, params: Optional[dict] = None
|
|
|
|
) -> dict:
|
2022-05-24 04:45:56 +00:00
|
|
|
return self.cancel_order(order_id, pair, params)
|
2021-06-02 09:17:50 +00:00
|
|
|
|
2024-11-04 18:40:11 +00:00
|
|
|
def is_cancel_order_result_suitable(self, corder) -> TypeGuard[CcxtOrder]:
|
2021-06-02 09:17:50 +00:00
|
|
|
if not isinstance(corder, dict):
|
|
|
|
return False
|
|
|
|
|
2024-05-12 15:02:37 +00:00
|
|
|
required = ("fee", "status", "amount")
|
2022-07-25 15:47:52 +00:00
|
|
|
return all(corder.get(k, None) is not None for k in required)
|
2021-06-02 09:17:50 +00:00
|
|
|
|
2024-11-04 06:23:25 +00:00
|
|
|
def cancel_order_with_result(self, order_id: str, pair: str, amount: float) -> CcxtOrder:
|
2021-06-02 09:17:50 +00:00
|
|
|
"""
|
|
|
|
Cancel order returning a result.
|
|
|
|
Creates a fake result if cancel order returns a non-usable result
|
|
|
|
and fetch_order does not work (certain exchanges don't return cancelled orders)
|
|
|
|
:param order_id: Orderid to cancel
|
|
|
|
:param pair: Pair corresponding to order_id
|
|
|
|
:param amount: Amount to use for fake response
|
|
|
|
:return: Result from either cancel_order if usable, or fetch_order
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
corder = self.cancel_order(order_id, pair)
|
|
|
|
if self.is_cancel_order_result_suitable(corder):
|
|
|
|
return corder
|
|
|
|
except InvalidOrderException:
|
|
|
|
logger.warning(f"Could not cancel order {order_id} for {pair}.")
|
|
|
|
try:
|
|
|
|
order = self.fetch_order(order_id, pair)
|
|
|
|
except InvalidOrderException:
|
|
|
|
logger.warning(f"Could not fetch cancelled order {order_id}.")
|
2022-10-02 06:45:41 +00:00
|
|
|
order = {
|
2024-05-12 15:02:37 +00:00
|
|
|
"id": order_id,
|
|
|
|
"status": "canceled",
|
|
|
|
"amount": amount,
|
|
|
|
"filled": 0.0,
|
|
|
|
"fee": {},
|
|
|
|
"info": {},
|
2022-10-02 06:45:41 +00:00
|
|
|
}
|
2021-06-02 09:17:50 +00:00
|
|
|
|
|
|
|
return order
|
|
|
|
|
2024-11-04 18:23:17 +00:00
|
|
|
def cancel_stoploss_order_with_result(
|
|
|
|
self, order_id: str, pair: str, amount: float
|
|
|
|
) -> CcxtOrder:
|
2021-06-02 09:17:50 +00:00
|
|
|
"""
|
|
|
|
Cancel stoploss order returning a result.
|
|
|
|
Creates a fake result if cancel order returns a non-usable result
|
|
|
|
and fetch_order does not work (certain exchanges don't return cancelled orders)
|
|
|
|
:param order_id: stoploss-order-id to cancel
|
|
|
|
:param pair: Pair corresponding to order_id
|
|
|
|
:param amount: Amount to use for fake response
|
|
|
|
:return: Result from either cancel_order if usable, or fetch_order
|
|
|
|
"""
|
|
|
|
corder = self.cancel_stoploss_order(order_id, pair)
|
|
|
|
if self.is_cancel_order_result_suitable(corder):
|
|
|
|
return corder
|
|
|
|
try:
|
|
|
|
order = self.fetch_stoploss_order(order_id, pair)
|
|
|
|
except InvalidOrderException:
|
|
|
|
logger.warning(f"Could not fetch cancelled stoploss order {order_id}.")
|
2024-05-12 15:02:37 +00:00
|
|
|
order = {"id": order_id, "fee": {}, "status": "canceled", "amount": amount, "info": {}}
|
2021-06-02 09:17:50 +00:00
|
|
|
|
|
|
|
return order
|
|
|
|
|
2018-06-17 10:41:33 +00:00
|
|
|
@retrier
|
2024-08-15 05:29:19 +00:00
|
|
|
def get_balances(self) -> CcxtBalances:
|
2018-06-17 10:41:33 +00:00
|
|
|
try:
|
2018-06-18 20:07:15 +00:00
|
|
|
balances = self._api.fetch_balance()
|
2018-06-17 10:41:33 +00:00
|
|
|
# Remove additional info from ccxt results
|
|
|
|
balances.pop("info", None)
|
|
|
|
balances.pop("free", None)
|
|
|
|
balances.pop("total", None)
|
|
|
|
balances.pop("used", None)
|
|
|
|
|
|
|
|
return balances
|
2020-06-28 09:17:06 +00:00
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
2024-04-13 09:12:10 +00:00
|
|
|
except (ccxt.OperationFailed, ccxt.ExchangeError) as e:
|
2018-06-17 10:41:33 +00:00
|
|
|
raise TemporaryError(
|
2024-05-12 15:02:37 +00:00
|
|
|
f"Could not get balance due to {e.__class__.__name__}. Message: {e}"
|
|
|
|
) from e
|
2018-06-17 10:41:33 +00:00
|
|
|
except ccxt.BaseError as e:
|
2019-07-25 18:06:20 +00:00
|
|
|
raise OperationalException(e) from e
|
2017-09-08 13:51:00 +00:00
|
|
|
|
2022-02-18 06:00:39 +00:00
|
|
|
@retrier
|
2024-10-04 04:46:45 +00:00
|
|
|
def fetch_positions(self, pair: Optional[str] = None) -> list[CcxtPosition]:
|
2022-07-30 15:49:06 +00:00
|
|
|
"""
|
|
|
|
Fetch positions from the exchange.
|
|
|
|
If no pair is given, all positions are returned.
|
|
|
|
:param pair: Pair for the query
|
|
|
|
"""
|
2024-05-12 15:02:37 +00:00
|
|
|
if self._config["dry_run"] or self.trading_mode != TradingMode.FUTURES:
|
2022-02-18 06:00:39 +00:00
|
|
|
return []
|
|
|
|
try:
|
2022-07-30 15:49:06 +00:00
|
|
|
symbols = []
|
|
|
|
if pair:
|
|
|
|
symbols.append(pair)
|
2024-10-04 04:46:45 +00:00
|
|
|
positions: list[CcxtPosition] = self._api.fetch_positions(symbols)
|
2024-05-12 15:02:37 +00:00
|
|
|
self._log_exchange_response("fetch_positions", positions)
|
2022-02-18 06:00:39 +00:00
|
|
|
return positions
|
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
2024-04-13 09:12:10 +00:00
|
|
|
except (ccxt.OperationFailed, ccxt.ExchangeError) as e:
|
2022-02-18 06:00:39 +00:00
|
|
|
raise TemporaryError(
|
2024-05-12 15:02:37 +00:00
|
|
|
f"Could not get positions due to {e.__class__.__name__}. Message: {e}"
|
|
|
|
) from e
|
2022-02-18 06:00:39 +00:00
|
|
|
except ccxt.BaseError as e:
|
|
|
|
raise OperationalException(e) from e
|
|
|
|
|
2024-11-04 06:23:25 +00:00
|
|
|
def _fetch_orders_emulate(self, pair: str, since_ms: int) -> list[CcxtOrder]:
|
2023-08-31 06:05:07 +00:00
|
|
|
orders = []
|
2024-05-12 15:02:37 +00:00
|
|
|
if self.exchange_has("fetchClosedOrders"):
|
2023-08-31 06:05:07 +00:00
|
|
|
orders = self._api.fetch_closed_orders(pair, since=since_ms)
|
2024-05-12 15:02:37 +00:00
|
|
|
if self.exchange_has("fetchOpenOrders"):
|
2023-08-31 06:05:07 +00:00
|
|
|
orders_open = self._api.fetch_open_orders(pair, since=since_ms)
|
|
|
|
orders.extend(orders_open)
|
|
|
|
return orders
|
|
|
|
|
2023-04-25 12:33:34 +00:00
|
|
|
@retrier(retries=0)
|
2024-11-04 06:23:25 +00:00
|
|
|
def fetch_orders(
|
|
|
|
self, pair: str, since: datetime, params: Optional[dict] = None
|
|
|
|
) -> list[CcxtOrder]:
|
2023-04-25 12:33:34 +00:00
|
|
|
"""
|
|
|
|
Fetch all orders for a pair "since"
|
|
|
|
:param pair: Pair for the query
|
|
|
|
:param since: Starting time for the query
|
|
|
|
"""
|
2024-05-12 15:02:37 +00:00
|
|
|
if self._config["dry_run"]:
|
2023-04-25 12:33:34 +00:00
|
|
|
return []
|
2023-04-25 15:13:02 +00:00
|
|
|
|
2023-04-25 12:33:34 +00:00
|
|
|
try:
|
|
|
|
since_ms = int((since.timestamp() - 10) * 1000)
|
2023-08-31 06:05:07 +00:00
|
|
|
|
2024-05-12 15:02:37 +00:00
|
|
|
if self.exchange_has("fetchOrders"):
|
2023-08-31 06:05:07 +00:00
|
|
|
if not params:
|
|
|
|
params = {}
|
2023-04-25 15:13:02 +00:00
|
|
|
try:
|
2024-11-04 18:23:17 +00:00
|
|
|
orders: list[CcxtOrder] = self._api.fetch_orders(
|
|
|
|
pair, since=since_ms, params=params
|
|
|
|
)
|
2023-04-25 15:13:02 +00:00
|
|
|
except ccxt.NotSupported:
|
|
|
|
# Some exchanges don't support fetchOrders
|
|
|
|
# attempt to fetch open and closed orders separately
|
2023-09-16 15:56:16 +00:00
|
|
|
orders = self._fetch_orders_emulate(pair, since_ms)
|
2023-04-25 15:13:02 +00:00
|
|
|
else:
|
2023-09-16 15:56:16 +00:00
|
|
|
orders = self._fetch_orders_emulate(pair, since_ms)
|
2024-05-12 15:02:37 +00:00
|
|
|
self._log_exchange_response("fetch_orders", orders)
|
2023-04-25 12:33:34 +00:00
|
|
|
orders = [self._order_contracts_to_amount(o) for o in orders]
|
|
|
|
return orders
|
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
2024-04-13 09:12:10 +00:00
|
|
|
except (ccxt.OperationFailed, ccxt.ExchangeError) as e:
|
2023-04-25 12:33:34 +00:00
|
|
|
raise TemporaryError(
|
2024-05-12 15:02:37 +00:00
|
|
|
f"Could not fetch positions due to {e.__class__.__name__}. Message: {e}"
|
|
|
|
) from e
|
2023-04-25 12:33:34 +00:00
|
|
|
except ccxt.BaseError as e:
|
|
|
|
raise OperationalException(e) from e
|
|
|
|
|
2022-03-26 12:53:36 +00:00
|
|
|
@retrier
|
2024-10-04 04:46:45 +00:00
|
|
|
def fetch_trading_fees(self) -> dict[str, Any]:
|
2022-03-26 12:53:36 +00:00
|
|
|
"""
|
|
|
|
Fetch user account trading fees
|
|
|
|
Can be cached, should not update often.
|
|
|
|
"""
|
2024-05-12 15:02:37 +00:00
|
|
|
if (
|
|
|
|
self._config["dry_run"]
|
|
|
|
or self.trading_mode != TradingMode.FUTURES
|
|
|
|
or not self.exchange_has("fetchTradingFees")
|
|
|
|
):
|
2022-03-26 12:53:36 +00:00
|
|
|
return {}
|
|
|
|
try:
|
2024-10-04 04:46:45 +00:00
|
|
|
trading_fees: dict[str, Any] = self._api.fetch_trading_fees()
|
2024-05-12 15:02:37 +00:00
|
|
|
self._log_exchange_response("fetch_trading_fees", trading_fees)
|
2022-03-26 12:53:36 +00:00
|
|
|
return trading_fees
|
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
2024-04-13 09:12:10 +00:00
|
|
|
except (ccxt.OperationFailed, ccxt.ExchangeError) as e:
|
2022-03-26 12:53:36 +00:00
|
|
|
raise TemporaryError(
|
2024-05-12 15:02:37 +00:00
|
|
|
f"Could not fetch trading fees due to {e.__class__.__name__}. Message: {e}"
|
|
|
|
) from e
|
2022-03-26 12:53:36 +00:00
|
|
|
except ccxt.BaseError as e:
|
|
|
|
raise OperationalException(e) from e
|
|
|
|
|
2022-03-18 06:08:16 +00:00
|
|
|
@retrier
|
2024-10-04 04:46:45 +00:00
|
|
|
def fetch_bids_asks(self, symbols: Optional[list[str]] = None, cached: bool = False) -> dict:
|
2022-03-18 06:08:16 +00:00
|
|
|
"""
|
2023-11-25 12:10:45 +00:00
|
|
|
:param symbols: List of symbols to fetch
|
2022-03-18 06:08:16 +00:00
|
|
|
:param cached: Allow cached result
|
2023-11-25 12:10:45 +00:00
|
|
|
:return: fetch_bids_asks result
|
2022-03-18 06:08:16 +00:00
|
|
|
"""
|
2024-05-12 15:02:37 +00:00
|
|
|
if not self.exchange_has("fetchBidsAsks"):
|
2022-03-18 06:08:16 +00:00
|
|
|
return {}
|
|
|
|
if cached:
|
2022-08-16 17:43:41 +00:00
|
|
|
with self._cache_lock:
|
2024-05-12 15:02:37 +00:00
|
|
|
tickers = self._fetch_tickers_cache.get("fetch_bids_asks")
|
2022-03-18 06:08:16 +00:00
|
|
|
if tickers:
|
|
|
|
return tickers
|
|
|
|
try:
|
|
|
|
tickers = self._api.fetch_bids_asks(symbols)
|
2022-08-16 17:43:41 +00:00
|
|
|
with self._cache_lock:
|
2024-05-12 15:02:37 +00:00
|
|
|
self._fetch_tickers_cache["fetch_bids_asks"] = tickers
|
2022-03-18 06:08:16 +00:00
|
|
|
return tickers
|
|
|
|
except ccxt.NotSupported as e:
|
|
|
|
raise OperationalException(
|
2024-05-12 15:02:37 +00:00
|
|
|
f"Exchange {self._api.name} does not support fetching bids/asks in batch. "
|
|
|
|
f"Message: {e}"
|
|
|
|
) from e
|
2022-03-18 06:08:16 +00:00
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
2024-04-13 09:12:10 +00:00
|
|
|
except (ccxt.OperationFailed, ccxt.ExchangeError) as e:
|
2022-03-18 06:08:16 +00:00
|
|
|
raise TemporaryError(
|
2024-05-12 15:02:37 +00:00
|
|
|
f"Could not load bids/asks due to {e.__class__.__name__}. Message: {e}"
|
|
|
|
) from e
|
2022-03-18 06:08:16 +00:00
|
|
|
except ccxt.BaseError as e:
|
|
|
|
raise OperationalException(e) from e
|
|
|
|
|
2018-06-17 10:41:33 +00:00
|
|
|
@retrier
|
2024-10-04 04:46:45 +00:00
|
|
|
def get_tickers(self, symbols: Optional[list[str]] = None, cached: bool = False) -> Tickers:
|
2021-04-13 18:09:22 +00:00
|
|
|
"""
|
|
|
|
:param cached: Allow cached result
|
|
|
|
:return: fetch_tickers result
|
|
|
|
"""
|
2022-10-11 19:33:02 +00:00
|
|
|
tickers: Tickers
|
2024-05-12 15:02:37 +00:00
|
|
|
if not self.exchange_has("fetchTickers"):
|
2022-10-13 04:58:02 +00:00
|
|
|
return {}
|
2021-04-13 18:09:22 +00:00
|
|
|
if cached:
|
2022-08-16 17:43:41 +00:00
|
|
|
with self._cache_lock:
|
2024-05-12 15:02:37 +00:00
|
|
|
tickers = self._fetch_tickers_cache.get("fetch_tickers") # type: ignore
|
2021-04-13 18:09:22 +00:00
|
|
|
if tickers:
|
|
|
|
return tickers
|
2018-06-17 10:41:33 +00:00
|
|
|
try:
|
2022-01-28 06:20:47 +00:00
|
|
|
tickers = self._api.fetch_tickers(symbols)
|
2022-08-16 17:43:41 +00:00
|
|
|
with self._cache_lock:
|
2024-05-12 15:02:37 +00:00
|
|
|
self._fetch_tickers_cache["fetch_tickers"] = tickers
|
2021-04-13 18:09:22 +00:00
|
|
|
return tickers
|
2018-06-17 10:41:33 +00:00
|
|
|
except ccxt.NotSupported as e:
|
|
|
|
raise OperationalException(
|
2024-05-12 15:02:37 +00:00
|
|
|
f"Exchange {self._api.name} does not support fetching tickers in batch. "
|
|
|
|
f"Message: {e}"
|
|
|
|
) from e
|
2023-11-25 14:05:56 +00:00
|
|
|
except ccxt.BadSymbol as e:
|
2024-05-12 15:02:37 +00:00
|
|
|
logger.warning(
|
|
|
|
f"Could not load tickers due to {e.__class__.__name__}. Message: {e} ."
|
|
|
|
"Reloading markets."
|
|
|
|
)
|
2023-11-25 14:05:56 +00:00
|
|
|
self.reload_markets(True)
|
|
|
|
# Re-raise exception to repeat the call.
|
|
|
|
raise TemporaryError from e
|
2020-06-28 09:17:06 +00:00
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
2024-04-13 09:12:10 +00:00
|
|
|
except (ccxt.OperationFailed, ccxt.ExchangeError) as e:
|
2018-06-17 10:41:33 +00:00
|
|
|
raise TemporaryError(
|
2024-05-12 15:02:37 +00:00
|
|
|
f"Could not load tickers due to {e.__class__.__name__}. Message: {e}"
|
|
|
|
) from e
|
2018-06-17 10:41:33 +00:00
|
|
|
except ccxt.BaseError as e:
|
2019-07-25 18:06:20 +00:00
|
|
|
raise OperationalException(e) from e
|
2017-09-08 13:51:00 +00:00
|
|
|
|
2021-06-02 09:17:50 +00:00
|
|
|
# Pricing info
|
|
|
|
|
2018-06-17 10:41:33 +00:00
|
|
|
@retrier
|
2022-10-11 19:42:48 +00:00
|
|
|
def fetch_ticker(self, pair: str) -> Ticker:
|
2020-02-22 10:03:25 +00:00
|
|
|
try:
|
2024-05-12 15:02:37 +00:00
|
|
|
if pair not in self.markets or self.markets[pair].get("active", False) is False:
|
2020-06-28 14:01:40 +00:00
|
|
|
raise ExchangeError(f"Pair {pair} not available")
|
2022-10-11 19:42:48 +00:00
|
|
|
data: Ticker = self._api.fetch_ticker(pair)
|
2020-02-22 10:03:25 +00:00
|
|
|
return data
|
2020-06-28 09:17:06 +00:00
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
2024-04-13 09:12:10 +00:00
|
|
|
except (ccxt.OperationFailed, ccxt.ExchangeError) as e:
|
2020-02-22 10:03:25 +00:00
|
|
|
raise TemporaryError(
|
2024-05-12 15:02:37 +00:00
|
|
|
f"Could not load ticker due to {e.__class__.__name__}. Message: {e}"
|
|
|
|
) from e
|
2020-02-22 10:03:25 +00:00
|
|
|
except ccxt.BaseError as e:
|
|
|
|
raise OperationalException(e) from e
|
2018-06-17 10:41:33 +00:00
|
|
|
|
2021-06-02 09:17:50 +00:00
|
|
|
@staticmethod
|
2024-05-12 15:02:37 +00:00
|
|
|
def get_next_limit_in_list(
|
2024-10-04 04:46:45 +00:00
|
|
|
limit: int, limit_range: Optional[list[int]], range_required: bool = True
|
2024-05-12 15:02:37 +00:00
|
|
|
):
|
2021-06-02 09:17:50 +00:00
|
|
|
"""
|
|
|
|
Get next greater value in the list.
|
|
|
|
Used by fetch_l2_order_book if the api only supports a limited range
|
|
|
|
"""
|
|
|
|
if not limit_range:
|
|
|
|
return limit
|
|
|
|
|
|
|
|
result = min([x for x in limit_range if limit <= x] + [max(limit_range)])
|
|
|
|
if not range_required and limit > result:
|
|
|
|
# Range is not required - we can use None as parameter.
|
|
|
|
return None
|
|
|
|
return result
|
|
|
|
|
|
|
|
@retrier
|
2023-02-15 06:01:36 +00:00
|
|
|
def fetch_l2_order_book(self, pair: str, limit: int = 100) -> OrderBook:
|
2021-06-02 09:17:50 +00:00
|
|
|
"""
|
|
|
|
Get L2 order book from exchange.
|
|
|
|
Can be limited to a certain amount (if supported).
|
|
|
|
Returns a dict in the format
|
|
|
|
{'asks': [price, volume], 'bids': [price, volume]}
|
|
|
|
"""
|
2024-05-12 15:02:37 +00:00
|
|
|
limit1 = self.get_next_limit_in_list(
|
|
|
|
limit, self._ft_has["l2_limit_range"], self._ft_has["l2_limit_range_required"]
|
|
|
|
)
|
2021-06-02 09:17:50 +00:00
|
|
|
try:
|
|
|
|
return self._api.fetch_l2_order_book(pair, limit1)
|
|
|
|
except ccxt.NotSupported as e:
|
|
|
|
raise OperationalException(
|
2024-05-12 15:51:21 +00:00
|
|
|
f"Exchange {self._api.name} does not support fetching order book. Message: {e}"
|
2024-05-12 15:02:37 +00:00
|
|
|
) from e
|
2021-06-02 09:17:50 +00:00
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
2024-04-13 09:12:10 +00:00
|
|
|
except (ccxt.OperationFailed, ccxt.ExchangeError) as e:
|
2021-06-02 09:17:50 +00:00
|
|
|
raise TemporaryError(
|
2024-05-12 15:02:37 +00:00
|
|
|
f"Could not get order book due to {e.__class__.__name__}. Message: {e}"
|
|
|
|
) from e
|
2021-06-02 09:17:50 +00:00
|
|
|
except ccxt.BaseError as e:
|
|
|
|
raise OperationalException(e) from e
|
|
|
|
|
2024-10-04 04:46:45 +00:00
|
|
|
def _get_price_side(self, side: str, is_short: bool, conf_strategy: dict) -> BidAsk:
|
2024-05-12 15:02:37 +00:00
|
|
|
price_side = conf_strategy["price_side"]
|
2022-05-16 17:18:13 +00:00
|
|
|
|
2024-05-12 15:02:37 +00:00
|
|
|
if price_side in ("same", "other"):
|
2022-05-16 17:18:13 +00:00
|
|
|
price_map = {
|
2024-05-12 15:02:37 +00:00
|
|
|
("entry", "long", "same"): "bid",
|
|
|
|
("entry", "long", "other"): "ask",
|
|
|
|
("entry", "short", "same"): "ask",
|
|
|
|
("entry", "short", "other"): "bid",
|
|
|
|
("exit", "long", "same"): "ask",
|
|
|
|
("exit", "long", "other"): "bid",
|
|
|
|
("exit", "short", "same"): "bid",
|
|
|
|
("exit", "short", "other"): "ask",
|
2022-05-16 17:18:13 +00:00
|
|
|
}
|
2024-05-12 15:02:37 +00:00
|
|
|
price_side = price_map[(side, "short" if is_short else "long", price_side)]
|
2022-05-16 17:18:13 +00:00
|
|
|
return price_side
|
|
|
|
|
2024-05-12 15:02:37 +00:00
|
|
|
def get_rate(
|
|
|
|
self,
|
|
|
|
pair: str,
|
|
|
|
refresh: bool,
|
|
|
|
side: EntryExit,
|
|
|
|
is_short: bool,
|
|
|
|
order_book: Optional[OrderBook] = None,
|
|
|
|
ticker: Optional[Ticker] = None,
|
|
|
|
) -> float:
|
2021-06-02 09:30:19 +00:00
|
|
|
"""
|
2021-07-18 06:00:18 +00:00
|
|
|
Calculates bid/ask target
|
|
|
|
bid rate - between current ask price and last price
|
|
|
|
ask rate - either using ticker bid or first bid based on orderbook
|
|
|
|
or remain static in any other case since it's not updating.
|
2021-06-02 09:30:19 +00:00
|
|
|
:param pair: Pair to get rate for
|
|
|
|
:param refresh: allow cached data
|
2021-07-18 03:58:54 +00:00
|
|
|
:param side: "buy" or "sell"
|
2021-06-02 09:30:19 +00:00
|
|
|
:return: float: Price
|
|
|
|
:raises PricingError if orderbook price could not be determined.
|
|
|
|
"""
|
2022-03-28 05:03:10 +00:00
|
|
|
name = side.capitalize()
|
2024-05-12 15:02:37 +00:00
|
|
|
strat_name = "entry_pricing" if side == "entry" else "exit_pricing"
|
2021-07-18 03:58:54 +00:00
|
|
|
|
2022-03-28 05:03:10 +00:00
|
|
|
cache_rate: TTLCache = self._entry_rate_cache if side == "entry" else self._exit_rate_cache
|
2021-06-02 09:30:19 +00:00
|
|
|
if not refresh:
|
2022-08-16 17:43:41 +00:00
|
|
|
with self._cache_lock:
|
|
|
|
rate = cache_rate.get(pair)
|
2021-06-02 09:30:19 +00:00
|
|
|
# Check if cache has been invalidated
|
|
|
|
if rate:
|
2021-07-18 03:58:54 +00:00
|
|
|
logger.debug(f"Using cached {side} rate for {pair}.")
|
2021-06-02 09:30:19 +00:00
|
|
|
return rate
|
|
|
|
|
2021-07-19 17:37:52 +00:00
|
|
|
conf_strategy = self._config.get(strat_name, {})
|
2021-07-18 03:58:54 +00:00
|
|
|
|
2022-05-16 17:18:13 +00:00
|
|
|
price_side = self._get_price_side(side, is_short, conf_strategy)
|
2022-03-28 05:03:10 +00:00
|
|
|
|
2024-05-12 15:02:37 +00:00
|
|
|
if conf_strategy.get("use_order_book", False):
|
|
|
|
order_book_top = conf_strategy.get("order_book_top", 1)
|
2022-07-31 12:19:04 +00:00
|
|
|
if order_book is None:
|
|
|
|
order_book = self.fetch_l2_order_book(pair, order_book_top)
|
2024-05-12 15:02:37 +00:00
|
|
|
rate = self._get_rate_from_ob(pair, side, order_book, name, price_side, order_book_top)
|
2021-06-02 09:39:18 +00:00
|
|
|
else:
|
2023-05-29 15:33:11 +00:00
|
|
|
logger.debug(f"Using Last {price_side.capitalize()} / Last Price")
|
2022-07-31 12:19:04 +00:00
|
|
|
if ticker is None:
|
|
|
|
ticker = self.fetch_ticker(pair)
|
2023-05-29 15:27:11 +00:00
|
|
|
rate = self._get_rate_from_ticker(side, ticker, conf_strategy, price_side)
|
2021-06-02 09:39:18 +00:00
|
|
|
|
2021-07-18 06:00:18 +00:00
|
|
|
if rate is None:
|
2021-07-18 03:58:54 +00:00
|
|
|
raise PricingError(f"{name}-Rate for {pair} was empty.")
|
2022-08-16 17:43:41 +00:00
|
|
|
with self._cache_lock:
|
|
|
|
cache_rate[pair] = rate
|
2021-07-18 03:58:54 +00:00
|
|
|
|
2021-06-02 09:39:18 +00:00
|
|
|
return rate
|
|
|
|
|
2024-05-12 15:02:37 +00:00
|
|
|
def _get_rate_from_ticker(
|
2024-10-04 04:46:45 +00:00
|
|
|
self, side: EntryExit, ticker: Ticker, conf_strategy: dict[str, Any], price_side: BidAsk
|
2024-05-12 15:02:37 +00:00
|
|
|
) -> Optional[float]:
|
2023-05-29 15:27:11 +00:00
|
|
|
"""
|
|
|
|
Get rate from ticker.
|
|
|
|
"""
|
|
|
|
ticker_rate = ticker[price_side]
|
2024-05-12 15:02:37 +00:00
|
|
|
if ticker["last"] and ticker_rate:
|
|
|
|
if side == "entry" and ticker_rate > ticker["last"]:
|
|
|
|
balance = conf_strategy.get("price_last_balance", 0.0)
|
|
|
|
ticker_rate = ticker_rate + balance * (ticker["last"] - ticker_rate)
|
|
|
|
elif side == "exit" and ticker_rate < ticker["last"]:
|
|
|
|
balance = conf_strategy.get("price_last_balance", 0.0)
|
|
|
|
ticker_rate = ticker_rate - balance * (ticker_rate - ticker["last"])
|
2023-05-29 15:27:11 +00:00
|
|
|
rate = ticker_rate
|
|
|
|
return rate
|
|
|
|
|
2024-05-12 15:02:37 +00:00
|
|
|
def _get_rate_from_ob(
|
|
|
|
self,
|
|
|
|
pair: str,
|
|
|
|
side: EntryExit,
|
|
|
|
order_book: OrderBook,
|
|
|
|
name: str,
|
|
|
|
price_side: BidAsk,
|
|
|
|
order_book_top: int,
|
|
|
|
) -> float:
|
2023-05-29 15:24:04 +00:00
|
|
|
"""
|
|
|
|
Get rate from orderbook
|
|
|
|
:raises: PricingError if rate could not be determined.
|
|
|
|
"""
|
2024-05-12 15:02:37 +00:00
|
|
|
logger.debug("order_book %s", order_book)
|
2023-05-29 15:24:04 +00:00
|
|
|
# top 1 = index 0
|
|
|
|
try:
|
2024-05-12 15:02:37 +00:00
|
|
|
obside: OBLiteral = "bids" if price_side == "bid" else "asks"
|
2023-05-29 15:24:04 +00:00
|
|
|
rate = order_book[obside][order_book_top - 1][0]
|
|
|
|
except (IndexError, KeyError) as e:
|
|
|
|
logger.warning(
|
2024-05-12 15:02:37 +00:00
|
|
|
f"{pair} - {name} Price at location {order_book_top} from orderbook "
|
|
|
|
f"could not be determined. Orderbook: {order_book}"
|
|
|
|
)
|
2023-05-29 15:24:04 +00:00
|
|
|
raise PricingError from e
|
2024-05-12 15:02:37 +00:00
|
|
|
logger.debug(
|
|
|
|
f"{pair} - {name} price from orderbook {price_side.capitalize()}"
|
|
|
|
f"side - top {order_book_top} order book {side} rate {rate:.8f}"
|
|
|
|
)
|
2023-05-29 15:24:04 +00:00
|
|
|
return rate
|
|
|
|
|
2024-10-04 04:46:45 +00:00
|
|
|
def get_rates(self, pair: str, refresh: bool, is_short: bool) -> tuple[float, float]:
|
2022-07-31 12:19:04 +00:00
|
|
|
entry_rate = None
|
|
|
|
exit_rate = None
|
|
|
|
if not refresh:
|
2022-08-16 17:43:41 +00:00
|
|
|
with self._cache_lock:
|
|
|
|
entry_rate = self._entry_rate_cache.get(pair)
|
|
|
|
exit_rate = self._exit_rate_cache.get(pair)
|
2022-07-31 12:19:04 +00:00
|
|
|
if entry_rate:
|
|
|
|
logger.debug(f"Using cached buy rate for {pair}.")
|
|
|
|
if exit_rate:
|
|
|
|
logger.debug(f"Using cached sell rate for {pair}.")
|
|
|
|
|
2024-05-12 15:02:37 +00:00
|
|
|
entry_pricing = self._config.get("entry_pricing", {})
|
|
|
|
exit_pricing = self._config.get("exit_pricing", {})
|
2022-07-31 12:19:04 +00:00
|
|
|
order_book = ticker = None
|
2024-05-12 15:02:37 +00:00
|
|
|
if not entry_rate and entry_pricing.get("use_order_book", False):
|
|
|
|
order_book_top = max(
|
|
|
|
entry_pricing.get("order_book_top", 1), exit_pricing.get("order_book_top", 1)
|
|
|
|
)
|
2022-07-31 12:19:04 +00:00
|
|
|
order_book = self.fetch_l2_order_book(pair, order_book_top)
|
2024-05-12 15:02:37 +00:00
|
|
|
entry_rate = self.get_rate(pair, refresh, "entry", is_short, order_book=order_book)
|
2022-07-31 12:19:04 +00:00
|
|
|
elif not entry_rate:
|
|
|
|
ticker = self.fetch_ticker(pair)
|
2024-05-12 15:02:37 +00:00
|
|
|
entry_rate = self.get_rate(pair, refresh, "entry", is_short, ticker=ticker)
|
2022-07-31 12:19:04 +00:00
|
|
|
if not exit_rate:
|
2024-05-12 15:02:37 +00:00
|
|
|
exit_rate = self.get_rate(
|
|
|
|
pair, refresh, "exit", is_short, order_book=order_book, ticker=ticker
|
|
|
|
)
|
2022-07-31 12:19:04 +00:00
|
|
|
return entry_rate, exit_rate
|
|
|
|
|
2021-06-02 09:20:26 +00:00
|
|
|
# Fee handling
|
|
|
|
|
|
|
|
@retrier
|
2024-05-12 15:02:37 +00:00
|
|
|
def get_trades_for_order(
|
2024-10-04 04:46:45 +00:00
|
|
|
self, order_id: str, pair: str, since: datetime, params: Optional[dict] = None
|
|
|
|
) -> list:
|
2021-06-02 09:20:26 +00:00
|
|
|
"""
|
|
|
|
Fetch Orders using the "fetch_my_trades" endpoint and filter them by order-id.
|
|
|
|
The "since" argument passed in is coming from the database and is in UTC,
|
|
|
|
as timezone-native datetime object.
|
|
|
|
From the python documentation:
|
|
|
|
> Naive datetime instances are assumed to represent local time
|
|
|
|
Therefore, calling "since.timestamp()" will get the UTC timestamp, after applying the
|
|
|
|
transformation from local timezone to UTC.
|
|
|
|
This works for timezones UTC+ since then the result will contain trades from a few hours
|
|
|
|
instead of from the last 5 seconds, however fails for UTC- timezones,
|
|
|
|
since we're then asking for trades with a "since" argument in the future.
|
|
|
|
|
|
|
|
:param order_id order_id: Order-id as given when creating the order
|
|
|
|
:param pair: Pair the order is for
|
|
|
|
:param since: datetime object of the order creation time. Assumes object is in UTC.
|
|
|
|
"""
|
2024-05-12 15:02:37 +00:00
|
|
|
if self._config["dry_run"]:
|
2021-06-02 09:20:26 +00:00
|
|
|
return []
|
2024-05-12 15:02:37 +00:00
|
|
|
if not self.exchange_has("fetchMyTrades"):
|
2021-06-02 09:20:26 +00:00
|
|
|
return []
|
|
|
|
try:
|
|
|
|
# Allow 5s offset to catch slight time offsets (discovered in #1185)
|
|
|
|
# since needs to be int in milliseconds
|
2021-12-25 13:28:22 +00:00
|
|
|
_params = params if params else {}
|
2021-06-02 09:20:26 +00:00
|
|
|
my_trades = self._api.fetch_my_trades(
|
2024-05-12 15:02:37 +00:00
|
|
|
pair,
|
|
|
|
int((since.replace(tzinfo=timezone.utc).timestamp() - 5) * 1000),
|
|
|
|
params=_params,
|
|
|
|
)
|
|
|
|
matched_trades = [trade for trade in my_trades if trade["order"] == order_id]
|
2021-06-02 09:20:26 +00:00
|
|
|
|
2024-05-12 15:02:37 +00:00
|
|
|
self._log_exchange_response("get_trades_for_order", matched_trades)
|
2021-12-25 13:38:17 +00:00
|
|
|
|
|
|
|
matched_trades = self._trades_contracts_to_amount(matched_trades)
|
|
|
|
|
2021-06-02 09:20:26 +00:00
|
|
|
return matched_trades
|
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
2024-04-13 09:12:10 +00:00
|
|
|
except (ccxt.OperationFailed, ccxt.ExchangeError) as e:
|
2021-06-02 09:20:26 +00:00
|
|
|
raise TemporaryError(
|
2024-05-12 15:02:37 +00:00
|
|
|
f"Could not get trades due to {e.__class__.__name__}. Message: {e}"
|
|
|
|
) from e
|
2021-06-02 09:20:26 +00:00
|
|
|
except ccxt.BaseError as e:
|
|
|
|
raise OperationalException(e) from e
|
|
|
|
|
2024-11-04 18:23:17 +00:00
|
|
|
def get_order_id_conditional(self, order: CcxtOrder) -> str:
|
2024-05-12 15:02:37 +00:00
|
|
|
return order["id"]
|
2021-06-02 09:20:26 +00:00
|
|
|
|
|
|
|
@retrier
|
2024-05-12 15:02:37 +00:00
|
|
|
def get_fee(
|
|
|
|
self,
|
|
|
|
symbol: str,
|
2024-07-05 06:49:27 +00:00
|
|
|
order_type: str = "",
|
2024-05-12 15:02:37 +00:00
|
|
|
side: str = "",
|
|
|
|
amount: float = 1,
|
|
|
|
price: float = 1,
|
|
|
|
taker_or_maker: MakerTaker = "maker",
|
|
|
|
) -> float:
|
2022-11-06 08:02:15 +00:00
|
|
|
"""
|
|
|
|
Retrieve fee from exchange
|
|
|
|
:param symbol: Pair
|
2024-07-05 06:49:27 +00:00
|
|
|
:param order_type: Type of order (market, limit, ...)
|
2022-11-06 08:02:15 +00:00
|
|
|
:param side: Side of order (buy, sell)
|
|
|
|
:param amount: Amount of order
|
|
|
|
:param price: Price of order
|
|
|
|
:param taker_or_maker: 'maker' or 'taker' (ignored if "type" is provided)
|
|
|
|
"""
|
2024-07-05 06:49:27 +00:00
|
|
|
if order_type and order_type == "market":
|
2024-05-12 15:02:37 +00:00
|
|
|
taker_or_maker = "taker"
|
2021-06-02 09:20:26 +00:00
|
|
|
try:
|
2024-05-12 15:02:37 +00:00
|
|
|
if self._config["dry_run"] and self._config.get("fee", None) is not None:
|
|
|
|
return self._config["fee"]
|
2021-06-02 09:20:26 +00:00
|
|
|
# validate that markets are loaded before trying to get fee
|
|
|
|
if self._api.markets is None or len(self._api.markets) == 0:
|
2023-01-07 14:13:22 +00:00
|
|
|
self._api.load_markets(params={})
|
2021-12-25 13:38:17 +00:00
|
|
|
|
2024-05-12 15:02:37 +00:00
|
|
|
return self._api.calculate_fee(
|
|
|
|
symbol=symbol,
|
2024-07-05 06:49:27 +00:00
|
|
|
type=order_type,
|
2024-05-12 15:02:37 +00:00
|
|
|
side=side,
|
|
|
|
amount=amount,
|
|
|
|
price=price,
|
|
|
|
takerOrMaker=taker_or_maker,
|
|
|
|
)["rate"]
|
2021-06-02 09:20:26 +00:00
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
2024-04-13 09:12:10 +00:00
|
|
|
except (ccxt.OperationFailed, ccxt.ExchangeError) as e:
|
2021-06-02 09:20:26 +00:00
|
|
|
raise TemporaryError(
|
2024-05-12 15:02:37 +00:00
|
|
|
f"Could not get fee info due to {e.__class__.__name__}. Message: {e}"
|
|
|
|
) from e
|
2021-06-02 09:20:26 +00:00
|
|
|
except ccxt.BaseError as e:
|
|
|
|
raise OperationalException(e) from e
|
|
|
|
|
|
|
|
@staticmethod
|
2024-11-04 18:23:17 +00:00
|
|
|
def order_has_fee(order: CcxtOrder) -> bool:
|
2021-06-02 09:20:26 +00:00
|
|
|
"""
|
|
|
|
Verifies if the passed in order dict has the needed keys to extract fees,
|
|
|
|
and that these keys (currency, cost) are not empty.
|
|
|
|
:param order: Order or trade (one trade) dict
|
|
|
|
:return: True if the fee substructure contains currency and cost, false otherwise
|
|
|
|
"""
|
|
|
|
if not isinstance(order, dict):
|
|
|
|
return False
|
2024-05-12 15:02:37 +00:00
|
|
|
return (
|
|
|
|
"fee" in order
|
|
|
|
and order["fee"] is not None
|
|
|
|
and (order["fee"].keys() >= {"currency", "cost"})
|
|
|
|
and order["fee"]["currency"] is not None
|
|
|
|
and order["fee"]["cost"] is not None
|
|
|
|
)
|
2021-06-02 09:20:26 +00:00
|
|
|
|
2022-07-06 17:15:55 +00:00
|
|
|
def calculate_fee_rate(
|
2024-10-04 04:46:45 +00:00
|
|
|
self, fee: dict, symbol: str, cost: float, amount: float
|
2024-05-12 15:02:37 +00:00
|
|
|
) -> Optional[float]:
|
2021-06-02 09:20:26 +00:00
|
|
|
"""
|
|
|
|
Calculate fee rate if it's not given by the exchange.
|
2022-07-06 17:15:55 +00:00
|
|
|
:param fee: ccxt Fee dict - must contain cost / currency / rate
|
|
|
|
:param symbol: Symbol of the order
|
|
|
|
:param cost: Total cost of the order
|
|
|
|
:param amount: Amount of the order
|
|
|
|
"""
|
2024-05-12 15:02:37 +00:00
|
|
|
if fee.get("rate") is not None:
|
|
|
|
return fee.get("rate")
|
|
|
|
fee_curr = fee.get("currency")
|
2022-07-07 05:08:49 +00:00
|
|
|
if fee_curr is None:
|
2022-07-07 17:40:16 +00:00
|
|
|
return None
|
2024-05-12 15:02:37 +00:00
|
|
|
fee_cost = float(fee["cost"])
|
2022-07-09 06:24:29 +00:00
|
|
|
|
2021-06-02 09:20:26 +00:00
|
|
|
# Calculate fee based on order details
|
2022-07-07 05:08:49 +00:00
|
|
|
if fee_curr == self.get_pair_base_currency(symbol):
|
2021-06-02 09:20:26 +00:00
|
|
|
# Base currency - divide by amount
|
2022-07-15 07:54:53 +00:00
|
|
|
return round(fee_cost / amount, 8)
|
2022-07-07 05:08:49 +00:00
|
|
|
elif fee_curr == self.get_pair_quote_currency(symbol):
|
2021-06-02 09:20:26 +00:00
|
|
|
# Quote currency - divide by cost
|
2022-07-09 06:24:29 +00:00
|
|
|
return round(fee_cost / cost, 8) if cost else None
|
2021-06-02 09:20:26 +00:00
|
|
|
else:
|
|
|
|
# If Fee currency is a different currency
|
2022-07-06 17:15:55 +00:00
|
|
|
if not cost:
|
2021-06-02 09:20:26 +00:00
|
|
|
# If cost is None or 0.0 -> falsy, return None
|
|
|
|
return None
|
|
|
|
try:
|
2024-05-12 15:02:37 +00:00
|
|
|
comb = self.get_valid_pair_combination(fee_curr, self._config["stake_currency"])
|
2021-06-02 09:20:26 +00:00
|
|
|
tick = self.fetch_ticker(comb)
|
|
|
|
|
2024-05-12 15:02:37 +00:00
|
|
|
fee_to_quote_rate = safe_value_fallback2(tick, tick, "last", "ask")
|
2023-08-12 14:10:37 +00:00
|
|
|
except (ValueError, ExchangeError):
|
2024-05-12 15:02:37 +00:00
|
|
|
fee_to_quote_rate = self._config["exchange"].get("unknown_fee_rate", None)
|
2021-12-11 14:26:08 +00:00
|
|
|
if not fee_to_quote_rate:
|
|
|
|
return None
|
2022-07-09 06:24:29 +00:00
|
|
|
return round((fee_cost * fee_to_quote_rate) / cost, 8)
|
2021-06-02 09:20:26 +00:00
|
|
|
|
2024-05-12 15:02:37 +00:00
|
|
|
def extract_cost_curr_rate(
|
2024-11-04 18:40:11 +00:00
|
|
|
self, fee: dict[str, Any], symbol: str, cost: float, amount: float
|
2024-10-04 04:46:45 +00:00
|
|
|
) -> tuple[float, str, Optional[float]]:
|
2021-06-02 09:20:26 +00:00
|
|
|
"""
|
|
|
|
Extract tuple of cost, currency, rate.
|
|
|
|
Requires order_has_fee to run first!
|
2022-07-06 17:15:55 +00:00
|
|
|
:param fee: ccxt Fee dict - must contain cost / currency / rate
|
|
|
|
:param symbol: Symbol of the order
|
|
|
|
:param cost: Total cost of the order
|
|
|
|
:param amount: Amount of the order
|
2021-06-02 09:20:26 +00:00
|
|
|
:return: Tuple with cost, currency, rate of the given fee dict
|
|
|
|
"""
|
2024-05-12 15:02:37 +00:00
|
|
|
return (
|
|
|
|
float(fee["cost"]),
|
|
|
|
fee["currency"],
|
|
|
|
self.calculate_fee_rate(fee, symbol, cost, amount),
|
|
|
|
)
|
2021-06-02 09:20:26 +00:00
|
|
|
|
|
|
|
# Historic data
|
|
|
|
|
2024-05-12 15:02:37 +00:00
|
|
|
def get_historic_ohlcv(
|
|
|
|
self,
|
|
|
|
pair: str,
|
|
|
|
timeframe: str,
|
|
|
|
since_ms: int,
|
|
|
|
candle_type: CandleType,
|
|
|
|
is_new_pair: bool = False,
|
|
|
|
until_ms: Optional[int] = None,
|
2024-09-20 05:23:45 +00:00
|
|
|
) -> DataFrame:
|
2018-08-10 09:08:28 +00:00
|
|
|
"""
|
2020-03-08 10:35:31 +00:00
|
|
|
Get candle history using asyncio and returns the list of candles.
|
|
|
|
Handles all async work for this.
|
2021-02-14 09:29:45 +00:00
|
|
|
Async over one pair, assuming we get `self.ohlcv_candle_limit()` candles per call.
|
2019-08-14 07:36:28 +00:00
|
|
|
:param pair: Pair to download
|
2020-03-08 10:35:31 +00:00
|
|
|
:param timeframe: Timeframe to get data for
|
2019-08-14 07:36:28 +00:00
|
|
|
:param since_ms: Timestamp in milliseconds to get history from
|
2022-04-30 13:28:01 +00:00
|
|
|
:param until_ms: Timestamp in milliseconds to get history up to
|
2021-11-27 08:55:42 +00:00
|
|
|
:param candle_type: '', mark, index, premiumIndex, or funding_rate
|
2024-09-20 05:23:45 +00:00
|
|
|
:return: Dataframe with candle (OHLCV) data
|
2018-08-10 09:08:28 +00:00
|
|
|
"""
|
2023-01-05 21:31:32 +00:00
|
|
|
pair, _, _, data, _ = self.loop.run_until_complete(
|
2024-05-12 15:02:37 +00:00
|
|
|
self._async_get_historic_ohlcv(
|
|
|
|
pair=pair,
|
|
|
|
timeframe=timeframe,
|
|
|
|
since_ms=since_ms,
|
|
|
|
until_ms=until_ms,
|
|
|
|
is_new_pair=is_new_pair,
|
|
|
|
candle_type=candle_type,
|
|
|
|
)
|
|
|
|
)
|
2021-11-04 19:00:02 +00:00
|
|
|
logger.info(f"Downloaded data for {pair} with length {len(data)}.")
|
2024-09-20 05:23:45 +00:00
|
|
|
return ohlcv_to_dataframe(data, timeframe, pair, fill_missing=False, drop_incomplete=True)
|
2018-08-10 09:08:28 +00:00
|
|
|
|
2024-05-12 15:02:37 +00:00
|
|
|
async def _async_get_historic_ohlcv(
|
|
|
|
self,
|
|
|
|
pair: str,
|
|
|
|
timeframe: str,
|
|
|
|
since_ms: int,
|
|
|
|
candle_type: CandleType,
|
|
|
|
is_new_pair: bool = False,
|
|
|
|
raise_: bool = False,
|
|
|
|
until_ms: Optional[int] = None,
|
|
|
|
) -> OHLCVResponse:
|
2020-10-23 05:45:11 +00:00
|
|
|
"""
|
|
|
|
Download historic ohlcv
|
2021-09-07 17:29:10 +00:00
|
|
|
:param is_new_pair: used by binance subclass to allow "fast" new pair downloading
|
2021-12-03 13:11:24 +00:00
|
|
|
:param candle_type: Any of the enum CandleType (must match trading mode!)
|
2020-10-23 05:45:11 +00:00
|
|
|
"""
|
2018-08-10 09:08:28 +00:00
|
|
|
|
2022-05-14 07:51:44 +00:00
|
|
|
one_call = timeframe_to_msecs(timeframe) * self.ohlcv_candle_limit(
|
2024-05-12 15:02:37 +00:00
|
|
|
timeframe, candle_type, since_ms
|
|
|
|
)
|
2019-05-25 19:42:17 +00:00
|
|
|
logger.debug(
|
|
|
|
"one_call: %s msecs (%s)",
|
|
|
|
one_call,
|
2024-05-12 15:02:37 +00:00
|
|
|
dt_humanize_delta(dt_now() - timedelta(milliseconds=one_call)),
|
2019-05-25 19:42:17 +00:00
|
|
|
)
|
2024-05-12 15:02:37 +00:00
|
|
|
input_coroutines = [
|
|
|
|
self._async_get_candle_history(pair, timeframe, candle_type, since)
|
|
|
|
for since in range(since_ms, until_ms or dt_ts(), one_call)
|
|
|
|
]
|
2018-12-29 18:30:47 +00:00
|
|
|
|
2024-10-04 04:46:45 +00:00
|
|
|
data: list = []
|
2024-04-18 20:51:25 +00:00
|
|
|
# Chunk requests into batches of 100 to avoid overwhelming ccxt Throttling
|
2021-09-10 05:52:48 +00:00
|
|
|
for input_coro in chunks(input_coroutines, 100):
|
|
|
|
results = await asyncio.gather(*input_coro, return_exceptions=True)
|
|
|
|
for res in results:
|
2023-11-13 17:14:18 +00:00
|
|
|
if isinstance(res, BaseException):
|
2021-11-30 05:58:32 +00:00
|
|
|
logger.warning(f"Async code raised an exception: {repr(res)}")
|
2021-11-04 19:00:02 +00:00
|
|
|
if raise_:
|
|
|
|
raise
|
2021-09-10 05:52:48 +00:00
|
|
|
continue
|
2021-11-04 19:00:02 +00:00
|
|
|
else:
|
|
|
|
# Deconstruct tuple if it's not an exception
|
2023-01-05 21:31:32 +00:00
|
|
|
p, _, c, new_data, _ = res
|
2021-11-21 07:43:05 +00:00
|
|
|
if p == pair and c == candle_type:
|
2021-11-04 19:00:02 +00:00
|
|
|
data.extend(new_data)
|
2018-12-29 13:18:49 +00:00
|
|
|
# Sort data again after extending the result - above calls return in "async order"
|
2018-08-18 19:08:59 +00:00
|
|
|
data = sorted(data, key=lambda x: x[0])
|
2023-01-05 21:31:32 +00:00
|
|
|
return pair, timeframe, candle_type, data, self._ohlcv_partial_candle
|
2018-08-10 09:08:28 +00:00
|
|
|
|
2023-01-05 10:33:47 +00:00
|
|
|
def _build_coroutine(
|
2024-05-12 15:02:37 +00:00
|
|
|
self,
|
|
|
|
pair: str,
|
|
|
|
timeframe: str,
|
|
|
|
candle_type: CandleType,
|
|
|
|
since_ms: Optional[int],
|
|
|
|
cache: bool,
|
|
|
|
) -> Coroutine[Any, Any, OHLCVResponse]:
|
2022-10-09 07:29:37 +00:00
|
|
|
not_all_data = cache and self.required_candle_call_count > 1
|
2022-10-23 07:50:13 +00:00
|
|
|
if cache and candle_type in (CandleType.SPOT, CandleType.FUTURES):
|
|
|
|
if self._has_watch_ohlcv and self._exchange_ws:
|
2022-10-18 18:48:40 +00:00
|
|
|
# Subscribe to websocket
|
|
|
|
self._exchange_ws.schedule_ohlcv(pair, timeframe, candle_type)
|
2023-08-11 05:30:07 +00:00
|
|
|
|
2022-10-03 18:49:54 +00:00
|
|
|
if cache and (pair, timeframe, candle_type) in self._klines:
|
2022-10-06 04:41:56 +00:00
|
|
|
candle_limit = self.ohlcv_candle_limit(timeframe, candle_type)
|
2022-10-23 07:50:13 +00:00
|
|
|
min_date = int(date_minus_candles(timeframe, candle_limit - 5).timestamp())
|
|
|
|
|
|
|
|
if self._exchange_ws:
|
2023-12-24 07:12:55 +00:00
|
|
|
candle_date = int(timeframe_to_prev_date(timeframe).timestamp() * 1000)
|
|
|
|
prev_candle_date = int(date_minus_candles(timeframe, 1).timestamp() * 1000)
|
2022-10-23 07:50:13 +00:00
|
|
|
candles = self._exchange_ws.ccxt_object.ohlcvs.get(pair, {}).get(timeframe)
|
2023-12-24 07:12:55 +00:00
|
|
|
half_candle = int(candle_date - (candle_date - prev_candle_date) * 0.5)
|
2024-04-06 14:47:43 +00:00
|
|
|
last_refresh_time = int(
|
|
|
|
self._exchange_ws.klines_last_refresh.get((pair, timeframe, candle_type), 0)
|
|
|
|
)
|
2023-12-24 07:12:55 +00:00
|
|
|
|
|
|
|
if (
|
2024-04-06 14:47:43 +00:00
|
|
|
candles
|
|
|
|
and candles[-1][0] >= prev_candle_date
|
2023-12-24 07:12:55 +00:00
|
|
|
and last_refresh_time >= half_candle
|
|
|
|
):
|
|
|
|
# Usable result, candle contains the previous candle.
|
|
|
|
# Also, we check if the last refresh time is no more than half the candle ago.
|
2024-05-12 06:30:51 +00:00
|
|
|
logger.debug(f"reuse watch result for {pair}, {timeframe}, {last_refresh_time}")
|
2022-10-23 07:50:13 +00:00
|
|
|
|
2023-08-02 16:11:25 +00:00
|
|
|
return self._exchange_ws.get_ohlcv(pair, timeframe, candle_type, candle_date)
|
2023-12-24 07:12:55 +00:00
|
|
|
logger.info(
|
2024-04-18 16:27:04 +00:00
|
|
|
f"Failed to reuse watch {pair}, {timeframe}, {candle_date < last_refresh_time},"
|
2024-04-20 06:05:55 +00:00
|
|
|
f" {candle_date}, {last_refresh_time}, "
|
|
|
|
f"{format_ms_time(candle_date)}, {format_ms_time(last_refresh_time)} "
|
2024-04-06 14:47:43 +00:00
|
|
|
)
|
2022-10-23 07:50:13 +00:00
|
|
|
|
2022-10-06 04:41:56 +00:00
|
|
|
# Check if 1 call can get us updated candles without hole in the data.
|
2024-05-26 17:10:32 +00:00
|
|
|
if min_date < self._pairs_last_refresh_time.get((pair, timeframe, candle_type), 0):
|
2022-10-06 04:41:56 +00:00
|
|
|
# Cache can be used - do one-off call.
|
|
|
|
not_all_data = False
|
2022-10-06 14:43:45 +00:00
|
|
|
else:
|
|
|
|
# Time jump detected, evict cache
|
|
|
|
logger.info(
|
2024-05-12 15:02:37 +00:00
|
|
|
f"Time jump detected. Evicting cache for {pair}, {timeframe}, {candle_type}"
|
|
|
|
)
|
2022-10-06 14:43:45 +00:00
|
|
|
del self._klines[(pair, timeframe, candle_type)]
|
2018-08-10 09:08:28 +00:00
|
|
|
|
2024-05-12 15:02:37 +00:00
|
|
|
if not since_ms and (self._ft_has["ohlcv_require_since"] or not_all_data):
|
2022-03-11 16:59:57 +00:00
|
|
|
# Multiple calls for one pair - to get more history
|
2022-05-14 07:51:44 +00:00
|
|
|
one_call = timeframe_to_msecs(timeframe) * self.ohlcv_candle_limit(
|
2024-05-12 15:02:37 +00:00
|
|
|
timeframe, candle_type, since_ms
|
|
|
|
)
|
2022-03-11 16:59:57 +00:00
|
|
|
move_to = one_call * self.required_candle_call_count
|
|
|
|
now = timeframe_to_next_date(timeframe)
|
2024-03-10 18:31:43 +00:00
|
|
|
since_ms = dt_ts(now - timedelta(seconds=move_to // 1000))
|
2022-03-11 16:59:57 +00:00
|
|
|
|
|
|
|
if since_ms:
|
|
|
|
return self._async_get_historic_ohlcv(
|
2024-05-12 15:02:37 +00:00
|
|
|
pair, timeframe, since_ms=since_ms, raise_=True, candle_type=candle_type
|
|
|
|
)
|
2022-03-11 16:59:57 +00:00
|
|
|
else:
|
|
|
|
# One call ... "regular" refresh
|
|
|
|
return self._async_get_candle_history(
|
2024-05-12 15:02:37 +00:00
|
|
|
pair, timeframe, since_ms=since_ms, candle_type=candle_type
|
|
|
|
)
|
2022-03-11 16:59:57 +00:00
|
|
|
|
2022-10-03 18:00:56 +00:00
|
|
|
def _build_ohlcv_dl_jobs(
|
2024-05-12 15:02:37 +00:00
|
|
|
self, pair_list: ListPairsWithTimeframes, since_ms: Optional[int], cache: bool
|
2024-10-04 04:46:45 +00:00
|
|
|
) -> tuple[list[Coroutine], list[PairWithTimeframe]]:
|
2018-08-16 10:15:09 +00:00
|
|
|
"""
|
2022-10-03 18:00:56 +00:00
|
|
|
Build Coroutines to execute as part of refresh_latest_ohlcv
|
2018-08-16 10:15:09 +00:00
|
|
|
"""
|
2024-10-04 04:46:45 +00:00
|
|
|
input_coroutines: list[Coroutine[Any, Any, OHLCVResponse]] = []
|
2021-08-17 04:44:20 +00:00
|
|
|
cached_pairs = []
|
2021-11-21 07:43:05 +00:00
|
|
|
for pair, timeframe, candle_type in set(pair_list):
|
2024-05-12 15:02:37 +00:00
|
|
|
if timeframe not in self.timeframes and candle_type in (
|
|
|
|
CandleType.SPOT,
|
|
|
|
CandleType.FUTURES,
|
|
|
|
):
|
2022-03-11 16:59:57 +00:00
|
|
|
logger.warning(
|
|
|
|
f"Cannot download ({pair}, {timeframe}) combination as this timeframe is "
|
|
|
|
f"not available on {self.name}. Available timeframes are "
|
2024-05-12 15:02:37 +00:00
|
|
|
f"{', '.join(self.timeframes)}."
|
|
|
|
)
|
2022-03-11 16:59:57 +00:00
|
|
|
continue
|
2022-10-03 18:00:56 +00:00
|
|
|
|
2024-05-12 15:02:37 +00:00
|
|
|
if (
|
|
|
|
(pair, timeframe, candle_type) not in self._klines
|
|
|
|
or not cache
|
|
|
|
or self._now_is_time_to_refresh(pair, timeframe, candle_type)
|
|
|
|
):
|
2022-10-03 18:49:54 +00:00
|
|
|
input_coroutines.append(
|
2024-05-12 15:02:37 +00:00
|
|
|
self._build_coroutine(pair, timeframe, candle_type, since_ms, cache)
|
|
|
|
)
|
2022-03-13 14:38:12 +00:00
|
|
|
|
2018-12-11 06:11:43 +00:00
|
|
|
else:
|
2019-05-25 19:42:17 +00:00
|
|
|
logger.debug(
|
2022-01-28 14:52:12 +00:00
|
|
|
f"Using cached candle (OHLCV) data for {pair}, {timeframe}, {candle_type} ..."
|
2019-05-25 19:42:17 +00:00
|
|
|
)
|
2021-11-21 07:43:05 +00:00
|
|
|
cached_pairs.append((pair, timeframe, candle_type))
|
2018-12-11 06:11:43 +00:00
|
|
|
|
2022-10-03 18:00:56 +00:00
|
|
|
return input_coroutines, cached_pairs
|
|
|
|
|
2024-05-12 15:02:37 +00:00
|
|
|
def _process_ohlcv_df(
|
|
|
|
self,
|
|
|
|
pair: str,
|
|
|
|
timeframe: str,
|
|
|
|
c_type: CandleType,
|
2024-10-04 04:46:45 +00:00
|
|
|
ticks: list[list],
|
2024-05-12 15:02:37 +00:00
|
|
|
cache: bool,
|
|
|
|
drop_incomplete: bool,
|
|
|
|
) -> DataFrame:
|
2022-10-03 18:49:54 +00:00
|
|
|
# keeping last candle time as last refreshed time of the pair
|
2022-10-06 14:56:38 +00:00
|
|
|
if ticks and cache:
|
2023-02-24 17:15:27 +00:00
|
|
|
idx = -2 if drop_incomplete and len(ticks) > 1 else -1
|
|
|
|
self._pairs_last_refresh_time[(pair, timeframe, c_type)] = ticks[idx][0] // 1000
|
2022-10-03 18:49:54 +00:00
|
|
|
# keeping parsed dataframe in cache
|
2024-05-12 15:02:37 +00:00
|
|
|
ohlcv_df = ohlcv_to_dataframe(
|
|
|
|
ticks, timeframe, pair=pair, fill_missing=True, drop_incomplete=drop_incomplete
|
|
|
|
)
|
2022-10-03 18:49:54 +00:00
|
|
|
if cache:
|
|
|
|
if (pair, timeframe, c_type) in self._klines:
|
|
|
|
old = self._klines[(pair, timeframe, c_type)]
|
|
|
|
# Reassign so we return the updated, combined df
|
2024-05-12 15:02:37 +00:00
|
|
|
ohlcv_df = clean_ohlcv_dataframe(
|
|
|
|
concat([old, ohlcv_df], axis=0),
|
|
|
|
timeframe,
|
|
|
|
pair,
|
|
|
|
fill_missing=True,
|
|
|
|
drop_incomplete=False,
|
|
|
|
)
|
|
|
|
candle_limit = self.ohlcv_candle_limit(timeframe, self._config["candle_type_def"])
|
2022-10-04 18:48:04 +00:00
|
|
|
# Age out old candles
|
|
|
|
ohlcv_df = ohlcv_df.tail(candle_limit + self._startup_candle_count)
|
2022-10-19 09:57:18 +00:00
|
|
|
ohlcv_df = ohlcv_df.reset_index(drop=True)
|
2022-10-03 18:49:54 +00:00
|
|
|
self._klines[(pair, timeframe, c_type)] = ohlcv_df
|
|
|
|
else:
|
|
|
|
self._klines[(pair, timeframe, c_type)] = ohlcv_df
|
|
|
|
return ohlcv_df
|
|
|
|
|
2024-05-12 15:02:37 +00:00
|
|
|
def refresh_latest_ohlcv(
|
|
|
|
self,
|
|
|
|
pair_list: ListPairsWithTimeframes,
|
|
|
|
*,
|
|
|
|
since_ms: Optional[int] = None,
|
|
|
|
cache: bool = True,
|
|
|
|
drop_incomplete: Optional[bool] = None,
|
2024-10-04 04:46:45 +00:00
|
|
|
) -> dict[PairWithTimeframe, DataFrame]:
|
2022-10-03 18:00:56 +00:00
|
|
|
"""
|
|
|
|
Refresh in-memory OHLCV asynchronously and set `_klines` with the result
|
|
|
|
Loops asynchronously over pair_list and downloads all pairs async (semi-parallel).
|
|
|
|
Only used in the dataprovider.refresh() method.
|
|
|
|
:param pair_list: List of 2 element tuples containing pair, interval to refresh
|
|
|
|
:param since_ms: time since when to download, in milliseconds
|
2024-04-18 20:51:25 +00:00
|
|
|
:param cache: Assign result to _klines. Useful for one-off downloads like for pairlists
|
2022-10-03 18:00:56 +00:00
|
|
|
:param drop_incomplete: Control candle dropping.
|
|
|
|
Specifying None defaults to _ohlcv_partial_candle
|
|
|
|
:return: Dict of [{(pair, timeframe): Dataframe}]
|
|
|
|
"""
|
|
|
|
logger.debug("Refreshing candle (OHLCV) data for %d pairs", len(pair_list))
|
|
|
|
|
|
|
|
# Gather coroutines to run
|
2024-08-12 17:44:35 +00:00
|
|
|
ohlcv_dl_jobs, cached_pairs = self._build_ohlcv_dl_jobs(pair_list, since_ms, cache)
|
2022-10-03 18:00:56 +00:00
|
|
|
|
2020-12-15 07:22:45 +00:00
|
|
|
results_df = {}
|
2024-04-18 20:51:25 +00:00
|
|
|
# Chunk requests into batches of 100 to avoid overwhelming ccxt Throttling
|
2024-08-12 17:44:35 +00:00
|
|
|
for dl_jobs_batch in chunks(ohlcv_dl_jobs, 100):
|
2024-05-12 15:02:37 +00:00
|
|
|
|
2024-08-12 17:44:35 +00:00
|
|
|
async def gather_coroutines(coro):
|
2024-05-05 17:51:52 +00:00
|
|
|
return await asyncio.gather(*coro, return_exceptions=True)
|
2021-12-31 15:34:15 +00:00
|
|
|
|
2022-04-28 04:29:14 +00:00
|
|
|
with self._loop_lock:
|
2024-08-12 17:44:35 +00:00
|
|
|
results = self.loop.run_until_complete(gather_coroutines(dl_jobs_batch))
|
2021-11-30 06:02:49 +00:00
|
|
|
|
|
|
|
for res in results:
|
|
|
|
if isinstance(res, Exception):
|
|
|
|
logger.warning(f"Async code raised an exception: {repr(res)}")
|
|
|
|
continue
|
2023-01-05 21:31:32 +00:00
|
|
|
# Deconstruct tuple (has 5 elements)
|
|
|
|
pair, timeframe, c_type, ticks, drop_hint = res
|
2023-02-24 13:34:41 +00:00
|
|
|
drop_incomplete_ = drop_hint if drop_incomplete is None else drop_incomplete
|
2022-10-03 18:49:54 +00:00
|
|
|
ohlcv_df = self._process_ohlcv_df(
|
2024-05-12 15:02:37 +00:00
|
|
|
pair, timeframe, c_type, ticks, cache, drop_incomplete_
|
|
|
|
)
|
2022-10-03 18:49:54 +00:00
|
|
|
|
2021-12-02 06:09:37 +00:00
|
|
|
results_df[(pair, timeframe, c_type)] = ohlcv_df
|
2022-10-03 18:49:54 +00:00
|
|
|
|
2021-08-17 04:44:20 +00:00
|
|
|
# Return cached klines
|
2021-11-21 07:43:05 +00:00
|
|
|
for pair, timeframe, c_type in cached_pairs:
|
|
|
|
results_df[(pair, timeframe, c_type)] = self.klines(
|
2024-05-12 15:02:37 +00:00
|
|
|
(pair, timeframe, c_type), copy=False
|
2021-11-21 07:43:05 +00:00
|
|
|
)
|
2021-08-17 04:44:20 +00:00
|
|
|
|
2020-12-15 07:22:45 +00:00
|
|
|
return results_df
|
2018-07-31 10:47:32 +00:00
|
|
|
|
2024-02-17 15:17:32 +00:00
|
|
|
def refresh_ohlcv_with_cache(
|
2024-10-04 04:46:45 +00:00
|
|
|
self, pairs: list[PairWithTimeframe], since_ms: int
|
|
|
|
) -> dict[PairWithTimeframe, DataFrame]:
|
2024-02-17 15:17:32 +00:00
|
|
|
"""
|
|
|
|
Refresh ohlcv data for all pairs in needed_pairs if necessary.
|
|
|
|
Caches data with expiring per timeframe.
|
|
|
|
Should only be used for pairlists which need "on time" expirarion, and no longer cache.
|
|
|
|
"""
|
|
|
|
|
2024-02-22 04:55:05 +00:00
|
|
|
timeframes = {p[1] for p in pairs}
|
2024-02-17 15:17:32 +00:00
|
|
|
for timeframe in timeframes:
|
2024-02-22 05:38:28 +00:00
|
|
|
if (timeframe, since_ms) not in self._expiring_candle_cache:
|
2024-02-17 15:17:32 +00:00
|
|
|
timeframe_in_sec = timeframe_to_seconds(timeframe)
|
|
|
|
# Initialise cache
|
2024-02-22 05:38:28 +00:00
|
|
|
self._expiring_candle_cache[(timeframe, since_ms)] = PeriodicCache(
|
2024-05-12 15:02:37 +00:00
|
|
|
ttl=timeframe_in_sec, maxsize=1000
|
|
|
|
)
|
2024-02-17 15:17:32 +00:00
|
|
|
|
|
|
|
# Get candles from cache
|
|
|
|
candles = {
|
2024-05-12 15:02:37 +00:00
|
|
|
c: self._expiring_candle_cache[(c[1], since_ms)].get(c, None)
|
|
|
|
for c in pairs
|
2024-02-22 05:38:28 +00:00
|
|
|
if c in self._expiring_candle_cache[(c[1], since_ms)]
|
2024-02-17 15:17:32 +00:00
|
|
|
}
|
|
|
|
pairs_to_download = [p for p in pairs if p not in candles]
|
|
|
|
if pairs_to_download:
|
2024-05-12 15:02:37 +00:00
|
|
|
candles = self.refresh_latest_ohlcv(pairs_to_download, since_ms=since_ms, cache=False)
|
2024-02-17 15:17:32 +00:00
|
|
|
for c, val in candles.items():
|
2024-02-22 05:38:28 +00:00
|
|
|
self._expiring_candle_cache[(c[1], since_ms)][c] = val
|
2024-02-17 15:17:32 +00:00
|
|
|
return candles
|
|
|
|
|
2021-12-03 13:11:24 +00:00
|
|
|
def _now_is_time_to_refresh(self, pair: str, timeframe: str, candle_type: CandleType) -> bool:
|
2020-03-08 10:35:31 +00:00
|
|
|
# Timeframe in seconds
|
2019-11-02 19:25:18 +00:00
|
|
|
interval_in_sec = timeframe_to_seconds(timeframe)
|
2022-10-29 17:45:46 +00:00
|
|
|
plr = self._pairs_last_refresh_time.get((pair, timeframe, candle_type), 0) + interval_in_sec
|
2023-02-24 17:15:27 +00:00
|
|
|
# current,active candle open date
|
|
|
|
now = int(timeframe_to_prev_date(timeframe).timestamp())
|
|
|
|
return plr < now
|
2019-02-20 22:20:24 +00:00
|
|
|
|
2018-08-18 19:05:38 +00:00
|
|
|
@retrier_async
|
2021-10-24 03:10:36 +00:00
|
|
|
async def _async_get_candle_history(
|
|
|
|
self,
|
|
|
|
pair: str,
|
|
|
|
timeframe: str,
|
2021-12-03 15:44:05 +00:00
|
|
|
candle_type: CandleType,
|
2021-10-24 03:10:36 +00:00
|
|
|
since_ms: Optional[int] = None,
|
2023-01-05 10:30:15 +00:00
|
|
|
) -> OHLCVResponse:
|
2018-12-29 13:32:24 +00:00
|
|
|
"""
|
2020-03-08 10:35:31 +00:00
|
|
|
Asynchronously get candle history data using fetch_ohlcv
|
2021-11-27 08:55:42 +00:00
|
|
|
:param candle_type: '', mark, index, premiumIndex, or funding_rate
|
2019-11-02 19:25:18 +00:00
|
|
|
returns tuple: (pair, timeframe, ohlcv_list)
|
2018-12-29 13:32:24 +00:00
|
|
|
"""
|
2018-07-31 10:47:32 +00:00
|
|
|
try:
|
2020-03-08 10:35:31 +00:00
|
|
|
# Fetch OHLCV asynchronously
|
2024-05-12 15:02:37 +00:00
|
|
|
s = "(" + dt_from_ts(since_ms).isoformat() + ") " if since_ms is not None else ""
|
2019-05-25 19:42:17 +00:00
|
|
|
logger.debug(
|
2022-12-30 06:15:24 +00:00
|
|
|
"Fetching pair %s, %s, interval %s, since %s %s...",
|
2024-05-12 15:02:37 +00:00
|
|
|
pair,
|
|
|
|
candle_type,
|
|
|
|
timeframe,
|
|
|
|
since_ms,
|
|
|
|
s,
|
2019-05-25 19:42:17 +00:00
|
|
|
)
|
2024-05-12 15:02:37 +00:00
|
|
|
params = deepcopy(self._ft_has.get("ohlcv_params", {}))
|
2022-05-14 07:51:44 +00:00
|
|
|
candle_limit = self.ohlcv_candle_limit(
|
2024-05-12 15:02:37 +00:00
|
|
|
timeframe, candle_type=candle_type, since_ms=since_ms
|
|
|
|
)
|
2022-05-14 07:51:44 +00:00
|
|
|
|
2022-10-09 08:55:38 +00:00
|
|
|
if candle_type and candle_type != CandleType.SPOT:
|
2024-05-12 15:02:37 +00:00
|
|
|
params.update({"price": candle_type.value})
|
2021-12-10 18:50:58 +00:00
|
|
|
if candle_type != CandleType.FUNDING_RATE:
|
|
|
|
data = await self._api_async.fetch_ohlcv(
|
2024-05-12 15:02:37 +00:00
|
|
|
pair, timeframe=timeframe, since=since_ms, limit=candle_limit, params=params
|
|
|
|
)
|
2021-12-10 18:50:58 +00:00
|
|
|
else:
|
|
|
|
# Funding rate
|
2022-12-30 06:32:59 +00:00
|
|
|
data = await self._fetch_funding_rate_history(
|
|
|
|
pair=pair,
|
|
|
|
timeframe=timeframe,
|
|
|
|
limit=candle_limit,
|
|
|
|
since_ms=since_ms,
|
|
|
|
)
|
2020-03-08 10:35:31 +00:00
|
|
|
# Some exchanges sort OHLCV in ASC order and others in DESC.
|
|
|
|
# Ex: Bittrex returns the list of OHLCV in ASC order (oldest first, newest last)
|
|
|
|
# while GDAX returns the list of OHLCV in DESC order (newest first, oldest last)
|
2018-11-25 14:00:50 +00:00
|
|
|
# Only sort if necessary to save computing time
|
2019-01-19 19:02:37 +00:00
|
|
|
try:
|
|
|
|
if data and data[0][0] > data[-1][0]:
|
|
|
|
data = sorted(data, key=lambda x: x[0])
|
|
|
|
except IndexError:
|
|
|
|
logger.exception("Error loading %s. Result was %s.", pair, data)
|
2023-01-05 21:31:32 +00:00
|
|
|
return pair, timeframe, candle_type, [], self._ohlcv_partial_candle
|
2023-08-24 18:06:51 +00:00
|
|
|
logger.debug("Done fetching pair %s, %s interval %s...", pair, candle_type, timeframe)
|
2023-01-05 21:31:32 +00:00
|
|
|
return pair, timeframe, candle_type, data, self._ohlcv_partial_candle
|
2018-07-31 10:47:32 +00:00
|
|
|
|
|
|
|
except ccxt.NotSupported as e:
|
|
|
|
raise OperationalException(
|
2024-05-12 15:02:37 +00:00
|
|
|
f"Exchange {self._api.name} does not support fetching historical "
|
|
|
|
f"candle (OHLCV) data. Message: {e}"
|
|
|
|
) from e
|
2020-06-28 09:17:06 +00:00
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
2024-04-13 09:12:10 +00:00
|
|
|
except (ccxt.OperationFailed, ccxt.ExchangeError) as e:
|
2024-05-12 15:02:37 +00:00
|
|
|
raise TemporaryError(
|
|
|
|
f"Could not fetch historical candle (OHLCV) data "
|
2024-08-02 05:25:53 +00:00
|
|
|
f"for {pair}, {timeframe}, {candle_type} due to {e.__class__.__name__}. "
|
2024-05-12 15:02:37 +00:00
|
|
|
f"Message: {e}"
|
|
|
|
) from e
|
2018-06-17 10:41:33 +00:00
|
|
|
except ccxt.BaseError as e:
|
2024-05-12 15:02:37 +00:00
|
|
|
raise OperationalException(
|
2024-08-02 05:25:53 +00:00
|
|
|
f"Could not fetch historical candle (OHLCV) data for "
|
|
|
|
f"{pair}, {timeframe}, {candle_type}. Message: {e}"
|
2024-05-12 15:02:37 +00:00
|
|
|
) from e
|
2017-10-31 23:12:18 +00:00
|
|
|
|
2022-12-30 06:32:59 +00:00
|
|
|
async def _fetch_funding_rate_history(
|
|
|
|
self,
|
|
|
|
pair: str,
|
|
|
|
timeframe: str,
|
|
|
|
limit: int,
|
|
|
|
since_ms: Optional[int] = None,
|
2024-10-04 04:46:45 +00:00
|
|
|
) -> list[list]:
|
2022-12-30 06:32:59 +00:00
|
|
|
"""
|
|
|
|
Fetch funding rate history - used to selectively override this by subclasses.
|
|
|
|
"""
|
|
|
|
# Funding rate
|
2024-05-12 15:02:37 +00:00
|
|
|
data = await self._api_async.fetch_funding_rate_history(pair, since=since_ms, limit=limit)
|
2022-12-30 06:32:59 +00:00
|
|
|
# Convert funding rate to candle pattern
|
2024-05-12 15:02:37 +00:00
|
|
|
data = [[x["timestamp"], x["fundingRate"], 0, 0, 0, 0] for x in data]
|
2022-12-30 06:32:59 +00:00
|
|
|
return data
|
|
|
|
|
2024-03-16 16:04:48 +00:00
|
|
|
# fetch Trade data stuff
|
|
|
|
|
2024-07-10 17:40:19 +00:00
|
|
|
def needed_candle_for_trades_ms(self, timeframe: str, candle_type: CandleType) -> int:
|
|
|
|
candle_limit = self.ohlcv_candle_limit(timeframe, candle_type)
|
|
|
|
tf_s = timeframe_to_seconds(timeframe)
|
|
|
|
candles_fetched = candle_limit * self.required_candle_call_count
|
|
|
|
|
|
|
|
max_candles = self._config["orderflow"]["max_candles"]
|
|
|
|
|
|
|
|
required_candles = min(max_candles, candles_fetched)
|
|
|
|
move_to = (
|
|
|
|
tf_s * candle_limit * required_candles
|
|
|
|
if required_candles > candle_limit
|
|
|
|
else (max_candles + 1) * tf_s
|
2024-07-04 16:32:08 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
now = timeframe_to_next_date(timeframe)
|
2024-07-10 17:40:19 +00:00
|
|
|
return int((now - timedelta(seconds=move_to)).timestamp() * 1000)
|
2024-07-04 16:32:08 +00:00
|
|
|
|
2024-05-15 15:09:32 +00:00
|
|
|
def _process_trades_df(
|
|
|
|
self,
|
|
|
|
pair: str,
|
|
|
|
timeframe: str,
|
|
|
|
c_type: CandleType,
|
2024-10-04 04:46:45 +00:00
|
|
|
ticks: list[list],
|
2024-05-15 15:09:32 +00:00
|
|
|
cache: bool,
|
2024-05-18 14:45:49 +00:00
|
|
|
first_required_candle_date: int,
|
2024-05-15 15:09:32 +00:00
|
|
|
) -> DataFrame:
|
2024-03-16 16:04:48 +00:00
|
|
|
# keeping parsed dataframe in cache
|
|
|
|
trades_df = trades_list_to_df(ticks, True)
|
2024-04-05 04:41:46 +00:00
|
|
|
|
2024-03-16 16:04:48 +00:00
|
|
|
if cache:
|
|
|
|
if (pair, timeframe, c_type) in self._trades:
|
|
|
|
old = self._trades[(pair, timeframe, c_type)]
|
|
|
|
# Reassign so we return the updated, combined df
|
|
|
|
combined_df = concat([old, trades_df], axis=0)
|
|
|
|
logger.debug(f"Clean duplicated ticks from Trades data {pair}")
|
2024-05-15 15:09:32 +00:00
|
|
|
trades_df = DataFrame(
|
|
|
|
trades_df_remove_duplicates(combined_df), columns=combined_df.columns
|
|
|
|
)
|
2024-03-16 16:04:48 +00:00
|
|
|
# Age out old candles
|
2024-05-18 14:45:49 +00:00
|
|
|
trades_df = trades_df[first_required_candle_date < trades_df["timestamp"]]
|
|
|
|
trades_df = trades_df.reset_index(drop=True)
|
2024-03-16 16:04:48 +00:00
|
|
|
self._trades[(pair, timeframe, c_type)] = trades_df
|
|
|
|
return trades_df
|
|
|
|
|
2024-08-01 17:58:17 +00:00
|
|
|
async def _build_trades_dl_jobs(
|
|
|
|
self, pairwt: PairWithTimeframe, data_handler, cache: bool
|
2024-10-04 04:46:45 +00:00
|
|
|
) -> tuple[PairWithTimeframe, Optional[DataFrame]]:
|
2024-08-01 17:58:17 +00:00
|
|
|
"""
|
|
|
|
Build coroutines to refresh trades for (they're then called through async.gather)
|
|
|
|
"""
|
|
|
|
pair, timeframe, candle_type = pairwt
|
|
|
|
since_ms = None
|
2024-10-04 04:46:45 +00:00
|
|
|
new_ticks: list = []
|
2024-08-01 17:58:17 +00:00
|
|
|
all_stored_ticks_df = DataFrame(columns=DEFAULT_TRADES_COLUMNS + ["date"])
|
|
|
|
first_candle_ms = self.needed_candle_for_trades_ms(timeframe, candle_type)
|
|
|
|
# refresh, if
|
|
|
|
# a. not in _trades
|
|
|
|
# b. no cache used
|
|
|
|
# c. need new data
|
|
|
|
is_in_cache = (pair, timeframe, candle_type) in self._trades
|
|
|
|
if (
|
|
|
|
not is_in_cache
|
|
|
|
or not cache
|
|
|
|
or self._now_is_time_to_refresh_trades(pair, timeframe, candle_type)
|
|
|
|
):
|
|
|
|
logger.debug(f"Refreshing TRADES data for {pair}")
|
|
|
|
# fetch trades since latest _trades and
|
|
|
|
# store together with existing trades
|
|
|
|
try:
|
|
|
|
until = None
|
|
|
|
from_id = None
|
|
|
|
if is_in_cache:
|
|
|
|
from_id = self._trades[(pair, timeframe, candle_type)].iloc[-1]["id"]
|
|
|
|
until = dt_ts() # now
|
|
|
|
|
|
|
|
else:
|
|
|
|
until = int(timeframe_to_prev_date(timeframe).timestamp()) * 1000
|
|
|
|
all_stored_ticks_df = data_handler.trades_load(
|
|
|
|
f"{pair}-cached", self.trading_mode
|
|
|
|
)
|
|
|
|
|
|
|
|
if not all_stored_ticks_df.empty:
|
|
|
|
if (
|
|
|
|
all_stored_ticks_df.iloc[-1]["timestamp"] > first_candle_ms
|
|
|
|
and all_stored_ticks_df.iloc[0]["timestamp"] <= first_candle_ms
|
|
|
|
):
|
|
|
|
# Use cache and populate further
|
|
|
|
last_cached_ms = all_stored_ticks_df.iloc[-1]["timestamp"]
|
|
|
|
from_id = all_stored_ticks_df.iloc[-1]["id"]
|
|
|
|
# only use cached if it's closer than first_candle_ms
|
|
|
|
since_ms = (
|
|
|
|
last_cached_ms
|
|
|
|
if last_cached_ms > first_candle_ms
|
|
|
|
else first_candle_ms
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
# Skip cache, it's too old
|
|
|
|
all_stored_ticks_df = DataFrame(
|
|
|
|
columns=DEFAULT_TRADES_COLUMNS + ["date"]
|
|
|
|
)
|
|
|
|
|
|
|
|
# from_id overrules with exchange set to id paginate
|
|
|
|
[_, new_ticks] = await self._async_get_trade_history(
|
|
|
|
pair,
|
|
|
|
since=since_ms if since_ms else first_candle_ms,
|
|
|
|
until=until,
|
|
|
|
from_id=from_id,
|
|
|
|
)
|
|
|
|
|
|
|
|
except Exception:
|
|
|
|
logger.exception(f"Refreshing TRADES data for {pair} failed")
|
|
|
|
return pairwt, None
|
|
|
|
|
|
|
|
if new_ticks:
|
|
|
|
all_stored_ticks_list = all_stored_ticks_df[DEFAULT_TRADES_COLUMNS].values.tolist()
|
|
|
|
all_stored_ticks_list.extend(new_ticks)
|
|
|
|
trades_df = self._process_trades_df(
|
|
|
|
pair,
|
|
|
|
timeframe,
|
|
|
|
candle_type,
|
|
|
|
all_stored_ticks_list,
|
|
|
|
cache,
|
|
|
|
first_required_candle_date=first_candle_ms,
|
|
|
|
)
|
|
|
|
data_handler.trades_store(
|
|
|
|
f"{pair}-cached", trades_df[DEFAULT_TRADES_COLUMNS], self.trading_mode
|
|
|
|
)
|
|
|
|
return pairwt, trades_df
|
|
|
|
else:
|
|
|
|
logger.error(f"No new ticks for {pair}")
|
|
|
|
return pairwt, None
|
|
|
|
|
2024-05-15 15:09:32 +00:00
|
|
|
def refresh_latest_trades(
|
|
|
|
self,
|
|
|
|
pair_list: ListPairsWithTimeframes,
|
|
|
|
*,
|
|
|
|
cache: bool = True,
|
2024-10-04 04:46:45 +00:00
|
|
|
) -> dict[PairWithTimeframe, DataFrame]:
|
2024-03-16 16:04:48 +00:00
|
|
|
"""
|
|
|
|
Refresh in-memory TRADES asynchronously and set `_trades` with the result
|
|
|
|
Loops asynchronously over pair_list and downloads all pairs async (semi-parallel).
|
|
|
|
Only used in the dataprovider.refresh() method.
|
|
|
|
:param pair_list: List of 3 element tuples containing (pair, timeframe, candle_type)
|
2024-05-15 12:33:41 +00:00
|
|
|
:param cache: Assign result to _trades. Useful for one-off downloads like for pairlists
|
2024-03-16 16:04:48 +00:00
|
|
|
:return: Dict of [{(pair, timeframe): Dataframe}]
|
|
|
|
"""
|
2024-03-16 16:19:01 +00:00
|
|
|
from freqtrade.data.history import get_datahandler
|
2024-05-15 15:09:32 +00:00
|
|
|
|
2024-03-16 16:19:01 +00:00
|
|
|
data_handler = get_datahandler(
|
2024-05-15 15:09:32 +00:00
|
|
|
self._config["datadir"], data_format=self._config["dataformat_trades"]
|
2024-03-16 16:19:01 +00:00
|
|
|
)
|
2024-03-16 16:04:48 +00:00
|
|
|
logger.debug("Refreshing TRADES data for %d pairs", len(pair_list))
|
|
|
|
results_df = {}
|
2024-08-12 17:43:48 +00:00
|
|
|
trades_dl_jobs = []
|
|
|
|
for pair_wt in set(pair_list):
|
|
|
|
trades_dl_jobs.append(self._build_trades_dl_jobs(pair_wt, data_handler, cache))
|
2024-03-16 16:04:48 +00:00
|
|
|
|
2024-08-12 17:43:48 +00:00
|
|
|
async def gather_coroutines(coro):
|
2024-08-01 17:58:17 +00:00
|
|
|
return await asyncio.gather(*coro, return_exceptions=True)
|
2024-03-16 16:04:48 +00:00
|
|
|
|
2024-08-12 17:43:48 +00:00
|
|
|
for dl_job_chunk in chunks(trades_dl_jobs, 100):
|
2024-08-01 17:58:17 +00:00
|
|
|
with self._loop_lock:
|
2024-08-12 17:43:48 +00:00
|
|
|
results = self.loop.run_until_complete(gather_coroutines(dl_job_chunk))
|
2024-03-16 16:04:48 +00:00
|
|
|
|
2024-08-01 17:58:17 +00:00
|
|
|
for res in results:
|
|
|
|
if isinstance(res, Exception):
|
|
|
|
logger.warning(f"Async code raised an exception: {repr(res)}")
|
|
|
|
continue
|
|
|
|
pairwt, trades_df = res
|
|
|
|
if trades_df is not None:
|
|
|
|
results_df[pairwt] = trades_df
|
2024-03-16 16:04:48 +00:00
|
|
|
|
|
|
|
return results_df
|
|
|
|
|
2024-05-08 13:08:25 +00:00
|
|
|
def _now_is_time_to_refresh_trades(
|
|
|
|
self, pair: str, timeframe: str, candle_type: CandleType
|
|
|
|
) -> bool: # Timeframe in seconds
|
|
|
|
trades = self.trades((pair, timeframe, candle_type), False)
|
|
|
|
pair_last_refreshed = int(trades.iloc[-1]["timestamp"])
|
2024-05-15 15:09:32 +00:00
|
|
|
full_candle = (
|
|
|
|
int(timeframe_to_next_date(timeframe, dt_from_ts(pair_last_refreshed)).timestamp())
|
|
|
|
* 1000
|
|
|
|
)
|
2024-05-08 13:08:25 +00:00
|
|
|
now = dt_ts()
|
|
|
|
return full_candle <= now
|
2024-03-16 16:04:48 +00:00
|
|
|
|
2021-06-02 09:20:26 +00:00
|
|
|
# Fetch historic trades
|
|
|
|
|
2019-08-14 18:30:29 +00:00
|
|
|
@retrier_async
|
2024-05-12 15:02:37 +00:00
|
|
|
async def _async_fetch_trades(
|
|
|
|
self, pair: str, since: Optional[int] = None, params: Optional[dict] = None
|
2024-10-04 04:46:45 +00:00
|
|
|
) -> tuple[list[list], Any]:
|
2019-08-14 18:30:29 +00:00
|
|
|
"""
|
2024-04-18 20:51:25 +00:00
|
|
|
Asynchronously gets trade history using fetch_trades.
|
2019-08-29 10:56:10 +00:00
|
|
|
Handles exchange errors, does one call to the exchange.
|
2019-08-14 18:30:29 +00:00
|
|
|
:param pair: Pair to fetch trade data for
|
|
|
|
:param since: Since as integer timestamp in milliseconds
|
2024-01-21 14:22:03 +00:00
|
|
|
returns: List of dicts containing trades, the next iteration value (new "since" or trade_id)
|
2019-08-14 18:30:29 +00:00
|
|
|
"""
|
|
|
|
try:
|
2024-03-16 16:01:27 +00:00
|
|
|
trades_limit = self._max_trades_limit
|
2019-08-14 18:30:29 +00:00
|
|
|
# fetch trades asynchronously
|
|
|
|
if params:
|
|
|
|
logger.debug("Fetching trades for pair %s, params: %s ", pair, params)
|
2024-03-16 16:01:27 +00:00
|
|
|
trades = await self._api_async.fetch_trades(pair, params=params, limit=trades_limit)
|
2019-08-14 18:30:29 +00:00
|
|
|
else:
|
|
|
|
logger.debug(
|
|
|
|
"Fetching trades for pair %s, since %s %s...",
|
2024-05-12 15:02:37 +00:00
|
|
|
pair,
|
|
|
|
since,
|
|
|
|
"(" + dt_from_ts(since).isoformat() + ") " if since is not None else "",
|
2019-08-14 18:30:29 +00:00
|
|
|
)
|
2024-03-16 16:01:27 +00:00
|
|
|
trades = await self._api_async.fetch_trades(pair, since=since, limit=trades_limit)
|
2021-12-25 13:38:17 +00:00
|
|
|
trades = self._trades_contracts_to_amount(trades)
|
2024-01-21 14:22:03 +00:00
|
|
|
pagination_value = self._get_trade_pagination_next_value(trades)
|
|
|
|
return trades_dict_to_list(trades), pagination_value
|
2019-08-14 18:30:29 +00:00
|
|
|
except ccxt.NotSupported as e:
|
|
|
|
raise OperationalException(
|
2024-05-12 15:02:37 +00:00
|
|
|
f"Exchange {self._api.name} does not support fetching historical trade data."
|
|
|
|
f"Message: {e}"
|
|
|
|
) from e
|
2020-06-28 09:17:06 +00:00
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
2024-04-13 09:12:10 +00:00
|
|
|
except (ccxt.OperationFailed, ccxt.ExchangeError) as e:
|
2024-05-12 15:02:37 +00:00
|
|
|
raise TemporaryError(
|
2024-05-12 15:51:21 +00:00
|
|
|
f"Could not load trade history due to {e.__class__.__name__}. Message: {e}"
|
2024-05-12 15:02:37 +00:00
|
|
|
) from e
|
2019-08-14 18:30:29 +00:00
|
|
|
except ccxt.BaseError as e:
|
2024-05-12 15:02:37 +00:00
|
|
|
raise OperationalException(f"Could not fetch trade data. Msg: {e}") from e
|
2019-08-14 18:30:29 +00:00
|
|
|
|
2024-01-21 13:08:35 +00:00
|
|
|
def _valid_trade_pagination_id(self, pair: str, from_id: str) -> bool:
|
|
|
|
"""
|
|
|
|
Verify trade-pagination id is valid.
|
|
|
|
Workaround for odd Kraken issue where ID is sometimes wrong.
|
|
|
|
"""
|
|
|
|
return True
|
|
|
|
|
2024-10-04 04:46:45 +00:00
|
|
|
def _get_trade_pagination_next_value(self, trades: list[dict]):
|
2024-01-21 14:22:03 +00:00
|
|
|
"""
|
|
|
|
Extract pagination id for the next "from_id" value
|
|
|
|
Applies only to fetch_trade_history by id.
|
|
|
|
"""
|
|
|
|
if not trades:
|
|
|
|
return None
|
2024-05-12 15:02:37 +00:00
|
|
|
if self._trades_pagination == "id":
|
|
|
|
return trades[-1].get("id")
|
2024-01-21 14:22:03 +00:00
|
|
|
else:
|
2024-05-12 15:02:37 +00:00
|
|
|
return trades[-1].get("timestamp")
|
2024-01-21 14:22:03 +00:00
|
|
|
|
2024-05-12 15:02:37 +00:00
|
|
|
async def _async_get_trade_history_id(
|
|
|
|
self, pair: str, until: int, since: Optional[int] = None, from_id: Optional[str] = None
|
2024-10-04 04:46:45 +00:00
|
|
|
) -> tuple[str, list[list]]:
|
2019-08-14 18:30:29 +00:00
|
|
|
"""
|
2024-04-18 20:51:25 +00:00
|
|
|
Asynchronously gets trade history using fetch_trades
|
2019-09-28 08:56:43 +00:00
|
|
|
use this when exchange uses id-based iteration (check `self._trades_pagination`)
|
2019-08-14 18:30:29 +00:00
|
|
|
:param pair: Pair to fetch trade data for
|
|
|
|
:param since: Since as integer timestamp in milliseconds
|
|
|
|
:param until: Until as integer timestamp in milliseconds
|
2019-08-16 08:51:04 +00:00
|
|
|
:param from_id: Download data starting with ID (if id is known). Ignores "since" if set.
|
2019-10-19 08:05:30 +00:00
|
|
|
returns tuple: (pair, trades-list)
|
2019-08-14 18:30:29 +00:00
|
|
|
"""
|
|
|
|
|
2024-10-04 04:46:45 +00:00
|
|
|
trades: list[list] = []
|
2024-01-21 14:22:03 +00:00
|
|
|
# DEFAULT_TRADES_COLUMNS: 0 -> timestamp
|
|
|
|
# DEFAULT_TRADES_COLUMNS: 1 -> id
|
2024-05-12 15:02:37 +00:00
|
|
|
has_overlap = self._ft_has.get("trades_pagination_overlap", True)
|
2024-01-21 15:50:49 +00:00
|
|
|
# Skip last trade by default since its the key for the next call
|
|
|
|
x = slice(None, -1) if has_overlap else slice(None)
|
2019-08-29 10:56:10 +00:00
|
|
|
|
2024-01-21 13:08:35 +00:00
|
|
|
if not from_id or not self._valid_trade_pagination_id(pair, from_id):
|
2019-08-29 10:56:10 +00:00
|
|
|
# Fetch first elements using timebased method to get an ID to paginate on
|
|
|
|
# Depending on the Exchange, this can introduce a drift at the start of the interval
|
|
|
|
# of up to an hour.
|
2019-08-29 11:13:41 +00:00
|
|
|
# e.g. Binance returns the "last 1000" candles within a 1h time interval
|
|
|
|
# - so we will miss the first trades.
|
2024-01-21 14:22:03 +00:00
|
|
|
t, from_id = await self._async_fetch_trades(pair, since=since)
|
2024-01-21 15:50:49 +00:00
|
|
|
trades.extend(t[x])
|
2019-08-29 10:56:10 +00:00
|
|
|
while True:
|
2023-08-20 09:57:59 +00:00
|
|
|
try:
|
2024-01-21 14:22:03 +00:00
|
|
|
t, from_id_next = await self._async_fetch_trades(
|
2024-05-12 15:02:37 +00:00
|
|
|
pair, params={self._trades_pagination_arg: from_id}
|
|
|
|
)
|
2023-08-20 09:57:59 +00:00
|
|
|
if t:
|
2024-01-21 15:50:49 +00:00
|
|
|
trades.extend(t[x])
|
2024-01-21 14:22:03 +00:00
|
|
|
if from_id == from_id_next or t[-1][0] > until:
|
2024-05-12 15:02:37 +00:00
|
|
|
logger.debug(
|
|
|
|
f"Stopping because from_id did not change. "
|
|
|
|
f"Reached {t[-1][0]} > {until}"
|
|
|
|
)
|
2023-08-20 09:57:59 +00:00
|
|
|
# Reached the end of the defined-download period - add last trade as well.
|
2024-01-21 15:50:49 +00:00
|
|
|
if has_overlap:
|
|
|
|
trades.extend(t[-1:])
|
2023-08-20 09:57:59 +00:00
|
|
|
break
|
2019-08-14 18:30:29 +00:00
|
|
|
|
2024-01-21 14:22:03 +00:00
|
|
|
from_id = from_id_next
|
2023-08-20 09:57:59 +00:00
|
|
|
else:
|
2023-10-17 05:03:03 +00:00
|
|
|
logger.debug("Stopping as no more trades were returned.")
|
2023-08-20 09:57:59 +00:00
|
|
|
break
|
|
|
|
except asyncio.CancelledError:
|
|
|
|
logger.debug("Async operation Interrupted, breaking trades DL loop.")
|
2019-08-29 10:56:10 +00:00
|
|
|
break
|
|
|
|
|
|
|
|
return (pair, trades)
|
2019-08-14 18:30:29 +00:00
|
|
|
|
2024-05-12 15:02:37 +00:00
|
|
|
async def _async_get_trade_history_time(
|
|
|
|
self, pair: str, until: int, since: Optional[int] = None
|
2024-10-04 04:46:45 +00:00
|
|
|
) -> tuple[str, list[list]]:
|
2019-08-14 18:30:29 +00:00
|
|
|
"""
|
2024-04-18 20:51:25 +00:00
|
|
|
Asynchronously gets trade history using fetch_trades,
|
2019-09-28 08:56:43 +00:00
|
|
|
when the exchange uses time-based iteration (check `self._trades_pagination`)
|
2019-08-14 18:30:29 +00:00
|
|
|
:param pair: Pair to fetch trade data for
|
|
|
|
:param since: Since as integer timestamp in milliseconds
|
|
|
|
:param until: Until as integer timestamp in milliseconds
|
2019-10-19 08:05:30 +00:00
|
|
|
returns tuple: (pair, trades-list)
|
2019-08-14 18:30:29 +00:00
|
|
|
"""
|
|
|
|
|
2024-10-04 04:46:45 +00:00
|
|
|
trades: list[list] = []
|
2020-03-31 18:20:10 +00:00
|
|
|
# DEFAULT_TRADES_COLUMNS: 0 -> timestamp
|
|
|
|
# DEFAULT_TRADES_COLUMNS: 1 -> id
|
2019-08-29 10:56:10 +00:00
|
|
|
while True:
|
2023-08-20 09:57:59 +00:00
|
|
|
try:
|
2024-01-21 14:22:03 +00:00
|
|
|
t, since_next = await self._async_fetch_trades(pair, since=since)
|
2023-08-20 09:57:59 +00:00
|
|
|
if t:
|
2023-10-18 18:23:19 +00:00
|
|
|
# No more trades to download available at the exchange,
|
|
|
|
# So we repeatedly get the same trade over and over again.
|
2024-01-21 14:22:03 +00:00
|
|
|
if since == since_next and len(t) == 1:
|
2023-10-18 18:23:19 +00:00
|
|
|
logger.debug("Stopping because no more trades are available.")
|
|
|
|
break
|
2024-01-21 14:22:03 +00:00
|
|
|
since = since_next
|
2023-08-20 09:57:59 +00:00
|
|
|
trades.extend(t)
|
|
|
|
# Reached the end of the defined-download period
|
2024-01-21 14:22:03 +00:00
|
|
|
if until and since_next > until:
|
2024-05-12 15:02:37 +00:00
|
|
|
logger.debug(f"Stopping because until was reached. {since_next} > {until}")
|
2023-08-20 09:57:59 +00:00
|
|
|
break
|
|
|
|
else:
|
2023-10-17 05:11:06 +00:00
|
|
|
logger.debug("Stopping as no more trades were returned.")
|
2019-08-14 18:30:29 +00:00
|
|
|
break
|
2023-08-20 09:57:59 +00:00
|
|
|
except asyncio.CancelledError:
|
|
|
|
logger.debug("Async operation Interrupted, breaking trades DL loop.")
|
2019-08-29 10:56:10 +00:00
|
|
|
break
|
|
|
|
|
|
|
|
return (pair, trades)
|
|
|
|
|
2024-05-12 15:02:37 +00:00
|
|
|
async def _async_get_trade_history(
|
|
|
|
self,
|
|
|
|
pair: str,
|
|
|
|
since: Optional[int] = None,
|
|
|
|
until: Optional[int] = None,
|
|
|
|
from_id: Optional[str] = None,
|
2024-10-04 04:46:45 +00:00
|
|
|
) -> tuple[str, list[list]]:
|
2019-08-29 10:56:10 +00:00
|
|
|
"""
|
|
|
|
Async wrapper handling downloading trades using either time or id based methods.
|
|
|
|
"""
|
2019-08-14 18:30:29 +00:00
|
|
|
|
2024-05-12 15:02:37 +00:00
|
|
|
logger.debug(
|
|
|
|
f"_async_get_trade_history(), pair: {pair}, "
|
|
|
|
f"since: {since}, until: {until}, from_id: {from_id}"
|
|
|
|
)
|
2020-05-31 10:05:58 +00:00
|
|
|
|
2020-06-02 15:43:37 +00:00
|
|
|
if until is None:
|
2020-06-02 18:09:23 +00:00
|
|
|
until = ccxt.Exchange.milliseconds()
|
|
|
|
logger.debug(f"Exchange milliseconds: {until}")
|
2020-05-31 10:05:58 +00:00
|
|
|
|
2024-05-12 15:02:37 +00:00
|
|
|
if self._trades_pagination == "time":
|
|
|
|
return await self._async_get_trade_history_time(pair=pair, since=since, until=until)
|
|
|
|
elif self._trades_pagination == "id":
|
2019-09-28 08:52:53 +00:00
|
|
|
return await self._async_get_trade_history_id(
|
2020-05-31 10:05:58 +00:00
|
|
|
pair=pair, since=since, until=until, from_id=from_id
|
2019-09-28 08:52:53 +00:00
|
|
|
)
|
|
|
|
else:
|
2024-05-12 15:02:37 +00:00
|
|
|
raise OperationalException(
|
2024-05-12 15:51:21 +00:00
|
|
|
f"Exchange {self.name} does use neither time, nor id based pagination"
|
2024-05-12 15:02:37 +00:00
|
|
|
)
|
2019-08-14 18:30:29 +00:00
|
|
|
|
2024-05-12 15:02:37 +00:00
|
|
|
def get_historic_trades(
|
|
|
|
self,
|
|
|
|
pair: str,
|
|
|
|
since: Optional[int] = None,
|
|
|
|
until: Optional[int] = None,
|
|
|
|
from_id: Optional[str] = None,
|
2024-10-04 04:46:45 +00:00
|
|
|
) -> tuple[str, list]:
|
2019-08-14 18:30:29 +00:00
|
|
|
"""
|
2020-03-08 10:35:31 +00:00
|
|
|
Get trade history data using asyncio.
|
|
|
|
Handles all async work and returns the list of candles.
|
2021-02-14 09:29:45 +00:00
|
|
|
Async over one pair, assuming we get `self.ohlcv_candle_limit()` candles per call.
|
2019-08-14 18:30:29 +00:00
|
|
|
:param pair: Pair to download
|
2019-08-16 08:51:04 +00:00
|
|
|
:param since: Timestamp in milliseconds to get history from
|
|
|
|
:param until: Timestamp in milliseconds. Defaults to current timestamp if not defined.
|
2019-08-16 08:34:52 +00:00
|
|
|
:param from_id: Download data starting with ID (if id is known)
|
2020-03-08 10:35:31 +00:00
|
|
|
:returns List of trade data
|
2019-08-14 18:30:29 +00:00
|
|
|
"""
|
2019-10-19 08:05:30 +00:00
|
|
|
if not self.exchange_has("fetchTrades"):
|
2021-08-16 12:16:24 +00:00
|
|
|
raise OperationalException("This exchange does not support downloading Trades.")
|
2019-08-29 10:56:10 +00:00
|
|
|
|
2022-04-28 04:29:14 +00:00
|
|
|
with self._loop_lock:
|
2024-05-12 15:02:37 +00:00
|
|
|
task = asyncio.ensure_future(
|
|
|
|
self._async_get_trade_history(pair=pair, since=since, until=until, from_id=from_id)
|
|
|
|
)
|
2023-08-20 09:57:59 +00:00
|
|
|
|
|
|
|
for sig in [signal.SIGINT, signal.SIGTERM]:
|
2023-08-20 14:09:12 +00:00
|
|
|
try:
|
|
|
|
self.loop.add_signal_handler(sig, task.cancel)
|
|
|
|
except NotImplementedError:
|
|
|
|
# Not all platforms implement signals (e.g. windows)
|
|
|
|
pass
|
2023-08-20 09:57:59 +00:00
|
|
|
return self.loop.run_until_complete(task)
|
2019-08-14 18:30:29 +00:00
|
|
|
|
2021-12-19 07:03:02 +00:00
|
|
|
@retrier
|
2021-11-08 07:50:50 +00:00
|
|
|
def _get_funding_fees_from_exchange(self, pair: str, since: Union[datetime, int]) -> float:
|
2021-09-08 19:46:52 +00:00
|
|
|
"""
|
2021-11-09 07:00:57 +00:00
|
|
|
Returns the sum of all funding fees that were exchanged for a pair within a timeframe
|
2021-11-12 06:26:59 +00:00
|
|
|
Dry-run handling happens as part of _calculate_funding_fees.
|
2021-11-09 07:00:57 +00:00
|
|
|
:param pair: (e.g. ADA/USDT)
|
|
|
|
:param since: The earliest time of consideration for calculating funding fees,
|
|
|
|
in unix time or as a datetime
|
2021-09-08 19:46:52 +00:00
|
|
|
"""
|
|
|
|
if not self.exchange_has("fetchFundingHistory"):
|
|
|
|
raise OperationalException(
|
2021-11-12 00:32:39 +00:00
|
|
|
f"fetch_funding_history() is not available using {self.name}"
|
|
|
|
)
|
2021-09-08 19:46:52 +00:00
|
|
|
|
|
|
|
if type(since) is datetime:
|
2024-03-10 18:31:43 +00:00
|
|
|
since = dt_ts(since)
|
2021-09-08 19:46:52 +00:00
|
|
|
|
|
|
|
try:
|
2024-05-12 15:02:37 +00:00
|
|
|
funding_history = self._api.fetch_funding_history(symbol=pair, since=since)
|
|
|
|
self._log_exchange_response(
|
|
|
|
"funding_history", funding_history, add_info=f"pair: {pair}, since: {since}"
|
2021-09-08 19:46:52 +00:00
|
|
|
)
|
2024-05-12 15:02:37 +00:00
|
|
|
return sum(fee["amount"] for fee in funding_history)
|
2021-09-08 19:46:52 +00:00
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
2024-04-13 09:12:10 +00:00
|
|
|
except (ccxt.OperationFailed, ccxt.ExchangeError) as e:
|
2021-09-08 19:46:52 +00:00
|
|
|
raise TemporaryError(
|
2024-05-12 15:02:37 +00:00
|
|
|
f"Could not get funding fees due to {e.__class__.__name__}. Message: {e}"
|
|
|
|
) from e
|
2021-09-08 19:46:52 +00:00
|
|
|
except ccxt.BaseError as e:
|
|
|
|
raise OperationalException(e) from e
|
|
|
|
|
2022-02-15 06:04:50 +00:00
|
|
|
@retrier
|
2024-10-04 04:46:45 +00:00
|
|
|
def get_leverage_tiers(self) -> dict[str, list[dict]]:
|
2022-02-25 18:45:35 +00:00
|
|
|
try:
|
|
|
|
return self._api.fetch_leverage_tiers()
|
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
2024-04-13 09:12:10 +00:00
|
|
|
except (ccxt.OperationFailed, ccxt.ExchangeError) as e:
|
2022-02-25 18:45:35 +00:00
|
|
|
raise TemporaryError(
|
2024-05-12 15:02:37 +00:00
|
|
|
f"Could not load leverage tiers due to {e.__class__.__name__}. Message: {e}"
|
2022-02-25 18:45:35 +00:00
|
|
|
) from e
|
|
|
|
except ccxt.BaseError as e:
|
|
|
|
raise OperationalException(e) from e
|
|
|
|
|
2022-05-31 08:48:34 +00:00
|
|
|
@retrier_async
|
2024-10-04 04:46:45 +00:00
|
|
|
async def get_market_leverage_tiers(self, symbol: str) -> tuple[str, list[dict]]:
|
2024-05-12 15:02:37 +00:00
|
|
|
"""Leverage tiers per symbol"""
|
2022-02-25 18:45:35 +00:00
|
|
|
try:
|
2022-05-31 08:48:34 +00:00
|
|
|
tier = await self._api_async.fetch_market_leverage_tiers(symbol)
|
|
|
|
return symbol, tier
|
2022-02-25 18:45:35 +00:00
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
2024-04-13 09:12:10 +00:00
|
|
|
except (ccxt.OperationFailed, ccxt.ExchangeError) as e:
|
2022-02-25 18:45:35 +00:00
|
|
|
raise TemporaryError(
|
2024-05-12 15:02:37 +00:00
|
|
|
f"Could not load leverage tiers for {symbol}"
|
|
|
|
f" due to {e.__class__.__name__}. Message: {e}"
|
2022-02-25 18:45:35 +00:00
|
|
|
) from e
|
|
|
|
except ccxt.BaseError as e:
|
|
|
|
raise OperationalException(e) from e
|
|
|
|
|
2024-10-04 04:46:45 +00:00
|
|
|
def load_leverage_tiers(self) -> dict[str, list[dict]]:
|
2022-02-24 16:06:30 +00:00
|
|
|
if self.trading_mode == TradingMode.FUTURES:
|
2024-05-12 15:02:37 +00:00
|
|
|
if self.exchange_has("fetchLeverageTiers"):
|
2022-02-25 18:45:35 +00:00
|
|
|
# Fetch all leverage tiers at once
|
|
|
|
return self.get_leverage_tiers()
|
2024-05-12 15:02:37 +00:00
|
|
|
elif self.exchange_has("fetchMarketLeverageTiers"):
|
2022-02-25 18:45:35 +00:00
|
|
|
# Must fetch the leverage tiers for each market separately
|
|
|
|
# * This is slow(~45s) on Okx, makes ~90 api calls to load all linear swap markets
|
2022-02-24 16:06:30 +00:00
|
|
|
markets = self.markets
|
|
|
|
|
2023-04-17 17:42:41 +00:00
|
|
|
symbols = [
|
2024-05-12 15:02:37 +00:00
|
|
|
symbol
|
|
|
|
for symbol, market in markets.items()
|
|
|
|
if (
|
|
|
|
self.market_is_future(market)
|
|
|
|
and market["quote"] == self._config["stake_currency"]
|
|
|
|
)
|
2023-04-17 17:42:41 +00:00
|
|
|
]
|
2022-02-24 16:06:30 +00:00
|
|
|
|
2024-10-04 04:46:45 +00:00
|
|
|
tiers: dict[str, list[dict]] = {}
|
2022-02-24 16:06:30 +00:00
|
|
|
|
2024-05-12 15:02:37 +00:00
|
|
|
tiers_cached = self.load_cached_leverage_tiers(self._config["stake_currency"])
|
2022-08-20 11:00:25 +00:00
|
|
|
if tiers_cached:
|
|
|
|
tiers = tiers_cached
|
2022-02-24 16:06:30 +00:00
|
|
|
|
2022-08-20 11:00:25 +00:00
|
|
|
coros = [
|
|
|
|
self.get_market_leverage_tiers(symbol)
|
2024-05-12 15:02:37 +00:00
|
|
|
for symbol in sorted(symbols)
|
|
|
|
if symbol not in tiers
|
|
|
|
]
|
2022-08-20 11:00:25 +00:00
|
|
|
|
|
|
|
# Be verbose here, as this delays startup by ~1 minute.
|
|
|
|
if coros:
|
|
|
|
logger.info(
|
|
|
|
f"Initializing leverage_tiers for {len(symbols)} markets. "
|
2024-05-12 15:02:37 +00:00
|
|
|
"This will take about a minute."
|
|
|
|
)
|
2022-08-20 11:00:25 +00:00
|
|
|
else:
|
|
|
|
logger.info("Using cached leverage_tiers.")
|
2022-05-31 08:48:34 +00:00
|
|
|
|
2023-04-17 17:55:58 +00:00
|
|
|
async def gather_results(input_coro):
|
2022-08-17 12:31:40 +00:00
|
|
|
return await asyncio.gather(*input_coro, return_exceptions=True)
|
|
|
|
|
2022-05-31 08:48:34 +00:00
|
|
|
for input_coro in chunks(coros, 100):
|
2022-08-17 12:31:40 +00:00
|
|
|
with self._loop_lock:
|
2023-04-17 17:55:58 +00:00
|
|
|
results = self.loop.run_until_complete(gather_results(input_coro))
|
2022-08-17 12:31:40 +00:00
|
|
|
|
2023-04-17 17:48:33 +00:00
|
|
|
for res in results:
|
|
|
|
if isinstance(res, Exception):
|
|
|
|
logger.warning(f"Leverage tier exception: {repr(res)}")
|
|
|
|
continue
|
|
|
|
symbol, tier = res
|
|
|
|
tiers[symbol] = tier
|
2022-08-20 11:00:25 +00:00
|
|
|
if len(coros) > 0:
|
2024-05-12 15:02:37 +00:00
|
|
|
self.cache_leverage_tiers(tiers, self._config["stake_currency"])
|
2022-02-24 16:06:30 +00:00
|
|
|
logger.info(f"Done initializing {len(symbols)} markets.")
|
|
|
|
|
|
|
|
return tiers
|
2023-04-17 17:48:33 +00:00
|
|
|
return {}
|
2022-02-07 07:33:42 +00:00
|
|
|
|
2024-10-04 04:46:45 +00:00
|
|
|
def cache_leverage_tiers(self, tiers: dict[str, list[dict]], stake_currency: str) -> None:
|
2024-05-12 15:02:37 +00:00
|
|
|
filename = self._config["datadir"] / "futures" / f"leverage_tiers_{stake_currency}.json"
|
2022-08-20 11:47:34 +00:00
|
|
|
if not filename.parent.is_dir():
|
|
|
|
filename.parent.mkdir(parents=True)
|
2022-08-20 11:00:25 +00:00
|
|
|
data = {
|
|
|
|
"updated": datetime.now(timezone.utc),
|
|
|
|
"data": tiers,
|
|
|
|
}
|
|
|
|
file_dump_json(filename, data)
|
|
|
|
|
2024-05-16 17:11:51 +00:00
|
|
|
def load_cached_leverage_tiers(
|
|
|
|
self, stake_currency: str, cache_time: Optional[timedelta] = None
|
2024-10-04 04:46:45 +00:00
|
|
|
) -> Optional[dict[str, list[dict]]]:
|
2024-05-16 17:11:51 +00:00
|
|
|
"""
|
|
|
|
Load cached leverage tiers from disk
|
|
|
|
:param cache_time: The maximum age of the cache before it is considered outdated
|
|
|
|
"""
|
|
|
|
if not cache_time:
|
|
|
|
# Default to 4 weeks
|
|
|
|
cache_time = timedelta(weeks=4)
|
2024-05-12 15:02:37 +00:00
|
|
|
filename = self._config["datadir"] / "futures" / f"leverage_tiers_{stake_currency}.json"
|
2022-08-20 11:00:25 +00:00
|
|
|
if filename.is_file():
|
2023-04-18 16:01:12 +00:00
|
|
|
try:
|
|
|
|
tiers = file_load_json(filename)
|
2024-05-12 15:02:37 +00:00
|
|
|
updated = tiers.get("updated")
|
2023-04-18 16:01:12 +00:00
|
|
|
if updated:
|
|
|
|
updated_dt = parser.parse(updated)
|
2024-05-16 17:11:51 +00:00
|
|
|
if updated_dt < datetime.now(timezone.utc) - cache_time:
|
2023-04-18 16:01:12 +00:00
|
|
|
logger.info("Cached leverage tiers are outdated. Will update.")
|
|
|
|
return None
|
2024-10-20 13:22:50 +00:00
|
|
|
return tiers.get("data")
|
2023-04-18 16:01:12 +00:00
|
|
|
except Exception:
|
|
|
|
logger.exception("Error loading cached leverage tiers. Refreshing.")
|
2022-08-20 11:00:25 +00:00
|
|
|
return None
|
|
|
|
|
2022-02-07 08:01:00 +00:00
|
|
|
def fill_leverage_tiers(self) -> None:
|
2021-08-20 08:40:22 +00:00
|
|
|
"""
|
2022-02-07 08:01:00 +00:00
|
|
|
Assigns property _leverage_tiers to a dictionary of information about the leverage
|
2021-11-09 07:00:57 +00:00
|
|
|
allowed on each pair
|
2021-08-20 08:40:22 +00:00
|
|
|
"""
|
2022-02-07 10:19:18 +00:00
|
|
|
leverage_tiers = self.load_leverage_tiers()
|
|
|
|
for pair, tiers in leverage_tiers.items():
|
2022-02-07 10:43:19 +00:00
|
|
|
pair_tiers = []
|
2022-02-07 10:19:18 +00:00
|
|
|
for tier in tiers:
|
2022-02-07 10:43:19 +00:00
|
|
|
pair_tiers.append(self.parse_leverage_tier(tier))
|
|
|
|
self._leverage_tiers[pair] = pair_tiers
|
2022-02-07 07:33:42 +00:00
|
|
|
|
2024-10-04 04:46:45 +00:00
|
|
|
def parse_leverage_tier(self, tier) -> dict:
|
2024-05-12 15:02:37 +00:00
|
|
|
info = tier.get("info", {})
|
2022-02-07 07:33:42 +00:00
|
|
|
return {
|
2024-05-12 15:02:37 +00:00
|
|
|
"minNotional": tier["minNotional"],
|
|
|
|
"maxNotional": tier["maxNotional"],
|
|
|
|
"maintenanceMarginRate": tier["maintenanceMarginRate"],
|
|
|
|
"maxLeverage": tier["maxLeverage"],
|
|
|
|
"maintAmt": float(info["cum"]) if "cum" in info else None,
|
2022-02-07 07:33:42 +00:00
|
|
|
}
|
2021-08-20 08:40:22 +00:00
|
|
|
|
2022-01-31 10:47:52 +00:00
|
|
|
def get_max_leverage(self, pair: str, stake_amount: Optional[float]) -> float:
|
2021-08-20 08:40:22 +00:00
|
|
|
"""
|
2021-11-09 07:00:57 +00:00
|
|
|
Returns the maximum leverage that a pair can be traded at
|
|
|
|
:param pair: The base/quote currency pair being traded
|
2022-02-07 07:33:42 +00:00
|
|
|
:stake_amount: The total value of the traders margin_mode in quote currency
|
2021-08-20 08:40:22 +00:00
|
|
|
"""
|
2022-02-07 07:33:42 +00:00
|
|
|
|
|
|
|
if self.trading_mode == TradingMode.SPOT:
|
2021-10-17 13:06:55 +00:00
|
|
|
return 1.0
|
2021-08-20 08:40:22 +00:00
|
|
|
|
2022-02-11 12:50:23 +00:00
|
|
|
if self.trading_mode == TradingMode.FUTURES:
|
2022-02-07 07:33:42 +00:00
|
|
|
# Checks and edge cases
|
|
|
|
if stake_amount is None:
|
|
|
|
raise OperationalException(
|
2024-05-12 15:02:37 +00:00
|
|
|
f"{self.name}.get_max_leverage requires argument stake_amount"
|
2022-02-10 09:44:22 +00:00
|
|
|
)
|
2022-02-09 09:18:32 +00:00
|
|
|
|
2022-02-11 12:50:23 +00:00
|
|
|
if pair not in self._leverage_tiers:
|
|
|
|
# Maybe raise exception because it can't be traded on futures?
|
2022-02-10 09:44:22 +00:00
|
|
|
return 1.0
|
|
|
|
|
2022-02-11 12:50:23 +00:00
|
|
|
pair_tiers = self._leverage_tiers[pair]
|
|
|
|
|
2022-02-10 10:06:02 +00:00
|
|
|
if stake_amount == 0:
|
2024-05-12 15:02:37 +00:00
|
|
|
return self._leverage_tiers[pair][0]["maxLeverage"] # Max lev for lowest amount
|
2022-02-10 10:06:02 +00:00
|
|
|
|
2022-02-11 12:50:23 +00:00
|
|
|
for tier_index in range(len(pair_tiers)):
|
2022-02-07 08:01:00 +00:00
|
|
|
tier = pair_tiers[tier_index]
|
2024-05-12 15:02:37 +00:00
|
|
|
lev = tier["maxLeverage"]
|
2022-02-07 07:33:42 +00:00
|
|
|
|
2022-02-11 12:50:23 +00:00
|
|
|
if tier_index < len(pair_tiers) - 1:
|
2022-04-11 16:02:02 +00:00
|
|
|
next_tier = pair_tiers[tier_index + 1]
|
2024-05-12 15:02:37 +00:00
|
|
|
next_floor = next_tier["minNotional"] / next_tier["maxLeverage"]
|
2022-02-07 08:01:00 +00:00
|
|
|
if next_floor > stake_amount: # Next tier min too high for stake amount
|
2024-05-12 15:02:37 +00:00
|
|
|
return min((tier["maxNotional"] / stake_amount), lev)
|
2022-02-07 07:33:42 +00:00
|
|
|
#
|
2022-02-07 08:01:00 +00:00
|
|
|
# With the two leverage tiers below,
|
2022-02-07 07:33:42 +00:00
|
|
|
# - a stake amount of 150 would mean a max leverage of (10000 / 150) = 66.66
|
|
|
|
# - stakes below 133.33 = max_lev of 75
|
|
|
|
# - stakes between 133.33-200 = max_lev of 10000/stake = 50.01-74.99
|
|
|
|
# - stakes from 200 + 1000 = max_lev of 50
|
|
|
|
#
|
|
|
|
# {
|
|
|
|
# "min": 0, # stake = 0.0
|
|
|
|
# "max": 10000, # max_stake@75 = 10000/75 = 133.33333333333334
|
|
|
|
# "lev": 75,
|
|
|
|
# },
|
|
|
|
# {
|
|
|
|
# "min": 10000, # stake = 200.0
|
|
|
|
# "max": 50000, # max_stake@50 = 50000/50 = 1000.0
|
|
|
|
# "lev": 50,
|
|
|
|
# }
|
|
|
|
#
|
|
|
|
|
2022-02-07 08:01:00 +00:00
|
|
|
else: # if on the last tier
|
2024-05-12 15:02:37 +00:00
|
|
|
if stake_amount > tier["maxNotional"]:
|
2022-08-26 17:34:51 +00:00
|
|
|
# If stake is > than max tradeable amount
|
2024-05-12 15:02:37 +00:00
|
|
|
raise InvalidOrderException(f"Amount {stake_amount} too high for {pair}")
|
2022-02-07 07:33:42 +00:00
|
|
|
else:
|
2024-05-12 15:02:37 +00:00
|
|
|
return tier["maxLeverage"]
|
2022-02-07 07:33:42 +00:00
|
|
|
|
|
|
|
raise OperationalException(
|
2024-05-12 15:02:37 +00:00
|
|
|
"Looped through all tiers without finding a max leverage. Should never be reached"
|
2022-02-07 07:33:42 +00:00
|
|
|
)
|
|
|
|
|
2022-02-13 03:59:26 +00:00
|
|
|
elif self.trading_mode == TradingMode.MARGIN: # Search markets.limits for max lev
|
2022-02-07 07:33:42 +00:00
|
|
|
market = self.markets[pair]
|
2024-05-12 15:02:37 +00:00
|
|
|
if market["limits"]["leverage"]["max"] is not None:
|
|
|
|
return market["limits"]["leverage"]["max"]
|
2022-02-07 07:33:42 +00:00
|
|
|
else:
|
|
|
|
return 1.0 # Default if max leverage cannot be found
|
2022-02-13 03:59:26 +00:00
|
|
|
else:
|
|
|
|
return 1.0
|
2022-02-07 07:33:42 +00:00
|
|
|
|
2021-09-05 01:16:17 +00:00
|
|
|
@retrier
|
2021-09-12 08:42:13 +00:00
|
|
|
def _set_leverage(
|
|
|
|
self,
|
|
|
|
leverage: float,
|
2021-09-12 09:09:51 +00:00
|
|
|
pair: Optional[str] = None,
|
2022-12-31 14:53:43 +00:00
|
|
|
accept_fail: bool = False,
|
2021-09-12 08:42:13 +00:00
|
|
|
):
|
2021-08-09 01:34:33 +00:00
|
|
|
"""
|
2021-11-09 07:00:57 +00:00
|
|
|
Set's the leverage before making a trade, in order to not
|
|
|
|
have the same leverage on every trade
|
2021-08-09 01:34:33 +00:00
|
|
|
"""
|
2024-05-12 15:02:37 +00:00
|
|
|
if self._config["dry_run"] or not self.exchange_has("setLeverage"):
|
2022-02-01 18:53:38 +00:00
|
|
|
# Some exchanges only support one margin_mode type
|
2021-09-10 19:39:42 +00:00
|
|
|
return
|
2024-05-12 15:02:37 +00:00
|
|
|
if self._ft_has.get("floor_leverage", False) is True:
|
2023-02-07 19:51:55 +00:00
|
|
|
# Rounding for binance ...
|
|
|
|
leverage = floor(leverage)
|
2021-08-21 22:26:04 +00:00
|
|
|
try:
|
2022-08-18 07:52:03 +00:00
|
|
|
res = self._api.set_leverage(symbol=pair, leverage=leverage)
|
2024-05-12 15:02:37 +00:00
|
|
|
self._log_exchange_response("set_leverage", res)
|
2021-08-21 22:26:04 +00:00
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
2024-02-15 18:20:08 +00:00
|
|
|
except (ccxt.BadRequest, ccxt.OperationRejected, ccxt.InsufficientFunds) as e:
|
2022-12-31 14:53:43 +00:00
|
|
|
if not accept_fail:
|
|
|
|
raise TemporaryError(
|
2024-05-12 15:02:37 +00:00
|
|
|
f"Could not set leverage due to {e.__class__.__name__}. Message: {e}"
|
|
|
|
) from e
|
2024-04-13 09:12:10 +00:00
|
|
|
except (ccxt.OperationFailed, ccxt.ExchangeError) as e:
|
2021-08-21 22:26:04 +00:00
|
|
|
raise TemporaryError(
|
2024-05-12 15:02:37 +00:00
|
|
|
f"Could not set leverage due to {e.__class__.__name__}. Message: {e}"
|
|
|
|
) from e
|
2021-08-21 22:26:04 +00:00
|
|
|
except ccxt.BaseError as e:
|
|
|
|
raise OperationalException(e) from e
|
2021-08-20 08:40:22 +00:00
|
|
|
|
2022-02-28 18:45:15 +00:00
|
|
|
def get_interest_rate(self) -> float:
|
|
|
|
"""
|
2022-03-01 18:23:14 +00:00
|
|
|
Retrieve interest rate - necessary for Margin trading.
|
|
|
|
Should not call the exchange directly when used from backtesting.
|
2022-02-28 18:45:15 +00:00
|
|
|
"""
|
|
|
|
return 0.0
|
|
|
|
|
2023-10-09 04:37:08 +00:00
|
|
|
def funding_fee_cutoff(self, open_date: datetime) -> bool:
|
2021-11-09 18:40:42 +00:00
|
|
|
"""
|
2023-10-08 18:39:25 +00:00
|
|
|
Funding fees are only charged at full hours (usually every 4-8h).
|
|
|
|
Therefore a trade opening at 10:00:01 will not be charged a funding fee until the next hour.
|
2021-11-09 07:17:29 +00:00
|
|
|
:param open_date: The open date for a trade
|
2023-10-09 04:37:08 +00:00
|
|
|
:return: True if the date falls on a full hour, False otherwise
|
2021-11-09 18:40:42 +00:00
|
|
|
"""
|
2023-10-09 04:37:08 +00:00
|
|
|
return open_date.minute == 0 and open_date.second == 0
|
2021-11-01 07:09:11 +00:00
|
|
|
|
2021-12-19 07:03:02 +00:00
|
|
|
@retrier
|
2024-05-12 15:02:37 +00:00
|
|
|
def set_margin_mode(
|
|
|
|
self,
|
|
|
|
pair: str,
|
|
|
|
margin_mode: MarginMode,
|
|
|
|
accept_fail: bool = False,
|
2024-10-04 04:46:45 +00:00
|
|
|
params: Optional[dict] = None,
|
2024-05-12 15:02:37 +00:00
|
|
|
):
|
2021-11-09 18:22:29 +00:00
|
|
|
"""
|
2021-11-09 07:00:57 +00:00
|
|
|
Set's the margin mode on the exchange to cross or isolated for a specific pair
|
|
|
|
:param pair: base/quote currency pair (e.g. "ADA/USDT")
|
2021-11-09 18:22:29 +00:00
|
|
|
"""
|
2024-05-12 15:02:37 +00:00
|
|
|
if self._config["dry_run"] or not self.exchange_has("setMarginMode"):
|
2022-02-01 18:53:38 +00:00
|
|
|
# Some exchanges only support one margin_mode type
|
2021-09-04 00:11:39 +00:00
|
|
|
return
|
|
|
|
|
2024-04-20 07:17:25 +00:00
|
|
|
if params is None:
|
|
|
|
params = {}
|
2021-09-04 00:11:39 +00:00
|
|
|
try:
|
2022-08-18 07:52:03 +00:00
|
|
|
res = self._api.set_margin_mode(margin_mode.value, pair, params)
|
2024-05-12 15:02:37 +00:00
|
|
|
self._log_exchange_response("set_margin_mode", res)
|
2021-09-04 00:11:39 +00:00
|
|
|
except ccxt.DDoSProtection as e:
|
|
|
|
raise DDosProtection(e) from e
|
2024-02-15 18:20:08 +00:00
|
|
|
except (ccxt.BadRequest, ccxt.OperationRejected) as e:
|
2022-12-31 14:53:43 +00:00
|
|
|
if not accept_fail:
|
|
|
|
raise TemporaryError(
|
2024-05-12 15:02:37 +00:00
|
|
|
f"Could not set margin mode due to {e.__class__.__name__}. Message: {e}"
|
|
|
|
) from e
|
2024-04-13 09:12:10 +00:00
|
|
|
except (ccxt.OperationFailed, ccxt.ExchangeError) as e:
|
2021-09-04 00:11:39 +00:00
|
|
|
raise TemporaryError(
|
2024-05-12 15:02:37 +00:00
|
|
|
f"Could not set margin mode due to {e.__class__.__name__}. Message: {e}"
|
|
|
|
) from e
|
2021-09-04 00:11:39 +00:00
|
|
|
except ccxt.BaseError as e:
|
|
|
|
raise OperationalException(e) from e
|
2021-09-02 05:40:32 +00:00
|
|
|
|
2022-01-08 10:01:14 +00:00
|
|
|
def _fetch_and_calculate_funding_fees(
|
2021-09-26 10:11:35 +00:00
|
|
|
self,
|
|
|
|
pair: str,
|
|
|
|
amount: float,
|
2022-01-17 18:59:33 +00:00
|
|
|
is_short: bool,
|
2021-09-26 10:11:35 +00:00
|
|
|
open_date: datetime,
|
2024-05-12 15:02:37 +00:00
|
|
|
close_date: Optional[datetime] = None,
|
2021-09-26 10:11:35 +00:00
|
|
|
) -> float:
|
|
|
|
"""
|
2022-01-08 10:01:14 +00:00
|
|
|
Fetches and calculates the sum of all funding fees that occurred for a pair
|
|
|
|
during a futures trade.
|
2021-11-11 19:34:45 +00:00
|
|
|
Only used during dry-run or if the exchange does not provide a funding_rates endpoint.
|
2021-11-09 07:00:57 +00:00
|
|
|
:param pair: The quote/base pair of the trade
|
|
|
|
:param amount: The quantity of the trade
|
2022-01-17 18:59:33 +00:00
|
|
|
:param is_short: trade direction
|
2021-11-09 07:00:57 +00:00
|
|
|
:param open_date: The date and time that the trade started
|
|
|
|
:param close_date: The date and time that the trade ended
|
2021-09-26 10:11:35 +00:00
|
|
|
"""
|
|
|
|
|
2021-11-13 10:45:23 +00:00
|
|
|
if self.funding_fee_cutoff(open_date):
|
2023-10-09 04:37:08 +00:00
|
|
|
# Shift back to 1h candle to avoid missing funding fees
|
|
|
|
# Only really relevant for trades very close to the full hour
|
2024-05-12 15:02:37 +00:00
|
|
|
open_date = timeframe_to_prev_date("1h", open_date)
|
|
|
|
timeframe = self._ft_has["mark_ohlcv_timeframe"]
|
|
|
|
timeframe_ff = self._ft_has["funding_fee_timeframe"]
|
2024-01-04 14:23:15 +00:00
|
|
|
mark_price_type = CandleType.from_string(self._ft_has["mark_ohlcv_price"])
|
2021-11-13 10:45:23 +00:00
|
|
|
|
2021-11-01 07:13:37 +00:00
|
|
|
if not close_date:
|
|
|
|
close_date = datetime.now(timezone.utc)
|
2024-03-10 18:31:43 +00:00
|
|
|
since_ms = dt_ts(timeframe_to_prev_date(timeframe, open_date))
|
2021-12-10 18:50:58 +00:00
|
|
|
|
2024-01-04 14:23:15 +00:00
|
|
|
mark_comb: PairWithTimeframe = (pair, timeframe, mark_price_type)
|
2021-12-19 13:48:59 +00:00
|
|
|
funding_comb: PairWithTimeframe = (pair, timeframe_ff, CandleType.FUNDING_RATE)
|
2024-01-04 14:23:15 +00:00
|
|
|
|
2021-12-10 18:50:58 +00:00
|
|
|
candle_histories = self.refresh_latest_ohlcv(
|
|
|
|
[mark_comb, funding_comb],
|
2023-10-08 19:00:41 +00:00
|
|
|
since_ms=since_ms,
|
2021-12-10 18:50:58 +00:00
|
|
|
cache=False,
|
|
|
|
drop_incomplete=False,
|
2021-10-22 15:35:50 +00:00
|
|
|
)
|
2022-09-08 05:18:38 +00:00
|
|
|
try:
|
|
|
|
# we can't assume we always get histories - for example during exchange downtimes
|
|
|
|
funding_rates = candle_histories[funding_comb]
|
|
|
|
mark_rates = candle_histories[mark_comb]
|
|
|
|
except KeyError:
|
2022-09-08 05:24:57 +00:00
|
|
|
raise ExchangeError("Could not find funding rates.") from None
|
2022-09-08 05:18:38 +00:00
|
|
|
|
2023-10-07 13:09:44 +00:00
|
|
|
funding_mark_rates = self.combine_funding_and_mark(funding_rates, mark_rates)
|
2021-12-10 18:50:58 +00:00
|
|
|
|
2022-01-17 18:26:03 +00:00
|
|
|
return self.calculate_funding_fees(
|
2022-01-17 18:39:58 +00:00
|
|
|
funding_mark_rates,
|
2022-01-08 10:01:14 +00:00
|
|
|
amount=amount,
|
2022-01-17 18:59:33 +00:00
|
|
|
is_short=is_short,
|
2022-01-08 10:01:14 +00:00
|
|
|
open_date=open_date,
|
2024-05-12 15:02:37 +00:00
|
|
|
close_date=close_date,
|
2022-01-08 10:01:14 +00:00
|
|
|
)
|
|
|
|
|
2022-01-17 18:39:58 +00:00
|
|
|
@staticmethod
|
2024-05-12 15:02:37 +00:00
|
|
|
def combine_funding_and_mark(
|
|
|
|
funding_rates: DataFrame, mark_rates: DataFrame, futures_funding_rate: Optional[int] = None
|
|
|
|
) -> DataFrame:
|
2022-01-17 18:39:58 +00:00
|
|
|
"""
|
|
|
|
Combine funding-rates and mark-rates dataframes
|
|
|
|
:param funding_rates: Dataframe containing Funding rates (Type FUNDING_RATE)
|
|
|
|
:param mark_rates: Dataframe containing Mark rates (Type mark_ohlcv_price)
|
2022-05-21 06:50:39 +00:00
|
|
|
:param futures_funding_rate: Fake funding rate to use if funding_rates are not available
|
2022-01-17 18:39:58 +00:00
|
|
|
"""
|
2022-05-21 06:50:39 +00:00
|
|
|
if futures_funding_rate is None:
|
|
|
|
return mark_rates.merge(
|
2024-05-12 15:02:37 +00:00
|
|
|
funding_rates, on="date", how="inner", suffixes=["_mark", "_fund"]
|
|
|
|
)
|
2022-05-21 06:50:39 +00:00
|
|
|
else:
|
|
|
|
if len(funding_rates) == 0:
|
|
|
|
# No funding rate candles - full fillup with fallback variable
|
2024-05-12 15:02:37 +00:00
|
|
|
mark_rates["open_fund"] = futures_funding_rate
|
2022-05-21 06:50:39 +00:00
|
|
|
return mark_rates.rename(
|
2024-05-12 15:02:37 +00:00
|
|
|
columns={
|
|
|
|
"open": "open_mark",
|
|
|
|
"close": "close_mark",
|
|
|
|
"high": "high_mark",
|
|
|
|
"low": "low_mark",
|
|
|
|
"volume": "volume_mark",
|
|
|
|
}
|
|
|
|
)
|
2022-01-17 18:39:58 +00:00
|
|
|
|
2022-05-21 06:50:39 +00:00
|
|
|
else:
|
|
|
|
# Fill up missing funding_rate candles with fallback value
|
2022-05-21 07:03:30 +00:00
|
|
|
combined = mark_rates.merge(
|
2024-05-12 15:02:37 +00:00
|
|
|
funding_rates, on="date", how="left", suffixes=["_mark", "_fund"]
|
|
|
|
)
|
|
|
|
combined["open_fund"] = combined["open_fund"].fillna(futures_funding_rate)
|
2022-05-21 07:03:30 +00:00
|
|
|
return combined
|
2022-01-17 18:39:58 +00:00
|
|
|
|
2022-01-17 18:26:03 +00:00
|
|
|
def calculate_funding_fees(
|
2022-01-08 10:01:14 +00:00
|
|
|
self,
|
2022-01-17 18:39:58 +00:00
|
|
|
df: DataFrame,
|
2022-01-08 10:01:14 +00:00
|
|
|
amount: float,
|
2022-01-17 18:59:33 +00:00
|
|
|
is_short: bool,
|
2022-01-08 10:01:14 +00:00
|
|
|
open_date: datetime,
|
2023-10-07 13:09:44 +00:00
|
|
|
close_date: datetime,
|
2024-05-12 15:02:37 +00:00
|
|
|
time_in_ratio: Optional[float] = None,
|
2022-01-08 10:01:14 +00:00
|
|
|
) -> float:
|
|
|
|
"""
|
|
|
|
calculates the sum of all funding fees that occurred for a pair during a futures trade
|
2022-01-17 18:39:58 +00:00
|
|
|
:param df: Dataframe containing combined funding and mark rates
|
|
|
|
as `open_fund` and `open_mark`.
|
2022-01-08 10:01:14 +00:00
|
|
|
:param amount: The quantity of the trade
|
2022-01-17 18:59:33 +00:00
|
|
|
:param is_short: trade direction
|
2022-01-08 10:01:14 +00:00
|
|
|
:param open_date: The date and time that the trade started
|
|
|
|
:param close_date: The date and time that the trade ended
|
2022-01-08 10:16:56 +00:00
|
|
|
:param time_in_ratio: Not used by most exchange classes
|
2022-01-08 10:01:14 +00:00
|
|
|
"""
|
|
|
|
fees: float = 0
|
2021-12-10 18:50:58 +00:00
|
|
|
|
2021-12-27 23:51:47 +00:00
|
|
|
if not df.empty:
|
2024-05-12 15:02:37 +00:00
|
|
|
df1 = df[(df["date"] >= open_date) & (df["date"] <= close_date)]
|
|
|
|
fees = sum(df1["open_fund"] * df1["open_mark"] * amount)
|
2024-02-29 06:22:40 +00:00
|
|
|
if isnan(fees):
|
|
|
|
fees = 0.0
|
2022-01-17 18:59:33 +00:00
|
|
|
# Negate fees for longs as funding_fees expects it this way based on live endpoints.
|
|
|
|
return fees if is_short else -fees
|
2021-09-26 10:11:35 +00:00
|
|
|
|
2022-01-17 18:59:33 +00:00
|
|
|
def get_funding_fees(
|
2024-05-12 15:02:37 +00:00
|
|
|
self, pair: str, amount: float, is_short: bool, open_date: datetime
|
|
|
|
) -> float:
|
2021-11-09 18:40:42 +00:00
|
|
|
"""
|
|
|
|
Fetch funding fees, either from the exchange (live) or calculates them
|
|
|
|
based on funding rate/mark price history
|
|
|
|
:param pair: The quote/base pair of the trade
|
2022-01-17 18:59:33 +00:00
|
|
|
:param is_short: trade direction
|
2021-11-09 18:40:42 +00:00
|
|
|
:param amount: Trade amount
|
|
|
|
:param open_date: Open date of the trade
|
2022-09-08 05:18:38 +00:00
|
|
|
:return: funding fee since open_date
|
2021-11-09 18:40:42 +00:00
|
|
|
"""
|
2021-11-12 01:02:07 +00:00
|
|
|
if self.trading_mode == TradingMode.FUTURES:
|
2023-10-12 04:27:29 +00:00
|
|
|
try:
|
2024-05-12 15:02:37 +00:00
|
|
|
if self._config["dry_run"]:
|
2023-10-12 04:27:29 +00:00
|
|
|
funding_fees = self._fetch_and_calculate_funding_fees(
|
2024-05-12 15:02:37 +00:00
|
|
|
pair, amount, is_short, open_date
|
|
|
|
)
|
2023-10-12 04:27:29 +00:00
|
|
|
else:
|
|
|
|
funding_fees = self._get_funding_fees_from_exchange(pair, open_date)
|
|
|
|
return funding_fees
|
|
|
|
except ExchangeError:
|
|
|
|
logger.warning(f"Could not update funding fees for {pair}.")
|
|
|
|
|
|
|
|
return 0.0
|
2021-11-08 07:50:50 +00:00
|
|
|
|
2022-08-29 04:45:00 +00:00
|
|
|
def get_liquidation_price(
|
2022-01-30 00:47:17 +00:00
|
|
|
self,
|
|
|
|
pair: str,
|
|
|
|
# Dry-run
|
2024-05-12 15:02:37 +00:00
|
|
|
open_rate: float, # Entry price of position
|
2022-01-31 19:49:06 +00:00
|
|
|
is_short: bool,
|
2022-08-26 18:04:36 +00:00
|
|
|
amount: float, # Absolute value of position size
|
|
|
|
stake_amount: float,
|
2022-12-31 09:41:03 +00:00
|
|
|
leverage: float,
|
2022-09-01 04:49:51 +00:00
|
|
|
wallet_balance: float,
|
2024-08-31 06:20:14 +00:00
|
|
|
open_trades: Optional[list] = None,
|
2022-01-31 19:51:26 +00:00
|
|
|
) -> Optional[float]:
|
2022-01-14 11:49:46 +00:00
|
|
|
"""
|
|
|
|
Set's the margin mode on the exchange to cross or isolated for a specific pair
|
|
|
|
"""
|
2022-01-30 00:47:17 +00:00
|
|
|
if self.trading_mode == TradingMode.SPOT:
|
|
|
|
return None
|
2024-05-12 15:02:37 +00:00
|
|
|
elif self.trading_mode != TradingMode.FUTURES:
|
2022-01-30 00:47:17 +00:00
|
|
|
raise OperationalException(
|
2024-05-12 15:02:37 +00:00
|
|
|
f"{self.name} does not support {self.margin_mode} {self.trading_mode}"
|
|
|
|
)
|
2022-01-30 00:47:17 +00:00
|
|
|
|
2023-03-13 18:50:19 +00:00
|
|
|
liquidation_price = None
|
2024-05-12 15:02:37 +00:00
|
|
|
if self._config["dry_run"] or not self.exchange_has("fetchPositions"):
|
2023-03-13 18:50:19 +00:00
|
|
|
liquidation_price = self.dry_run_liquidation_price(
|
2022-01-30 00:47:17 +00:00
|
|
|
pair=pair,
|
|
|
|
open_rate=open_rate,
|
|
|
|
is_short=is_short,
|
2022-08-26 18:04:36 +00:00
|
|
|
amount=amount,
|
2022-12-31 09:41:03 +00:00
|
|
|
leverage=leverage,
|
2022-08-26 18:04:36 +00:00
|
|
|
stake_amount=stake_amount,
|
2022-01-30 00:47:17 +00:00
|
|
|
wallet_balance=wallet_balance,
|
2024-08-31 06:20:14 +00:00
|
|
|
open_trades=open_trades or [],
|
2022-01-30 00:47:17 +00:00
|
|
|
)
|
2022-02-11 09:48:09 +00:00
|
|
|
else:
|
2022-07-30 15:49:06 +00:00
|
|
|
positions = self.fetch_positions(pair)
|
|
|
|
if len(positions) > 0:
|
|
|
|
pos = positions[0]
|
2024-05-12 15:02:37 +00:00
|
|
|
liquidation_price = pos["liquidationPrice"]
|
2021-11-05 20:25:22 +00:00
|
|
|
|
2023-03-13 18:50:19 +00:00
|
|
|
if liquidation_price is not None:
|
|
|
|
buffer_amount = abs(open_rate - liquidation_price) * self.liquidation_buffer
|
|
|
|
liquidation_price_buffer = (
|
2024-05-12 15:02:37 +00:00
|
|
|
liquidation_price - buffer_amount if is_short else liquidation_price + buffer_amount
|
2022-02-11 03:14:07 +00:00
|
|
|
)
|
2023-03-13 18:50:19 +00:00
|
|
|
return max(liquidation_price_buffer, 0.0)
|
2022-02-11 03:14:07 +00:00
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
2022-01-30 00:47:17 +00:00
|
|
|
def dry_run_liquidation_price(
|
2022-01-23 02:03:38 +00:00
|
|
|
self,
|
2022-01-30 00:47:17 +00:00
|
|
|
pair: str,
|
2024-05-12 15:02:37 +00:00
|
|
|
open_rate: float, # Entry price of position
|
2022-01-23 02:03:38 +00:00
|
|
|
is_short: bool,
|
2022-08-26 18:04:36 +00:00
|
|
|
amount: float,
|
|
|
|
stake_amount: float,
|
2022-12-31 09:41:03 +00:00
|
|
|
leverage: float,
|
2022-01-29 08:06:56 +00:00
|
|
|
wallet_balance: float, # Or margin balance
|
2024-08-31 06:20:14 +00:00
|
|
|
open_trades: list,
|
2022-01-23 02:03:38 +00:00
|
|
|
) -> Optional[float]:
|
|
|
|
"""
|
2022-08-26 18:04:36 +00:00
|
|
|
Important: Must be fetching data from cached values as this is used by backtesting!
|
2022-01-29 08:06:56 +00:00
|
|
|
PERPETUAL:
|
2023-02-10 19:58:02 +00:00
|
|
|
gate: https://www.gate.io/help/futures/futures/27724/liquidation-price-bankruptcy-price
|
2022-12-31 09:08:51 +00:00
|
|
|
> Liquidation Price = (Entry Price ± Margin / Contract Multiplier / Size) /
|
|
|
|
[ 1 ± (Maintenance Margin Ratio + Taker Rate)]
|
|
|
|
Wherein, "+" or "-" depends on whether the contract goes long or short:
|
|
|
|
"-" for long, and "+" for short.
|
|
|
|
|
2024-09-16 17:05:00 +00:00
|
|
|
okex: https://www.okx.com/support/hc/en-us/articles/
|
2022-01-29 08:06:56 +00:00
|
|
|
360053909592-VI-Introduction-to-the-isolated-mode-of-Single-Multi-currency-Portfolio-margin
|
|
|
|
|
2022-12-31 09:08:51 +00:00
|
|
|
:param pair: Pair to calculate liquidation price for
|
2022-01-29 08:06:56 +00:00
|
|
|
:param open_rate: Entry price of position
|
2022-01-23 02:03:38 +00:00
|
|
|
:param is_short: True if the trade is a short, false otherwise
|
2022-08-26 18:04:36 +00:00
|
|
|
:param amount: Absolute value of position size incl. leverage (in base currency)
|
|
|
|
:param stake_amount: Stake amount - Collateral in settle currency.
|
2022-12-31 09:41:03 +00:00
|
|
|
:param leverage: Leverage used for this position.
|
2022-01-28 11:47:19 +00:00
|
|
|
:param trading_mode: SPOT, MARGIN, FUTURES, etc.
|
2022-02-01 18:53:38 +00:00
|
|
|
:param margin_mode: Either ISOLATED or CROSS
|
|
|
|
:param wallet_balance: Amount of margin_mode in the wallet being used to trade
|
2022-01-23 02:03:38 +00:00
|
|
|
Cross-Margin Mode: crossWalletBalance
|
|
|
|
Isolated-Margin Mode: isolatedWalletBalance
|
2024-08-31 06:20:14 +00:00
|
|
|
:param open_trades: List of other open trades in the same wallet
|
2022-01-23 02:03:38 +00:00
|
|
|
"""
|
|
|
|
|
2022-01-30 00:47:17 +00:00
|
|
|
market = self.markets[pair]
|
2024-05-12 15:02:37 +00:00
|
|
|
taker_fee_rate = market["taker"]
|
2022-08-26 18:04:36 +00:00
|
|
|
mm_ratio, _ = self.get_maintenance_ratio_and_amt(pair, stake_amount)
|
2022-01-23 02:03:38 +00:00
|
|
|
|
2022-02-01 18:53:38 +00:00
|
|
|
if self.trading_mode == TradingMode.FUTURES and self.margin_mode == MarginMode.ISOLATED:
|
2024-05-12 15:02:37 +00:00
|
|
|
if market["inverse"]:
|
|
|
|
raise OperationalException("Freqtrade does not yet support inverse contracts")
|
2022-01-29 03:55:17 +00:00
|
|
|
|
2022-08-26 18:04:36 +00:00
|
|
|
value = wallet_balance / amount
|
2022-01-29 03:55:17 +00:00
|
|
|
|
2024-05-12 15:02:37 +00:00
|
|
|
mm_ratio_taker = mm_ratio + taker_fee_rate
|
2022-01-29 08:06:56 +00:00
|
|
|
if is_short:
|
|
|
|
return (open_rate + value) / (1 + mm_ratio_taker)
|
2022-01-29 03:55:17 +00:00
|
|
|
else:
|
2022-01-29 08:06:56 +00:00
|
|
|
return (open_rate - value) / (1 - mm_ratio_taker)
|
|
|
|
else:
|
|
|
|
raise OperationalException(
|
2024-05-12 15:02:37 +00:00
|
|
|
"Freqtrade only supports isolated futures for leverage trading"
|
|
|
|
)
|
2022-01-23 02:03:38 +00:00
|
|
|
|
2022-02-09 09:38:25 +00:00
|
|
|
def get_maintenance_ratio_and_amt(
|
|
|
|
self,
|
|
|
|
pair: str,
|
2024-07-16 16:20:57 +00:00
|
|
|
notional_value: float,
|
2024-10-04 04:46:45 +00:00
|
|
|
) -> tuple[float, Optional[float]]:
|
2022-02-06 01:32:46 +00:00
|
|
|
"""
|
2022-02-28 19:05:14 +00:00
|
|
|
Important: Must be fetching data from cached values as this is used by backtesting!
|
2022-02-06 01:32:46 +00:00
|
|
|
:param pair: Market symbol
|
2024-07-16 16:20:57 +00:00
|
|
|
:param notional_value: The total trade amount in quote currency
|
2022-02-06 01:32:46 +00:00
|
|
|
:return: (maintenance margin ratio, maintenance amount)
|
|
|
|
"""
|
2022-02-09 10:57:43 +00:00
|
|
|
|
2024-05-12 15:02:37 +00:00
|
|
|
if (
|
|
|
|
self._config.get("runmode") in OPTIMIZE_MODES
|
|
|
|
or self.exchange_has("fetchLeverageTiers")
|
|
|
|
or self.exchange_has("fetchMarketLeverageTiers")
|
|
|
|
):
|
2022-02-11 12:50:23 +00:00
|
|
|
if pair not in self._leverage_tiers:
|
2022-02-10 12:10:15 +00:00
|
|
|
raise InvalidOrderException(
|
|
|
|
f"Maintenance margin rate for {pair} is unavailable for {self.name}"
|
|
|
|
)
|
|
|
|
|
2022-02-11 12:50:23 +00:00
|
|
|
pair_tiers = self._leverage_tiers[pair]
|
|
|
|
|
2022-02-07 08:01:00 +00:00
|
|
|
for tier in reversed(pair_tiers):
|
2024-07-16 16:20:57 +00:00
|
|
|
if notional_value >= tier["minNotional"]:
|
2024-05-12 15:02:37 +00:00
|
|
|
return (tier["maintenanceMarginRate"], tier["maintAmt"])
|
2022-02-10 12:10:15 +00:00
|
|
|
|
2023-05-02 19:36:36 +00:00
|
|
|
raise ExchangeError("nominal value can not be lower than 0")
|
2022-02-06 04:36:28 +00:00
|
|
|
# The lowest notional_floor for any pair in fetch_leverage_tiers is always 0 because it
|
2022-02-07 08:01:00 +00:00
|
|
|
# describes the min amt for a tier, and the lowest tier will always go down to 0
|
2022-02-06 01:32:46 +00:00
|
|
|
else:
|
2023-05-02 19:36:36 +00:00
|
|
|
raise ExchangeError(f"Cannot get maintenance ratio using {self.name}")
|