Merge branch 'develop' into feature/fetch-public-trades

This commit is contained in:
Matthias 2024-07-20 08:15:34 +02:00
commit a8f707eee2
29 changed files with 1133 additions and 614 deletions

View File

@ -16,7 +16,7 @@ repos:
additional_dependencies:
- types-cachetools==5.3.0.7
- types-filelock==3.2.7
- types-requests==2.32.0.20240622
- types-requests==2.32.0.20240712
- types-tabulate==0.9.0.20240106
- types-python-dateutil==2.9.0.20240316
- SQLAlchemy==2.0.31
@ -31,7 +31,7 @@ repos:
- repo: https://github.com/charliermarsh/ruff-pre-commit
# Ruff version.
rev: 'v0.5.1'
rev: 'v0.5.2'
hooks:
- id: ruff

View File

@ -1,5 +1,4 @@
---
version: '3'
services:
freqtrade:
image: freqtradeorg/freqtrade:stable

View File

@ -1,5 +1,4 @@
---
version: '3'
services:
freqtrade:
image: freqtradeorg/freqtrade:stable_freqaitorch

View File

@ -1,5 +1,4 @@
---
version: '3'
services:
ft_jupyterlab:
build:

View File

@ -1,6 +1,6 @@
markdown==3.6
mkdocs==1.6.0
mkdocs-material==9.5.28
mkdocs-material==9.5.29
mdx_truly_sane_lists==1.3
pymdown-extensions==10.8.1
jinja2==3.1.4

View File

@ -14,9 +14,13 @@ def sanitize_config(config: Config, *, show_sensitive: bool = False) -> Config:
return config
keys_to_remove = [
"exchange.key",
"exchange.apiKey",
"exchange.secret",
"exchange.password",
"exchange.uid",
"exchange.accountId",
"exchange.walletAddress",
"exchange.privateKey",
"telegram.token",
"telegram.chat_id",
"discord.webhook_url",

View File

@ -26,8 +26,7 @@ from freqtrade.enums import CandleType, TradingMode
from freqtrade.exceptions import OperationalException
from freqtrade.exchange import Exchange
from freqtrade.plugins.pairlist.pairlist_helpers import dynamic_expand_pairlist
from freqtrade.util import dt_ts, format_ms_time
from freqtrade.util.datetime_helpers import dt_now
from freqtrade.util import dt_now, dt_ts, format_ms_time, get_progress_tracker
from freqtrade.util.migrations import migrate_data
@ -155,11 +154,9 @@ def refresh_data(
:param candle_type: Any of the enum CandleType (must match trading mode!)
"""
data_handler = get_datahandler(datadir, data_format)
for idx, pair in enumerate(pairs):
process = f"{idx}/{len(pairs)}"
for pair in pairs:
_download_pair_history(
pair=pair,
process=process,
timeframe=timeframe,
datadir=datadir,
timerange=timerange,
@ -223,7 +220,6 @@ def _download_pair_history(
datadir: Path,
exchange: Exchange,
timeframe: str = "5m",
process: str = "",
new_pairs_days: int = 30,
data_handler: Optional[IDataHandler] = None,
timerange: Optional[TimeRange] = None,
@ -261,7 +257,7 @@ def _download_pair_history(
)
logger.info(
f'({process}) - Download history data for "{pair}", {timeframe}, '
f'Download history data for "{pair}", {timeframe}, '
f"{candle_type} and store in {datadir}. "
f'From {format_ms_time(since_ms) if since_ms else "start"} to '
f'{format_ms_time(until_ms) if until_ms else "now"}'
@ -345,53 +341,65 @@ def refresh_backtest_ohlcv_data(
pairs_not_available = []
data_handler = get_datahandler(datadir, data_format)
candle_type = CandleType.get_default(trading_mode)
process = ""
for idx, pair in enumerate(pairs, start=1):
if pair not in exchange.markets:
pairs_not_available.append(pair)
logger.info(f"Skipping pair {pair}...")
continue
for timeframe in timeframes:
logger.debug(f"Downloading pair {pair}, {candle_type}, interval {timeframe}.")
process = f"{idx}/{len(pairs)}"
_download_pair_history(
pair=pair,
process=process,
datadir=datadir,
exchange=exchange,
timerange=timerange,
data_handler=data_handler,
timeframe=str(timeframe),
new_pairs_days=new_pairs_days,
candle_type=candle_type,
erase=erase,
prepend=prepend,
)
if trading_mode == "futures":
# Predefined candletype (and timeframe) depending on exchange
# Downloads what is necessary to backtest based on futures data.
tf_mark = exchange.get_option("mark_ohlcv_timeframe")
tf_funding_rate = exchange.get_option("funding_fee_timeframe")
with get_progress_tracker() as progress:
tf_length = len(timeframes) if trading_mode != "futures" else len(timeframes) + 2
timeframe_task = progress.add_task("Timeframe", total=tf_length)
pair_task = progress.add_task("Downloading data...", total=len(pairs))
fr_candle_type = CandleType.from_string(exchange.get_option("mark_ohlcv_price"))
# All exchanges need FundingRate for futures trading.
# The timeframe is aligned to the mark-price timeframe.
combs = ((CandleType.FUNDING_RATE, tf_funding_rate), (fr_candle_type, tf_mark))
for candle_type_f, tf in combs:
logger.debug(f"Downloading pair {pair}, {candle_type_f}, interval {tf}.")
for pair in pairs:
progress.update(pair_task, description=f"Downloading {pair}")
progress.update(timeframe_task, completed=0)
if pair not in exchange.markets:
pairs_not_available.append(pair)
logger.info(f"Skipping pair {pair}...")
continue
for timeframe in timeframes:
progress.update(timeframe_task, description=f"Timeframe {timeframe}")
logger.debug(f"Downloading pair {pair}, {candle_type}, interval {timeframe}.")
_download_pair_history(
pair=pair,
process=process,
datadir=datadir,
exchange=exchange,
timerange=timerange,
data_handler=data_handler,
timeframe=str(tf),
timeframe=str(timeframe),
new_pairs_days=new_pairs_days,
candle_type=candle_type_f,
candle_type=candle_type,
erase=erase,
prepend=prepend,
)
progress.update(timeframe_task, advance=1)
if trading_mode == "futures":
# Predefined candletype (and timeframe) depending on exchange
# Downloads what is necessary to backtest based on futures data.
tf_mark = exchange.get_option("mark_ohlcv_timeframe")
tf_funding_rate = exchange.get_option("funding_fee_timeframe")
fr_candle_type = CandleType.from_string(exchange.get_option("mark_ohlcv_price"))
# All exchanges need FundingRate for futures trading.
# The timeframe is aligned to the mark-price timeframe.
combs = ((CandleType.FUNDING_RATE, tf_funding_rate), (fr_candle_type, tf_mark))
for candle_type_f, tf in combs:
logger.debug(f"Downloading pair {pair}, {candle_type_f}, interval {tf}.")
_download_pair_history(
pair=pair,
datadir=datadir,
exchange=exchange,
timerange=timerange,
data_handler=data_handler,
timeframe=str(tf),
new_pairs_days=new_pairs_days,
candle_type=candle_type_f,
erase=erase,
prepend=prepend,
)
progress.update(
timeframe_task, advance=1, description=f"Timeframe {candle_type_f}, {tf}"
)
progress.update(pair_task, advance=1)
progress.update(timeframe_task, description="Timeframe")
return pairs_not_available
@ -501,25 +509,30 @@ def refresh_backtest_trades_data(
"""
pairs_not_available = []
data_handler = get_datahandler(datadir, data_format=data_format)
for pair in pairs:
if pair not in exchange.markets:
pairs_not_available.append(pair)
logger.info(f"Skipping pair {pair}...")
continue
with get_progress_tracker() as progress:
pair_task = progress.add_task("Downloading data...", total=len(pairs))
for pair in pairs:
progress.update(pair_task, description=f"Downloading trades [{pair}]")
if pair not in exchange.markets:
pairs_not_available.append(pair)
logger.info(f"Skipping pair {pair}...")
continue
if erase:
if data_handler.trades_purge(pair, trading_mode):
logger.info(f"Deleting existing data for pair {pair}.")
if erase:
if data_handler.trades_purge(pair, trading_mode):
logger.info(f"Deleting existing data for pair {pair}.")
logger.info(f"Downloading trades for pair {pair}.")
_download_trades_history(
exchange=exchange,
pair=pair,
new_pairs_days=new_pairs_days,
timerange=timerange,
data_handler=data_handler,
trading_mode=trading_mode,
)
progress.update(pair_task, advance=1)
logger.info(f"Downloading trades for pair {pair}.")
_download_trades_history(
exchange=exchange,
pair=pair,
new_pairs_days=new_pairs_days,
timerange=timerange,
data_handler=data_handler,
trading_mode=trading_mode,
)
return pairs_not_available

View File

@ -11,6 +11,7 @@ from freqtrade.exchange.bitpanda import Bitpanda
from freqtrade.exchange.bitvavo import Bitvavo
from freqtrade.exchange.bybit import Bybit
from freqtrade.exchange.coinbasepro import Coinbasepro
from freqtrade.exchange.cryptocom import Cryptocom
from freqtrade.exchange.exchange_utils import (
ROUND_DOWN,
ROUND_UP,

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,19 @@
"""Crypto.com exchange subclass"""
import logging
from typing import Dict
from freqtrade.exchange import Exchange
logger = logging.getLogger(__name__)
class Cryptocom(Exchange):
"""Crypto.com exchange class.
Contains adjustments needed for Freqtrade to work with this exchange.
"""
_ft_has: Dict = {
"ohlcv_candle_limit": 300,
}

View File

@ -351,10 +351,14 @@ class Exchange:
raise OperationalException(f"Exchange {name} is not supported by ccxt")
ex_config = {
"apiKey": exchange_config.get("key"),
"apiKey": exchange_config.get("apiKey", exchange_config.get("key")),
"secret": exchange_config.get("secret"),
"password": exchange_config.get("password"),
"uid": exchange_config.get("uid", ""),
"accountId": exchange_config.get("accountId", ""),
# DEX attributes:
"walletAddress": exchange_config.get("walletAddress"),
"privateKey": exchange_config.get("privateKey"),
}
if ccxt_kwargs:
logger.info("Applying additional ccxt config: %s", ccxt_kwargs)
@ -3604,13 +3608,12 @@ class Exchange:
def get_maintenance_ratio_and_amt(
self,
pair: str,
nominal_value: float,
notional_value: float,
) -> Tuple[float, Optional[float]]:
"""
Important: Must be fetching data from cached values as this is used by backtesting!
:param pair: Market symbol
:param nominal_value: The total trade amount in quote currency including leverage
maintenance amount only on Binance
:param notional_value: The total trade amount in quote currency
:return: (maintenance margin ratio, maintenance amount)
"""
@ -3627,7 +3630,7 @@ class Exchange:
pair_tiers = self._leverage_tiers[pair]
for tier in reversed(pair_tiers):
if nominal_value >= tier["minNotional"]:
if notional_value >= tier["minNotional"]:
return (tier["maintenanceMarginRate"], tier["maintAmt"])
raise ExchangeError("nominal value can not be lower than 0")

View File

@ -0,0 +1,24 @@
"""Hyperliquid exchange subclass"""
import logging
from typing import Dict
from freqtrade.exchange import Exchange
logger = logging.getLogger(__name__)
class Hyperliquid(Exchange):
"""Hyperliquid exchange class.
Contains adjustments needed for Freqtrade to work with this exchange.
"""
_ft_has: Dict = {
# Only the most recent 5000 candles are available according to the
# exchange's API documentation.
"ohlcv_has_history": True,
"ohlcv_candle_limit": 5000,
"trades_has_history": False, # Trades endpoint doesn't seem available.
"exchange_has_overrides": {"fetchTrades": False},
}

View File

@ -19,14 +19,6 @@ from joblib.externals import cloudpickle
from pandas import DataFrame
from rich.align import Align
from rich.console import Console
from rich.progress import (
BarColumn,
MofNCompleteColumn,
TaskProgressColumn,
TextColumn,
TimeElapsedColumn,
TimeRemainingColumn,
)
from freqtrade.constants import DATETIME_PRINT_FORMAT, FTHYPT_FILEVERSION, LAST_BT_RESULT_FN, Config
from freqtrade.data.converter import trim_dataframes
@ -48,7 +40,7 @@ from freqtrade.optimize.hyperopt_tools import (
)
from freqtrade.optimize.optimize_reports import generate_strategy_stats
from freqtrade.resolvers.hyperopt_resolver import HyperOptLossResolver
from freqtrade.util import CustomProgress
from freqtrade.util import get_progress_tracker
# Suppress scikit-learn FutureWarnings from skopt
@ -634,16 +626,7 @@ class Hyperopt:
)
# Define progressbar
with CustomProgress(
TextColumn("[progress.description]{task.description}"),
BarColumn(bar_width=None),
MofNCompleteColumn(),
TaskProgressColumn(),
"",
TimeElapsedColumn(),
"",
TimeRemainingColumn(),
expand=True,
with get_progress_tracker(
console=console,
cust_objs=[Align.center(self._hyper_out.table)],
) as pbar:

View File

@ -1,4 +1,3 @@
import contextlib
import threading
import time
@ -53,7 +52,6 @@ class UvicornServer(uvicorn.Server):
loop = asyncio.new_event_loop()
loop.run_until_complete(self.serve(sockets=sockets))
@contextlib.contextmanager
def run_in_thread(self):
self.thread = threading.Thread(target=self.run, name="FTUvicorn")
self.thread.start()

View File

@ -15,6 +15,7 @@ from freqtrade.util.formatters import decimals_per_coin, fmt_coin, round_value
from freqtrade.util.ft_precise import FtPrecise
from freqtrade.util.measure_time import MeasureTime
from freqtrade.util.periodic_cache import PeriodicCache
from freqtrade.util.progress_tracker import get_progress_tracker # noqa F401
from freqtrade.util.rich_progress import CustomProgress
from freqtrade.util.rich_tables import print_df_rich_table, print_rich_table
from freqtrade.util.template_renderer import render_template, render_template_with_fallback # noqa

View File

@ -0,0 +1,28 @@
from rich.progress import (
BarColumn,
MofNCompleteColumn,
TaskProgressColumn,
TextColumn,
TimeElapsedColumn,
TimeRemainingColumn,
)
from freqtrade.util.rich_progress import CustomProgress
def get_progress_tracker(**kwargs):
"""
Get progress Bar with custom columns.
"""
return CustomProgress(
TextColumn("[progress.description]{task.description}"),
BarColumn(bar_width=None),
MofNCompleteColumn(),
TaskProgressColumn(),
"",
TimeElapsedColumn(),
"",
TimeRemainingColumn(),
expand=True,
**kwargs,
)

View File

@ -5,7 +5,7 @@ from rich.progress import Progress
class CustomProgress(Progress):
def __init__(self, *args, cust_objs, **kwargs) -> None:
def __init__(self, *args, cust_objs=[], **kwargs) -> None:
self._cust_objs = cust_objs
super().__init__(*args, **kwargs)

View File

@ -7,7 +7,7 @@
-r docs/requirements-docs.txt
coveralls==4.0.1
ruff==0.5.1
ruff==0.5.2
mypy==1.10.1
pre-commit==3.7.1
pytest==8.2.2
@ -27,6 +27,6 @@ nbconvert==7.16.4
# mypy types
types-cachetools==5.3.0.7
types-filelock==3.2.7
types-requests==2.32.0.20240622
types-requests==2.32.0.20240712
types-tabulate==0.9.0.20240106
types-python-dateutil==2.9.0.20240316

View File

@ -4,18 +4,18 @@ bottleneck==1.4.0
numexpr==2.10.1
pandas-ta==0.3.14b
ccxt==4.3.58
ccxt==4.3.61
cryptography==42.0.8
aiohttp==3.9.5
SQLAlchemy==2.0.31
python-telegram-bot==21.3
python-telegram-bot==21.4
# can't be hard-pinned due to telegram-bot pinning httpx with ~
httpx>=0.24.1
humanize==4.9.0
humanize==4.10.0
cachetools==5.3.3
requests==2.32.3
urllib3==2.2.2
jsonschema==4.22.0
jsonschema==4.23.0
TA-Lib==0.4.32
technical==1.4.3
tabulate==0.9.0
@ -38,7 +38,7 @@ orjson==3.10.6
sdnotify==0.3.2
# API Server
fastapi==0.111.0
fastapi==0.111.1
pydantic==2.8.2
uvicorn==0.30.1
pyjwt==2.8.0

View File

@ -151,9 +151,7 @@ def test_load_data_with_new_pair_1min(
)
load_pair_history(datadir=tmp_path, timeframe="1m", pair="MEME/BTC", candle_type=candle_type)
assert file.is_file()
assert log_has_re(
r'\(0/1\) - Download history data for "MEME/BTC", 1m, ' r"spot and store in .*", caplog
)
assert log_has_re(r'Download history data for "MEME/BTC", 1m, ' r"spot and store in .*", caplog)
def test_testdata_path(testdatadir) -> None:

View File

@ -600,7 +600,7 @@ async def test__async_get_historic_ohlcv_binance(default_conf, mocker, caplog, c
@pytest.mark.parametrize(
"pair,nominal_value,mm_ratio,amt",
"pair,notional_value,mm_ratio,amt",
[
("XRP/USDT:USDT", 0.0, 0.025, 0),
("BNB/USDT:USDT", 100.0, 0.0065, 0),
@ -615,12 +615,12 @@ def test_get_maintenance_ratio_and_amt_binance(
mocker,
leverage_tiers,
pair,
nominal_value,
notional_value,
mm_ratio,
amt,
):
mocker.patch(f"{EXMS}.exchange_has", return_value=True)
exchange = get_patched_exchange(mocker, default_conf, exchange="binance")
exchange._leverage_tiers = leverage_tiers
(result_ratio, result_amt) = exchange.get_maintenance_ratio_and_amt(pair, nominal_value)
(result_ratio, result_amt) = exchange.get_maintenance_ratio_and_amt(pair, notional_value)
assert (round(result_ratio, 8), round(result_amt, 8)) == (mm_ratio, amt)