mirror of
https://github.com/freqtrade/freqtrade.git
synced 2024-11-14 20:23:57 +00:00
Merge pull request #10898 from xzmeng/ruff310
chore: bump ruff target-version to 3.10
This commit is contained in:
commit
b5592b88fa
|
@ -5,7 +5,7 @@ This module contains the argument manager class
|
|||
from argparse import ArgumentParser, Namespace, _ArgumentGroup
|
||||
from functools import partial
|
||||
from pathlib import Path
|
||||
from typing import Any, Optional, Union
|
||||
from typing import Any
|
||||
|
||||
from freqtrade.commands.cli_options import AVAILABLE_CLI_OPTIONS
|
||||
from freqtrade.constants import DEFAULT_CONFIG
|
||||
|
@ -275,9 +275,9 @@ class Arguments:
|
|||
Arguments Class. Manage the arguments received by the cli
|
||||
"""
|
||||
|
||||
def __init__(self, args: Optional[list[str]]) -> None:
|
||||
def __init__(self, args: list[str] | None) -> None:
|
||||
self.args = args
|
||||
self._parsed_arg: Optional[Namespace] = None
|
||||
self._parsed_arg: Namespace | None = None
|
||||
|
||||
def get_parsed_arg(self) -> dict[str, Any]:
|
||||
"""
|
||||
|
@ -319,9 +319,7 @@ class Arguments:
|
|||
|
||||
return parsed_arg
|
||||
|
||||
def _build_args(
|
||||
self, optionlist: list[str], parser: Union[ArgumentParser, _ArgumentGroup]
|
||||
) -> None:
|
||||
def _build_args(self, optionlist: list[str], parser: ArgumentParser | _ArgumentGroup) -> None:
|
||||
for val in optionlist:
|
||||
opt = AVAILABLE_CLI_OPTIONS[val]
|
||||
parser.add_argument(*opt.cli, dest=val, **opt.kwargs)
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
import requests
|
||||
|
||||
|
@ -24,7 +23,7 @@ def clean_ui_subdir(directory: Path):
|
|||
p.rmdir()
|
||||
|
||||
|
||||
def read_ui_version(dest_folder: Path) -> Optional[str]:
|
||||
def read_ui_version(dest_folder: Path) -> str | None:
|
||||
file = dest_folder / ".uiversion"
|
||||
if not file.is_file():
|
||||
return None
|
||||
|
@ -52,7 +51,7 @@ def download_and_install_ui(dest_folder: Path, dl_url: str, version: str):
|
|||
f.write(version)
|
||||
|
||||
|
||||
def get_ui_download_url(version: Optional[str] = None) -> tuple[str, str]:
|
||||
def get_ui_download_url(version: str | None = None) -> tuple[str, str]:
|
||||
base_url = "https://api.github.com/repos/freqtrade/frequi/"
|
||||
# Get base UI Repo path
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import csv
|
||||
import logging
|
||||
import sys
|
||||
from typing import Any, Union
|
||||
from typing import Any
|
||||
|
||||
from freqtrade.enums import RunMode
|
||||
from freqtrade.exceptions import ConfigurationError, OperationalException
|
||||
|
@ -87,7 +87,7 @@ def _print_objs_tabular(objs: list, print_colorized: bool) -> None:
|
|||
from rich.text import Text
|
||||
|
||||
names = [s["name"] for s in objs]
|
||||
objs_to_print: list[dict[str, Union[Text, str]]] = [
|
||||
objs_to_print: list[dict[str, Text | str]] = [
|
||||
{
|
||||
"name": Text(s["name"] if s["name"] else "--"),
|
||||
"location": s["location_rel"],
|
||||
|
|
|
@ -5,9 +5,10 @@ This module contains the configuration class
|
|||
import ast
|
||||
import logging
|
||||
import warnings
|
||||
from collections.abc import Callable
|
||||
from copy import deepcopy
|
||||
from pathlib import Path
|
||||
from typing import Any, Callable, Optional
|
||||
from typing import Any
|
||||
|
||||
from freqtrade import constants
|
||||
from freqtrade.configuration.deprecated_settings import process_temporary_deprecated_settings
|
||||
|
@ -37,9 +38,9 @@ class Configuration:
|
|||
Reuse this class for the bot, backtesting, hyperopt and every script that required configuration
|
||||
"""
|
||||
|
||||
def __init__(self, args: dict[str, Any], runmode: Optional[RunMode] = None) -> None:
|
||||
def __init__(self, args: dict[str, Any], runmode: RunMode | None = None) -> None:
|
||||
self.args = args
|
||||
self.config: Optional[Config] = None
|
||||
self.config: Config | None = None
|
||||
self.runmode = runmode
|
||||
|
||||
def get_config(self) -> Config:
|
||||
|
@ -451,8 +452,8 @@ class Configuration:
|
|||
config: Config,
|
||||
argname: str,
|
||||
logstring: str,
|
||||
logfun: Optional[Callable] = None,
|
||||
deprecated_msg: Optional[str] = None,
|
||||
logfun: Callable | None = None,
|
||||
deprecated_msg: str | None = None,
|
||||
) -> None:
|
||||
"""
|
||||
:param config: Configuration dictionary
|
||||
|
|
|
@ -3,7 +3,6 @@ Functions to handle deprecated settings
|
|||
"""
|
||||
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from freqtrade.constants import Config
|
||||
from freqtrade.exceptions import ConfigurationError, OperationalException
|
||||
|
@ -14,9 +13,9 @@ logger = logging.getLogger(__name__)
|
|||
|
||||
def check_conflicting_settings(
|
||||
config: Config,
|
||||
section_old: Optional[str],
|
||||
section_old: str | None,
|
||||
name_old: str,
|
||||
section_new: Optional[str],
|
||||
section_new: str | None,
|
||||
name_new: str,
|
||||
) -> None:
|
||||
section_new_config = config.get(section_new, {}) if section_new else config
|
||||
|
@ -34,7 +33,7 @@ def check_conflicting_settings(
|
|||
|
||||
|
||||
def process_removed_setting(
|
||||
config: Config, section1: str, name1: str, section2: Optional[str], name2: str
|
||||
config: Config, section1: str, name1: str, section2: str | None, name2: str
|
||||
) -> None:
|
||||
"""
|
||||
:param section1: Removed section
|
||||
|
@ -54,9 +53,9 @@ def process_removed_setting(
|
|||
|
||||
def process_deprecated_setting(
|
||||
config: Config,
|
||||
section_old: Optional[str],
|
||||
section_old: str | None,
|
||||
name_old: str,
|
||||
section_new: Optional[str],
|
||||
section_new: str | None,
|
||||
name_new: str,
|
||||
) -> None:
|
||||
check_conflicting_settings(config, section_old, name_old, section_new, name_new)
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
import logging
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from freqtrade.configuration.detect_environment import running_in_docker
|
||||
from freqtrade.constants import (
|
||||
|
@ -18,7 +17,7 @@ from freqtrade.exceptions import OperationalException
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def create_datadir(config: Config, datadir: Optional[str] = None) -> Path:
|
||||
def create_datadir(config: Config, datadir: str | None = None) -> Path:
|
||||
folder = Path(datadir) if datadir else Path(f"{config['user_data_dir']}/data")
|
||||
if not datadir:
|
||||
# set datadir
|
||||
|
|
|
@ -7,7 +7,7 @@ import re
|
|||
import sys
|
||||
from copy import deepcopy
|
||||
from pathlib import Path
|
||||
from typing import Any, Optional
|
||||
from typing import Any
|
||||
|
||||
import rapidjson
|
||||
|
||||
|
@ -78,7 +78,7 @@ def load_config_file(path: str) -> dict[str, Any]:
|
|||
|
||||
|
||||
def load_from_files(
|
||||
files: list[str], base_path: Optional[Path] = None, level: int = 0
|
||||
files: list[str], base_path: Path | None = None, level: int = 0
|
||||
) -> dict[str, Any]:
|
||||
"""
|
||||
Recursively load configuration files if specified.
|
||||
|
|
|
@ -5,7 +5,6 @@ This module contains the argument manager class
|
|||
import logging
|
||||
import re
|
||||
from datetime import datetime, timezone
|
||||
from typing import Optional
|
||||
|
||||
from typing_extensions import Self
|
||||
|
||||
|
@ -25,24 +24,24 @@ class TimeRange:
|
|||
|
||||
def __init__(
|
||||
self,
|
||||
starttype: Optional[str] = None,
|
||||
stoptype: Optional[str] = None,
|
||||
starttype: str | None = None,
|
||||
stoptype: str | None = None,
|
||||
startts: int = 0,
|
||||
stopts: int = 0,
|
||||
):
|
||||
self.starttype: Optional[str] = starttype
|
||||
self.stoptype: Optional[str] = stoptype
|
||||
self.starttype: str | None = starttype
|
||||
self.stoptype: str | None = stoptype
|
||||
self.startts: int = startts
|
||||
self.stopts: int = stopts
|
||||
|
||||
@property
|
||||
def startdt(self) -> Optional[datetime]:
|
||||
def startdt(self) -> datetime | None:
|
||||
if self.startts:
|
||||
return datetime.fromtimestamp(self.startts, tz=timezone.utc)
|
||||
return None
|
||||
|
||||
@property
|
||||
def stopdt(self) -> Optional[datetime]:
|
||||
def stopdt(self) -> datetime | None:
|
||||
if self.stopts:
|
||||
return datetime.fromtimestamp(self.stopts, tz=timezone.utc)
|
||||
return None
|
||||
|
@ -120,7 +119,7 @@ class TimeRange:
|
|||
self.starttype = "date"
|
||||
|
||||
@classmethod
|
||||
def parse_timerange(cls, text: Optional[str]) -> Self:
|
||||
def parse_timerange(cls, text: str | None) -> Self:
|
||||
"""
|
||||
Parse the value of the argument --timerange to determine what is the range desired
|
||||
:param text: value from --timerange
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
bot constants
|
||||
"""
|
||||
|
||||
from typing import Any, Literal, Optional
|
||||
from typing import Any, Literal
|
||||
|
||||
from freqtrade.enums import CandleType, PriceType
|
||||
|
||||
|
@ -193,7 +193,7 @@ ListPairsWithTimeframes = list[PairWithTimeframe]
|
|||
# Type for trades list
|
||||
TradeList = list[list]
|
||||
# ticks, pair, timeframe, CandleType
|
||||
TickWithTimeframe = tuple[str, str, CandleType, Optional[int], Optional[int]]
|
||||
TickWithTimeframe = tuple[str, str, CandleType, int | None, int | None]
|
||||
ListTicksWithTimeframes = list[TickWithTimeframe]
|
||||
|
||||
LongShort = Literal["long", "short"]
|
||||
|
|
|
@ -6,7 +6,7 @@ import logging
|
|||
from copy import copy
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any, Literal, Optional, Union
|
||||
from typing import Any, Literal
|
||||
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
|
@ -53,7 +53,7 @@ BT_DATA_COLUMNS = [
|
|||
]
|
||||
|
||||
|
||||
def get_latest_optimize_filename(directory: Union[Path, str], variant: str) -> str:
|
||||
def get_latest_optimize_filename(directory: Path | str, variant: str) -> str:
|
||||
"""
|
||||
Get latest backtest export based on '.last_result.json'.
|
||||
:param directory: Directory to search for last result
|
||||
|
@ -84,7 +84,7 @@ def get_latest_optimize_filename(directory: Union[Path, str], variant: str) -> s
|
|||
return data[f"latest_{variant}"]
|
||||
|
||||
|
||||
def get_latest_backtest_filename(directory: Union[Path, str]) -> str:
|
||||
def get_latest_backtest_filename(directory: Path | str) -> str:
|
||||
"""
|
||||
Get latest backtest export based on '.last_result.json'.
|
||||
:param directory: Directory to search for last result
|
||||
|
@ -97,7 +97,7 @@ def get_latest_backtest_filename(directory: Union[Path, str]) -> str:
|
|||
return get_latest_optimize_filename(directory, "backtest")
|
||||
|
||||
|
||||
def get_latest_hyperopt_filename(directory: Union[Path, str]) -> str:
|
||||
def get_latest_hyperopt_filename(directory: Path | str) -> str:
|
||||
"""
|
||||
Get latest hyperopt export based on '.last_result.json'.
|
||||
:param directory: Directory to search for last result
|
||||
|
@ -114,9 +114,7 @@ def get_latest_hyperopt_filename(directory: Union[Path, str]) -> str:
|
|||
return "hyperopt_results.pickle"
|
||||
|
||||
|
||||
def get_latest_hyperopt_file(
|
||||
directory: Union[Path, str], predef_filename: Optional[str] = None
|
||||
) -> Path:
|
||||
def get_latest_hyperopt_file(directory: Path | str, predef_filename: str | None = None) -> Path:
|
||||
"""
|
||||
Get latest hyperopt export based on '.last_result.json'.
|
||||
:param directory: Directory to search for last result
|
||||
|
@ -137,7 +135,7 @@ def get_latest_hyperopt_file(
|
|||
return directory / get_latest_hyperopt_filename(directory)
|
||||
|
||||
|
||||
def load_backtest_metadata(filename: Union[Path, str]) -> dict[str, Any]:
|
||||
def load_backtest_metadata(filename: Path | str) -> dict[str, Any]:
|
||||
"""
|
||||
Read metadata dictionary from backtest results file without reading and deserializing entire
|
||||
file.
|
||||
|
@ -154,7 +152,7 @@ def load_backtest_metadata(filename: Union[Path, str]) -> dict[str, Any]:
|
|||
raise OperationalException("Unexpected error while loading backtest metadata.") from e
|
||||
|
||||
|
||||
def load_backtest_stats(filename: Union[Path, str]) -> BacktestResultType:
|
||||
def load_backtest_stats(filename: Path | str) -> BacktestResultType:
|
||||
"""
|
||||
Load backtest statistics file.
|
||||
:param filename: pathlib.Path object, or string pointing to the file.
|
||||
|
@ -276,7 +274,7 @@ def get_backtest_market_change(filename: Path, include_ts: bool = True) -> pd.Da
|
|||
|
||||
|
||||
def find_existing_backtest_stats(
|
||||
dirname: Union[Path, str], run_ids: dict[str, str], min_backtest_date: Optional[datetime] = None
|
||||
dirname: Path | str, run_ids: dict[str, str], min_backtest_date: datetime | None = None
|
||||
) -> dict[str, Any]:
|
||||
"""
|
||||
Find existing backtest stats that match specified run IDs and load them.
|
||||
|
@ -345,7 +343,7 @@ def _load_backtest_data_df_compatibility(df: pd.DataFrame) -> pd.DataFrame:
|
|||
return df
|
||||
|
||||
|
||||
def load_backtest_data(filename: Union[Path, str], strategy: Optional[str] = None) -> pd.DataFrame:
|
||||
def load_backtest_data(filename: Path | str, strategy: str | None = None) -> pd.DataFrame:
|
||||
"""
|
||||
Load backtest data file.
|
||||
:param filename: pathlib.Path object, or string pointing to a file or directory
|
||||
|
@ -439,7 +437,7 @@ def evaluate_result_multi(
|
|||
return df_final[df_final["open_trades"] > max_open_trades]
|
||||
|
||||
|
||||
def trade_list_to_dataframe(trades: Union[list[Trade], list[LocalTrade]]) -> pd.DataFrame:
|
||||
def trade_list_to_dataframe(trades: list[Trade] | list[LocalTrade]) -> pd.DataFrame:
|
||||
"""
|
||||
Convert list of Trade objects to pandas Dataframe
|
||||
:param trades: List of trade objects
|
||||
|
@ -453,7 +451,7 @@ def trade_list_to_dataframe(trades: Union[list[Trade], list[LocalTrade]]) -> pd.
|
|||
return df
|
||||
|
||||
|
||||
def load_trades_from_db(db_url: str, strategy: Optional[str] = None) -> pd.DataFrame:
|
||||
def load_trades_from_db(db_url: str, strategy: str | None = None) -> pd.DataFrame:
|
||||
"""
|
||||
Load trades from a DB (using dburl)
|
||||
:param db_url: Sqlite url (default format sqlite:///tradesv3.dry-run.sqlite)
|
||||
|
@ -476,7 +474,7 @@ def load_trades(
|
|||
db_url: str,
|
||||
exportfilename: Path,
|
||||
no_trades: bool = False,
|
||||
strategy: Optional[str] = None,
|
||||
strategy: str | None = None,
|
||||
) -> pd.DataFrame:
|
||||
"""
|
||||
Based on configuration option 'trade_source':
|
||||
|
|
|
@ -8,7 +8,7 @@ Common Interface for bot and strategy to access data.
|
|||
import logging
|
||||
from collections import deque
|
||||
from datetime import datetime, timezone
|
||||
from typing import Any, Optional
|
||||
from typing import Any
|
||||
|
||||
from pandas import DataFrame, Timedelta, Timestamp, to_timedelta
|
||||
|
||||
|
@ -40,17 +40,17 @@ class DataProvider:
|
|||
def __init__(
|
||||
self,
|
||||
config: Config,
|
||||
exchange: Optional[Exchange],
|
||||
exchange: Exchange | None,
|
||||
pairlists=None,
|
||||
rpc: Optional[RPCManager] = None,
|
||||
rpc: RPCManager | None = None,
|
||||
) -> None:
|
||||
self._config = config
|
||||
self._exchange = exchange
|
||||
self._pairlists = pairlists
|
||||
self.__rpc = rpc
|
||||
self.__cached_pairs: dict[PairWithTimeframe, tuple[DataFrame, datetime]] = {}
|
||||
self.__slice_index: Optional[int] = None
|
||||
self.__slice_date: Optional[datetime] = None
|
||||
self.__slice_index: int | None = None
|
||||
self.__slice_date: datetime | None = None
|
||||
|
||||
self.__cached_pairs_backtesting: dict[PairWithTimeframe, DataFrame] = {}
|
||||
self.__producer_pairs_df: dict[
|
||||
|
@ -255,8 +255,8 @@ class DataProvider:
|
|||
def get_producer_df(
|
||||
self,
|
||||
pair: str,
|
||||
timeframe: Optional[str] = None,
|
||||
candle_type: Optional[CandleType] = None,
|
||||
timeframe: str | None = None,
|
||||
candle_type: CandleType | None = None,
|
||||
producer_name: str = "default",
|
||||
) -> tuple[DataFrame, datetime]:
|
||||
"""
|
||||
|
@ -349,7 +349,7 @@ class DataProvider:
|
|||
return total_candles
|
||||
|
||||
def get_pair_dataframe(
|
||||
self, pair: str, timeframe: Optional[str] = None, candle_type: str = ""
|
||||
self, pair: str, timeframe: str | None = None, candle_type: str = ""
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Return pair candle (OHLCV) data, either live or cached historical -- depending
|
||||
|
@ -437,7 +437,7 @@ class DataProvider:
|
|||
def refresh(
|
||||
self,
|
||||
pairlist: ListPairsWithTimeframes,
|
||||
helping_pairs: Optional[ListPairsWithTimeframes] = None,
|
||||
helping_pairs: ListPairsWithTimeframes | None = None,
|
||||
) -> None:
|
||||
"""
|
||||
Refresh data, called with each cycle
|
||||
|
@ -471,7 +471,7 @@ class DataProvider:
|
|||
return list(self._exchange._klines.keys())
|
||||
|
||||
def ohlcv(
|
||||
self, pair: str, timeframe: Optional[str] = None, copy: bool = True, candle_type: str = ""
|
||||
self, pair: str, timeframe: str | None = None, copy: bool = True, candle_type: str = ""
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Get candle (OHLCV) data for the given pair as DataFrame
|
||||
|
@ -497,7 +497,7 @@ class DataProvider:
|
|||
return DataFrame()
|
||||
|
||||
def trades(
|
||||
self, pair: str, timeframe: Optional[str] = None, copy: bool = True, candle_type: str = ""
|
||||
self, pair: str, timeframe: str | None = None, copy: bool = True, candle_type: str = ""
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Get candle (TRADES) data for the given pair as DataFrame
|
||||
|
@ -529,7 +529,7 @@ class DataProvider:
|
|||
)
|
||||
return trades_df
|
||||
|
||||
def market(self, pair: str) -> Optional[dict[str, Any]]:
|
||||
def market(self, pair: str) -> dict[str, Any] | None:
|
||||
"""
|
||||
Return market data for the pair
|
||||
:param pair: Pair to get the data for
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from pandas import DataFrame, read_feather, to_datetime
|
||||
|
||||
|
@ -37,7 +36,7 @@ class FeatherDataHandler(IDataHandler):
|
|||
)
|
||||
|
||||
def _ohlcv_load(
|
||||
self, pair: str, timeframe: str, timerange: Optional[TimeRange], candle_type: CandleType
|
||||
self, pair: str, timeframe: str, timerange: TimeRange | None, candle_type: CandleType
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Internal method used to load data for one pair from disk.
|
||||
|
@ -108,7 +107,7 @@ class FeatherDataHandler(IDataHandler):
|
|||
raise NotImplementedError()
|
||||
|
||||
def _trades_load(
|
||||
self, pair: str, trading_mode: TradingMode, timerange: Optional[TimeRange] = None
|
||||
self, pair: str, trading_mode: TradingMode, timerange: TimeRange | None = None
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Load a pair from file, either .json.gz or .json
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
import logging
|
||||
from typing import Optional
|
||||
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
|
@ -45,7 +44,7 @@ class HDF5DataHandler(IDataHandler):
|
|||
)
|
||||
|
||||
def _ohlcv_load(
|
||||
self, pair: str, timeframe: str, timerange: Optional[TimeRange], candle_type: CandleType
|
||||
self, pair: str, timeframe: str, timerange: TimeRange | None, candle_type: CandleType
|
||||
) -> pd.DataFrame:
|
||||
"""
|
||||
Internal method used to load data for one pair from disk.
|
||||
|
@ -134,7 +133,7 @@ class HDF5DataHandler(IDataHandler):
|
|||
raise NotImplementedError()
|
||||
|
||||
def _trades_load(
|
||||
self, pair: str, trading_mode: TradingMode, timerange: Optional[TimeRange] = None
|
||||
self, pair: str, trading_mode: TradingMode, timerange: TimeRange | None = None
|
||||
) -> pd.DataFrame:
|
||||
"""
|
||||
Load a pair from h5 file.
|
||||
|
|
|
@ -10,7 +10,6 @@ from abc import ABC, abstractmethod
|
|||
from copy import deepcopy
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from pandas import DataFrame, to_datetime
|
||||
|
||||
|
@ -126,7 +125,7 @@ class IDataHandler(ABC):
|
|||
|
||||
@abstractmethod
|
||||
def _ohlcv_load(
|
||||
self, pair: str, timeframe: str, timerange: Optional[TimeRange], candle_type: CandleType
|
||||
self, pair: str, timeframe: str, timerange: TimeRange | None, candle_type: CandleType
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Internal method used to load data for one pair from disk.
|
||||
|
@ -247,7 +246,7 @@ class IDataHandler(ABC):
|
|||
|
||||
@abstractmethod
|
||||
def _trades_load(
|
||||
self, pair: str, trading_mode: TradingMode, timerange: Optional[TimeRange] = None
|
||||
self, pair: str, trading_mode: TradingMode, timerange: TimeRange | None = None
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Load a pair from file, either .json.gz or .json
|
||||
|
@ -282,7 +281,7 @@ class IDataHandler(ABC):
|
|||
return False
|
||||
|
||||
def trades_load(
|
||||
self, pair: str, trading_mode: TradingMode, timerange: Optional[TimeRange] = None
|
||||
self, pair: str, trading_mode: TradingMode, timerange: TimeRange | None = None
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Load a pair from file, either .json.gz or .json
|
||||
|
@ -370,7 +369,7 @@ class IDataHandler(ABC):
|
|||
timeframe: str,
|
||||
candle_type: CandleType,
|
||||
*,
|
||||
timerange: Optional[TimeRange] = None,
|
||||
timerange: TimeRange | None = None,
|
||||
fill_missing: bool = True,
|
||||
drop_incomplete: bool = False,
|
||||
startup_candles: int = 0,
|
||||
|
@ -566,7 +565,7 @@ def get_datahandlerclass(datatype: str) -> type[IDataHandler]:
|
|||
|
||||
|
||||
def get_datahandler(
|
||||
datadir: Path, data_format: Optional[str] = None, data_handler: Optional[IDataHandler] = None
|
||||
datadir: Path, data_format: str | None = None, data_handler: IDataHandler | None = None
|
||||
) -> IDataHandler:
|
||||
"""
|
||||
:param datadir: Folder to save data
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
import logging
|
||||
from typing import Optional
|
||||
|
||||
import numpy as np
|
||||
from pandas import DataFrame, read_json, to_datetime
|
||||
|
@ -45,7 +44,7 @@ class JsonDataHandler(IDataHandler):
|
|||
)
|
||||
|
||||
def _ohlcv_load(
|
||||
self, pair: str, timeframe: str, timerange: Optional[TimeRange], candle_type: CandleType
|
||||
self, pair: str, timeframe: str, timerange: TimeRange | None, candle_type: CandleType
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Internal method used to load data for one pair from disk.
|
||||
|
@ -119,7 +118,7 @@ class JsonDataHandler(IDataHandler):
|
|||
raise NotImplementedError()
|
||||
|
||||
def _trades_load(
|
||||
self, pair: str, trading_mode: TradingMode, timerange: Optional[TimeRange] = None
|
||||
self, pair: str, trading_mode: TradingMode, timerange: TimeRange | None = None
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Load a pair from file, either .json.gz or .json
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from pandas import DataFrame, read_parquet, to_datetime
|
||||
|
||||
|
@ -35,7 +34,7 @@ class ParquetDataHandler(IDataHandler):
|
|||
data.reset_index(drop=True).loc[:, self._columns].to_parquet(filename)
|
||||
|
||||
def _ohlcv_load(
|
||||
self, pair: str, timeframe: str, timerange: Optional[TimeRange], candle_type: CandleType
|
||||
self, pair: str, timeframe: str, timerange: TimeRange | None, candle_type: CandleType
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Internal method used to load data for one pair from disk.
|
||||
|
@ -106,7 +105,7 @@ class ParquetDataHandler(IDataHandler):
|
|||
raise NotImplementedError()
|
||||
|
||||
def _trades_load(
|
||||
self, pair: str, trading_mode: TradingMode, timerange: Optional[TimeRange] = None
|
||||
self, pair: str, trading_mode: TradingMode, timerange: TimeRange | None = None
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Load a pair from file, either .json.gz or .json
|
||||
|
|
|
@ -2,7 +2,6 @@ import logging
|
|||
import operator
|
||||
from datetime import datetime, timedelta
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from pandas import DataFrame, concat
|
||||
|
||||
|
@ -37,12 +36,12 @@ def load_pair_history(
|
|||
timeframe: str,
|
||||
datadir: Path,
|
||||
*,
|
||||
timerange: Optional[TimeRange] = None,
|
||||
timerange: TimeRange | None = None,
|
||||
fill_up_missing: bool = True,
|
||||
drop_incomplete: bool = False,
|
||||
startup_candles: int = 0,
|
||||
data_format: Optional[str] = None,
|
||||
data_handler: Optional[IDataHandler] = None,
|
||||
data_format: str | None = None,
|
||||
data_handler: IDataHandler | None = None,
|
||||
candle_type: CandleType = CandleType.SPOT,
|
||||
) -> DataFrame:
|
||||
"""
|
||||
|
@ -79,13 +78,13 @@ def load_data(
|
|||
timeframe: str,
|
||||
pairs: list[str],
|
||||
*,
|
||||
timerange: Optional[TimeRange] = None,
|
||||
timerange: TimeRange | None = None,
|
||||
fill_up_missing: bool = True,
|
||||
startup_candles: int = 0,
|
||||
fail_without_data: bool = False,
|
||||
data_format: str = "feather",
|
||||
candle_type: CandleType = CandleType.SPOT,
|
||||
user_futures_funding_rate: Optional[int] = None,
|
||||
user_futures_funding_rate: int | None = None,
|
||||
) -> dict[str, DataFrame]:
|
||||
"""
|
||||
Load ohlcv history data for a list of pairs.
|
||||
|
@ -137,8 +136,8 @@ def refresh_data(
|
|||
timeframe: str,
|
||||
pairs: list[str],
|
||||
exchange: Exchange,
|
||||
data_format: Optional[str] = None,
|
||||
timerange: Optional[TimeRange] = None,
|
||||
data_format: str | None = None,
|
||||
timerange: TimeRange | None = None,
|
||||
candle_type: CandleType,
|
||||
) -> None:
|
||||
"""
|
||||
|
@ -168,11 +167,11 @@ def refresh_data(
|
|||
def _load_cached_data_for_updating(
|
||||
pair: str,
|
||||
timeframe: str,
|
||||
timerange: Optional[TimeRange],
|
||||
timerange: TimeRange | None,
|
||||
data_handler: IDataHandler,
|
||||
candle_type: CandleType,
|
||||
prepend: bool = False,
|
||||
) -> tuple[DataFrame, Optional[int], Optional[int]]:
|
||||
) -> tuple[DataFrame, int | None, int | None]:
|
||||
"""
|
||||
Load cached data to download more data.
|
||||
If timerange is passed in, checks whether data from an before the stored data will be
|
||||
|
@ -220,8 +219,8 @@ def _download_pair_history(
|
|||
exchange: Exchange,
|
||||
timeframe: str = "5m",
|
||||
new_pairs_days: int = 30,
|
||||
data_handler: Optional[IDataHandler] = None,
|
||||
timerange: Optional[TimeRange] = None,
|
||||
data_handler: IDataHandler | None = None,
|
||||
timerange: TimeRange | None = None,
|
||||
candle_type: CandleType,
|
||||
erase: bool = False,
|
||||
prepend: bool = False,
|
||||
|
@ -322,10 +321,10 @@ def refresh_backtest_ohlcv_data(
|
|||
timeframes: list[str],
|
||||
datadir: Path,
|
||||
trading_mode: str,
|
||||
timerange: Optional[TimeRange] = None,
|
||||
timerange: TimeRange | None = None,
|
||||
new_pairs_days: int = 30,
|
||||
erase: bool = False,
|
||||
data_format: Optional[str] = None,
|
||||
data_format: str | None = None,
|
||||
prepend: bool = False,
|
||||
) -> list[str]:
|
||||
"""
|
||||
|
@ -404,7 +403,7 @@ def _download_trades_history(
|
|||
pair: str,
|
||||
*,
|
||||
new_pairs_days: int = 30,
|
||||
timerange: Optional[TimeRange] = None,
|
||||
timerange: TimeRange | None = None,
|
||||
data_handler: IDataHandler,
|
||||
trading_mode: TradingMode,
|
||||
) -> bool:
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
import logging
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
import ccxt
|
||||
|
||||
|
@ -53,7 +52,7 @@ class Binance(Exchange):
|
|||
(TradingMode.FUTURES, MarginMode.ISOLATED)
|
||||
]
|
||||
|
||||
def get_tickers(self, symbols: Optional[list[str]] = None, cached: bool = False) -> Tickers:
|
||||
def get_tickers(self, symbols: list[str] | None = None, cached: bool = False) -> Tickers:
|
||||
tickers = super().get_tickers(symbols=symbols, cached=cached)
|
||||
if self.trading_mode == TradingMode.FUTURES:
|
||||
# Binance's future result has no bid/ask values.
|
||||
|
@ -106,7 +105,7 @@ class Binance(Exchange):
|
|||
candle_type: CandleType,
|
||||
is_new_pair: bool = False,
|
||||
raise_: bool = False,
|
||||
until_ms: Optional[int] = None,
|
||||
until_ms: int | None = None,
|
||||
) -> OHLCVResponse:
|
||||
"""
|
||||
Overwrite to introduce "fast new pair" functionality by detecting the pair's listing date
|
||||
|
@ -144,9 +143,7 @@ class Binance(Exchange):
|
|||
"""
|
||||
return open_date.minute == 0 and open_date.second < 15
|
||||
|
||||
def fetch_funding_rates(
|
||||
self, symbols: Optional[list[str]] = None
|
||||
) -> dict[str, dict[str, float]]:
|
||||
def fetch_funding_rates(self, symbols: list[str] | None = None) -> dict[str, dict[str, float]]:
|
||||
"""
|
||||
Fetch funding rates for the given symbols.
|
||||
:param symbols: List of symbols to fetch funding rates for
|
||||
|
@ -177,7 +174,7 @@ class Binance(Exchange):
|
|||
leverage: float,
|
||||
wallet_balance: float, # Or margin balance
|
||||
open_trades: list,
|
||||
) -> Optional[float]:
|
||||
) -> float | None:
|
||||
"""
|
||||
Important: Must be fetching data from cached values as this is used by backtesting!
|
||||
MARGIN: https://www.binance.com/en/support/faq/f6b010588e55413aa58b7d63ee0125ed
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
|
||||
import logging
|
||||
from datetime import datetime, timezone
|
||||
from typing import Optional
|
||||
|
||||
from freqtrade.exchange import Exchange
|
||||
|
||||
|
@ -17,7 +16,7 @@ class Bitpanda(Exchange):
|
|||
"""
|
||||
|
||||
def get_trades_for_order(
|
||||
self, order_id: str, pair: str, since: datetime, params: Optional[dict] = None
|
||||
self, order_id: str, pair: str, since: datetime, params: dict | None = None
|
||||
) -> list:
|
||||
"""
|
||||
Fetch Orders using the "fetch_my_trades" endpoint and filter them by order-id.
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Any, Optional
|
||||
from typing import Any
|
||||
|
||||
import ccxt
|
||||
|
||||
|
@ -115,7 +115,7 @@ class Bybit(Exchange):
|
|||
raise OperationalException(e) from e
|
||||
|
||||
def ohlcv_candle_limit(
|
||||
self, timeframe: str, candle_type: CandleType, since_ms: Optional[int] = None
|
||||
self, timeframe: str, candle_type: CandleType, since_ms: int | None = None
|
||||
) -> int:
|
||||
if candle_type == CandleType.FUNDING_RATE:
|
||||
return 200
|
||||
|
@ -157,7 +157,7 @@ class Bybit(Exchange):
|
|||
leverage: float,
|
||||
wallet_balance: float, # Or margin balance
|
||||
open_trades: list,
|
||||
) -> Optional[float]:
|
||||
) -> float | None:
|
||||
"""
|
||||
Important: Must be fetching data from cached values as this is used by backtesting!
|
||||
PERPETUAL:
|
||||
|
@ -230,7 +230,7 @@ class Bybit(Exchange):
|
|||
return 0.0
|
||||
|
||||
def fetch_orders(
|
||||
self, pair: str, since: datetime, params: Optional[dict] = None
|
||||
self, pair: str, since: datetime, params: dict | None = None
|
||||
) -> list[CcxtOrder]:
|
||||
"""
|
||||
Fetch all orders for a pair "since"
|
||||
|
@ -248,7 +248,7 @@ class Bybit(Exchange):
|
|||
|
||||
return orders
|
||||
|
||||
def fetch_order(self, order_id: str, pair: str, params: Optional[dict] = None) -> CcxtOrder:
|
||||
def fetch_order(self, order_id: str, pair: str, params: dict | None = None) -> CcxtOrder:
|
||||
if self.exchange_has("fetchOrder"):
|
||||
# Set acknowledged to True to avoid ccxt exception
|
||||
params = {"acknowledged": True}
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
import asyncio
|
||||
import logging
|
||||
import time
|
||||
from collections.abc import Callable
|
||||
from functools import wraps
|
||||
from typing import Any, Callable, Optional, TypeVar, cast, overload
|
||||
from typing import Any, TypeVar, cast, overload
|
||||
|
||||
from freqtrade.constants import ExchangeConfig
|
||||
from freqtrade.exceptions import DDosProtection, RetryableOrderError, TemporaryError
|
||||
|
@ -172,7 +173,7 @@ def retrier(_func: F, *, retries=API_RETRY_COUNT) -> F: ...
|
|||
def retrier(*, retries=API_RETRY_COUNT) -> Callable[[F], F]: ...
|
||||
|
||||
|
||||
def retrier(_func: Optional[F] = None, *, retries=API_RETRY_COUNT):
|
||||
def retrier(_func: F | None = None, *, retries=API_RETRY_COUNT):
|
||||
def decorator(f: F) -> F:
|
||||
@wraps(f)
|
||||
def wrapper(*args, **kwargs):
|
||||
|
@ -185,7 +186,7 @@ def retrier(_func: Optional[F] = None, *, retries=API_RETRY_COUNT):
|
|||
logger.warning(msg + f"Retrying still for {count} times.")
|
||||
count -= 1
|
||||
kwargs.update({"count": count})
|
||||
if isinstance(ex, (DDosProtection, RetryableOrderError)):
|
||||
if isinstance(ex, DDosProtection | RetryableOrderError):
|
||||
# increasing backoff
|
||||
backoff_delay = calculate_backoff(count + 1, retries)
|
||||
logger.info(f"Applying DDosProtection backoff delay: {backoff_delay}")
|
||||
|
|
|
@ -12,7 +12,7 @@ from copy import deepcopy
|
|||
from datetime import datetime, timedelta, timezone
|
||||
from math import floor, isnan
|
||||
from threading import Lock
|
||||
from typing import Any, Literal, Optional, TypeGuard, Union
|
||||
from typing import Any, Literal, TypeGuard
|
||||
|
||||
import ccxt
|
||||
import ccxt.pro as ccxt_pro
|
||||
|
@ -170,7 +170,7 @@ class Exchange:
|
|||
self,
|
||||
config: Config,
|
||||
*,
|
||||
exchange_config: Optional[ExchangeConfig] = None,
|
||||
exchange_config: ExchangeConfig | None = None,
|
||||
validate: bool = True,
|
||||
load_leverage_tiers: bool = False,
|
||||
) -> None:
|
||||
|
@ -182,7 +182,7 @@ class Exchange:
|
|||
self._api: ccxt.Exchange
|
||||
self._api_async: ccxt_pro.Exchange
|
||||
self._ws_async: ccxt_pro.Exchange = None
|
||||
self._exchange_ws: Optional[ExchangeWS] = None
|
||||
self._exchange_ws: ExchangeWS | None = None
|
||||
self._markets: dict = {}
|
||||
self._trading_fees: dict[str, Any] = {}
|
||||
self._leverage_tiers: dict[str, list[dict]] = {}
|
||||
|
@ -453,7 +453,7 @@ class Exchange:
|
|||
logger.info(f"API {endpoint}: {add_info_str}{response}")
|
||||
|
||||
def ohlcv_candle_limit(
|
||||
self, timeframe: str, candle_type: CandleType, since_ms: Optional[int] = None
|
||||
self, timeframe: str, candle_type: CandleType, since_ms: int | None = None
|
||||
) -> int:
|
||||
"""
|
||||
Exchange ohlcv candle limit
|
||||
|
@ -473,8 +473,8 @@ class Exchange:
|
|||
|
||||
def get_markets(
|
||||
self,
|
||||
base_currencies: Optional[list[str]] = None,
|
||||
quote_currencies: Optional[list[str]] = None,
|
||||
base_currencies: list[str] | None = None,
|
||||
quote_currencies: list[str] | None = None,
|
||||
spot_only: bool = False,
|
||||
margin_only: bool = False,
|
||||
futures_only: bool = False,
|
||||
|
@ -567,7 +567,7 @@ class Exchange:
|
|||
else:
|
||||
return DataFrame(columns=DEFAULT_TRADES_COLUMNS)
|
||||
|
||||
def get_contract_size(self, pair: str) -> Optional[float]:
|
||||
def get_contract_size(self, pair: str) -> float | None:
|
||||
if self.trading_mode == TradingMode.FUTURES:
|
||||
market = self.markets.get(pair, {})
|
||||
contract_size: float = 1.0
|
||||
|
@ -710,7 +710,7 @@ class Exchange:
|
|||
return pair
|
||||
raise ValueError(f"Could not combine {curr_1} and {curr_2} to get a valid pair.")
|
||||
|
||||
def validate_timeframes(self, timeframe: Optional[str]) -> None:
|
||||
def validate_timeframes(self, timeframe: str | None) -> None:
|
||||
"""
|
||||
Check if timeframe from config is a supported timeframe on the exchange
|
||||
"""
|
||||
|
@ -840,7 +840,7 @@ class Exchange:
|
|||
def validate_trading_mode_and_margin_mode(
|
||||
self,
|
||||
trading_mode: TradingMode,
|
||||
margin_mode: Optional[MarginMode], # Only None when trading_mode = TradingMode.SPOT
|
||||
margin_mode: MarginMode | None, # Only None when trading_mode = TradingMode.SPOT
|
||||
):
|
||||
"""
|
||||
Checks if freqtrade can perform trades using the configured
|
||||
|
@ -856,7 +856,7 @@ class Exchange:
|
|||
f"Freqtrade does not support {mm_value} {trading_mode} on {self.name}"
|
||||
)
|
||||
|
||||
def get_option(self, param: str, default: Optional[Any] = None) -> Any:
|
||||
def get_option(self, param: str, default: Any | None = None) -> Any:
|
||||
"""
|
||||
Get parameter value from _ft_has
|
||||
"""
|
||||
|
@ -873,7 +873,7 @@ class Exchange:
|
|||
return self._ft_has["exchange_has_overrides"][endpoint]
|
||||
return endpoint in self._api_async.has and self._api_async.has[endpoint]
|
||||
|
||||
def get_precision_amount(self, pair: str) -> Optional[float]:
|
||||
def get_precision_amount(self, pair: str) -> float | None:
|
||||
"""
|
||||
Returns the amount precision of the exchange.
|
||||
:param pair: Pair to get precision for
|
||||
|
@ -881,7 +881,7 @@ class Exchange:
|
|||
"""
|
||||
return self.markets.get(pair, {}).get("precision", {}).get("amount", None)
|
||||
|
||||
def get_precision_price(self, pair: str) -> Optional[float]:
|
||||
def get_precision_price(self, pair: str) -> float | None:
|
||||
"""
|
||||
Returns the price precision of the exchange.
|
||||
:param pair: Pair to get precision for
|
||||
|
@ -921,8 +921,8 @@ class Exchange:
|
|||
return 1 / pow(10, precision)
|
||||
|
||||
def get_min_pair_stake_amount(
|
||||
self, pair: str, price: float, stoploss: float, leverage: Optional[float] = 1.0
|
||||
) -> Optional[float]:
|
||||
self, pair: str, price: float, stoploss: float, leverage: float | None = 1.0
|
||||
) -> float | None:
|
||||
return self._get_stake_amount_limit(pair, price, stoploss, "min", leverage)
|
||||
|
||||
def get_max_pair_stake_amount(self, pair: str, price: float, leverage: float = 1.0) -> float:
|
||||
|
@ -940,8 +940,8 @@ class Exchange:
|
|||
price: float,
|
||||
stoploss: float,
|
||||
limit: Literal["min", "max"],
|
||||
leverage: Optional[float] = 1.0,
|
||||
) -> Optional[float]:
|
||||
leverage: float | None = 1.0,
|
||||
) -> float | None:
|
||||
isMin = limit == "min"
|
||||
|
||||
try:
|
||||
|
@ -1002,7 +1002,7 @@ class Exchange:
|
|||
amount: float,
|
||||
rate: float,
|
||||
leverage: float,
|
||||
params: Optional[dict] = None,
|
||||
params: dict | None = None,
|
||||
stop_loss: bool = False,
|
||||
) -> CcxtOrder:
|
||||
now = dt_now()
|
||||
|
@ -1033,7 +1033,7 @@ class Exchange:
|
|||
dry_order[self._ft_has["stop_price_prop"]] = dry_order["price"]
|
||||
# Workaround to avoid filling stoploss orders immediately
|
||||
dry_order["ft_order_type"] = "stoploss"
|
||||
orderbook: Optional[OrderBook] = None
|
||||
orderbook: OrderBook | None = None
|
||||
if self.exchange_has("fetchL2OrderBook"):
|
||||
orderbook = self.fetch_l2_order_book(pair, 20)
|
||||
if ordertype == "limit" and orderbook:
|
||||
|
@ -1088,7 +1088,7 @@ class Exchange:
|
|||
return dry_order
|
||||
|
||||
def get_dry_market_fill_price(
|
||||
self, pair: str, side: str, amount: float, rate: float, orderbook: Optional[OrderBook]
|
||||
self, pair: str, side: str, amount: float, rate: float, orderbook: OrderBook | None
|
||||
) -> float:
|
||||
"""
|
||||
Get the market order fill price based on orderbook interpolation
|
||||
|
@ -1136,7 +1136,7 @@ class Exchange:
|
|||
pair: str,
|
||||
side: str,
|
||||
limit: float,
|
||||
orderbook: Optional[OrderBook] = None,
|
||||
orderbook: OrderBook | None = None,
|
||||
offset: float = 0.0,
|
||||
) -> bool:
|
||||
if not self.exchange_has("fetchL2OrderBook"):
|
||||
|
@ -1158,7 +1158,7 @@ class Exchange:
|
|||
return False
|
||||
|
||||
def check_dry_limit_order_filled(
|
||||
self, order: CcxtOrder, immediate: bool = False, orderbook: Optional[OrderBook] = None
|
||||
self, order: CcxtOrder, immediate: bool = False, orderbook: OrderBook | None = None
|
||||
) -> CcxtOrder:
|
||||
"""
|
||||
Check dry-run limit order fill and update fee (if it filled).
|
||||
|
@ -1494,7 +1494,7 @@ class Exchange:
|
|||
raise OperationalException(e) from e
|
||||
|
||||
@retrier(retries=API_FETCH_ORDER_RETRY_COUNT)
|
||||
def fetch_order(self, order_id: str, pair: str, params: Optional[dict] = None) -> CcxtOrder:
|
||||
def fetch_order(self, order_id: str, pair: str, params: dict | None = None) -> CcxtOrder:
|
||||
if self._config["dry_run"]:
|
||||
return self.fetch_dry_run_order(order_id)
|
||||
if params is None:
|
||||
|
@ -1524,7 +1524,7 @@ class Exchange:
|
|||
raise OperationalException(e) from e
|
||||
|
||||
def fetch_stoploss_order(
|
||||
self, order_id: str, pair: str, params: Optional[dict] = None
|
||||
self, order_id: str, pair: str, params: dict | None = None
|
||||
) -> CcxtOrder:
|
||||
return self.fetch_order(order_id, pair, params)
|
||||
|
||||
|
@ -1551,9 +1551,7 @@ class Exchange:
|
|||
return order.get("status") in NON_OPEN_EXCHANGE_STATES and order.get("filled") == 0.0
|
||||
|
||||
@retrier
|
||||
def cancel_order(
|
||||
self, order_id: str, pair: str, params: Optional[dict] = None
|
||||
) -> dict[str, Any]:
|
||||
def cancel_order(self, order_id: str, pair: str, params: dict | None = None) -> dict[str, Any]:
|
||||
if self._config["dry_run"]:
|
||||
try:
|
||||
order = self.fetch_dry_run_order(order_id)
|
||||
|
@ -1581,9 +1579,7 @@ class Exchange:
|
|||
except ccxt.BaseError as e:
|
||||
raise OperationalException(e) from e
|
||||
|
||||
def cancel_stoploss_order(
|
||||
self, order_id: str, pair: str, params: Optional[dict] = None
|
||||
) -> dict:
|
||||
def cancel_stoploss_order(self, order_id: str, pair: str, params: dict | None = None) -> dict:
|
||||
return self.cancel_order(order_id, pair, params)
|
||||
|
||||
def is_cancel_order_result_suitable(self, corder) -> TypeGuard[CcxtOrder]:
|
||||
|
@ -1668,7 +1664,7 @@ class Exchange:
|
|||
raise OperationalException(e) from e
|
||||
|
||||
@retrier
|
||||
def fetch_positions(self, pair: Optional[str] = None) -> list[CcxtPosition]:
|
||||
def fetch_positions(self, pair: str | None = None) -> list[CcxtPosition]:
|
||||
"""
|
||||
Fetch positions from the exchange.
|
||||
If no pair is given, all positions are returned.
|
||||
|
@ -1703,7 +1699,7 @@ class Exchange:
|
|||
|
||||
@retrier(retries=0)
|
||||
def fetch_orders(
|
||||
self, pair: str, since: datetime, params: Optional[dict] = None
|
||||
self, pair: str, since: datetime, params: dict | None = None
|
||||
) -> list[CcxtOrder]:
|
||||
"""
|
||||
Fetch all orders for a pair "since"
|
||||
|
@ -1767,7 +1763,7 @@ class Exchange:
|
|||
raise OperationalException(e) from e
|
||||
|
||||
@retrier
|
||||
def fetch_bids_asks(self, symbols: Optional[list[str]] = None, cached: bool = False) -> dict:
|
||||
def fetch_bids_asks(self, symbols: list[str] | None = None, cached: bool = False) -> dict:
|
||||
"""
|
||||
:param symbols: List of symbols to fetch
|
||||
:param cached: Allow cached result
|
||||
|
@ -1800,7 +1796,7 @@ class Exchange:
|
|||
raise OperationalException(e) from e
|
||||
|
||||
@retrier
|
||||
def get_tickers(self, symbols: Optional[list[str]] = None, cached: bool = False) -> Tickers:
|
||||
def get_tickers(self, symbols: list[str] | None = None, cached: bool = False) -> Tickers:
|
||||
"""
|
||||
:param cached: Allow cached result
|
||||
:return: fetch_tickers result
|
||||
|
@ -1860,7 +1856,7 @@ class Exchange:
|
|||
|
||||
@staticmethod
|
||||
def get_next_limit_in_list(
|
||||
limit: int, limit_range: Optional[list[int]], range_required: bool = True
|
||||
limit: int, limit_range: list[int] | None, range_required: bool = True
|
||||
):
|
||||
"""
|
||||
Get next greater value in the list.
|
||||
|
@ -1924,8 +1920,8 @@ class Exchange:
|
|||
refresh: bool,
|
||||
side: EntryExit,
|
||||
is_short: bool,
|
||||
order_book: Optional[OrderBook] = None,
|
||||
ticker: Optional[Ticker] = None,
|
||||
order_book: OrderBook | None = None,
|
||||
ticker: Ticker | None = None,
|
||||
) -> float:
|
||||
"""
|
||||
Calculates bid/ask target
|
||||
|
@ -1974,7 +1970,7 @@ class Exchange:
|
|||
|
||||
def _get_rate_from_ticker(
|
||||
self, side: EntryExit, ticker: Ticker, conf_strategy: dict[str, Any], price_side: BidAsk
|
||||
) -> Optional[float]:
|
||||
) -> float | None:
|
||||
"""
|
||||
Get rate from ticker.
|
||||
"""
|
||||
|
@ -2053,7 +2049,7 @@ class Exchange:
|
|||
|
||||
@retrier
|
||||
def get_trades_for_order(
|
||||
self, order_id: str, pair: str, since: datetime, params: Optional[dict] = None
|
||||
self, order_id: str, pair: str, since: datetime, params: dict | None = None
|
||||
) -> list:
|
||||
"""
|
||||
Fetch Orders using the "fetch_my_trades" endpoint and filter them by order-id.
|
||||
|
@ -2168,7 +2164,7 @@ class Exchange:
|
|||
|
||||
def calculate_fee_rate(
|
||||
self, fee: dict, symbol: str, cost: float, amount: float
|
||||
) -> Optional[float]:
|
||||
) -> float | None:
|
||||
"""
|
||||
Calculate fee rate if it's not given by the exchange.
|
||||
:param fee: ccxt Fee dict - must contain cost / currency / rate
|
||||
|
@ -2208,7 +2204,7 @@ class Exchange:
|
|||
|
||||
def extract_cost_curr_rate(
|
||||
self, fee: dict[str, Any], symbol: str, cost: float, amount: float
|
||||
) -> tuple[float, str, Optional[float]]:
|
||||
) -> tuple[float, str, float | None]:
|
||||
"""
|
||||
Extract tuple of cost, currency, rate.
|
||||
Requires order_has_fee to run first!
|
||||
|
@ -2233,7 +2229,7 @@ class Exchange:
|
|||
since_ms: int,
|
||||
candle_type: CandleType,
|
||||
is_new_pair: bool = False,
|
||||
until_ms: Optional[int] = None,
|
||||
until_ms: int | None = None,
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Get candle history using asyncio and returns the list of candles.
|
||||
|
@ -2267,7 +2263,7 @@ class Exchange:
|
|||
candle_type: CandleType,
|
||||
is_new_pair: bool = False,
|
||||
raise_: bool = False,
|
||||
until_ms: Optional[int] = None,
|
||||
until_ms: int | None = None,
|
||||
) -> OHLCVResponse:
|
||||
"""
|
||||
Download historic ohlcv
|
||||
|
@ -2312,7 +2308,7 @@ class Exchange:
|
|||
pair: str,
|
||||
timeframe: str,
|
||||
candle_type: CandleType,
|
||||
since_ms: Optional[int],
|
||||
since_ms: int | None,
|
||||
cache: bool,
|
||||
) -> Coroutine[Any, Any, OHLCVResponse]:
|
||||
not_all_data = cache and self.required_candle_call_count > 1
|
||||
|
@ -2381,7 +2377,7 @@ class Exchange:
|
|||
)
|
||||
|
||||
def _build_ohlcv_dl_jobs(
|
||||
self, pair_list: ListPairsWithTimeframes, since_ms: Optional[int], cache: bool
|
||||
self, pair_list: ListPairsWithTimeframes, since_ms: int | None, cache: bool
|
||||
) -> tuple[list[Coroutine], list[PairWithTimeframe]]:
|
||||
"""
|
||||
Build Coroutines to execute as part of refresh_latest_ohlcv
|
||||
|
@ -2458,9 +2454,9 @@ class Exchange:
|
|||
self,
|
||||
pair_list: ListPairsWithTimeframes,
|
||||
*,
|
||||
since_ms: Optional[int] = None,
|
||||
since_ms: int | None = None,
|
||||
cache: bool = True,
|
||||
drop_incomplete: Optional[bool] = None,
|
||||
drop_incomplete: bool | None = None,
|
||||
) -> dict[PairWithTimeframe, DataFrame]:
|
||||
"""
|
||||
Refresh in-memory OHLCV asynchronously and set `_klines` with the result
|
||||
|
@ -2554,7 +2550,7 @@ class Exchange:
|
|||
pair: str,
|
||||
timeframe: str,
|
||||
candle_type: CandleType,
|
||||
since_ms: Optional[int] = None,
|
||||
since_ms: int | None = None,
|
||||
) -> OHLCVResponse:
|
||||
"""
|
||||
Asynchronously get candle history data using fetch_ohlcv
|
||||
|
@ -2628,7 +2624,7 @@ class Exchange:
|
|||
pair: str,
|
||||
timeframe: str,
|
||||
limit: int,
|
||||
since_ms: Optional[int] = None,
|
||||
since_ms: int | None = None,
|
||||
) -> list[list]:
|
||||
"""
|
||||
Fetch funding rate history - used to selectively override this by subclasses.
|
||||
|
@ -2687,7 +2683,7 @@ class Exchange:
|
|||
|
||||
async def _build_trades_dl_jobs(
|
||||
self, pairwt: PairWithTimeframe, data_handler, cache: bool
|
||||
) -> tuple[PairWithTimeframe, Optional[DataFrame]]:
|
||||
) -> tuple[PairWithTimeframe, DataFrame | None]:
|
||||
"""
|
||||
Build coroutines to refresh trades for (they're then called through async.gather)
|
||||
"""
|
||||
|
@ -2831,7 +2827,7 @@ class Exchange:
|
|||
|
||||
@retrier_async
|
||||
async def _async_fetch_trades(
|
||||
self, pair: str, since: Optional[int] = None, params: Optional[dict] = None
|
||||
self, pair: str, since: int | None = None, params: dict | None = None
|
||||
) -> tuple[list[list], Any]:
|
||||
"""
|
||||
Asynchronously gets trade history using fetch_trades.
|
||||
|
@ -2891,7 +2887,7 @@ class Exchange:
|
|||
return trades[-1].get("timestamp")
|
||||
|
||||
async def _async_get_trade_history_id(
|
||||
self, pair: str, until: int, since: Optional[int] = None, from_id: Optional[str] = None
|
||||
self, pair: str, until: int, since: int | None = None, from_id: str | None = None
|
||||
) -> tuple[str, list[list]]:
|
||||
"""
|
||||
Asynchronously gets trade history using fetch_trades
|
||||
|
@ -2946,7 +2942,7 @@ class Exchange:
|
|||
return (pair, trades)
|
||||
|
||||
async def _async_get_trade_history_time(
|
||||
self, pair: str, until: int, since: Optional[int] = None
|
||||
self, pair: str, until: int, since: int | None = None
|
||||
) -> tuple[str, list[list]]:
|
||||
"""
|
||||
Asynchronously gets trade history using fetch_trades,
|
||||
|
@ -2987,9 +2983,9 @@ class Exchange:
|
|||
async def _async_get_trade_history(
|
||||
self,
|
||||
pair: str,
|
||||
since: Optional[int] = None,
|
||||
until: Optional[int] = None,
|
||||
from_id: Optional[str] = None,
|
||||
since: int | None = None,
|
||||
until: int | None = None,
|
||||
from_id: str | None = None,
|
||||
) -> tuple[str, list[list]]:
|
||||
"""
|
||||
Async wrapper handling downloading trades using either time or id based methods.
|
||||
|
@ -3018,9 +3014,9 @@ class Exchange:
|
|||
def get_historic_trades(
|
||||
self,
|
||||
pair: str,
|
||||
since: Optional[int] = None,
|
||||
until: Optional[int] = None,
|
||||
from_id: Optional[str] = None,
|
||||
since: int | None = None,
|
||||
until: int | None = None,
|
||||
from_id: str | None = None,
|
||||
) -> tuple[str, list]:
|
||||
"""
|
||||
Get trade history data using asyncio.
|
||||
|
@ -3049,7 +3045,7 @@ class Exchange:
|
|||
return self.loop.run_until_complete(task)
|
||||
|
||||
@retrier
|
||||
def _get_funding_fees_from_exchange(self, pair: str, since: Union[datetime, int]) -> float:
|
||||
def _get_funding_fees_from_exchange(self, pair: str, since: datetime | int) -> float:
|
||||
"""
|
||||
Returns the sum of all funding fees that were exchanged for a pair within a timeframe
|
||||
Dry-run handling happens as part of _calculate_funding_fees.
|
||||
|
@ -3180,8 +3176,8 @@ class Exchange:
|
|||
file_dump_json(filename, data)
|
||||
|
||||
def load_cached_leverage_tiers(
|
||||
self, stake_currency: str, cache_time: Optional[timedelta] = None
|
||||
) -> Optional[dict[str, list[dict]]]:
|
||||
self, stake_currency: str, cache_time: timedelta | None = None
|
||||
) -> dict[str, list[dict]] | None:
|
||||
"""
|
||||
Load cached leverage tiers from disk
|
||||
:param cache_time: The maximum age of the cache before it is considered outdated
|
||||
|
@ -3226,7 +3222,7 @@ class Exchange:
|
|||
"maintAmt": float(info["cum"]) if "cum" in info else None,
|
||||
}
|
||||
|
||||
def get_max_leverage(self, pair: str, stake_amount: Optional[float]) -> float:
|
||||
def get_max_leverage(self, pair: str, stake_amount: float | None) -> float:
|
||||
"""
|
||||
Returns the maximum leverage that a pair can be traded at
|
||||
:param pair: The base/quote currency pair being traded
|
||||
|
@ -3304,7 +3300,7 @@ class Exchange:
|
|||
def _set_leverage(
|
||||
self,
|
||||
leverage: float,
|
||||
pair: Optional[str] = None,
|
||||
pair: str | None = None,
|
||||
accept_fail: bool = False,
|
||||
):
|
||||
"""
|
||||
|
@ -3356,7 +3352,7 @@ class Exchange:
|
|||
pair: str,
|
||||
margin_mode: MarginMode,
|
||||
accept_fail: bool = False,
|
||||
params: Optional[dict] = None,
|
||||
params: dict | None = None,
|
||||
):
|
||||
"""
|
||||
Set's the margin mode on the exchange to cross or isolated for a specific pair
|
||||
|
@ -3391,7 +3387,7 @@ class Exchange:
|
|||
amount: float,
|
||||
is_short: bool,
|
||||
open_date: datetime,
|
||||
close_date: Optional[datetime] = None,
|
||||
close_date: datetime | None = None,
|
||||
) -> float:
|
||||
"""
|
||||
Fetches and calculates the sum of all funding fees that occurred for a pair
|
||||
|
@ -3444,7 +3440,7 @@ class Exchange:
|
|||
|
||||
@staticmethod
|
||||
def combine_funding_and_mark(
|
||||
funding_rates: DataFrame, mark_rates: DataFrame, futures_funding_rate: Optional[int] = None
|
||||
funding_rates: DataFrame, mark_rates: DataFrame, futures_funding_rate: int | None = None
|
||||
) -> DataFrame:
|
||||
"""
|
||||
Combine funding-rates and mark-rates dataframes
|
||||
|
@ -3485,7 +3481,7 @@ class Exchange:
|
|||
is_short: bool,
|
||||
open_date: datetime,
|
||||
close_date: datetime,
|
||||
time_in_ratio: Optional[float] = None,
|
||||
time_in_ratio: float | None = None,
|
||||
) -> float:
|
||||
"""
|
||||
calculates the sum of all funding fees that occurred for a pair during a futures trade
|
||||
|
@ -3543,8 +3539,8 @@ class Exchange:
|
|||
stake_amount: float,
|
||||
leverage: float,
|
||||
wallet_balance: float,
|
||||
open_trades: Optional[list] = None,
|
||||
) -> Optional[float]:
|
||||
open_trades: list | None = None,
|
||||
) -> float | None:
|
||||
"""
|
||||
Set's the margin mode on the exchange to cross or isolated for a specific pair
|
||||
"""
|
||||
|
@ -3592,7 +3588,7 @@ class Exchange:
|
|||
leverage: float,
|
||||
wallet_balance: float, # Or margin balance
|
||||
open_trades: list,
|
||||
) -> Optional[float]:
|
||||
) -> float | None:
|
||||
"""
|
||||
Important: Must be fetching data from cached values as this is used by backtesting!
|
||||
PERPETUAL:
|
||||
|
@ -3643,7 +3639,7 @@ class Exchange:
|
|||
self,
|
||||
pair: str,
|
||||
notional_value: float,
|
||||
) -> tuple[float, Optional[float]]:
|
||||
) -> tuple[float, float | None]:
|
||||
"""
|
||||
Important: Must be fetching data from cached values as this is used by backtesting!
|
||||
:param pair: Market symbol
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from typing import Any, Literal, Optional, TypedDict
|
||||
from typing import Any, Literal, TypedDict
|
||||
|
||||
from freqtrade.enums import CandleType
|
||||
|
||||
|
@ -35,7 +35,7 @@ class FtHas(TypedDict, total=False):
|
|||
trades_has_history: bool
|
||||
trades_pagination_overlap: bool
|
||||
# Orderbook
|
||||
l2_limit_range: Optional[list[int]]
|
||||
l2_limit_range: list[int] | None
|
||||
l2_limit_range_required: bool
|
||||
# Futures
|
||||
ccxt_futures_name: str # usually swap
|
||||
|
@ -52,14 +52,14 @@ class FtHas(TypedDict, total=False):
|
|||
|
||||
class Ticker(TypedDict):
|
||||
symbol: str
|
||||
ask: Optional[float]
|
||||
askVolume: Optional[float]
|
||||
bid: Optional[float]
|
||||
bidVolume: Optional[float]
|
||||
last: Optional[float]
|
||||
quoteVolume: Optional[float]
|
||||
baseVolume: Optional[float]
|
||||
percentage: Optional[float]
|
||||
ask: float | None
|
||||
askVolume: float | None
|
||||
bid: float | None
|
||||
bidVolume: float | None
|
||||
last: float | None
|
||||
quoteVolume: float | None
|
||||
baseVolume: float | None
|
||||
percentage: float | None
|
||||
# Several more - only listing required.
|
||||
|
||||
|
||||
|
@ -70,9 +70,9 @@ class OrderBook(TypedDict):
|
|||
symbol: str
|
||||
bids: list[tuple[float, float]]
|
||||
asks: list[tuple[float, float]]
|
||||
timestamp: Optional[int]
|
||||
datetime: Optional[str]
|
||||
nonce: Optional[int]
|
||||
timestamp: int | None
|
||||
datetime: str | None
|
||||
nonce: int | None
|
||||
|
||||
|
||||
class CcxtBalance(TypedDict):
|
||||
|
@ -89,9 +89,9 @@ class CcxtPosition(TypedDict):
|
|||
side: str
|
||||
contracts: float
|
||||
leverage: float
|
||||
collateral: Optional[float]
|
||||
initialMargin: Optional[float]
|
||||
liquidationPrice: Optional[float]
|
||||
collateral: float | None
|
||||
initialMargin: float | None
|
||||
liquidationPrice: float | None
|
||||
|
||||
|
||||
CcxtOrder = dict[str, Any]
|
||||
|
|
|
@ -5,7 +5,7 @@ Exchange support utils
|
|||
import inspect
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from math import ceil, floor
|
||||
from typing import Any, Optional
|
||||
from typing import Any
|
||||
|
||||
import ccxt
|
||||
from ccxt import (
|
||||
|
@ -33,20 +33,18 @@ from freqtrade.util import FtPrecise
|
|||
CcxtModuleType = Any
|
||||
|
||||
|
||||
def is_exchange_known_ccxt(
|
||||
exchange_name: str, ccxt_module: Optional[CcxtModuleType] = None
|
||||
) -> bool:
|
||||
def is_exchange_known_ccxt(exchange_name: str, ccxt_module: CcxtModuleType | None = None) -> bool:
|
||||
return exchange_name in ccxt_exchanges(ccxt_module)
|
||||
|
||||
|
||||
def ccxt_exchanges(ccxt_module: Optional[CcxtModuleType] = None) -> list[str]:
|
||||
def ccxt_exchanges(ccxt_module: CcxtModuleType | None = None) -> list[str]:
|
||||
"""
|
||||
Return the list of all exchanges known to ccxt
|
||||
"""
|
||||
return ccxt_module.exchanges if ccxt_module is not None else ccxt.exchanges
|
||||
|
||||
|
||||
def available_exchanges(ccxt_module: Optional[CcxtModuleType] = None) -> list[str]:
|
||||
def available_exchanges(ccxt_module: CcxtModuleType | None = None) -> list[str]:
|
||||
"""
|
||||
Return exchanges available to the bot, i.e. non-bad exchanges in the ccxt list
|
||||
"""
|
||||
|
@ -54,7 +52,7 @@ def available_exchanges(ccxt_module: Optional[CcxtModuleType] = None) -> list[st
|
|||
return [x for x in exchanges if validate_exchange(x)[0]]
|
||||
|
||||
|
||||
def validate_exchange(exchange: str) -> tuple[bool, str, Optional[ccxt.Exchange]]:
|
||||
def validate_exchange(exchange: str) -> tuple[bool, str, ccxt.Exchange | None]:
|
||||
"""
|
||||
returns: can_use, reason, exchange_object
|
||||
with Reason including both missing and missing_opt
|
||||
|
@ -137,9 +135,7 @@ def list_available_exchanges(all_exchanges: bool) -> list[ValidExchangesType]:
|
|||
return exchanges_valid
|
||||
|
||||
|
||||
def date_minus_candles(
|
||||
timeframe: str, candle_count: int, date: Optional[datetime] = None
|
||||
) -> datetime:
|
||||
def date_minus_candles(timeframe: str, candle_count: int, date: datetime | None = None) -> datetime:
|
||||
"""
|
||||
subtract X candles from a date.
|
||||
:param timeframe: timeframe in string format (e.g. "5m")
|
||||
|
@ -166,7 +162,7 @@ def market_is_active(market: dict) -> bool:
|
|||
return market.get("active", True) is not False
|
||||
|
||||
|
||||
def amount_to_contracts(amount: float, contract_size: Optional[float]) -> float:
|
||||
def amount_to_contracts(amount: float, contract_size: float | None) -> float:
|
||||
"""
|
||||
Convert amount to contracts.
|
||||
:param amount: amount to convert
|
||||
|
@ -179,7 +175,7 @@ def amount_to_contracts(amount: float, contract_size: Optional[float]) -> float:
|
|||
return amount
|
||||
|
||||
|
||||
def contracts_to_amount(num_contracts: float, contract_size: Optional[float]) -> float:
|
||||
def contracts_to_amount(num_contracts: float, contract_size: float | None) -> float:
|
||||
"""
|
||||
Takes num-contracts and converts it to contract size
|
||||
:param num_contracts: number of contracts
|
||||
|
@ -194,7 +190,7 @@ def contracts_to_amount(num_contracts: float, contract_size: Optional[float]) ->
|
|||
|
||||
|
||||
def amount_to_precision(
|
||||
amount: float, amount_precision: Optional[float], precisionMode: Optional[int]
|
||||
amount: float, amount_precision: float | None, precisionMode: int | None
|
||||
) -> float:
|
||||
"""
|
||||
Returns the amount to buy or sell to a precision the Exchange accepts
|
||||
|
@ -224,9 +220,9 @@ def amount_to_precision(
|
|||
|
||||
def amount_to_contract_precision(
|
||||
amount,
|
||||
amount_precision: Optional[float],
|
||||
precisionMode: Optional[int],
|
||||
contract_size: Optional[float],
|
||||
amount_precision: float | None,
|
||||
precisionMode: int | None,
|
||||
contract_size: float | None,
|
||||
) -> float:
|
||||
"""
|
||||
Returns the amount to buy or sell to a precision the Exchange accepts
|
||||
|
@ -285,8 +281,8 @@ def __price_to_precision_significant_digits(
|
|||
|
||||
def price_to_precision(
|
||||
price: float,
|
||||
price_precision: Optional[float],
|
||||
precisionMode: Optional[int],
|
||||
price_precision: float | None,
|
||||
precisionMode: int | None,
|
||||
*,
|
||||
rounding_mode: int = ROUND,
|
||||
) -> float:
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
from datetime import datetime, timezone
|
||||
from typing import Optional
|
||||
|
||||
import ccxt
|
||||
from ccxt import ROUND_DOWN, ROUND_UP
|
||||
|
@ -51,7 +50,7 @@ def timeframe_to_resample_freq(timeframe: str) -> str:
|
|||
return resample_interval
|
||||
|
||||
|
||||
def timeframe_to_prev_date(timeframe: str, date: Optional[datetime] = None) -> datetime:
|
||||
def timeframe_to_prev_date(timeframe: str, date: datetime | None = None) -> datetime:
|
||||
"""
|
||||
Use Timeframe and determine the candle start date for this date.
|
||||
Does not round when given a candle start date.
|
||||
|
@ -66,7 +65,7 @@ def timeframe_to_prev_date(timeframe: str, date: Optional[datetime] = None) -> d
|
|||
return dt_from_ts(new_timestamp)
|
||||
|
||||
|
||||
def timeframe_to_next_date(timeframe: str, date: Optional[datetime] = None) -> datetime:
|
||||
def timeframe_to_next_date(timeframe: str, date: datetime | None = None) -> datetime:
|
||||
"""
|
||||
Use Timeframe and determine next candle.
|
||||
:param timeframe: timeframe in string format (e.g. "5m")
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from freqtrade.constants import BuySell
|
||||
from freqtrade.enums import MarginMode, PriceType, TradingMode
|
||||
|
@ -74,7 +73,7 @@ class Gate(Exchange):
|
|||
return params
|
||||
|
||||
def get_trades_for_order(
|
||||
self, order_id: str, pair: str, since: datetime, params: Optional[dict] = None
|
||||
self, order_id: str, pair: str, since: datetime, params: dict | None = None
|
||||
) -> list:
|
||||
trades = super().get_trades_for_order(order_id, pair, since, params)
|
||||
|
||||
|
@ -103,7 +102,7 @@ class Gate(Exchange):
|
|||
return safe_value_fallback2(order, order, "id_stop", "id")
|
||||
|
||||
def fetch_stoploss_order(
|
||||
self, order_id: str, pair: str, params: Optional[dict] = None
|
||||
self, order_id: str, pair: str, params: dict | None = None
|
||||
) -> CcxtOrder:
|
||||
order = self.fetch_order(order_id=order_id, pair=pair, params={"stop": True})
|
||||
if order.get("status", "open") == "closed":
|
||||
|
@ -121,7 +120,5 @@ class Gate(Exchange):
|
|||
return order1
|
||||
return order
|
||||
|
||||
def cancel_stoploss_order(
|
||||
self, order_id: str, pair: str, params: Optional[dict] = None
|
||||
) -> dict:
|
||||
def cancel_stoploss_order(self, order_id: str, pair: str, params: dict | None = None) -> dict:
|
||||
return self.cancel_order(order_id=order_id, pair=pair, params={"stop": True})
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import Any, Optional
|
||||
from typing import Any
|
||||
|
||||
import ccxt
|
||||
from pandas import DataFrame
|
||||
|
@ -50,7 +50,7 @@ class Kraken(Exchange):
|
|||
|
||||
return parent_check and market.get("darkpool", False) is False
|
||||
|
||||
def get_tickers(self, symbols: Optional[list[str]] = None, cached: bool = False) -> Tickers:
|
||||
def get_tickers(self, symbols: list[str] | None = None, cached: bool = False) -> Tickers:
|
||||
# Only fetch tickers for current stake currency
|
||||
# Otherwise the request for kraken becomes too large.
|
||||
symbols = list(self.get_markets(quote_currencies=[self._config["stake_currency"]]))
|
||||
|
@ -99,7 +99,7 @@ class Kraken(Exchange):
|
|||
def _set_leverage(
|
||||
self,
|
||||
leverage: float,
|
||||
pair: Optional[str] = None,
|
||||
pair: str | None = None,
|
||||
accept_fail: bool = False,
|
||||
):
|
||||
"""
|
||||
|
@ -137,7 +137,7 @@ class Kraken(Exchange):
|
|||
is_short: bool,
|
||||
open_date: datetime,
|
||||
close_date: datetime,
|
||||
time_in_ratio: Optional[float] = None,
|
||||
time_in_ratio: float | None = None,
|
||||
) -> float:
|
||||
"""
|
||||
# ! This method will always error when run by Freqtrade because time_in_ratio is never
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import logging
|
||||
from datetime import timedelta
|
||||
from typing import Optional
|
||||
|
||||
import ccxt
|
||||
|
||||
|
@ -60,7 +59,7 @@ class Okx(Exchange):
|
|||
_ccxt_params: dict = {"options": {"brokerId": "ffb5405ad327SUDE"}}
|
||||
|
||||
def ohlcv_candle_limit(
|
||||
self, timeframe: str, candle_type: CandleType, since_ms: Optional[int] = None
|
||||
self, timeframe: str, candle_type: CandleType, since_ms: int | None = None
|
||||
) -> int:
|
||||
"""
|
||||
Exchange ohlcv candle limit
|
||||
|
@ -210,7 +209,7 @@ class Okx(Exchange):
|
|||
|
||||
@retrier(retries=API_RETRY_COUNT)
|
||||
def fetch_stoploss_order(
|
||||
self, order_id: str, pair: str, params: Optional[dict] = None
|
||||
self, order_id: str, pair: str, params: dict | None = None
|
||||
) -> CcxtOrder:
|
||||
if self._config["dry_run"]:
|
||||
return self.fetch_dry_run_order(order_id)
|
||||
|
@ -263,9 +262,7 @@ class Okx(Exchange):
|
|||
return safe_value_fallback2(order, order, "id_stop", "id")
|
||||
return order["id"]
|
||||
|
||||
def cancel_stoploss_order(
|
||||
self, order_id: str, pair: str, params: Optional[dict] = None
|
||||
) -> dict:
|
||||
def cancel_stoploss_order(self, order_id: str, pair: str, params: dict | None = None) -> dict:
|
||||
params1 = {"stop": True}
|
||||
# 'ordType': 'conditional'
|
||||
#
|
||||
|
|
|
@ -2,7 +2,6 @@ import logging
|
|||
import random
|
||||
from abc import abstractmethod
|
||||
from enum import Enum
|
||||
from typing import Optional, Union
|
||||
|
||||
import gymnasium as gym
|
||||
import numpy as np
|
||||
|
@ -140,7 +139,7 @@ class BaseEnvironment(gym.Env):
|
|||
self._end_tick: int = len(self.prices) - 1
|
||||
self._done: bool = False
|
||||
self._current_tick: int = self._start_tick
|
||||
self._last_trade_tick: Optional[int] = None
|
||||
self._last_trade_tick: int | None = None
|
||||
self._position = Positions.Neutral
|
||||
self._position_history: list = [None]
|
||||
self.total_reward: float = 0
|
||||
|
@ -173,8 +172,8 @@ class BaseEnvironment(gym.Env):
|
|||
def tensorboard_log(
|
||||
self,
|
||||
metric: str,
|
||||
value: Optional[Union[int, float]] = None,
|
||||
inc: Optional[bool] = None,
|
||||
value: int | float | None = None,
|
||||
inc: bool | None = None,
|
||||
category: str = "custom",
|
||||
):
|
||||
"""
|
||||
|
|
|
@ -2,9 +2,10 @@ import copy
|
|||
import importlib
|
||||
import logging
|
||||
from abc import abstractmethod
|
||||
from collections.abc import Callable
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any, Callable, Optional, Union
|
||||
from typing import Any
|
||||
|
||||
import gymnasium as gym
|
||||
import numpy as np
|
||||
|
@ -49,9 +50,9 @@ class BaseReinforcementLearningModel(IFreqaiModel):
|
|||
)
|
||||
th.set_num_threads(self.max_threads)
|
||||
self.reward_params = self.freqai_info["rl_config"]["model_reward_parameters"]
|
||||
self.train_env: Union[VecMonitor, SubprocVecEnv, gym.Env] = gym.Env()
|
||||
self.eval_env: Union[VecMonitor, SubprocVecEnv, gym.Env] = gym.Env()
|
||||
self.eval_callback: Optional[MaskableEvalCallback] = None
|
||||
self.train_env: VecMonitor | SubprocVecEnv | gym.Env = gym.Env()
|
||||
self.eval_env: VecMonitor | SubprocVecEnv | gym.Env = gym.Env()
|
||||
self.eval_callback: MaskableEvalCallback | None = None
|
||||
self.model_type = self.freqai_info["rl_config"]["model_type"]
|
||||
self.rl_config = self.freqai_info["rl_config"]
|
||||
self.df_raw: DataFrame = DataFrame()
|
||||
|
|
|
@ -5,7 +5,7 @@ import random
|
|||
import shutil
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any, Optional
|
||||
from typing import Any
|
||||
|
||||
import numpy as np
|
||||
import numpy.typing as npt
|
||||
|
@ -111,7 +111,7 @@ class FreqaiDataKitchen:
|
|||
def set_paths(
|
||||
self,
|
||||
pair: str,
|
||||
trained_timestamp: Optional[int] = None,
|
||||
trained_timestamp: int | None = None,
|
||||
) -> None:
|
||||
"""
|
||||
Set the paths to the data for the present coin/botloop
|
||||
|
|
|
@ -5,7 +5,7 @@ from abc import ABC, abstractmethod
|
|||
from collections import deque
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any, Literal, Optional
|
||||
from typing import Any, Literal
|
||||
|
||||
import datasieve.transforms as ds
|
||||
import numpy as np
|
||||
|
@ -106,7 +106,7 @@ class IFreqaiModel(ABC):
|
|||
self._threads: list[threading.Thread] = []
|
||||
self._stop_event = threading.Event()
|
||||
self.metadata: dict[str, Any] = self.dd.load_global_metadata_from_disk()
|
||||
self.data_provider: Optional[DataProvider] = None
|
||||
self.data_provider: DataProvider | None = None
|
||||
self.max_system_threads = max(int(psutil.cpu_count() * 2 - 2), 1)
|
||||
self.can_short = True # overridden in start() with strategy.can_short
|
||||
self.model: Any = None
|
||||
|
@ -294,7 +294,9 @@ class IFreqaiModel(ABC):
|
|||
# tr_backtest is the backtesting time range e.g. the week directly
|
||||
# following tr_train. Both of these windows slide through the
|
||||
# entire backtest
|
||||
for tr_train, tr_backtest in zip(dk.training_timeranges, dk.backtesting_timeranges):
|
||||
for tr_train, tr_backtest in zip(
|
||||
dk.training_timeranges, dk.backtesting_timeranges, strict=False
|
||||
):
|
||||
(_, _) = self.dd.get_pair_dict_info(pair)
|
||||
train_it += 1
|
||||
total_trains = len(dk.backtesting_timeranges)
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Any, Optional
|
||||
from typing import Any
|
||||
|
||||
import torch as th
|
||||
from stable_baselines3.common.callbacks import ProgressBarCallback
|
||||
|
@ -78,7 +78,7 @@ class ReinforcementLearner(BaseReinforcementLearningModel):
|
|||
model = self.dd.model_dictionary[dk.pair]
|
||||
model.set_env(self.train_env)
|
||||
callbacks: list[Any] = [self.eval_callback, self.tensorboard_callback]
|
||||
progressbar_callback: Optional[ProgressBarCallback] = None
|
||||
progressbar_callback: ProgressBarCallback | None = None
|
||||
if self.rl_config.get("progress_bar", False):
|
||||
progressbar_callback = ProgressBarCallback()
|
||||
callbacks.insert(0, progressbar_callback)
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from enum import Enum
|
||||
from typing import Any, Union
|
||||
from typing import Any
|
||||
|
||||
from stable_baselines3.common.callbacks import BaseCallback
|
||||
from stable_baselines3.common.logger import HParam
|
||||
|
@ -27,7 +27,7 @@ class TensorboardCallback(BaseCallback):
|
|||
# "batch_size": self.model.batch_size,
|
||||
# "n_steps": self.model.n_steps,
|
||||
}
|
||||
metric_dict: dict[str, Union[float, int]] = {
|
||||
metric_dict: dict[str, float | int] = {
|
||||
"eval/mean_reward": 0,
|
||||
"rollout/ep_rew_mean": 0,
|
||||
"rollout/ep_len_mean": 0,
|
||||
|
|
|
@ -45,7 +45,7 @@ class TensorBoardCallback(BaseTensorBoardCallback):
|
|||
return False
|
||||
|
||||
evals = ["validation", "train"]
|
||||
for metric, eval_ in zip(evals_log.items(), evals):
|
||||
for metric, eval_ in zip(evals_log.items(), evals, strict=False):
|
||||
for metric_name, log in metric[1].items():
|
||||
score = log[-1][0] if isinstance(log[-1], tuple) else log[-1]
|
||||
self.writer.add_scalar(f"{eval_}-{metric_name}", score, epoch)
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Any, Optional
|
||||
from typing import Any
|
||||
|
||||
import pandas as pd
|
||||
import torch
|
||||
|
@ -50,8 +50,8 @@ class PyTorchModelTrainer(PyTorchTrainerInterface):
|
|||
self.criterion = criterion
|
||||
self.model_meta_data = model_meta_data
|
||||
self.device = device
|
||||
self.n_epochs: Optional[int] = kwargs.get("n_epochs", 10)
|
||||
self.n_steps: Optional[int] = kwargs.get("n_steps", None)
|
||||
self.n_epochs: int | None = kwargs.get("n_epochs", 10)
|
||||
self.n_steps: int | None = kwargs.get("n_steps", None)
|
||||
if self.n_steps is None and not self.n_epochs:
|
||||
raise Exception("Either `n_steps` or `n_epochs` should be set.")
|
||||
|
||||
|
|
|
@ -107,7 +107,7 @@ def plot_feature_importance(
|
|||
# Extract feature importance from model
|
||||
models = {}
|
||||
if "FreqaiMultiOutputRegressor" in str(model.__class__):
|
||||
for estimator, label in zip(model.estimators_, dk.label_list):
|
||||
for estimator, label in zip(model.estimators_, dk.label_list, strict=False):
|
||||
models[label] = estimator
|
||||
else:
|
||||
models[dk.label_list[0]] = model
|
||||
|
|
|
@ -9,7 +9,7 @@ from datetime import datetime, time, timedelta, timezone
|
|||
from math import isclose
|
||||
from threading import Lock
|
||||
from time import sleep
|
||||
from typing import Any, Optional
|
||||
from typing import Any
|
||||
|
||||
from schedule import Scheduler
|
||||
|
||||
|
@ -112,7 +112,7 @@ class FreqtradeBot(LoggingMixin):
|
|||
|
||||
self.trading_mode: TradingMode = self.config.get("trading_mode", TradingMode.SPOT)
|
||||
self.margin_mode: MarginMode = self.config.get("margin_mode", MarginMode.NONE)
|
||||
self.last_process: Optional[datetime] = None
|
||||
self.last_process: datetime | None = None
|
||||
|
||||
# RPC runs in separate threads, can start handling external commands just after
|
||||
# initialization, even before Freqtradebot has a chance to start its throttling,
|
||||
|
@ -326,7 +326,7 @@ class FreqtradeBot(LoggingMixin):
|
|||
}
|
||||
self.rpc.send_msg(msg)
|
||||
|
||||
def _refresh_active_whitelist(self, trades: Optional[list[Trade]] = None) -> list[str]:
|
||||
def _refresh_active_whitelist(self, trades: list[Trade] | None = None) -> list[str]:
|
||||
"""
|
||||
Refresh active whitelist from pairlist or edge and extend it with
|
||||
pairs that have open trades.
|
||||
|
@ -863,14 +863,14 @@ class FreqtradeBot(LoggingMixin):
|
|||
self,
|
||||
pair: str,
|
||||
stake_amount: float,
|
||||
price: Optional[float] = None,
|
||||
price: float | None = None,
|
||||
*,
|
||||
is_short: bool = False,
|
||||
ordertype: Optional[str] = None,
|
||||
enter_tag: Optional[str] = None,
|
||||
trade: Optional[Trade] = None,
|
||||
ordertype: str | None = None,
|
||||
enter_tag: str | None = None,
|
||||
trade: Trade | None = None,
|
||||
mode: EntryExecuteMode = "initial",
|
||||
leverage_: Optional[float] = None,
|
||||
leverage_: float | None = None,
|
||||
) -> bool:
|
||||
"""
|
||||
Executes an entry for the given pair
|
||||
|
@ -1079,13 +1079,13 @@ class FreqtradeBot(LoggingMixin):
|
|||
def get_valid_enter_price_and_stake(
|
||||
self,
|
||||
pair: str,
|
||||
price: Optional[float],
|
||||
price: float | None,
|
||||
stake_amount: float,
|
||||
trade_side: LongShort,
|
||||
entry_tag: Optional[str],
|
||||
trade: Optional[Trade],
|
||||
entry_tag: str | None,
|
||||
trade: Trade | None,
|
||||
mode: EntryExecuteMode,
|
||||
leverage_: Optional[float],
|
||||
leverage_: float | None,
|
||||
) -> tuple[float, float, float]:
|
||||
"""
|
||||
Validate and eventually adjust (within limits) limit, amount and leverage
|
||||
|
@ -1181,7 +1181,7 @@ class FreqtradeBot(LoggingMixin):
|
|||
self,
|
||||
trade: Trade,
|
||||
order: Order,
|
||||
order_type: Optional[str],
|
||||
order_type: str | None,
|
||||
fill: bool = False,
|
||||
sub_trade: bool = False,
|
||||
) -> None:
|
||||
|
@ -1352,7 +1352,7 @@ class FreqtradeBot(LoggingMixin):
|
|||
return False
|
||||
|
||||
def _check_and_execute_exit(
|
||||
self, trade: Trade, exit_rate: float, enter: bool, exit_: bool, exit_tag: Optional[str]
|
||||
self, trade: Trade, exit_rate: float, enter: bool, exit_: bool, exit_tag: str | None
|
||||
) -> bool:
|
||||
"""
|
||||
Check and execute trade exit
|
||||
|
@ -1612,7 +1612,7 @@ class FreqtradeBot(LoggingMixin):
|
|||
self.emergency_exit(trade, order["price"], order["amount"])
|
||||
|
||||
def emergency_exit(
|
||||
self, trade: Trade, price: float, sub_trade_amt: Optional[float] = None
|
||||
self, trade: Trade, price: float, sub_trade_amt: float | None = None
|
||||
) -> None:
|
||||
try:
|
||||
self.execute_trade_exit(
|
||||
|
@ -1642,7 +1642,7 @@ class FreqtradeBot(LoggingMixin):
|
|||
)
|
||||
trade.delete()
|
||||
|
||||
def replace_order(self, order: CcxtOrder, order_obj: Optional[Order], trade: Trade) -> None:
|
||||
def replace_order(self, order: CcxtOrder, order_obj: Order | None, trade: Trade) -> None:
|
||||
"""
|
||||
Check if current analyzed entry order should be replaced or simply cancelled.
|
||||
To simply cancel the existing order(no replacement) adjust_entry_price() should return None
|
||||
|
@ -1749,7 +1749,7 @@ class FreqtradeBot(LoggingMixin):
|
|||
order: CcxtOrder,
|
||||
order_obj: Order,
|
||||
reason: str,
|
||||
replacing: Optional[bool] = False,
|
||||
replacing: bool | None = False,
|
||||
) -> bool:
|
||||
"""
|
||||
entry cancel - cancel order
|
||||
|
@ -1943,9 +1943,9 @@ class FreqtradeBot(LoggingMixin):
|
|||
limit: float,
|
||||
exit_check: ExitCheckTuple,
|
||||
*,
|
||||
exit_tag: Optional[str] = None,
|
||||
ordertype: Optional[str] = None,
|
||||
sub_trade_amt: Optional[float] = None,
|
||||
exit_tag: str | None = None,
|
||||
ordertype: str | None = None,
|
||||
sub_trade_amt: float | None = None,
|
||||
) -> bool:
|
||||
"""
|
||||
Executes a trade exit for the given trade and limit
|
||||
|
@ -2054,10 +2054,10 @@ class FreqtradeBot(LoggingMixin):
|
|||
def _notify_exit(
|
||||
self,
|
||||
trade: Trade,
|
||||
order_type: Optional[str],
|
||||
order_type: str | None,
|
||||
fill: bool = False,
|
||||
sub_trade: bool = False,
|
||||
order: Optional[Order] = None,
|
||||
order: Order | None = None,
|
||||
) -> None:
|
||||
"""
|
||||
Sends rpc notification when a sell occurred.
|
||||
|
@ -2170,7 +2170,7 @@ class FreqtradeBot(LoggingMixin):
|
|||
# Send the message
|
||||
self.rpc.send_msg(msg)
|
||||
|
||||
def order_obj_or_raise(self, order_id: str, order_obj: Optional[Order]) -> Order:
|
||||
def order_obj_or_raise(self, order_id: str, order_obj: Order | None) -> Order:
|
||||
if not order_obj:
|
||||
raise DependencyException(
|
||||
f"Order_obj not found for {order_id}. This should not have happened."
|
||||
|
@ -2184,8 +2184,8 @@ class FreqtradeBot(LoggingMixin):
|
|||
def update_trade_state(
|
||||
self,
|
||||
trade: Trade,
|
||||
order_id: Optional[str],
|
||||
action_order: Optional[CcxtOrder] = None,
|
||||
order_id: str | None,
|
||||
action_order: CcxtOrder | None = None,
|
||||
*,
|
||||
stoploss_order: bool = False,
|
||||
send_msg: bool = True,
|
||||
|
@ -2322,7 +2322,7 @@ class FreqtradeBot(LoggingMixin):
|
|||
amount: float,
|
||||
fee_abs: float,
|
||||
order_obj: Order,
|
||||
) -> Optional[float]:
|
||||
) -> float | None:
|
||||
"""
|
||||
Applies the fee to amount (either from Order or from Trades).
|
||||
Can eat into dust if more than the required asset is available.
|
||||
|
@ -2359,7 +2359,7 @@ class FreqtradeBot(LoggingMixin):
|
|||
except DependencyException as exception:
|
||||
logger.warning("Could not update trade amount: %s", exception)
|
||||
|
||||
def get_real_amount(self, trade: Trade, order: CcxtOrder, order_obj: Order) -> Optional[float]:
|
||||
def get_real_amount(self, trade: Trade, order: CcxtOrder, order_obj: Order) -> float | None:
|
||||
"""
|
||||
Detect and update trade fee.
|
||||
Calls trade.update_fee() upon correct detection.
|
||||
|
@ -2420,7 +2420,7 @@ class FreqtradeBot(LoggingMixin):
|
|||
|
||||
def fee_detection_from_trades(
|
||||
self, trade: Trade, order: CcxtOrder, order_obj: Order, order_amount: float, trades: list
|
||||
) -> Optional[float]:
|
||||
) -> float | None:
|
||||
"""
|
||||
fee-detection fallback to Trades.
|
||||
Either uses provided trades list or the result of fetch_my_trades to get correct fee.
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from typing import Any, Optional
|
||||
from typing import Any
|
||||
|
||||
from typing_extensions import TypedDict
|
||||
|
||||
|
@ -26,7 +26,7 @@ class BacktestHistoryEntryType(BacktestMetadataType):
|
|||
filename: str
|
||||
strategy: str
|
||||
notes: str
|
||||
backtest_start_ts: Optional[int]
|
||||
backtest_end_ts: Optional[int]
|
||||
timeframe: Optional[str]
|
||||
timeframe_detail: Optional[str]
|
||||
backtest_start_ts: int | None
|
||||
backtest_end_ts: int | None
|
||||
timeframe: str | None
|
||||
timeframe_detail: str | None
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
# Used for list-exchanges
|
||||
from typing import Optional
|
||||
|
||||
from typing_extensions import TypedDict
|
||||
|
||||
|
@ -17,5 +16,5 @@ class ValidExchangesType(TypedDict):
|
|||
comment: str
|
||||
dex: bool
|
||||
is_alias: bool
|
||||
alias_for: Optional[str]
|
||||
alias_for: str | None
|
||||
trade_modes: list[TradeModeType]
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from freqtrade.enums import MarginMode
|
||||
from freqtrade.exceptions import DependencyException
|
||||
|
@ -12,7 +11,7 @@ logger = logging.getLogger(__name__)
|
|||
|
||||
|
||||
def update_liquidation_prices(
|
||||
trade: Optional[LocalTrade] = None,
|
||||
trade: LocalTrade | None = None,
|
||||
*,
|
||||
exchange: Exchange,
|
||||
wallets: Wallets,
|
||||
|
|
|
@ -6,11 +6,11 @@ Read the documentation to know what cli arguments you need.
|
|||
|
||||
import logging
|
||||
import sys
|
||||
from typing import Any, Optional
|
||||
from typing import Any
|
||||
|
||||
|
||||
# check min. python version
|
||||
if sys.version_info < (3, 10): # pragma: no cover
|
||||
if sys.version_info < (3, 10): # pragma: no cover # noqa: UP036
|
||||
sys.exit("Freqtrade requires Python version >= 3.10")
|
||||
|
||||
from freqtrade import __version__
|
||||
|
@ -24,7 +24,7 @@ from freqtrade.system import asyncio_setup, gc_set_threshold
|
|||
logger = logging.getLogger("freqtrade")
|
||||
|
||||
|
||||
def main(sysargv: Optional[list[str]] = None) -> None:
|
||||
def main(sysargv: list[str] | None = None) -> None:
|
||||
"""
|
||||
This function will initiate the bot and start the trading loop.
|
||||
:return: None
|
||||
|
|
|
@ -7,7 +7,7 @@ import logging
|
|||
from collections.abc import Iterator, Mapping
|
||||
from io import StringIO
|
||||
from pathlib import Path
|
||||
from typing import Any, Optional, TextIO, Union
|
||||
from typing import Any, TextIO
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import pandas as pd
|
||||
|
@ -129,10 +129,10 @@ def round_dict(d, n):
|
|||
return {k: (round(v, n) if isinstance(v, float) else v) for k, v in d.items()}
|
||||
|
||||
|
||||
DictMap = Union[dict[str, Any], Mapping[str, Any]]
|
||||
DictMap = dict[str, Any] | Mapping[str, Any]
|
||||
|
||||
|
||||
def safe_value_fallback(obj: DictMap, key1: str, key2: Optional[str] = None, default_value=None):
|
||||
def safe_value_fallback(obj: DictMap, key1: str, key2: str | None = None, default_value=None):
|
||||
"""
|
||||
Search a value in obj, return this if it's not None.
|
||||
Then search key2 in obj - return that if it's not none - then use default_value.
|
||||
|
@ -161,7 +161,7 @@ def safe_value_fallback2(dict1: DictMap, dict2: DictMap, key1: str, key2: str, d
|
|||
return default_value
|
||||
|
||||
|
||||
def plural(num: float, singular: str, plural: Optional[str] = None) -> str:
|
||||
def plural(num: float, singular: str, plural: str | None = None) -> str:
|
||||
return singular if (num == 1 or num == -1) else plural or singular + "s"
|
||||
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from typing import Callable
|
||||
from collections.abc import Callable
|
||||
|
||||
from cachetools import TTLCache, cached
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import logging
|
||||
import time
|
||||
from pathlib import Path
|
||||
from typing import Any, Union
|
||||
from typing import Any
|
||||
|
||||
import pandas as pd
|
||||
from rich.text import Text
|
||||
|
@ -21,7 +21,7 @@ class LookaheadAnalysisSubFunctions:
|
|||
def text_table_lookahead_analysis_instances(
|
||||
config: dict[str, Any],
|
||||
lookahead_instances: list[LookaheadAnalysis],
|
||||
caption: Union[str, None] = None,
|
||||
caption: str | None = None,
|
||||
):
|
||||
headers = [
|
||||
"filename",
|
||||
|
@ -243,7 +243,7 @@ class LookaheadAnalysisSubFunctions:
|
|||
|
||||
# report the results
|
||||
if lookaheadAnalysis_instances:
|
||||
caption: Union[str, None] = None
|
||||
caption: str | None = None
|
||||
if any(
|
||||
[
|
||||
any(
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
import hashlib
|
||||
from copy import deepcopy
|
||||
from pathlib import Path
|
||||
from typing import Union
|
||||
|
||||
import rapidjson
|
||||
|
||||
|
@ -38,7 +37,7 @@ def get_strategy_run_id(strategy) -> str:
|
|||
return digest.hexdigest().lower()
|
||||
|
||||
|
||||
def get_backtest_metadata_filename(filename: Union[Path, str]) -> Path:
|
||||
def get_backtest_metadata_filename(filename: Path | str) -> Path:
|
||||
"""Return metadata filename for specified backtest results file."""
|
||||
filename = Path(filename)
|
||||
return filename.parent / Path(f"{filename.stem}.meta{filename.suffix}")
|
||||
|
|
|
@ -8,7 +8,7 @@ import logging
|
|||
from collections import defaultdict
|
||||
from copy import deepcopy
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Any, Optional
|
||||
from typing import Any
|
||||
|
||||
from numpy import nan
|
||||
from pandas import DataFrame
|
||||
|
@ -110,7 +110,7 @@ class Backtesting:
|
|||
backtesting.start()
|
||||
"""
|
||||
|
||||
def __init__(self, config: Config, exchange: Optional[Exchange] = None) -> None:
|
||||
def __init__(self, config: Config, exchange: Exchange | None = None) -> None:
|
||||
LoggingMixin.show_output = False
|
||||
self.config = config
|
||||
self.results: BacktestResultType = get_BacktestResultType_default()
|
||||
|
@ -685,7 +685,7 @@ class Backtesting:
|
|||
)
|
||||
|
||||
def _try_close_open_order(
|
||||
self, order: Optional[Order], trade: LocalTrade, current_date: datetime, row: tuple
|
||||
self, order: Order | None, trade: LocalTrade, current_date: datetime, row: tuple
|
||||
) -> bool:
|
||||
"""
|
||||
Check if an order is open and if it should've filled.
|
||||
|
@ -742,8 +742,8 @@ class Backtesting:
|
|||
row: tuple,
|
||||
exit_: ExitCheckTuple,
|
||||
current_time: datetime,
|
||||
amount: Optional[float] = None,
|
||||
) -> Optional[LocalTrade]:
|
||||
amount: float | None = None,
|
||||
) -> LocalTrade | None:
|
||||
if exit_.exit_flag:
|
||||
trade.close_date = current_time
|
||||
exit_reason = exit_.exit_reason
|
||||
|
@ -822,8 +822,8 @@ class Backtesting:
|
|||
sell_row: tuple,
|
||||
close_rate: float,
|
||||
amount: float,
|
||||
exit_reason: Optional[str],
|
||||
) -> Optional[LocalTrade]:
|
||||
exit_reason: str | None,
|
||||
) -> LocalTrade | None:
|
||||
self.order_id_counter += 1
|
||||
exit_candle_time = sell_row[DATE_IDX].to_pydatetime()
|
||||
order_type = self.strategy.order_types["exit"]
|
||||
|
@ -859,7 +859,7 @@ class Backtesting:
|
|||
|
||||
def _check_trade_exit(
|
||||
self, trade: LocalTrade, row: tuple, current_time: datetime
|
||||
) -> Optional[LocalTrade]:
|
||||
) -> LocalTrade | None:
|
||||
self._run_funding_fees(trade, current_time)
|
||||
|
||||
# Check if we need to adjust our current positions
|
||||
|
@ -909,10 +909,10 @@ class Backtesting:
|
|||
stake_amount: float,
|
||||
direction: LongShort,
|
||||
current_time: datetime,
|
||||
entry_tag: Optional[str],
|
||||
trade: Optional[LocalTrade],
|
||||
entry_tag: str | None,
|
||||
trade: LocalTrade | None,
|
||||
order_type: str,
|
||||
price_precision: Optional[float],
|
||||
price_precision: float | None,
|
||||
) -> tuple[float, float, float, float]:
|
||||
if order_type == "limit":
|
||||
new_rate = strategy_safe_wrapper(
|
||||
|
@ -1004,12 +1004,12 @@ class Backtesting:
|
|||
pair: str,
|
||||
row: tuple,
|
||||
direction: LongShort,
|
||||
stake_amount: Optional[float] = None,
|
||||
trade: Optional[LocalTrade] = None,
|
||||
requested_rate: Optional[float] = None,
|
||||
requested_stake: Optional[float] = None,
|
||||
entry_tag1: Optional[str] = None,
|
||||
) -> Optional[LocalTrade]:
|
||||
stake_amount: float | None = None,
|
||||
trade: LocalTrade | None = None,
|
||||
requested_rate: float | None = None,
|
||||
requested_stake: float | None = None,
|
||||
entry_tag1: str | None = None,
|
||||
) -> LocalTrade | None:
|
||||
"""
|
||||
:param trade: Trade to adjust - initial entry if None
|
||||
:param requested_rate: Adjusted entry rate
|
||||
|
@ -1178,7 +1178,7 @@ class Backtesting:
|
|||
self.rejected_trades += 1
|
||||
return False
|
||||
|
||||
def check_for_trade_entry(self, row) -> Optional[LongShort]:
|
||||
def check_for_trade_entry(self, row) -> LongShort | None:
|
||||
enter_long = row[LONG_IDX] == 1
|
||||
exit_long = row[ELONG_IDX] == 1
|
||||
enter_short = self._can_short and row[SHORT_IDX] == 1
|
||||
|
@ -1216,7 +1216,7 @@ class Backtesting:
|
|||
|
||||
def check_order_cancel(
|
||||
self, trade: LocalTrade, order: Order, current_time: datetime
|
||||
) -> Optional[bool]:
|
||||
) -> bool | None:
|
||||
"""
|
||||
Check if current analyzed order has to be canceled.
|
||||
Returns True if the trade should be Deleted (initial order was canceled),
|
||||
|
@ -1298,7 +1298,7 @@ class Backtesting:
|
|||
|
||||
def validate_row(
|
||||
self, data: dict, pair: str, row_index: int, current_time: datetime
|
||||
) -> Optional[tuple]:
|
||||
) -> tuple | None:
|
||||
try:
|
||||
# Row is treated as "current incomplete candle".
|
||||
# entry / exit signals are shifted by 1 to compensate for this.
|
||||
|
@ -1332,7 +1332,7 @@ class Backtesting:
|
|||
row: tuple,
|
||||
pair: str,
|
||||
current_time: datetime,
|
||||
trade_dir: Optional[LongShort],
|
||||
trade_dir: LongShort | None,
|
||||
can_enter: bool,
|
||||
) -> None:
|
||||
"""
|
||||
|
@ -1354,15 +1354,15 @@ class Backtesting:
|
|||
row: tuple,
|
||||
pair: str,
|
||||
current_time: datetime,
|
||||
trade_dir: Optional[LongShort],
|
||||
trade_dir: LongShort | None,
|
||||
can_enter: bool,
|
||||
) -> Optional[LongShort]:
|
||||
) -> LongShort | None:
|
||||
"""
|
||||
NOTE: This method is used by Hyperopt at each iteration. Please keep it optimized.
|
||||
|
||||
Backtesting processing for one candle/pair.
|
||||
"""
|
||||
exiting_dir: Optional[LongShort] = None
|
||||
exiting_dir: LongShort | None = None
|
||||
if not self._position_stacking and len(LocalTrade.bt_trades_open_pp[pair]) > 0:
|
||||
# position_stacking not supported for now.
|
||||
exiting_dir = "short" if LocalTrade.bt_trades_open_pp[pair][0].is_short else "long"
|
||||
|
@ -1481,7 +1481,7 @@ class Backtesting:
|
|||
self.dataprovider._set_dataframe_max_index(self.required_startup + row_index)
|
||||
self.dataprovider._set_dataframe_max_date(current_time)
|
||||
current_detail_time: datetime = row[DATE_IDX].to_pydatetime()
|
||||
trade_dir: Optional[LongShort] = self.check_for_trade_entry(row)
|
||||
trade_dir: LongShort | None = self.check_for_trade_entry(row)
|
||||
|
||||
if (
|
||||
(trade_dir is not None or len(LocalTrade.bt_trades_open_pp[pair]) > 0)
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import logging
|
||||
from copy import deepcopy
|
||||
from datetime import datetime, timezone
|
||||
from typing import Any, Optional
|
||||
from typing import Any
|
||||
|
||||
from pandas import DataFrame
|
||||
|
||||
|
@ -28,7 +28,7 @@ class BaseAnalysis:
|
|||
def __init__(self, config: dict[str, Any], strategy_obj: dict):
|
||||
self.failed_bias_check = True
|
||||
self.full_varHolder = VarHolder()
|
||||
self.exchange: Optional[Any] = None
|
||||
self.exchange: Any | None = None
|
||||
self._fee = None
|
||||
|
||||
# pull variables the scope of the lookahead_analysis-instance
|
||||
|
|
|
@ -11,7 +11,7 @@ import warnings
|
|||
from datetime import datetime, timezone
|
||||
from math import ceil
|
||||
from pathlib import Path
|
||||
from typing import Any, Optional
|
||||
from typing import Any
|
||||
|
||||
import rapidjson
|
||||
from joblib import Parallel, cpu_count, delayed, dump, load, wrap_non_picklable_objects
|
||||
|
@ -125,7 +125,7 @@ class Hyperopt:
|
|||
|
||||
self.market_change = 0.0
|
||||
self.num_epochs_saved = 0
|
||||
self.current_best_epoch: Optional[dict[str, Any]] = None
|
||||
self.current_best_epoch: dict[str, Any] | None = None
|
||||
|
||||
if HyperoptTools.has_space(self.config, "sell"):
|
||||
# Make sure use_exit_signal is enabled
|
||||
|
@ -171,7 +171,7 @@ class Hyperopt:
|
|||
|
||||
# Return a dict where the keys are the names of the dimensions
|
||||
# and the values are taken from the list of parameters.
|
||||
return {d.name: v for d, v in zip(dimensions, raw_params)}
|
||||
return {d.name: v for d, v in zip(dimensions, raw_params, strict=False)}
|
||||
|
||||
def _save_result(self, epoch: dict) -> None:
|
||||
"""
|
||||
|
@ -479,7 +479,7 @@ class Hyperopt:
|
|||
delayed(wrap_non_picklable_objects(self.generate_optimizer))(v) for v in asked
|
||||
)
|
||||
|
||||
def _set_random_state(self, random_state: Optional[int]) -> int:
|
||||
def _set_random_state(self, random_state: int | None) -> int:
|
||||
return random_state or random.randint(1, 2**16 - 1) # noqa: S311
|
||||
|
||||
def advise_and_trim(self, data: dict[str, DataFrame]) -> dict[str, DataFrame]:
|
||||
|
@ -551,7 +551,7 @@ class Hyperopt:
|
|||
is_random = [True for _ in range(len(asked))]
|
||||
is_random_non_tried += [
|
||||
rand
|
||||
for x, rand in zip(asked, is_random)
|
||||
for x, rand in zip(asked, is_random, strict=False)
|
||||
if x not in self.opt.Xi and x not in asked_non_tried
|
||||
]
|
||||
asked_non_tried += [
|
||||
|
|
|
@ -5,8 +5,8 @@ This module implements a convenience auto-hyperopt class, which can be used toge
|
|||
"""
|
||||
|
||||
import logging
|
||||
from collections.abc import Callable
|
||||
from contextlib import suppress
|
||||
from typing import Callable
|
||||
|
||||
from freqtrade.exceptions import OperationalException
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@ This module defines the interface to apply for hyperopt
|
|||
import logging
|
||||
import math
|
||||
from abc import ABC
|
||||
from typing import Union
|
||||
from typing import TypeAlias
|
||||
|
||||
from sklearn.base import RegressorMixin
|
||||
from skopt.space import Categorical, Dimension, Integer
|
||||
|
@ -20,7 +20,7 @@ from freqtrade.strategy import IStrategy
|
|||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
EstimatorType = Union[RegressorMixin, str]
|
||||
EstimatorType: TypeAlias = RegressorMixin | str
|
||||
|
||||
|
||||
class IHyperOpt(ABC):
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import sys
|
||||
from os import get_terminal_size
|
||||
from typing import Any, Optional
|
||||
from typing import Any
|
||||
|
||||
from rich.align import Align
|
||||
from rich.console import Console
|
||||
|
@ -37,7 +37,7 @@ class HyperoptOutput:
|
|||
self.table.add_column("Objective", justify="right")
|
||||
self.table.add_column("Max Drawdown (Acct)", justify="right")
|
||||
|
||||
def print(self, console: Optional[Console] = None, *, print_colorized=True):
|
||||
def print(self, console: Console | None = None, *, print_colorized=True):
|
||||
if not console:
|
||||
console = Console(
|
||||
color_system="auto" if print_colorized else None,
|
||||
|
@ -57,7 +57,7 @@ class HyperoptOutput:
|
|||
stake_currency = config["stake_currency"]
|
||||
self._results.extend(results)
|
||||
|
||||
max_rows: Optional[int] = None
|
||||
max_rows: int | None = None
|
||||
|
||||
if self._streaming:
|
||||
try:
|
||||
|
|
|
@ -3,7 +3,7 @@ from collections.abc import Iterator
|
|||
from copy import deepcopy
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any, Optional
|
||||
from typing import Any
|
||||
|
||||
import numpy as np
|
||||
import rapidjson
|
||||
|
@ -44,7 +44,7 @@ class HyperoptStateContainer:
|
|||
|
||||
class HyperoptTools:
|
||||
@staticmethod
|
||||
def get_strategy_filename(config: Config, strategy_name: str) -> Optional[Path]:
|
||||
def get_strategy_filename(config: Config, strategy_name: str) -> Path | None:
|
||||
"""
|
||||
Get Strategy-location (filename) from strategy_name
|
||||
"""
|
||||
|
@ -188,7 +188,7 @@ class HyperoptTools:
|
|||
total_epochs: int,
|
||||
print_json: bool,
|
||||
no_header: bool = False,
|
||||
header_str: Optional[str] = None,
|
||||
header_str: str | None = None,
|
||||
) -> None:
|
||||
"""
|
||||
Display details of the hyperopt result
|
||||
|
@ -257,7 +257,7 @@ class HyperoptTools:
|
|||
|
||||
@staticmethod
|
||||
def _params_pretty_print(
|
||||
params, space: str, header: str, non_optimized: Optional[dict] = None
|
||||
params, space: str, header: str, non_optimized: dict | None = None
|
||||
) -> None:
|
||||
if space in params or (non_optimized and space in non_optimized):
|
||||
space_params = HyperoptTools._space_params(params, space, 5)
|
||||
|
@ -299,7 +299,7 @@ class HyperoptTools:
|
|||
print(result)
|
||||
|
||||
@staticmethod
|
||||
def _space_params(params, space: str, r: Optional[int] = None) -> dict:
|
||||
def _space_params(params, space: str, r: int | None = None) -> dict:
|
||||
d = params.get(space)
|
||||
if d:
|
||||
# Round floats to `r` digits after the decimal point if requested
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import logging
|
||||
from typing import Any, Literal, Union
|
||||
from typing import Any, Literal
|
||||
|
||||
from freqtrade.constants import UNLIMITED_STAKE_AMOUNT, Config
|
||||
from freqtrade.ft_types import BacktestResultType
|
||||
|
@ -18,7 +18,7 @@ def _get_line_floatfmt(stake_currency: str) -> list[str]:
|
|||
|
||||
|
||||
def _get_line_header(
|
||||
first_column: Union[str, list[str]], stake_currency: str, direction: str = "Trades"
|
||||
first_column: str | list[str], stake_currency: str, direction: str = "Trades"
|
||||
) -> list[str]:
|
||||
"""
|
||||
Generate header lines (goes in line with _generate_result_line())
|
||||
|
@ -172,7 +172,7 @@ def text_table_strategy(strategy_results, stake_currency: str, title: str):
|
|||
dd_pad_per = max([len(dd) for dd in drawdown])
|
||||
drawdown = [
|
||||
f'{t["max_drawdown_abs"]:>{dd_pad_abs}} {stake_currency} {dd:>{dd_pad_per}}%'
|
||||
for t, dd in zip(strategy_results, drawdown)
|
||||
for t, dd in zip(strategy_results, drawdown, strict=False)
|
||||
]
|
||||
|
||||
output = [
|
||||
|
@ -186,7 +186,7 @@ def text_table_strategy(strategy_results, stake_currency: str, title: str):
|
|||
generate_wins_draws_losses(t["wins"], t["draws"], t["losses"]),
|
||||
drawdown,
|
||||
]
|
||||
for t, drawdown in zip(strategy_results, drawdown)
|
||||
for t, drawdown in zip(strategy_results, drawdown, strict=False)
|
||||
]
|
||||
print_rich_table(output, headers, summary=title)
|
||||
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from pandas import DataFrame
|
||||
|
||||
|
@ -35,7 +34,7 @@ def store_backtest_stats(
|
|||
stats: BacktestResultType,
|
||||
dtappendix: str,
|
||||
*,
|
||||
market_change_data: Optional[DataFrame] = None,
|
||||
market_change_data: DataFrame | None = None,
|
||||
) -> Path:
|
||||
"""
|
||||
Stores backtest results
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import logging
|
||||
from copy import deepcopy
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Any, Literal, Union
|
||||
from typing import Any, Literal
|
||||
|
||||
import numpy as np
|
||||
from pandas import DataFrame, Series, concat, to_datetime
|
||||
|
@ -69,7 +69,7 @@ def generate_rejected_signals(
|
|||
|
||||
|
||||
def _generate_result_line(
|
||||
result: DataFrame, starting_balance: int, first_column: Union[str, list[str]]
|
||||
result: DataFrame, starting_balance: int, first_column: str | list[str]
|
||||
) -> dict:
|
||||
"""
|
||||
Generate one result dict, with "first_column" as key.
|
||||
|
@ -143,7 +143,7 @@ def generate_pair_metrics(
|
|||
|
||||
|
||||
def generate_tag_metrics(
|
||||
tag_type: Union[Literal["enter_tag", "exit_reason"], list[Literal["enter_tag", "exit_reason"]]],
|
||||
tag_type: Literal["enter_tag", "exit_reason"] | list[Literal["enter_tag", "exit_reason"]],
|
||||
starting_balance: int,
|
||||
results: DataFrame,
|
||||
skip_nan: bool = False,
|
||||
|
@ -208,7 +208,7 @@ def _get_resample_from_period(period: str) -> str:
|
|||
|
||||
|
||||
def generate_periodic_breakdown_stats(
|
||||
trade_list: Union[list, DataFrame], period: str
|
||||
trade_list: list | DataFrame, period: str
|
||||
) -> list[dict[str, Any]]:
|
||||
results = trade_list if not isinstance(trade_list, list) else DataFrame.from_records(trade_list)
|
||||
if len(results) == 0:
|
||||
|
@ -559,7 +559,7 @@ def generate_strategy_stats(
|
|||
|
||||
def generate_backtest_stats(
|
||||
btdata: dict[str, DataFrame],
|
||||
all_results: dict[str, dict[str, Union[DataFrame, dict]]],
|
||||
all_results: dict[str, dict[str, DataFrame | dict]],
|
||||
min_date: datetime,
|
||||
max_date: datetime,
|
||||
) -> BacktestResultType:
|
||||
|
|
|
@ -2,7 +2,7 @@ import json
|
|||
import logging
|
||||
from collections.abc import Sequence
|
||||
from datetime import datetime
|
||||
from typing import Any, ClassVar, Optional
|
||||
from typing import Any, ClassVar
|
||||
|
||||
from sqlalchemy import DateTime, ForeignKey, Integer, String, Text, UniqueConstraint, select
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
@ -42,7 +42,7 @@ class _CustomData(ModelBase):
|
|||
cd_type: Mapped[str] = mapped_column(String(25), nullable=False)
|
||||
cd_value: Mapped[str] = mapped_column(Text, nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, default=dt_now)
|
||||
updated_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True)
|
||||
updated_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True)
|
||||
|
||||
# Empty container value - not persisted, but filled with cd_value on query
|
||||
value: Any = None
|
||||
|
@ -62,7 +62,7 @@ class _CustomData(ModelBase):
|
|||
|
||||
@classmethod
|
||||
def query_cd(
|
||||
cls, key: Optional[str] = None, trade_id: Optional[int] = None
|
||||
cls, key: str | None = None, trade_id: int | None = None
|
||||
) -> Sequence["_CustomData"]:
|
||||
"""
|
||||
Get all CustomData, if trade_id is not specified
|
||||
|
@ -117,7 +117,7 @@ class CustomDataWrapper:
|
|||
_CustomData.session.commit()
|
||||
|
||||
@staticmethod
|
||||
def get_custom_data(*, trade_id: int, key: Optional[str] = None) -> list[_CustomData]:
|
||||
def get_custom_data(*, trade_id: int, key: str | None = None) -> list[_CustomData]:
|
||||
if CustomDataWrapper.use_db:
|
||||
filters = [
|
||||
_CustomData.ft_trade_id == trade_id,
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
from datetime import datetime, timezone
|
||||
from enum import Enum
|
||||
from typing import ClassVar, Optional, Union
|
||||
from typing import ClassVar
|
||||
|
||||
from sqlalchemy import String
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
|
@ -8,7 +8,7 @@ from sqlalchemy.orm import Mapped, mapped_column
|
|||
from freqtrade.persistence.base import ModelBase, SessionType
|
||||
|
||||
|
||||
ValueTypes = Union[str, datetime, float, int]
|
||||
ValueTypes = str | datetime | float | int
|
||||
|
||||
|
||||
class ValueTypesEnum(str, Enum):
|
||||
|
@ -37,10 +37,10 @@ class _KeyValueStoreModel(ModelBase):
|
|||
|
||||
value_type: Mapped[ValueTypesEnum] = mapped_column(String(20), nullable=False)
|
||||
|
||||
string_value: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
|
||||
datetime_value: Mapped[Optional[datetime]]
|
||||
float_value: Mapped[Optional[float]]
|
||||
int_value: Mapped[Optional[int]]
|
||||
string_value: Mapped[str | None] = mapped_column(String(255), nullable=True)
|
||||
datetime_value: Mapped[datetime | None]
|
||||
float_value: Mapped[float | None]
|
||||
int_value: Mapped[int | None]
|
||||
|
||||
|
||||
class KeyValueStore:
|
||||
|
@ -97,7 +97,7 @@ class KeyValueStore:
|
|||
_KeyValueStoreModel.session.commit()
|
||||
|
||||
@staticmethod
|
||||
def get_value(key: KeyStoreKeys) -> Optional[ValueTypes]:
|
||||
def get_value(key: KeyStoreKeys) -> ValueTypes | None:
|
||||
"""
|
||||
Get the value for the given key.
|
||||
:param key: Key to get the value for
|
||||
|
@ -121,7 +121,7 @@ class KeyValueStore:
|
|||
raise ValueError(f"Unknown value type {kv.value_type}") # pragma: no cover
|
||||
|
||||
@staticmethod
|
||||
def get_string_value(key: KeyStoreKeys) -> Optional[str]:
|
||||
def get_string_value(key: KeyStoreKeys) -> str | None:
|
||||
"""
|
||||
Get the value for the given key.
|
||||
:param key: Key to get the value for
|
||||
|
@ -139,7 +139,7 @@ class KeyValueStore:
|
|||
return kv.string_value
|
||||
|
||||
@staticmethod
|
||||
def get_datetime_value(key: KeyStoreKeys) -> Optional[datetime]:
|
||||
def get_datetime_value(key: KeyStoreKeys) -> datetime | None:
|
||||
"""
|
||||
Get the value for the given key.
|
||||
:param key: Key to get the value for
|
||||
|
@ -157,7 +157,7 @@ class KeyValueStore:
|
|||
return kv.datetime_value.replace(tzinfo=timezone.utc)
|
||||
|
||||
@staticmethod
|
||||
def get_float_value(key: KeyStoreKeys) -> Optional[float]:
|
||||
def get_float_value(key: KeyStoreKeys) -> float | None:
|
||||
"""
|
||||
Get the value for the given key.
|
||||
:param key: Key to get the value for
|
||||
|
@ -175,7 +175,7 @@ class KeyValueStore:
|
|||
return kv.float_value
|
||||
|
||||
@staticmethod
|
||||
def get_int_value(key: KeyStoreKeys) -> Optional[int]:
|
||||
def get_int_value(key: KeyStoreKeys) -> int | None:
|
||||
"""
|
||||
Get the value for the given key.
|
||||
:param key: Key to get the value for
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import inspect, select, text, update
|
||||
|
||||
|
@ -32,8 +31,8 @@ def get_backup_name(tabs: list[str], backup_prefix: str):
|
|||
|
||||
|
||||
def get_last_sequence_ids(engine, trade_back_name: str, order_back_name: str):
|
||||
order_id: Optional[int] = None
|
||||
trade_id: Optional[int] = None
|
||||
order_id: int | None = None
|
||||
trade_id: int | None = None
|
||||
|
||||
if engine.name == "postgresql":
|
||||
with engine.begin() as connection:
|
||||
|
|
|
@ -5,7 +5,7 @@ This module contains the class to persist trades into SQLite
|
|||
import logging
|
||||
import threading
|
||||
from contextvars import ContextVar
|
||||
from typing import Any, Final, Optional
|
||||
from typing import Any, Final
|
||||
|
||||
from sqlalchemy import create_engine, inspect
|
||||
from sqlalchemy.exc import NoSuchModuleError
|
||||
|
@ -25,10 +25,10 @@ logger = logging.getLogger(__name__)
|
|||
|
||||
|
||||
REQUEST_ID_CTX_KEY: Final[str] = "request_id"
|
||||
_request_id_ctx_var: ContextVar[Optional[str]] = ContextVar(REQUEST_ID_CTX_KEY, default=None)
|
||||
_request_id_ctx_var: ContextVar[str | None] = ContextVar(REQUEST_ID_CTX_KEY, default=None)
|
||||
|
||||
|
||||
def get_request_or_thread_id() -> Optional[str]:
|
||||
def get_request_or_thread_id() -> str | None:
|
||||
"""
|
||||
Helper method to get either async context (for fastapi requests), or thread id
|
||||
"""
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from datetime import datetime, timezone
|
||||
from typing import Any, ClassVar, Optional
|
||||
from typing import Any, ClassVar
|
||||
|
||||
from sqlalchemy import ScalarResult, String, or_, select
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
|
@ -21,7 +21,7 @@ class PairLock(ModelBase):
|
|||
pair: Mapped[str] = mapped_column(String(25), nullable=False, index=True)
|
||||
# lock direction - long, short or * (for both)
|
||||
side: Mapped[str] = mapped_column(String(25), nullable=False, default="*")
|
||||
reason: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
|
||||
reason: Mapped[str | None] = mapped_column(String(255), nullable=True)
|
||||
# Time the pair was locked (start time)
|
||||
lock_time: Mapped[datetime] = mapped_column(nullable=False)
|
||||
# Time until the pair is locked (end time)
|
||||
|
@ -39,7 +39,7 @@ class PairLock(ModelBase):
|
|||
|
||||
@staticmethod
|
||||
def query_pair_locks(
|
||||
pair: Optional[str], now: datetime, side: str = "*"
|
||||
pair: str | None, now: datetime, side: str = "*"
|
||||
) -> ScalarResult["PairLock"]:
|
||||
"""
|
||||
Get all currently active locks for this pair
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
import logging
|
||||
from collections.abc import Sequence
|
||||
from datetime import datetime, timezone
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import select
|
||||
|
||||
|
@ -36,9 +35,9 @@ class PairLocks:
|
|||
def lock_pair(
|
||||
pair: str,
|
||||
until: datetime,
|
||||
reason: Optional[str] = None,
|
||||
reason: str | None = None,
|
||||
*,
|
||||
now: Optional[datetime] = None,
|
||||
now: datetime | None = None,
|
||||
side: str = "*",
|
||||
) -> PairLock:
|
||||
"""
|
||||
|
@ -68,7 +67,7 @@ class PairLocks:
|
|||
|
||||
@staticmethod
|
||||
def get_pair_locks(
|
||||
pair: Optional[str], now: Optional[datetime] = None, side: str = "*"
|
||||
pair: str | None, now: datetime | None = None, side: str = "*"
|
||||
) -> Sequence[PairLock]:
|
||||
"""
|
||||
Get all currently active locks for this pair
|
||||
|
@ -96,8 +95,8 @@ class PairLocks:
|
|||
|
||||
@staticmethod
|
||||
def get_pair_longest_lock(
|
||||
pair: str, now: Optional[datetime] = None, side: str = "*"
|
||||
) -> Optional[PairLock]:
|
||||
pair: str, now: datetime | None = None, side: str = "*"
|
||||
) -> PairLock | None:
|
||||
"""
|
||||
Get the lock that expires the latest for the pair given.
|
||||
"""
|
||||
|
@ -106,7 +105,7 @@ class PairLocks:
|
|||
return locks[0] if locks else None
|
||||
|
||||
@staticmethod
|
||||
def unlock_pair(pair: str, now: Optional[datetime] = None, side: str = "*") -> None:
|
||||
def unlock_pair(pair: str, now: datetime | None = None, side: str = "*") -> None:
|
||||
"""
|
||||
Release all locks for this pair.
|
||||
:param pair: Pair to unlock
|
||||
|
@ -124,7 +123,7 @@ class PairLocks:
|
|||
PairLock.session.commit()
|
||||
|
||||
@staticmethod
|
||||
def unlock_reason(reason: str, now: Optional[datetime] = None) -> None:
|
||||
def unlock_reason(reason: str, now: datetime | None = None) -> None:
|
||||
"""
|
||||
Release all locks for this reason.
|
||||
:param reason: Which reason to unlock
|
||||
|
@ -155,7 +154,7 @@ class PairLocks:
|
|||
lock.active = False
|
||||
|
||||
@staticmethod
|
||||
def is_global_lock(now: Optional[datetime] = None, side: str = "*") -> bool:
|
||||
def is_global_lock(now: datetime | None = None, side: str = "*") -> bool:
|
||||
"""
|
||||
:param now: Datetime object (generated via datetime.now(timezone.utc)).
|
||||
defaults to datetime.now(timezone.utc)
|
||||
|
@ -166,7 +165,7 @@ class PairLocks:
|
|||
return len(PairLocks.get_pair_locks("*", now, side)) > 0
|
||||
|
||||
@staticmethod
|
||||
def is_pair_locked(pair: str, now: Optional[datetime] = None, side: str = "*") -> bool:
|
||||
def is_pair_locked(pair: str, now: datetime | None = None, side: str = "*") -> bool:
|
||||
"""
|
||||
:param pair: Pair to check for
|
||||
:param now: Datetime object (generated via datetime.now(timezone.utc)).
|
||||
|
|
|
@ -97,26 +97,24 @@ class Order(ModelBase):
|
|||
ft_cancel_reason: Mapped[str] = mapped_column(String(CUSTOM_TAG_MAX_LENGTH), nullable=True)
|
||||
|
||||
order_id: Mapped[str] = mapped_column(String(255), nullable=False, index=True)
|
||||
status: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
|
||||
symbol: Mapped[Optional[str]] = mapped_column(String(25), nullable=True)
|
||||
order_type: Mapped[Optional[str]] = mapped_column(String(50), nullable=True)
|
||||
status: Mapped[str | None] = mapped_column(String(255), nullable=True)
|
||||
symbol: Mapped[str | None] = mapped_column(String(25), nullable=True)
|
||||
order_type: Mapped[str | None] = mapped_column(String(50), nullable=True)
|
||||
side: Mapped[str] = mapped_column(String(25), nullable=True)
|
||||
price: Mapped[Optional[float]] = mapped_column(Float(), nullable=True)
|
||||
average: Mapped[Optional[float]] = mapped_column(Float(), nullable=True)
|
||||
amount: Mapped[Optional[float]] = mapped_column(Float(), nullable=True)
|
||||
filled: Mapped[Optional[float]] = mapped_column(Float(), nullable=True)
|
||||
remaining: Mapped[Optional[float]] = mapped_column(Float(), nullable=True)
|
||||
cost: Mapped[Optional[float]] = mapped_column(Float(), nullable=True)
|
||||
stop_price: Mapped[Optional[float]] = mapped_column(Float(), nullable=True)
|
||||
price: Mapped[float | None] = mapped_column(Float(), nullable=True)
|
||||
average: Mapped[float | None] = mapped_column(Float(), nullable=True)
|
||||
amount: Mapped[float | None] = mapped_column(Float(), nullable=True)
|
||||
filled: Mapped[float | None] = mapped_column(Float(), nullable=True)
|
||||
remaining: Mapped[float | None] = mapped_column(Float(), nullable=True)
|
||||
cost: Mapped[float | None] = mapped_column(Float(), nullable=True)
|
||||
stop_price: Mapped[float | None] = mapped_column(Float(), nullable=True)
|
||||
order_date: Mapped[datetime] = mapped_column(nullable=True, default=dt_now)
|
||||
order_filled_date: Mapped[Optional[datetime]] = mapped_column(nullable=True)
|
||||
order_update_date: Mapped[Optional[datetime]] = mapped_column(nullable=True)
|
||||
funding_fee: Mapped[Optional[float]] = mapped_column(Float(), nullable=True)
|
||||
order_filled_date: Mapped[datetime | None] = mapped_column(nullable=True)
|
||||
order_update_date: Mapped[datetime | None] = mapped_column(nullable=True)
|
||||
funding_fee: Mapped[float | None] = mapped_column(Float(), nullable=True)
|
||||
|
||||
ft_fee_base: Mapped[Optional[float]] = mapped_column(Float(), nullable=True)
|
||||
ft_order_tag: Mapped[Optional[str]] = mapped_column(
|
||||
String(CUSTOM_TAG_MAX_LENGTH), nullable=True
|
||||
)
|
||||
ft_fee_base: Mapped[float | None] = mapped_column(Float(), nullable=True)
|
||||
ft_order_tag: Mapped[str | None] = mapped_column(String(CUSTOM_TAG_MAX_LENGTH), nullable=True)
|
||||
|
||||
@property
|
||||
def order_date_utc(self) -> datetime:
|
||||
|
@ -124,7 +122,7 @@ class Order(ModelBase):
|
|||
return self.order_date.replace(tzinfo=timezone.utc)
|
||||
|
||||
@property
|
||||
def order_filled_utc(self) -> Optional[datetime]:
|
||||
def order_filled_utc(self) -> datetime | None:
|
||||
"""last order-date with UTC timezoneinfo"""
|
||||
return (
|
||||
self.order_filled_date.replace(tzinfo=timezone.utc) if self.order_filled_date else None
|
||||
|
@ -337,8 +335,8 @@ class Order(ModelBase):
|
|||
order: CcxtOrder,
|
||||
pair: str,
|
||||
side: str,
|
||||
amount: Optional[float] = None,
|
||||
price: Optional[float] = None,
|
||||
amount: float | None = None,
|
||||
price: float | None = None,
|
||||
) -> Self:
|
||||
"""
|
||||
Parse an order from a ccxt object and return a new order Object.
|
||||
|
@ -395,57 +393,57 @@ class LocalTrade:
|
|||
|
||||
exchange: str = ""
|
||||
pair: str = ""
|
||||
base_currency: Optional[str] = ""
|
||||
stake_currency: Optional[str] = ""
|
||||
base_currency: str | None = ""
|
||||
stake_currency: str | None = ""
|
||||
is_open: bool = True
|
||||
fee_open: float = 0.0
|
||||
fee_open_cost: Optional[float] = None
|
||||
fee_open_currency: Optional[str] = ""
|
||||
fee_close: Optional[float] = 0.0
|
||||
fee_close_cost: Optional[float] = None
|
||||
fee_close_currency: Optional[str] = ""
|
||||
fee_open_cost: float | None = None
|
||||
fee_open_currency: str | None = ""
|
||||
fee_close: float | None = 0.0
|
||||
fee_close_cost: float | None = None
|
||||
fee_close_currency: str | None = ""
|
||||
open_rate: float = 0.0
|
||||
open_rate_requested: Optional[float] = None
|
||||
open_rate_requested: float | None = None
|
||||
# open_trade_value - calculated via _calc_open_trade_value
|
||||
open_trade_value: float = 0.0
|
||||
close_rate: Optional[float] = None
|
||||
close_rate_requested: Optional[float] = None
|
||||
close_profit: Optional[float] = None
|
||||
close_profit_abs: Optional[float] = None
|
||||
close_rate: float | None = None
|
||||
close_rate_requested: float | None = None
|
||||
close_profit: float | None = None
|
||||
close_profit_abs: float | None = None
|
||||
stake_amount: float = 0.0
|
||||
max_stake_amount: Optional[float] = 0.0
|
||||
max_stake_amount: float | None = 0.0
|
||||
amount: float = 0.0
|
||||
amount_requested: Optional[float] = None
|
||||
amount_requested: float | None = None
|
||||
open_date: datetime
|
||||
close_date: Optional[datetime] = None
|
||||
close_date: datetime | None = None
|
||||
# absolute value of the stop loss
|
||||
stop_loss: float = 0.0
|
||||
# percentage value of the stop loss
|
||||
stop_loss_pct: Optional[float] = 0.0
|
||||
stop_loss_pct: float | None = 0.0
|
||||
# absolute value of the initial stop loss
|
||||
initial_stop_loss: Optional[float] = 0.0
|
||||
initial_stop_loss: float | None = 0.0
|
||||
# percentage value of the initial stop loss
|
||||
initial_stop_loss_pct: Optional[float] = None
|
||||
initial_stop_loss_pct: float | None = None
|
||||
is_stop_loss_trailing: bool = False
|
||||
# absolute value of the highest reached price
|
||||
max_rate: Optional[float] = None
|
||||
max_rate: float | None = None
|
||||
# Lowest price reached
|
||||
min_rate: Optional[float] = None
|
||||
exit_reason: Optional[str] = ""
|
||||
exit_order_status: Optional[str] = ""
|
||||
strategy: Optional[str] = ""
|
||||
enter_tag: Optional[str] = None
|
||||
timeframe: Optional[int] = None
|
||||
min_rate: float | None = None
|
||||
exit_reason: str | None = ""
|
||||
exit_order_status: str | None = ""
|
||||
strategy: str | None = ""
|
||||
enter_tag: str | None = None
|
||||
timeframe: int | None = None
|
||||
|
||||
trading_mode: TradingMode = TradingMode.SPOT
|
||||
amount_precision: Optional[float] = None
|
||||
price_precision: Optional[float] = None
|
||||
precision_mode: Optional[int] = None
|
||||
precision_mode_price: Optional[int] = None
|
||||
contract_size: Optional[float] = None
|
||||
amount_precision: float | None = None
|
||||
price_precision: float | None = None
|
||||
precision_mode: int | None = None
|
||||
precision_mode_price: int | None = None
|
||||
contract_size: float | None = None
|
||||
|
||||
# Leverage trading properties
|
||||
liquidation_price: Optional[float] = None
|
||||
liquidation_price: float | None = None
|
||||
is_short: bool = False
|
||||
leverage: float = 1.0
|
||||
|
||||
|
@ -453,10 +451,10 @@ class LocalTrade:
|
|||
interest_rate: float = 0.0
|
||||
|
||||
# Futures properties
|
||||
funding_fees: Optional[float] = None
|
||||
funding_fees: float | None = None
|
||||
# Used to keep running funding fees - between the last filled order and now
|
||||
# Shall not be used for calculations!
|
||||
funding_fee_running: Optional[float] = None
|
||||
funding_fee_running: float | None = None
|
||||
|
||||
@property
|
||||
def stoploss_or_liquidation(self) -> float:
|
||||
|
@ -469,7 +467,7 @@ class LocalTrade:
|
|||
return self.stop_loss
|
||||
|
||||
@property
|
||||
def buy_tag(self) -> Optional[str]:
|
||||
def buy_tag(self) -> str | None:
|
||||
"""
|
||||
Compatibility between buy_tag (old) and enter_tag (new)
|
||||
Consider buy_tag deprecated
|
||||
|
@ -496,7 +494,7 @@ class LocalTrade:
|
|||
return self.amount
|
||||
|
||||
@property
|
||||
def _date_last_filled_utc(self) -> Optional[datetime]:
|
||||
def _date_last_filled_utc(self) -> datetime | None:
|
||||
"""Date of the last filled order"""
|
||||
orders = self.select_filled_orders()
|
||||
if orders:
|
||||
|
@ -512,7 +510,7 @@ class LocalTrade:
|
|||
return max([self.open_date_utc, dt_last_filled])
|
||||
|
||||
@property
|
||||
def date_entry_fill_utc(self) -> Optional[datetime]:
|
||||
def date_entry_fill_utc(self) -> datetime | None:
|
||||
"""Date of the first filled order"""
|
||||
orders = self.select_filled_orders(self.entry_side)
|
||||
if orders and len(
|
||||
|
@ -764,7 +762,7 @@ class LocalTrade:
|
|||
self.max_rate = max(current_price, self.max_rate or self.open_rate)
|
||||
self.min_rate = min(current_price_low, self.min_rate or self.open_rate)
|
||||
|
||||
def set_liquidation_price(self, liquidation_price: Optional[float]):
|
||||
def set_liquidation_price(self, liquidation_price: float | None):
|
||||
"""
|
||||
Method you should use to set self.liquidation price.
|
||||
Assures stop_loss is not passed the liquidation price
|
||||
|
@ -796,7 +794,7 @@ class LocalTrade:
|
|||
def adjust_stop_loss(
|
||||
self,
|
||||
current_price: float,
|
||||
stoploss: Optional[float],
|
||||
stoploss: float | None,
|
||||
initial: bool = False,
|
||||
allow_refresh: bool = False,
|
||||
) -> None:
|
||||
|
@ -936,7 +934,7 @@ class LocalTrade:
|
|||
)
|
||||
|
||||
def update_fee(
|
||||
self, fee_cost: float, fee_currency: Optional[str], fee_rate: Optional[float], side: str
|
||||
self, fee_cost: float, fee_currency: str | None, fee_rate: float | None, side: str
|
||||
) -> None:
|
||||
"""
|
||||
Update Fee parameters. Only acts once per side
|
||||
|
@ -1044,7 +1042,7 @@ class LocalTrade:
|
|||
|
||||
return interest(exchange_name=self.exchange, borrowed=borrowed, rate=rate, hours=hours)
|
||||
|
||||
def _calc_base_close(self, amount: FtPrecise, rate: float, fee: Optional[float]) -> FtPrecise:
|
||||
def _calc_base_close(self, amount: FtPrecise, rate: float, fee: float | None) -> FtPrecise:
|
||||
close_trade = amount * FtPrecise(rate)
|
||||
fees = close_trade * FtPrecise(fee or 0.0)
|
||||
|
||||
|
@ -1053,7 +1051,7 @@ class LocalTrade:
|
|||
else:
|
||||
return close_trade - fees
|
||||
|
||||
def calc_close_trade_value(self, rate: float, amount: Optional[float] = None) -> float:
|
||||
def calc_close_trade_value(self, rate: float, amount: float | None = None) -> float:
|
||||
"""
|
||||
Calculate the Trade's close value including fees
|
||||
:param rate: rate to compare with.
|
||||
|
@ -1092,7 +1090,7 @@ class LocalTrade:
|
|||
)
|
||||
|
||||
def calc_profit(
|
||||
self, rate: float, amount: Optional[float] = None, open_rate: Optional[float] = None
|
||||
self, rate: float, amount: float | None = None, open_rate: float | None = None
|
||||
) -> float:
|
||||
"""
|
||||
Calculate the absolute profit in stake currency between Close and Open trade
|
||||
|
@ -1106,7 +1104,7 @@ class LocalTrade:
|
|||
return prof.profit_abs
|
||||
|
||||
def calculate_profit(
|
||||
self, rate: float, amount: Optional[float] = None, open_rate: Optional[float] = None
|
||||
self, rate: float, amount: float | None = None, open_rate: float | None = None
|
||||
) -> ProfitStruct:
|
||||
"""
|
||||
Calculate profit metrics (absolute, ratio, total, total ratio).
|
||||
|
@ -1154,7 +1152,7 @@ class LocalTrade:
|
|||
)
|
||||
|
||||
def calc_profit_ratio(
|
||||
self, rate: float, amount: Optional[float] = None, open_rate: Optional[float] = None
|
||||
self, rate: float, amount: float | None = None, open_rate: float | None = None
|
||||
) -> float:
|
||||
"""
|
||||
Calculates the profit as ratio (including fee).
|
||||
|
@ -1254,7 +1252,7 @@ class LocalTrade:
|
|||
self.close_profit = (close_profit_abs / total_stake) * self.leverage
|
||||
self.close_profit_abs = close_profit_abs
|
||||
|
||||
def select_order_by_order_id(self, order_id: str) -> Optional[Order]:
|
||||
def select_order_by_order_id(self, order_id: str) -> Order | None:
|
||||
"""
|
||||
Finds order object by Order id.
|
||||
:param order_id: Exchange order id
|
||||
|
@ -1266,10 +1264,10 @@ class LocalTrade:
|
|||
|
||||
def select_order(
|
||||
self,
|
||||
order_side: Optional[str] = None,
|
||||
is_open: Optional[bool] = None,
|
||||
order_side: str | None = None,
|
||||
is_open: bool | None = None,
|
||||
only_filled: bool = False,
|
||||
) -> Optional[Order]:
|
||||
) -> Order | None:
|
||||
"""
|
||||
Finds latest order for this orderside and status
|
||||
:param order_side: ft_order_side of the order (either 'buy', 'sell' or 'stoploss')
|
||||
|
@ -1289,7 +1287,7 @@ class LocalTrade:
|
|||
else:
|
||||
return None
|
||||
|
||||
def select_filled_orders(self, order_side: Optional[str] = None) -> list["Order"]:
|
||||
def select_filled_orders(self, order_side: str | None = None) -> list["Order"]:
|
||||
"""
|
||||
Finds filled orders for this order side.
|
||||
Will not return open orders which already partially filled.
|
||||
|
@ -1340,7 +1338,7 @@ class LocalTrade:
|
|||
return data[0].value
|
||||
return default
|
||||
|
||||
def get_custom_data_entry(self, key: str) -> Optional[_CustomData]:
|
||||
def get_custom_data_entry(self, key: str) -> _CustomData | None:
|
||||
"""
|
||||
Get custom data for this trade
|
||||
:param key: key of the custom data
|
||||
|
@ -1393,7 +1391,7 @@ class LocalTrade:
|
|||
return len(self.select_filled_orders("sell"))
|
||||
|
||||
@property
|
||||
def sell_reason(self) -> Optional[str]:
|
||||
def sell_reason(self) -> str | None:
|
||||
"""DEPRECATED! Please use exit_reason instead."""
|
||||
return self.exit_reason
|
||||
|
||||
|
@ -1404,10 +1402,10 @@ class LocalTrade:
|
|||
@staticmethod
|
||||
def get_trades_proxy(
|
||||
*,
|
||||
pair: Optional[str] = None,
|
||||
is_open: Optional[bool] = None,
|
||||
open_date: Optional[datetime] = None,
|
||||
close_date: Optional[datetime] = None,
|
||||
pair: str | None = None,
|
||||
is_open: bool | None = None,
|
||||
open_date: datetime | None = None,
|
||||
close_date: datetime | None = None,
|
||||
) -> list["LocalTrade"]:
|
||||
"""
|
||||
Helper function to query Trades.
|
||||
|
@ -1636,92 +1634,92 @@ class Trade(ModelBase, LocalTrade):
|
|||
|
||||
exchange: Mapped[str] = mapped_column(String(25), nullable=False) # type: ignore
|
||||
pair: Mapped[str] = mapped_column(String(25), nullable=False, index=True) # type: ignore
|
||||
base_currency: Mapped[Optional[str]] = mapped_column(String(25), nullable=True) # type: ignore
|
||||
stake_currency: Mapped[Optional[str]] = mapped_column(String(25), nullable=True) # type: ignore
|
||||
base_currency: Mapped[str | None] = mapped_column(String(25), nullable=True) # type: ignore
|
||||
stake_currency: Mapped[str | None] = mapped_column(String(25), nullable=True) # type: ignore
|
||||
is_open: Mapped[bool] = mapped_column(nullable=False, default=True, index=True) # type: ignore
|
||||
fee_open: Mapped[float] = mapped_column(Float(), nullable=False, default=0.0) # type: ignore
|
||||
fee_open_cost: Mapped[Optional[float]] = mapped_column(Float(), nullable=True) # type: ignore
|
||||
fee_open_currency: Mapped[Optional[str]] = mapped_column( # type: ignore
|
||||
fee_open_cost: Mapped[float | None] = mapped_column(Float(), nullable=True) # type: ignore
|
||||
fee_open_currency: Mapped[str | None] = mapped_column( # type: ignore
|
||||
String(25), nullable=True
|
||||
)
|
||||
fee_close: Mapped[Optional[float]] = mapped_column( # type: ignore
|
||||
fee_close: Mapped[float | None] = mapped_column( # type: ignore
|
||||
Float(), nullable=False, default=0.0
|
||||
)
|
||||
fee_close_cost: Mapped[Optional[float]] = mapped_column(Float(), nullable=True) # type: ignore
|
||||
fee_close_currency: Mapped[Optional[str]] = mapped_column( # type: ignore
|
||||
fee_close_cost: Mapped[float | None] = mapped_column(Float(), nullable=True) # type: ignore
|
||||
fee_close_currency: Mapped[str | None] = mapped_column( # type: ignore
|
||||
String(25), nullable=True
|
||||
)
|
||||
open_rate: Mapped[float] = mapped_column(Float()) # type: ignore
|
||||
open_rate_requested: Mapped[Optional[float]] = mapped_column( # type: ignore
|
||||
open_rate_requested: Mapped[float | None] = mapped_column( # type: ignore
|
||||
Float(), nullable=True
|
||||
)
|
||||
# open_trade_value - calculated via _calc_open_trade_value
|
||||
open_trade_value: Mapped[float] = mapped_column(Float(), nullable=True) # type: ignore
|
||||
close_rate: Mapped[Optional[float]] = mapped_column(Float()) # type: ignore
|
||||
close_rate_requested: Mapped[Optional[float]] = mapped_column(Float()) # type: ignore
|
||||
close_rate: Mapped[float | None] = mapped_column(Float()) # type: ignore
|
||||
close_rate_requested: Mapped[float | None] = mapped_column(Float()) # type: ignore
|
||||
realized_profit: Mapped[float] = mapped_column( # type: ignore
|
||||
Float(), default=0.0, nullable=True
|
||||
)
|
||||
close_profit: Mapped[Optional[float]] = mapped_column(Float()) # type: ignore
|
||||
close_profit_abs: Mapped[Optional[float]] = mapped_column(Float()) # type: ignore
|
||||
close_profit: Mapped[float | None] = mapped_column(Float()) # type: ignore
|
||||
close_profit_abs: Mapped[float | None] = mapped_column(Float()) # type: ignore
|
||||
stake_amount: Mapped[float] = mapped_column(Float(), nullable=False) # type: ignore
|
||||
max_stake_amount: Mapped[Optional[float]] = mapped_column(Float()) # type: ignore
|
||||
max_stake_amount: Mapped[float | None] = mapped_column(Float()) # type: ignore
|
||||
amount: Mapped[float] = mapped_column(Float()) # type: ignore
|
||||
amount_requested: Mapped[Optional[float]] = mapped_column(Float()) # type: ignore
|
||||
amount_requested: Mapped[float | None] = mapped_column(Float()) # type: ignore
|
||||
open_date: Mapped[datetime] = mapped_column( # type: ignore
|
||||
nullable=False, default=datetime.now
|
||||
)
|
||||
close_date: Mapped[Optional[datetime]] = mapped_column() # type: ignore
|
||||
close_date: Mapped[datetime | None] = mapped_column() # type: ignore
|
||||
# absolute value of the stop loss
|
||||
stop_loss: Mapped[float] = mapped_column(Float(), nullable=True, default=0.0) # type: ignore
|
||||
# percentage value of the stop loss
|
||||
stop_loss_pct: Mapped[Optional[float]] = mapped_column(Float(), nullable=True) # type: ignore
|
||||
stop_loss_pct: Mapped[float | None] = mapped_column(Float(), nullable=True) # type: ignore
|
||||
# absolute value of the initial stop loss
|
||||
initial_stop_loss: Mapped[Optional[float]] = mapped_column( # type: ignore
|
||||
initial_stop_loss: Mapped[float | None] = mapped_column( # type: ignore
|
||||
Float(), nullable=True, default=0.0
|
||||
)
|
||||
# percentage value of the initial stop loss
|
||||
initial_stop_loss_pct: Mapped[Optional[float]] = mapped_column( # type: ignore
|
||||
initial_stop_loss_pct: Mapped[float | None] = mapped_column( # type: ignore
|
||||
Float(), nullable=True
|
||||
)
|
||||
is_stop_loss_trailing: Mapped[bool] = mapped_column( # type: ignore
|
||||
nullable=False, default=False
|
||||
)
|
||||
# absolute value of the highest reached price
|
||||
max_rate: Mapped[Optional[float]] = mapped_column( # type: ignore
|
||||
max_rate: Mapped[float | None] = mapped_column( # type: ignore
|
||||
Float(), nullable=True, default=0.0
|
||||
)
|
||||
# Lowest price reached
|
||||
min_rate: Mapped[Optional[float]] = mapped_column(Float(), nullable=True) # type: ignore
|
||||
exit_reason: Mapped[Optional[str]] = mapped_column( # type: ignore
|
||||
min_rate: Mapped[float | None] = mapped_column(Float(), nullable=True) # type: ignore
|
||||
exit_reason: Mapped[str | None] = mapped_column( # type: ignore
|
||||
String(CUSTOM_TAG_MAX_LENGTH), nullable=True
|
||||
)
|
||||
exit_order_status: Mapped[Optional[str]] = mapped_column( # type: ignore
|
||||
exit_order_status: Mapped[str | None] = mapped_column( # type: ignore
|
||||
String(100), nullable=True
|
||||
)
|
||||
strategy: Mapped[Optional[str]] = mapped_column(String(100), nullable=True) # type: ignore
|
||||
enter_tag: Mapped[Optional[str]] = mapped_column( # type: ignore
|
||||
strategy: Mapped[str | None] = mapped_column(String(100), nullable=True) # type: ignore
|
||||
enter_tag: Mapped[str | None] = mapped_column( # type: ignore
|
||||
String(CUSTOM_TAG_MAX_LENGTH), nullable=True
|
||||
)
|
||||
timeframe: Mapped[Optional[int]] = mapped_column(Integer, nullable=True) # type: ignore
|
||||
timeframe: Mapped[int | None] = mapped_column(Integer, nullable=True) # type: ignore
|
||||
|
||||
trading_mode: Mapped[TradingMode] = mapped_column( # type: ignore
|
||||
Enum(TradingMode), nullable=True
|
||||
)
|
||||
amount_precision: Mapped[Optional[float]] = mapped_column( # type: ignore
|
||||
amount_precision: Mapped[float | None] = mapped_column( # type: ignore
|
||||
Float(), nullable=True
|
||||
)
|
||||
price_precision: Mapped[Optional[float]] = mapped_column(Float(), nullable=True) # type: ignore
|
||||
precision_mode: Mapped[Optional[int]] = mapped_column(Integer, nullable=True) # type: ignore
|
||||
precision_mode_price: Mapped[Optional[int]] = mapped_column( # type: ignore
|
||||
price_precision: Mapped[float | None] = mapped_column(Float(), nullable=True) # type: ignore
|
||||
precision_mode: Mapped[int | None] = mapped_column(Integer, nullable=True) # type: ignore
|
||||
precision_mode_price: Mapped[int | None] = mapped_column( # type: ignore
|
||||
Integer, nullable=True
|
||||
)
|
||||
contract_size: Mapped[Optional[float]] = mapped_column(Float(), nullable=True) # type: ignore
|
||||
contract_size: Mapped[float | None] = mapped_column(Float(), nullable=True) # type: ignore
|
||||
|
||||
# Leverage trading properties
|
||||
leverage: Mapped[float] = mapped_column(Float(), nullable=True, default=1.0) # type: ignore
|
||||
is_short: Mapped[bool] = mapped_column(nullable=False, default=False) # type: ignore
|
||||
liquidation_price: Mapped[Optional[float]] = mapped_column( # type: ignore
|
||||
liquidation_price: Mapped[float | None] = mapped_column( # type: ignore
|
||||
Float(), nullable=True
|
||||
)
|
||||
|
||||
|
@ -1731,10 +1729,10 @@ class Trade(ModelBase, LocalTrade):
|
|||
)
|
||||
|
||||
# Futures properties
|
||||
funding_fees: Mapped[Optional[float]] = mapped_column( # type: ignore
|
||||
funding_fees: Mapped[float | None] = mapped_column( # type: ignore
|
||||
Float(), nullable=True, default=None
|
||||
)
|
||||
funding_fee_running: Mapped[Optional[float]] = mapped_column( # type: ignore
|
||||
funding_fee_running: Mapped[float | None] = mapped_column( # type: ignore
|
||||
Float(), nullable=True, default=None
|
||||
)
|
||||
|
||||
|
@ -1773,10 +1771,10 @@ class Trade(ModelBase, LocalTrade):
|
|||
@staticmethod
|
||||
def get_trades_proxy(
|
||||
*,
|
||||
pair: Optional[str] = None,
|
||||
is_open: Optional[bool] = None,
|
||||
open_date: Optional[datetime] = None,
|
||||
close_date: Optional[datetime] = None,
|
||||
pair: str | None = None,
|
||||
is_open: bool | None = None,
|
||||
open_date: datetime | None = None,
|
||||
close_date: datetime | None = None,
|
||||
) -> list["LocalTrade"]:
|
||||
"""
|
||||
Helper function to query Trades.j
|
||||
|
@ -1939,7 +1937,7 @@ class Trade(ModelBase, LocalTrade):
|
|||
]
|
||||
|
||||
@staticmethod
|
||||
def get_enter_tag_performance(pair: Optional[str]) -> list[dict[str, Any]]:
|
||||
def get_enter_tag_performance(pair: str | None) -> list[dict[str, Any]]:
|
||||
"""
|
||||
Returns List of dicts containing all Trades, based on buy tag performance
|
||||
Can either be average for all pairs or a specific pair provided
|
||||
|
@ -1974,7 +1972,7 @@ class Trade(ModelBase, LocalTrade):
|
|||
]
|
||||
|
||||
@staticmethod
|
||||
def get_exit_reason_performance(pair: Optional[str]) -> list[dict[str, Any]]:
|
||||
def get_exit_reason_performance(pair: str | None) -> list[dict[str, Any]]:
|
||||
"""
|
||||
Returns List of dicts containing all Trades, based on exit reason performance
|
||||
Can either be average for all pairs or a specific pair provided
|
||||
|
@ -2008,7 +2006,7 @@ class Trade(ModelBase, LocalTrade):
|
|||
]
|
||||
|
||||
@staticmethod
|
||||
def get_mix_tag_performance(pair: Optional[str]) -> list[dict[str, Any]]:
|
||||
def get_mix_tag_performance(pair: str | None) -> list[dict[str, Any]]:
|
||||
"""
|
||||
Returns List of dicts containing all Trades, based on entry_tag + exit_reason performance
|
||||
Can either be average for all pairs or a specific pair provided
|
||||
|
@ -2065,7 +2063,7 @@ class Trade(ModelBase, LocalTrade):
|
|||
return resp
|
||||
|
||||
@staticmethod
|
||||
def get_best_pair(start_date: Optional[datetime] = None):
|
||||
def get_best_pair(start_date: datetime | None = None):
|
||||
"""
|
||||
Get best pair with closed trade.
|
||||
NOTE: Not supported in Backtesting.
|
||||
|
@ -2085,7 +2083,7 @@ class Trade(ModelBase, LocalTrade):
|
|||
return best_pair
|
||||
|
||||
@staticmethod
|
||||
def get_trading_volume(start_date: Optional[datetime] = None) -> float:
|
||||
def get_trading_volume(start_date: datetime | None = None) -> float:
|
||||
"""
|
||||
Get Trade volume based on Orders
|
||||
NOTE: Not supported in Backtesting.
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
import logging
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
import pandas as pd
|
||||
|
||||
|
@ -406,7 +405,7 @@ def add_areas(fig, row: int, data: pd.DataFrame, indicators) -> make_subplots:
|
|||
return fig
|
||||
|
||||
|
||||
def create_scatter(data, column_name, color, direction) -> Optional[go.Scatter]:
|
||||
def create_scatter(data, column_name, color, direction) -> go.Scatter | None:
|
||||
if column_name in data.columns:
|
||||
df_short = data[data[column_name] == 1]
|
||||
if len(df_short) > 0:
|
||||
|
@ -432,11 +431,11 @@ def create_scatter(data, column_name, color, direction) -> Optional[go.Scatter]:
|
|||
def generate_candlestick_graph(
|
||||
pair: str,
|
||||
data: pd.DataFrame,
|
||||
trades: Optional[pd.DataFrame] = None,
|
||||
trades: pd.DataFrame | None = None,
|
||||
*,
|
||||
indicators1: Optional[list[str]] = None,
|
||||
indicators2: Optional[list[str]] = None,
|
||||
plot_config: Optional[dict[str, dict]] = None,
|
||||
indicators1: list[str] | None = None,
|
||||
indicators2: list[str] | None = None,
|
||||
plot_config: dict[str, dict] | None = None,
|
||||
) -> go.Figure:
|
||||
"""
|
||||
Generate the graph from the data generated by Backtesting or from DB
|
||||
|
|
|
@ -5,7 +5,6 @@ Minimum age (days listed) pair list filter
|
|||
import logging
|
||||
from copy import deepcopy
|
||||
from datetime import timedelta
|
||||
from typing import Optional
|
||||
|
||||
from pandas import DataFrame
|
||||
|
||||
|
@ -126,7 +125,7 @@ class AgeFilter(IPairList):
|
|||
self.log_once(f"Validated {len(pairlist)} pairs.", logger.info)
|
||||
return pairlist
|
||||
|
||||
def _validate_pair_loc(self, pair: str, daily_candles: Optional[DataFrame]) -> bool:
|
||||
def _validate_pair_loc(self, pair: str, daily_candles: DataFrame | None) -> bool:
|
||||
"""
|
||||
Validate age for the ticker
|
||||
:param pair: Pair that's currently validated
|
||||
|
|
|
@ -6,7 +6,7 @@ import logging
|
|||
from abc import ABC, abstractmethod
|
||||
from copy import deepcopy
|
||||
from enum import Enum
|
||||
from typing import Any, Literal, Optional, TypedDict, Union
|
||||
from typing import Any, Literal, TypedDict
|
||||
|
||||
from freqtrade.constants import Config
|
||||
from freqtrade.exceptions import OperationalException
|
||||
|
@ -25,37 +25,37 @@ class __PairlistParameterBase(TypedDict):
|
|||
|
||||
class __NumberPairlistParameter(__PairlistParameterBase):
|
||||
type: Literal["number"]
|
||||
default: Union[int, float, None]
|
||||
default: int | float | None
|
||||
|
||||
|
||||
class __StringPairlistParameter(__PairlistParameterBase):
|
||||
type: Literal["string"]
|
||||
default: Union[str, None]
|
||||
default: str | None
|
||||
|
||||
|
||||
class __OptionPairlistParameter(__PairlistParameterBase):
|
||||
type: Literal["option"]
|
||||
default: Union[str, None]
|
||||
default: str | None
|
||||
options: list[str]
|
||||
|
||||
|
||||
class __ListPairListParamenter(__PairlistParameterBase):
|
||||
type: Literal["list"]
|
||||
default: Union[list[str], None]
|
||||
default: list[str] | None
|
||||
|
||||
|
||||
class __BoolPairlistParameter(__PairlistParameterBase):
|
||||
type: Literal["boolean"]
|
||||
default: Union[bool, None]
|
||||
default: bool | None
|
||||
|
||||
|
||||
PairlistParameter = Union[
|
||||
__NumberPairlistParameter,
|
||||
__StringPairlistParameter,
|
||||
__OptionPairlistParameter,
|
||||
__BoolPairlistParameter,
|
||||
__ListPairListParamenter,
|
||||
]
|
||||
PairlistParameter = (
|
||||
__NumberPairlistParameter
|
||||
| __StringPairlistParameter
|
||||
| __OptionPairlistParameter
|
||||
| __BoolPairlistParameter
|
||||
| __ListPairListParamenter
|
||||
)
|
||||
|
||||
|
||||
class SupportsBacktesting(str, Enum):
|
||||
|
@ -153,7 +153,7 @@ class IPairList(LoggingMixin, ABC):
|
|||
-> Please overwrite in subclasses
|
||||
"""
|
||||
|
||||
def _validate_pair(self, pair: str, ticker: Optional[Ticker]) -> bool:
|
||||
def _validate_pair(self, pair: str, ticker: Ticker | None) -> bool:
|
||||
"""
|
||||
Check one pair against Pairlist Handler's specific conditions.
|
||||
|
||||
|
|
|
@ -8,7 +8,7 @@ defined period or as coming from ticker
|
|||
|
||||
import logging
|
||||
from datetime import timedelta
|
||||
from typing import Any, Optional
|
||||
from typing import Any
|
||||
|
||||
from cachetools import TTLCache
|
||||
from pandas import DataFrame
|
||||
|
@ -46,7 +46,7 @@ class PercentChangePairList(IPairList):
|
|||
self._lookback_days = self._pairlistconfig.get("lookback_days", 0)
|
||||
self._lookback_timeframe = self._pairlistconfig.get("lookback_timeframe", "1d")
|
||||
self._lookback_period = self._pairlistconfig.get("lookback_period", 0)
|
||||
self._sort_direction: Optional[str] = self._pairlistconfig.get("sort_direction", "desc")
|
||||
self._sort_direction: str | None = self._pairlistconfig.get("sort_direction", "desc")
|
||||
self._def_candletype = self._config["candle_type_def"]
|
||||
|
||||
if (self._lookback_days > 0) & (self._lookback_period > 0):
|
||||
|
@ -311,7 +311,7 @@ class PercentChangePairList(IPairList):
|
|||
else:
|
||||
filtered_tickers[i]["percentage"] = tickers[p["symbol"]]["percentage"]
|
||||
|
||||
def _validate_pair(self, pair: str, ticker: Optional[Ticker]) -> bool:
|
||||
def _validate_pair(self, pair: str, ticker: Ticker | None) -> bool:
|
||||
"""
|
||||
Check if one price-step (pip) is > than a certain barrier.
|
||||
:param pair: Pair that's currently validated
|
||||
|
|
|
@ -3,7 +3,6 @@ Precision pair list filter
|
|||
"""
|
||||
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.exchange import ROUND_UP
|
||||
|
@ -50,7 +49,7 @@ class PrecisionFilter(IPairList):
|
|||
def description() -> str:
|
||||
return "Filters low-value coins which would not allow setting stoplosses."
|
||||
|
||||
def _validate_pair(self, pair: str, ticker: Optional[Ticker]) -> bool:
|
||||
def _validate_pair(self, pair: str, ticker: Ticker | None) -> bool:
|
||||
"""
|
||||
Check if pair has enough room to add a stoploss to avoid "unsellable" buys of very
|
||||
low value pairs.
|
||||
|
|
|
@ -3,7 +3,6 @@ Price pair list filter
|
|||
"""
|
||||
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.exchange.exchange_types import Ticker
|
||||
|
@ -101,7 +100,7 @@ class PriceFilter(IPairList):
|
|||
},
|
||||
}
|
||||
|
||||
def _validate_pair(self, pair: str, ticker: Optional[Ticker]) -> bool:
|
||||
def _validate_pair(self, pair: str, ticker: Ticker | None) -> bool:
|
||||
"""
|
||||
Check if one price-step (pip) is > than a certain barrier.
|
||||
:param pair: Pair that's currently validated
|
||||
|
|
|
@ -5,7 +5,6 @@ Provides pair list from Leader data
|
|||
"""
|
||||
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.exchange.exchange_types import Tickers
|
||||
|
@ -83,7 +82,7 @@ class ProducerPairList(IPairList):
|
|||
},
|
||||
}
|
||||
|
||||
def _filter_pairlist(self, pairlist: Optional[list[str]]):
|
||||
def _filter_pairlist(self, pairlist: list[str] | None):
|
||||
upstream_pairlist = self._pairlistmanager._dataprovider.get_producer_pairs(
|
||||
self._producer_name
|
||||
)
|
||||
|
|
|
@ -3,7 +3,6 @@ Spread pair list filter
|
|||
"""
|
||||
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.exchange.exchange_types import Ticker
|
||||
|
@ -61,7 +60,7 @@ class SpreadFilter(IPairList):
|
|||
},
|
||||
}
|
||||
|
||||
def _validate_pair(self, pair: str, ticker: Optional[Ticker]) -> bool:
|
||||
def _validate_pair(self, pair: str, ticker: Ticker | None) -> bool:
|
||||
"""
|
||||
Validate spread for the ticker
|
||||
:param pair: Pair that's currently validated
|
||||
|
|
|
@ -5,7 +5,6 @@ Volatility pairlist filter
|
|||
import logging
|
||||
import sys
|
||||
from datetime import timedelta
|
||||
from typing import Optional
|
||||
|
||||
import numpy as np
|
||||
from cachetools import TTLCache
|
||||
|
@ -37,7 +36,7 @@ class VolatilityFilter(IPairList):
|
|||
self._max_volatility = self._pairlistconfig.get("max_volatility", sys.maxsize)
|
||||
self._refresh_period = self._pairlistconfig.get("refresh_period", 1440)
|
||||
self._def_candletype = self._config["candle_type_def"]
|
||||
self._sort_direction: Optional[str] = self._pairlistconfig.get("sort_direction", None)
|
||||
self._sort_direction: str | None = self._pairlistconfig.get("sort_direction", None)
|
||||
|
||||
self._pair_cache: TTLCache = TTLCache(maxsize=1000, ttl=self._refresh_period)
|
||||
|
||||
|
@ -147,7 +146,7 @@ class VolatilityFilter(IPairList):
|
|||
)
|
||||
return resulting_pairlist
|
||||
|
||||
def _calculate_volatility(self, pair: str, daily_candles: DataFrame) -> Optional[float]:
|
||||
def _calculate_volatility(self, pair: str, daily_candles: DataFrame) -> float | None:
|
||||
# Check symbol in cache
|
||||
if (volatility_avg := self._pair_cache.get(pair, None)) is not None:
|
||||
return volatility_avg
|
||||
|
|
|
@ -4,7 +4,6 @@ Rate of change pairlist filter
|
|||
|
||||
import logging
|
||||
from datetime import timedelta
|
||||
from typing import Optional
|
||||
|
||||
from cachetools import TTLCache
|
||||
from pandas import DataFrame
|
||||
|
@ -31,7 +30,7 @@ class RangeStabilityFilter(IPairList):
|
|||
self._max_rate_of_change = self._pairlistconfig.get("max_rate_of_change")
|
||||
self._refresh_period = self._pairlistconfig.get("refresh_period", 86400)
|
||||
self._def_candletype = self._config["candle_type_def"]
|
||||
self._sort_direction: Optional[str] = self._pairlistconfig.get("sort_direction", None)
|
||||
self._sort_direction: str | None = self._pairlistconfig.get("sort_direction", None)
|
||||
|
||||
self._pair_cache: TTLCache = TTLCache(maxsize=1000, ttl=self._refresh_period)
|
||||
|
||||
|
@ -143,7 +142,7 @@ class RangeStabilityFilter(IPairList):
|
|||
)
|
||||
return resulting_pairlist
|
||||
|
||||
def _calculate_rate_of_change(self, pair: str, daily_candles: DataFrame) -> Optional[float]:
|
||||
def _calculate_rate_of_change(self, pair: str, daily_candles: DataFrame) -> float | None:
|
||||
# Check symbol in cache
|
||||
if (pct_change := self._pair_cache.get(pair, None)) is not None:
|
||||
return pct_change
|
||||
|
|
|
@ -4,7 +4,6 @@ PairList manager class
|
|||
|
||||
import logging
|
||||
from functools import partial
|
||||
from typing import Optional
|
||||
|
||||
from cachetools import TTLCache, cached
|
||||
|
||||
|
@ -24,16 +23,14 @@ logger = logging.getLogger(__name__)
|
|||
|
||||
|
||||
class PairListManager(LoggingMixin):
|
||||
def __init__(
|
||||
self, exchange, config: Config, dataprovider: Optional[DataProvider] = None
|
||||
) -> None:
|
||||
def __init__(self, exchange, config: Config, dataprovider: DataProvider | None = None) -> None:
|
||||
self._exchange = exchange
|
||||
self._config = config
|
||||
self._whitelist = self._config["exchange"].get("pair_whitelist")
|
||||
self._blacklist = self._config["exchange"].get("pair_blacklist", [])
|
||||
self._pairlist_handlers: list[IPairList] = []
|
||||
self._tickers_needed = False
|
||||
self._dataprovider: Optional[DataProvider] = dataprovider
|
||||
self._dataprovider: DataProvider | None = dataprovider
|
||||
for pairlist_handler_config in self._config.get("pairlists", []):
|
||||
pairlist_handler = PairListResolver.load_pairlist(
|
||||
pairlist_handler_config["method"],
|
||||
|
@ -193,7 +190,7 @@ class PairListManager(LoggingMixin):
|
|||
return whitelist
|
||||
|
||||
def create_pair_list(
|
||||
self, pairs: list[str], timeframe: Optional[str] = None
|
||||
self, pairs: list[str], timeframe: str | None = None
|
||||
) -> ListPairsWithTimeframes:
|
||||
"""
|
||||
Create list of pair tuples with (pair, timeframe)
|
||||
|
|
|
@ -4,7 +4,7 @@ Protection manager class
|
|||
|
||||
import logging
|
||||
from datetime import datetime, timezone
|
||||
from typing import Any, Optional
|
||||
from typing import Any
|
||||
|
||||
from freqtrade.constants import Config, LongShort
|
||||
from freqtrade.exceptions import ConfigurationError
|
||||
|
@ -47,9 +47,7 @@ class ProtectionManager:
|
|||
"""
|
||||
return [{p.name: p.short_desc()} for p in self._protection_handlers]
|
||||
|
||||
def global_stop(
|
||||
self, now: Optional[datetime] = None, side: LongShort = "long"
|
||||
) -> Optional[PairLock]:
|
||||
def global_stop(self, now: datetime | None = None, side: LongShort = "long") -> PairLock | None:
|
||||
if not now:
|
||||
now = datetime.now(timezone.utc)
|
||||
result = None
|
||||
|
@ -64,8 +62,8 @@ class ProtectionManager:
|
|||
return result
|
||||
|
||||
def stop_per_pair(
|
||||
self, pair, now: Optional[datetime] = None, side: LongShort = "long"
|
||||
) -> Optional[PairLock]:
|
||||
self, pair, now: datetime | None = None, side: LongShort = "long"
|
||||
) -> PairLock | None:
|
||||
if not now:
|
||||
now = datetime.now(timezone.utc)
|
||||
result = None
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Optional
|
||||
|
||||
from freqtrade.constants import LongShort
|
||||
from freqtrade.persistence import Trade
|
||||
|
@ -26,7 +25,7 @@ class CooldownPeriod(IProtection):
|
|||
"""
|
||||
return f"{self.name} - Cooldown period {self.unlock_reason_time_element}."
|
||||
|
||||
def _cooldown_period(self, pair: str, date_now: datetime) -> Optional[ProtectionReturn]:
|
||||
def _cooldown_period(self, pair: str, date_now: datetime) -> ProtectionReturn | None:
|
||||
"""
|
||||
Get last trade for this pair
|
||||
"""
|
||||
|
@ -53,7 +52,7 @@ class CooldownPeriod(IProtection):
|
|||
|
||||
return None
|
||||
|
||||
def global_stop(self, date_now: datetime, side: LongShort) -> Optional[ProtectionReturn]:
|
||||
def global_stop(self, date_now: datetime, side: LongShort) -> ProtectionReturn | None:
|
||||
"""
|
||||
Stops trading (position entering) for all pairs
|
||||
This must evaluate to true for the whole period of the "cooldown period".
|
||||
|
@ -65,7 +64,7 @@ class CooldownPeriod(IProtection):
|
|||
|
||||
def stop_per_pair(
|
||||
self, pair: str, date_now: datetime, side: LongShort
|
||||
) -> Optional[ProtectionReturn]:
|
||||
) -> ProtectionReturn | None:
|
||||
"""
|
||||
Stops trading (position entering) for this pair
|
||||
This must evaluate to true for the whole period of the "cooldown period".
|
||||
|
|
|
@ -2,7 +2,7 @@ import logging
|
|||
from abc import ABC, abstractmethod
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Any, Optional
|
||||
from typing import Any
|
||||
|
||||
from freqtrade.constants import Config, LongShort
|
||||
from freqtrade.exchange import timeframe_to_minutes
|
||||
|
@ -18,7 +18,7 @@ logger = logging.getLogger(__name__)
|
|||
class ProtectionReturn:
|
||||
lock: bool
|
||||
until: datetime
|
||||
reason: Optional[str]
|
||||
reason: str | None
|
||||
lock_side: str = "*"
|
||||
|
||||
|
||||
|
@ -31,10 +31,10 @@ class IProtection(LoggingMixin, ABC):
|
|||
def __init__(self, config: Config, protection_config: dict[str, Any]) -> None:
|
||||
self._config = config
|
||||
self._protection_config = protection_config
|
||||
self._stop_duration_candles: Optional[int] = None
|
||||
self._stop_duration_candles: int | None = None
|
||||
self._stop_duration: int = 0
|
||||
self._lookback_period_candles: Optional[int] = None
|
||||
self._unlock_at: Optional[str] = None
|
||||
self._lookback_period_candles: int | None = None
|
||||
self._unlock_at: str | None = None
|
||||
|
||||
tf_in_min = timeframe_to_minutes(config["timeframe"])
|
||||
if "stop_duration_candles" in protection_config:
|
||||
|
@ -102,7 +102,7 @@ class IProtection(LoggingMixin, ABC):
|
|||
"""
|
||||
|
||||
@abstractmethod
|
||||
def global_stop(self, date_now: datetime, side: LongShort) -> Optional[ProtectionReturn]:
|
||||
def global_stop(self, date_now: datetime, side: LongShort) -> ProtectionReturn | None:
|
||||
"""
|
||||
Stops trading (position entering) for all pairs
|
||||
This must evaluate to true for the whole period of the "cooldown period".
|
||||
|
@ -111,7 +111,7 @@ class IProtection(LoggingMixin, ABC):
|
|||
@abstractmethod
|
||||
def stop_per_pair(
|
||||
self, pair: str, date_now: datetime, side: LongShort
|
||||
) -> Optional[ProtectionReturn]:
|
||||
) -> ProtectionReturn | None:
|
||||
"""
|
||||
Stops trading (position entering) for this pair
|
||||
This must evaluate to true for the whole period of the "cooldown period".
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Any, Optional
|
||||
from typing import Any
|
||||
|
||||
from freqtrade.constants import Config, LongShort
|
||||
from freqtrade.persistence import Trade
|
||||
|
@ -41,7 +41,7 @@ class LowProfitPairs(IProtection):
|
|||
|
||||
def _low_profit(
|
||||
self, date_now: datetime, pair: str, side: LongShort
|
||||
) -> Optional[ProtectionReturn]:
|
||||
) -> ProtectionReturn | None:
|
||||
"""
|
||||
Evaluate recent trades for pair
|
||||
"""
|
||||
|
@ -81,7 +81,7 @@ class LowProfitPairs(IProtection):
|
|||
|
||||
return None
|
||||
|
||||
def global_stop(self, date_now: datetime, side: LongShort) -> Optional[ProtectionReturn]:
|
||||
def global_stop(self, date_now: datetime, side: LongShort) -> ProtectionReturn | None:
|
||||
"""
|
||||
Stops trading (position entering) for all pairs
|
||||
This must evaluate to true for the whole period of the "cooldown period".
|
||||
|
@ -92,7 +92,7 @@ class LowProfitPairs(IProtection):
|
|||
|
||||
def stop_per_pair(
|
||||
self, pair: str, date_now: datetime, side: LongShort
|
||||
) -> Optional[ProtectionReturn]:
|
||||
) -> ProtectionReturn | None:
|
||||
"""
|
||||
Stops trading (position entering) for this pair
|
||||
This must evaluate to true for the whole period of the "cooldown period".
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Any, Optional
|
||||
from typing import Any
|
||||
|
||||
import pandas as pd
|
||||
|
||||
|
@ -42,7 +42,7 @@ class MaxDrawdown(IProtection):
|
|||
f"locking {self.unlock_reason_time_element}."
|
||||
)
|
||||
|
||||
def _max_drawdown(self, date_now: datetime) -> Optional[ProtectionReturn]:
|
||||
def _max_drawdown(self, date_now: datetime) -> ProtectionReturn | None:
|
||||
"""
|
||||
Evaluate recent trades for drawdown ...
|
||||
"""
|
||||
|
@ -81,7 +81,7 @@ class MaxDrawdown(IProtection):
|
|||
|
||||
return None
|
||||
|
||||
def global_stop(self, date_now: datetime, side: LongShort) -> Optional[ProtectionReturn]:
|
||||
def global_stop(self, date_now: datetime, side: LongShort) -> ProtectionReturn | None:
|
||||
"""
|
||||
Stops trading (position entering) for all pairs
|
||||
This must evaluate to true for the whole period of the "cooldown period".
|
||||
|
@ -92,7 +92,7 @@ class MaxDrawdown(IProtection):
|
|||
|
||||
def stop_per_pair(
|
||||
self, pair: str, date_now: datetime, side: LongShort
|
||||
) -> Optional[ProtectionReturn]:
|
||||
) -> ProtectionReturn | None:
|
||||
"""
|
||||
Stops trading (position entering) for this pair
|
||||
This must evaluate to true for the whole period of the "cooldown period".
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Any, Optional
|
||||
from typing import Any
|
||||
|
||||
from freqtrade.constants import Config, LongShort
|
||||
from freqtrade.enums import ExitType
|
||||
|
@ -42,8 +42,8 @@ class StoplossGuard(IProtection):
|
|||
)
|
||||
|
||||
def _stoploss_guard(
|
||||
self, date_now: datetime, pair: Optional[str], side: LongShort
|
||||
) -> Optional[ProtectionReturn]:
|
||||
self, date_now: datetime, pair: str | None, side: LongShort
|
||||
) -> ProtectionReturn | None:
|
||||
"""
|
||||
Evaluate recent trades
|
||||
"""
|
||||
|
@ -86,7 +86,7 @@ class StoplossGuard(IProtection):
|
|||
lock_side=(side if self._only_per_side else "*"),
|
||||
)
|
||||
|
||||
def global_stop(self, date_now: datetime, side: LongShort) -> Optional[ProtectionReturn]:
|
||||
def global_stop(self, date_now: datetime, side: LongShort) -> ProtectionReturn | None:
|
||||
"""
|
||||
Stops trading (position entering) for all pairs
|
||||
This must evaluate to true for the whole period of the "cooldown period".
|
||||
|
@ -99,7 +99,7 @@ class StoplossGuard(IProtection):
|
|||
|
||||
def stop_per_pair(
|
||||
self, pair: str, date_now: datetime, side: LongShort
|
||||
) -> Optional[ProtectionReturn]:
|
||||
) -> ProtectionReturn | None:
|
||||
"""
|
||||
Stops trading (position entering) for this pair
|
||||
This must evaluate to true for the whole period of the "cooldown period".
|
||||
|
|
|
@ -4,7 +4,7 @@ This module loads custom exchanges
|
|||
|
||||
import logging
|
||||
from inspect import isclass
|
||||
from typing import Any, Optional
|
||||
from typing import Any
|
||||
|
||||
import freqtrade.exchange as exchanges
|
||||
from freqtrade.constants import Config, ExchangeConfig
|
||||
|
@ -26,7 +26,7 @@ class ExchangeResolver(IResolver):
|
|||
def load_exchange(
|
||||
config: Config,
|
||||
*,
|
||||
exchange_config: Optional[ExchangeConfig] = None,
|
||||
exchange_config: ExchangeConfig | None = None,
|
||||
validate: bool = True,
|
||||
load_leverage_tiers: bool = False,
|
||||
) -> Exchange:
|
||||
|
|
|
@ -10,7 +10,7 @@ import logging
|
|||
import sys
|
||||
from collections.abc import Iterator
|
||||
from pathlib import Path
|
||||
from typing import Any, Optional, Union
|
||||
from typing import Any
|
||||
|
||||
from freqtrade.constants import Config
|
||||
from freqtrade.exceptions import OperationalException
|
||||
|
@ -43,17 +43,17 @@ class IResolver:
|
|||
# Childclasses need to override this
|
||||
object_type: type[Any]
|
||||
object_type_str: str
|
||||
user_subdir: Optional[str] = None
|
||||
initial_search_path: Optional[Path] = None
|
||||
user_subdir: str | None = None
|
||||
initial_search_path: Path | None = None
|
||||
# Optional config setting containing a path (strategy_path, freqaimodel_path)
|
||||
extra_path: Optional[str] = None
|
||||
extra_path: str | None = None
|
||||
|
||||
@classmethod
|
||||
def build_search_paths(
|
||||
cls,
|
||||
config: Config,
|
||||
user_subdir: Optional[str] = None,
|
||||
extra_dirs: Optional[list[str]] = None,
|
||||
user_subdir: str | None = None,
|
||||
extra_dirs: list[str] | None = None,
|
||||
) -> list[Path]:
|
||||
abs_paths: list[Path] = []
|
||||
if cls.initial_search_path:
|
||||
|
@ -74,7 +74,7 @@ class IResolver:
|
|||
|
||||
@classmethod
|
||||
def _get_valid_object(
|
||||
cls, module_path: Path, object_name: Optional[str], enum_failed: bool = False
|
||||
cls, module_path: Path, object_name: str | None, enum_failed: bool = False
|
||||
) -> Iterator[Any]:
|
||||
"""
|
||||
Generator returning objects with matching object_type and object_name in the path given.
|
||||
|
@ -131,7 +131,7 @@ class IResolver:
|
|||
@classmethod
|
||||
def _search_object(
|
||||
cls, directory: Path, *, object_name: str, add_source: bool = False
|
||||
) -> Union[tuple[Any, Path], tuple[None, None]]:
|
||||
) -> tuple[Any, Path] | tuple[None, None]:
|
||||
"""
|
||||
Search for the objectname in the given directory
|
||||
:param directory: relative or absolute directory path
|
||||
|
@ -161,7 +161,7 @@ class IResolver:
|
|||
@classmethod
|
||||
def _load_object(
|
||||
cls, paths: list[Path], *, object_name: str, add_source: bool = False, kwargs: dict
|
||||
) -> Optional[Any]:
|
||||
) -> Any | None:
|
||||
"""
|
||||
Try to load object from path list.
|
||||
"""
|
||||
|
@ -184,7 +184,7 @@ class IResolver:
|
|||
|
||||
@classmethod
|
||||
def load_object(
|
||||
cls, object_name: str, config: Config, *, kwargs: dict, extra_dir: Optional[str] = None
|
||||
cls, object_name: str, config: Config, *, kwargs: dict, extra_dir: str | None = None
|
||||
) -> Any:
|
||||
"""
|
||||
Search and loads the specified object as configured in the child class.
|
||||
|
@ -245,7 +245,7 @@ class IResolver:
|
|||
directory: Path,
|
||||
enum_failed: bool,
|
||||
recursive: bool = False,
|
||||
basedir: Optional[Path] = None,
|
||||
basedir: Path | None = None,
|
||||
) -> list[dict[str, Any]]:
|
||||
"""
|
||||
Searches a directory for valid objects
|
||||
|
|
|
@ -10,7 +10,7 @@ from base64 import urlsafe_b64decode
|
|||
from inspect import getfullargspec
|
||||
from os import walk
|
||||
from pathlib import Path
|
||||
from typing import Any, Optional
|
||||
from typing import Any
|
||||
|
||||
from freqtrade.configuration.config_validation import validate_migrated_strategy_settings
|
||||
from freqtrade.constants import REQUIRED_ORDERTIF, REQUIRED_ORDERTYPES, USERPATH_STRATEGIES, Config
|
||||
|
@ -35,7 +35,7 @@ class StrategyResolver(IResolver):
|
|||
extra_path = "strategy_path"
|
||||
|
||||
@staticmethod
|
||||
def load_strategy(config: Optional[Config] = None) -> IStrategy:
|
||||
def load_strategy(config: Config | None = None) -> IStrategy:
|
||||
"""
|
||||
Load the custom class from config parameter
|
||||
:param config: configuration dictionary or None
|
||||
|
@ -246,7 +246,7 @@ class StrategyResolver(IResolver):
|
|||
|
||||
@staticmethod
|
||||
def _load_strategy(
|
||||
strategy_name: str, config: Config, extra_dir: Optional[str] = None
|
||||
strategy_name: str, config: Config, extra_dir: str | None = None
|
||||
) -> IStrategy:
|
||||
"""
|
||||
Search and loads the specified strategy.
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import logging
|
||||
import secrets
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Any, Union
|
||||
from typing import Any
|
||||
|
||||
import jwt
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, WebSocket, status
|
||||
|
@ -55,7 +55,7 @@ def get_user_from_token(token, secret_key: str, token_type: str = "access") -> s
|
|||
# https://github.com/tiangolo/fastapi/blob/master/fastapi/security/api_key.py
|
||||
async def validate_ws_token(
|
||||
ws: WebSocket,
|
||||
ws_token: Union[str, None] = Query(default=None, alias="token"),
|
||||
ws_token: str | None = Query(default=None, alias="token"),
|
||||
api_config: dict[str, Any] = Depends(get_api_config),
|
||||
):
|
||||
secret_ws_token = api_config.get("ws_token", None)
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from datetime import date, datetime
|
||||
from typing import Any, Optional, Union
|
||||
from typing import Any
|
||||
|
||||
from pydantic import AwareDatetime, BaseModel, RootModel, SerializeAsAny
|
||||
|
||||
|
@ -9,9 +9,9 @@ from freqtrade.ft_types import ValidExchangesType
|
|||
|
||||
|
||||
class ExchangeModePayloadMixin(BaseModel):
|
||||
trading_mode: Optional[TradingMode] = None
|
||||
margin_mode: Optional[MarginMode] = None
|
||||
exchange: Optional[str] = None
|
||||
trading_mode: TradingMode | None = None
|
||||
margin_mode: MarginMode | None = None
|
||||
exchange: str | None = None
|
||||
|
||||
|
||||
class Ping(BaseModel):
|
||||
|
@ -43,12 +43,12 @@ class BackgroundTaskStatus(BaseModel):
|
|||
job_category: str
|
||||
status: str
|
||||
running: bool
|
||||
progress: Optional[float] = None
|
||||
error: Optional[str] = None
|
||||
progress: float | None = None
|
||||
error: str | None = None
|
||||
|
||||
|
||||
class BackgroundTaskResult(BaseModel):
|
||||
error: Optional[str] = None
|
||||
error: str | None = None
|
||||
status: str
|
||||
|
||||
|
||||
|
@ -61,9 +61,9 @@ class Balance(BaseModel):
|
|||
free: float
|
||||
balance: float
|
||||
used: float
|
||||
bot_owned: Optional[float] = None
|
||||
bot_owned: float | None = None
|
||||
est_stake: float
|
||||
est_stake_bot: Optional[float] = None
|
||||
est_stake_bot: float | None = None
|
||||
stake: str
|
||||
# Starting with 2.x
|
||||
side: str
|
||||
|
@ -160,7 +160,7 @@ class Profit(BaseModel):
|
|||
max_drawdown_start_timestamp: int
|
||||
max_drawdown_end: str
|
||||
max_drawdown_end_timestamp: int
|
||||
trading_volume: Optional[float] = None
|
||||
trading_volume: float | None = None
|
||||
bot_start_timestamp: int
|
||||
bot_start_date: str
|
||||
|
||||
|
@ -173,7 +173,7 @@ class SellReason(BaseModel):
|
|||
|
||||
class Stats(BaseModel):
|
||||
exit_reasons: dict[str, SellReason]
|
||||
durations: dict[str, Optional[float]]
|
||||
durations: dict[str, float | None]
|
||||
|
||||
|
||||
class DailyWeeklyMonthlyRecord(BaseModel):
|
||||
|
@ -192,50 +192,50 @@ class DailyWeeklyMonthly(BaseModel):
|
|||
|
||||
|
||||
class UnfilledTimeout(BaseModel):
|
||||
entry: Optional[int] = None
|
||||
exit: Optional[int] = None
|
||||
unit: Optional[str] = None
|
||||
exit_timeout_count: Optional[int] = None
|
||||
entry: int | None = None
|
||||
exit: int | None = None
|
||||
unit: str | None = None
|
||||
exit_timeout_count: int | None = None
|
||||
|
||||
|
||||
class OrderTypes(BaseModel):
|
||||
entry: OrderTypeValues
|
||||
exit: OrderTypeValues
|
||||
emergency_exit: Optional[OrderTypeValues] = None
|
||||
force_exit: Optional[OrderTypeValues] = None
|
||||
force_entry: Optional[OrderTypeValues] = None
|
||||
emergency_exit: OrderTypeValues | None = None
|
||||
force_exit: OrderTypeValues | None = None
|
||||
force_entry: OrderTypeValues | None = None
|
||||
stoploss: OrderTypeValues
|
||||
stoploss_on_exchange: bool
|
||||
stoploss_on_exchange_interval: Optional[int] = None
|
||||
stoploss_on_exchange_interval: int | None = None
|
||||
|
||||
|
||||
class ShowConfig(BaseModel):
|
||||
version: str
|
||||
strategy_version: Optional[str] = None
|
||||
strategy_version: str | None = None
|
||||
api_version: float
|
||||
dry_run: bool
|
||||
trading_mode: str
|
||||
short_allowed: bool
|
||||
stake_currency: str
|
||||
stake_amount: str
|
||||
available_capital: Optional[float] = None
|
||||
available_capital: float | None = None
|
||||
stake_currency_decimals: int
|
||||
max_open_trades: IntOrInf
|
||||
minimal_roi: dict[str, Any]
|
||||
stoploss: Optional[float] = None
|
||||
stoploss: float | None = None
|
||||
stoploss_on_exchange: bool
|
||||
trailing_stop: Optional[bool] = None
|
||||
trailing_stop_positive: Optional[float] = None
|
||||
trailing_stop_positive_offset: Optional[float] = None
|
||||
trailing_only_offset_is_reached: Optional[bool] = None
|
||||
unfilledtimeout: Optional[UnfilledTimeout] = None # Empty in webserver mode
|
||||
order_types: Optional[OrderTypes] = None
|
||||
use_custom_stoploss: Optional[bool] = None
|
||||
timeframe: Optional[str] = None
|
||||
trailing_stop: bool | None = None
|
||||
trailing_stop_positive: float | None = None
|
||||
trailing_stop_positive_offset: float | None = None
|
||||
trailing_only_offset_is_reached: bool | None = None
|
||||
unfilledtimeout: UnfilledTimeout | None = None # Empty in webserver mode
|
||||
order_types: OrderTypes | None = None
|
||||
use_custom_stoploss: bool | None = None
|
||||
timeframe: str | None = None
|
||||
timeframe_ms: int
|
||||
timeframe_min: int
|
||||
exchange: str
|
||||
strategy: Optional[str] = None
|
||||
strategy: str | None = None
|
||||
force_entry_enable: bool
|
||||
exit_pricing: dict[str, Any]
|
||||
entry_pricing: dict[str, Any]
|
||||
|
@ -250,18 +250,18 @@ class OrderSchema(BaseModel):
|
|||
pair: str
|
||||
order_id: str
|
||||
status: str
|
||||
remaining: Optional[float] = None
|
||||
remaining: float | None = None
|
||||
amount: float
|
||||
safe_price: float
|
||||
cost: float
|
||||
filled: Optional[float] = None
|
||||
filled: float | None = None
|
||||
ft_order_side: str
|
||||
order_type: str
|
||||
is_open: bool
|
||||
order_timestamp: Optional[int] = None
|
||||
order_filled_timestamp: Optional[int] = None
|
||||
ft_fee_base: Optional[float] = None
|
||||
ft_order_tag: Optional[str] = None
|
||||
order_timestamp: int | None = None
|
||||
order_filled_timestamp: int | None = None
|
||||
ft_fee_base: float | None = None
|
||||
ft_order_tag: str | None = None
|
||||
|
||||
|
||||
class TradeSchema(BaseModel):
|
||||
|
@ -275,80 +275,80 @@ class TradeSchema(BaseModel):
|
|||
amount: float
|
||||
amount_requested: float
|
||||
stake_amount: float
|
||||
max_stake_amount: Optional[float] = None
|
||||
max_stake_amount: float | None = None
|
||||
strategy: str
|
||||
enter_tag: Optional[str] = None
|
||||
enter_tag: str | None = None
|
||||
timeframe: int
|
||||
fee_open: Optional[float] = None
|
||||
fee_open_cost: Optional[float] = None
|
||||
fee_open_currency: Optional[str] = None
|
||||
fee_close: Optional[float] = None
|
||||
fee_close_cost: Optional[float] = None
|
||||
fee_close_currency: Optional[str] = None
|
||||
fee_open: float | None = None
|
||||
fee_open_cost: float | None = None
|
||||
fee_open_currency: str | None = None
|
||||
fee_close: float | None = None
|
||||
fee_close_cost: float | None = None
|
||||
fee_close_currency: str | None = None
|
||||
|
||||
open_date: str
|
||||
open_timestamp: int
|
||||
open_fill_date: Optional[str]
|
||||
open_fill_timestamp: Optional[int]
|
||||
open_fill_date: str | None
|
||||
open_fill_timestamp: int | None
|
||||
open_rate: float
|
||||
open_rate_requested: Optional[float] = None
|
||||
open_rate_requested: float | None = None
|
||||
open_trade_value: float
|
||||
|
||||
close_date: Optional[str] = None
|
||||
close_timestamp: Optional[int] = None
|
||||
close_rate: Optional[float] = None
|
||||
close_rate_requested: Optional[float] = None
|
||||
close_date: str | None = None
|
||||
close_timestamp: int | None = None
|
||||
close_rate: float | None = None
|
||||
close_rate_requested: float | None = None
|
||||
|
||||
close_profit: Optional[float] = None
|
||||
close_profit_pct: Optional[float] = None
|
||||
close_profit_abs: Optional[float] = None
|
||||
close_profit: float | None = None
|
||||
close_profit_pct: float | None = None
|
||||
close_profit_abs: float | None = None
|
||||
|
||||
profit_ratio: Optional[float] = None
|
||||
profit_pct: Optional[float] = None
|
||||
profit_abs: Optional[float] = None
|
||||
profit_fiat: Optional[float] = None
|
||||
profit_ratio: float | None = None
|
||||
profit_pct: float | None = None
|
||||
profit_abs: float | None = None
|
||||
profit_fiat: float | None = None
|
||||
|
||||
realized_profit: float
|
||||
realized_profit_ratio: Optional[float] = None
|
||||
realized_profit_ratio: float | None = None
|
||||
|
||||
exit_reason: Optional[str] = None
|
||||
exit_order_status: Optional[str] = None
|
||||
exit_reason: str | None = None
|
||||
exit_order_status: str | None = None
|
||||
|
||||
stop_loss_abs: Optional[float] = None
|
||||
stop_loss_ratio: Optional[float] = None
|
||||
stop_loss_pct: Optional[float] = None
|
||||
stoploss_last_update: Optional[str] = None
|
||||
stoploss_last_update_timestamp: Optional[int] = None
|
||||
initial_stop_loss_abs: Optional[float] = None
|
||||
initial_stop_loss_ratio: Optional[float] = None
|
||||
initial_stop_loss_pct: Optional[float] = None
|
||||
stop_loss_abs: float | None = None
|
||||
stop_loss_ratio: float | None = None
|
||||
stop_loss_pct: float | None = None
|
||||
stoploss_last_update: str | None = None
|
||||
stoploss_last_update_timestamp: int | None = None
|
||||
initial_stop_loss_abs: float | None = None
|
||||
initial_stop_loss_ratio: float | None = None
|
||||
initial_stop_loss_pct: float | None = None
|
||||
|
||||
min_rate: Optional[float] = None
|
||||
max_rate: Optional[float] = None
|
||||
min_rate: float | None = None
|
||||
max_rate: float | None = None
|
||||
has_open_orders: bool
|
||||
orders: list[OrderSchema]
|
||||
|
||||
leverage: Optional[float] = None
|
||||
interest_rate: Optional[float] = None
|
||||
liquidation_price: Optional[float] = None
|
||||
funding_fees: Optional[float] = None
|
||||
trading_mode: Optional[TradingMode] = None
|
||||
leverage: float | None = None
|
||||
interest_rate: float | None = None
|
||||
liquidation_price: float | None = None
|
||||
funding_fees: float | None = None
|
||||
trading_mode: TradingMode | None = None
|
||||
|
||||
amount_precision: Optional[float] = None
|
||||
price_precision: Optional[float] = None
|
||||
precision_mode: Optional[int] = None
|
||||
amount_precision: float | None = None
|
||||
price_precision: float | None = None
|
||||
precision_mode: int | None = None
|
||||
|
||||
|
||||
class OpenTradeSchema(TradeSchema):
|
||||
stoploss_current_dist: Optional[float] = None
|
||||
stoploss_current_dist_pct: Optional[float] = None
|
||||
stoploss_current_dist_ratio: Optional[float] = None
|
||||
stoploss_entry_dist: Optional[float] = None
|
||||
stoploss_entry_dist_ratio: Optional[float] = None
|
||||
stoploss_current_dist: float | None = None
|
||||
stoploss_current_dist_pct: float | None = None
|
||||
stoploss_current_dist_ratio: float | None = None
|
||||
stoploss_entry_dist: float | None = None
|
||||
stoploss_entry_dist_ratio: float | None = None
|
||||
current_rate: float
|
||||
total_profit_abs: float
|
||||
total_profit_fiat: Optional[float] = None
|
||||
total_profit_ratio: Optional[float] = None
|
||||
total_profit_fiat: float | None = None
|
||||
total_profit_ratio: float | None = None
|
||||
|
||||
|
||||
class TradeResponse(BaseModel):
|
||||
|
@ -358,7 +358,7 @@ class TradeResponse(BaseModel):
|
|||
total_trades: int
|
||||
|
||||
|
||||
ForceEnterResponse = RootModel[Union[TradeSchema, StatusMsg]]
|
||||
ForceEnterResponse = RootModel[TradeSchema | StatusMsg]
|
||||
|
||||
|
||||
class LockModel(BaseModel):
|
||||
|
@ -370,7 +370,7 @@ class LockModel(BaseModel):
|
|||
lock_timestamp: int
|
||||
pair: str
|
||||
side: str
|
||||
reason: Optional[str] = None
|
||||
reason: str | None = None
|
||||
|
||||
|
||||
class Locks(BaseModel):
|
||||
|
@ -382,12 +382,12 @@ class LocksPayload(BaseModel):
|
|||
pair: str
|
||||
side: str = "*" # Default to both sides
|
||||
until: AwareDatetime
|
||||
reason: Optional[str] = None
|
||||
reason: str | None = None
|
||||
|
||||
|
||||
class DeleteLockRequest(BaseModel):
|
||||
pair: Optional[str] = None
|
||||
lockid: Optional[int] = None
|
||||
pair: str | None = None
|
||||
lockid: int | None = None
|
||||
|
||||
|
||||
class Logs(BaseModel):
|
||||
|
@ -398,17 +398,17 @@ class Logs(BaseModel):
|
|||
class ForceEnterPayload(BaseModel):
|
||||
pair: str
|
||||
side: SignalDirection = SignalDirection.LONG
|
||||
price: Optional[float] = None
|
||||
ordertype: Optional[OrderTypeValues] = None
|
||||
stakeamount: Optional[float] = None
|
||||
entry_tag: Optional[str] = None
|
||||
leverage: Optional[float] = None
|
||||
price: float | None = None
|
||||
ordertype: OrderTypeValues | None = None
|
||||
stakeamount: float | None = None
|
||||
entry_tag: str | None = None
|
||||
leverage: float | None = None
|
||||
|
||||
|
||||
class ForceExitPayload(BaseModel):
|
||||
tradeid: Union[str, int]
|
||||
ordertype: Optional[OrderTypeValues] = None
|
||||
amount: Optional[float] = None
|
||||
tradeid: str | int
|
||||
ordertype: OrderTypeValues | None = None
|
||||
amount: float | None = None
|
||||
|
||||
|
||||
class BlacklistPayload(BaseModel):
|
||||
|
@ -430,7 +430,7 @@ class WhitelistResponse(BaseModel):
|
|||
|
||||
|
||||
class WhitelistEvaluateResponse(BackgroundTaskResult):
|
||||
result: Optional[WhitelistResponse] = None
|
||||
result: WhitelistResponse | None = None
|
||||
|
||||
|
||||
class DeleteTrade(BaseModel):
|
||||
|
@ -445,7 +445,7 @@ class PlotConfig_(BaseModel):
|
|||
subplots: dict[str, Any]
|
||||
|
||||
|
||||
PlotConfig = RootModel[Union[PlotConfig_, dict]]
|
||||
PlotConfig = RootModel[PlotConfig_ | dict]
|
||||
|
||||
|
||||
class StrategyListResponse(BaseModel):
|
||||
|
@ -489,7 +489,7 @@ class FreqAIModelListResponse(BaseModel):
|
|||
class StrategyResponse(BaseModel):
|
||||
strategy: str
|
||||
code: str
|
||||
timeframe: Optional[str]
|
||||
timeframe: str | None
|
||||
|
||||
|
||||
class AvailablePairs(BaseModel):
|
||||
|
@ -501,14 +501,14 @@ class AvailablePairs(BaseModel):
|
|||
class PairCandlesRequest(BaseModel):
|
||||
pair: str
|
||||
timeframe: str
|
||||
limit: Optional[int] = None
|
||||
columns: Optional[list[str]] = None
|
||||
limit: int | None = None
|
||||
columns: list[str] | None = None
|
||||
|
||||
|
||||
class PairHistoryRequest(PairCandlesRequest):
|
||||
timerange: str
|
||||
strategy: str
|
||||
freqaimodel: Optional[str] = None
|
||||
freqaimodel: str | None = None
|
||||
|
||||
|
||||
class PairHistory(BaseModel):
|
||||
|
@ -540,16 +540,16 @@ class BacktestFreqAIInputs(BaseModel):
|
|||
|
||||
class BacktestRequest(BaseModel):
|
||||
strategy: str
|
||||
timeframe: Optional[str] = None
|
||||
timeframe_detail: Optional[str] = None
|
||||
timerange: Optional[str] = None
|
||||
max_open_trades: Optional[IntOrInf] = None
|
||||
stake_amount: Optional[Union[str, float]] = None
|
||||
timeframe: str | None = None
|
||||
timeframe_detail: str | None = None
|
||||
timerange: str | None = None
|
||||
max_open_trades: IntOrInf | None = None
|
||||
stake_amount: str | float | None = None
|
||||
enable_protections: bool
|
||||
dry_run_wallet: Optional[float] = None
|
||||
backtest_cache: Optional[str] = None
|
||||
freqaimodel: Optional[str] = None
|
||||
freqai: Optional[BacktestFreqAIInputs] = None
|
||||
dry_run_wallet: float | None = None
|
||||
backtest_cache: str | None = None
|
||||
freqaimodel: str | None = None
|
||||
freqai: BacktestFreqAIInputs | None = None
|
||||
|
||||
|
||||
class BacktestResponse(BaseModel):
|
||||
|
@ -558,9 +558,9 @@ class BacktestResponse(BaseModel):
|
|||
status_msg: str
|
||||
step: str
|
||||
progress: float
|
||||
trade_count: Optional[float] = None
|
||||
trade_count: float | None = None
|
||||
# TODO: Properly type backtestresult...
|
||||
backtest_result: Optional[dict[str, Any]] = None
|
||||
backtest_result: dict[str, Any] | None = None
|
||||
|
||||
|
||||
# TODO: This is a copy of BacktestHistoryEntryType
|
||||
|
@ -569,11 +569,11 @@ class BacktestHistoryEntry(BaseModel):
|
|||
strategy: str
|
||||
run_id: str
|
||||
backtest_start_time: int
|
||||
notes: Optional[str] = ""
|
||||
backtest_start_ts: Optional[int] = None
|
||||
backtest_end_ts: Optional[int] = None
|
||||
timeframe: Optional[str] = None
|
||||
timeframe_detail: Optional[str] = None
|
||||
notes: str | None = ""
|
||||
backtest_start_ts: int | None = None
|
||||
backtest_end_ts: int | None = None
|
||||
timeframe: str | None = None
|
||||
timeframe_detail: str | None = None
|
||||
|
||||
|
||||
class BacktestMetadataUpdate(BaseModel):
|
||||
|
@ -593,9 +593,9 @@ class SysInfo(BaseModel):
|
|||
|
||||
|
||||
class Health(BaseModel):
|
||||
last_process: Optional[datetime] = None
|
||||
last_process_ts: Optional[int] = None
|
||||
bot_start: Optional[datetime] = None
|
||||
bot_start_ts: Optional[int] = None
|
||||
bot_startup: Optional[datetime] = None
|
||||
bot_startup_ts: Optional[int] = None
|
||||
last_process: datetime | None = None
|
||||
last_process_ts: int | None = None
|
||||
bot_start: datetime | None = None
|
||||
bot_start_ts: int | None = None
|
||||
bot_startup: datetime | None = None
|
||||
bot_startup_ts: int | None = None
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import logging
|
||||
from copy import deepcopy
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, Query
|
||||
from fastapi.exceptions import HTTPException
|
||||
|
@ -119,17 +118,17 @@ def count(rpc: RPC = Depends(get_rpc)):
|
|||
|
||||
|
||||
@router.get("/entries", response_model=list[Entry], tags=["info"])
|
||||
def entries(pair: Optional[str] = None, rpc: RPC = Depends(get_rpc)):
|
||||
def entries(pair: str | None = None, rpc: RPC = Depends(get_rpc)):
|
||||
return rpc._rpc_enter_tag_performance(pair)
|
||||
|
||||
|
||||
@router.get("/exits", response_model=list[Exit], tags=["info"])
|
||||
def exits(pair: Optional[str] = None, rpc: RPC = Depends(get_rpc)):
|
||||
def exits(pair: str | None = None, rpc: RPC = Depends(get_rpc)):
|
||||
return rpc._rpc_exit_reason_performance(pair)
|
||||
|
||||
|
||||
@router.get("/mix_tags", response_model=list[MixTag], tags=["info"])
|
||||
def mix_tags(pair: Optional[str] = None, rpc: RPC = Depends(get_rpc)):
|
||||
def mix_tags(pair: str | None = None, rpc: RPC = Depends(get_rpc)):
|
||||
return rpc._rpc_mix_tag_performance(pair)
|
||||
|
||||
|
||||
|
@ -216,7 +215,7 @@ def edge(rpc: RPC = Depends(get_rpc)):
|
|||
|
||||
|
||||
@router.get("/show_config", response_model=ShowConfig, tags=["info"])
|
||||
def show_config(rpc: Optional[RPC] = Depends(get_rpc_optional), config=Depends(get_config)):
|
||||
def show_config(rpc: RPC | None = Depends(get_rpc_optional), config=Depends(get_config)):
|
||||
state = ""
|
||||
strategy_version = None
|
||||
if rpc:
|
||||
|
@ -304,7 +303,7 @@ def add_locks(payload: list[LocksPayload], rpc: RPC = Depends(get_rpc)):
|
|||
|
||||
|
||||
@router.get("/logs", response_model=Logs, tags=["info"])
|
||||
def logs(limit: Optional[int] = None):
|
||||
def logs(limit: int | None = None):
|
||||
return RPC._rpc_get_logs(limit)
|
||||
|
||||
|
||||
|
@ -330,9 +329,7 @@ def reload_config(rpc: RPC = Depends(get_rpc)):
|
|||
|
||||
|
||||
@router.get("/pair_candles", response_model=PairHistory, tags=["candle data"])
|
||||
def pair_candles(
|
||||
pair: str, timeframe: str, limit: Optional[int] = None, rpc: RPC = Depends(get_rpc)
|
||||
):
|
||||
def pair_candles(pair: str, timeframe: str, limit: int | None = None, rpc: RPC = Depends(get_rpc)):
|
||||
return rpc._rpc_analysed_dataframe(pair, timeframe, limit, None)
|
||||
|
||||
|
||||
|
@ -350,7 +347,7 @@ def pair_history(
|
|||
timeframe: str,
|
||||
timerange: str,
|
||||
strategy: str,
|
||||
freqaimodel: Optional[str] = None,
|
||||
freqaimodel: str | None = None,
|
||||
config=Depends(get_config),
|
||||
exchange=Depends(get_exchange),
|
||||
):
|
||||
|
@ -396,9 +393,9 @@ def pair_history_filtered(
|
|||
|
||||
@router.get("/plot_config", response_model=PlotConfig, tags=["candle data"])
|
||||
def plot_config(
|
||||
strategy: Optional[str] = None,
|
||||
strategy: str | None = None,
|
||||
config=Depends(get_config),
|
||||
rpc: Optional[RPC] = Depends(get_rpc_optional),
|
||||
rpc: RPC | None = Depends(get_rpc_optional),
|
||||
):
|
||||
if not strategy:
|
||||
if not rpc:
|
||||
|
@ -494,9 +491,9 @@ def list_freqaimodels(config=Depends(get_config)):
|
|||
|
||||
@router.get("/available_pairs", response_model=AvailablePairs, tags=["candle data"])
|
||||
def list_available_pairs(
|
||||
timeframe: Optional[str] = None,
|
||||
stake_currency: Optional[str] = None,
|
||||
candletype: Optional[CandleType] = None,
|
||||
timeframe: str | None = None,
|
||||
stake_currency: str | None = None,
|
||||
candletype: CandleType | None = None,
|
||||
config=Depends(get_config),
|
||||
):
|
||||
dh = get_datahandler(config["datadir"], config.get("dataformat_ohlcv"))
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from collections.abc import AsyncIterator
|
||||
from typing import Any, Optional
|
||||
from typing import Any
|
||||
from uuid import uuid4
|
||||
|
||||
from fastapi import Depends, HTTPException
|
||||
|
@ -14,13 +14,13 @@ from freqtrade.rpc.rpc import RPC, RPCException
|
|||
from .webserver import ApiServer
|
||||
|
||||
|
||||
def get_rpc_optional() -> Optional[RPC]:
|
||||
def get_rpc_optional() -> RPC | None:
|
||||
if ApiServer._has_rpc:
|
||||
return ApiServer._rpc
|
||||
return None
|
||||
|
||||
|
||||
async def get_rpc() -> Optional[AsyncIterator[RPC]]:
|
||||
async def get_rpc() -> AsyncIterator[RPC] | None:
|
||||
_rpc = get_rpc_optional()
|
||||
if _rpc:
|
||||
request_id = str(uuid4())
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import APIRouter
|
||||
from fastapi.exceptions import HTTPException
|
||||
|
@ -42,7 +41,7 @@ async def index_html(rest_of_path: str):
|
|||
filename = uibase / rest_of_path
|
||||
# It's security relevant to check "relative_to".
|
||||
# Without this, Directory-traversal is possible.
|
||||
media_type: Optional[str] = None
|
||||
media_type: str | None = None
|
||||
if filename.suffix == ".js":
|
||||
# Force text/javascript for .js files - Circumvent faulty system configuration
|
||||
media_type = "application/javascript"
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import logging
|
||||
from ipaddress import ip_address
|
||||
from typing import Any, Optional
|
||||
from typing import Any
|
||||
|
||||
import orjson
|
||||
import uvicorn
|
||||
|
@ -39,7 +39,7 @@ class ApiServer(RPCHandler):
|
|||
_has_rpc: bool = False
|
||||
_config: Config = {}
|
||||
# websocket message stuff
|
||||
_message_stream: Optional[MessageStream] = None
|
||||
_message_stream: MessageStream | None = None
|
||||
|
||||
def __new__(cls, *args, **kwargs):
|
||||
"""
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from typing import Any, Literal, Optional, TypedDict
|
||||
from typing import Any, Literal, TypedDict
|
||||
from uuid import uuid4
|
||||
|
||||
from freqtrade.exchange.exchange import Exchange
|
||||
|
@ -8,9 +8,9 @@ class JobsContainer(TypedDict):
|
|||
category: Literal["pairlist"]
|
||||
is_running: bool
|
||||
status: str
|
||||
progress: Optional[float]
|
||||
progress: float | None
|
||||
result: Any
|
||||
error: Optional[str]
|
||||
error: str | None
|
||||
|
||||
|
||||
class ApiBG:
|
||||
|
|
|
@ -4,7 +4,7 @@ import time
|
|||
from collections import deque
|
||||
from collections.abc import AsyncIterator
|
||||
from contextlib import asynccontextmanager
|
||||
from typing import Any, Optional, Union
|
||||
from typing import Any
|
||||
from uuid import uuid4
|
||||
|
||||
from fastapi import WebSocketDisconnect
|
||||
|
@ -30,7 +30,7 @@ class WebSocketChannel:
|
|||
def __init__(
|
||||
self,
|
||||
websocket: WebSocketType,
|
||||
channel_id: Optional[str] = None,
|
||||
channel_id: str | None = None,
|
||||
serializer_cls: type[WebSocketSerializer] = HybridJSONWebSocketSerializer,
|
||||
send_throttle: float = 0.01,
|
||||
):
|
||||
|
@ -80,9 +80,7 @@ class WebSocketChannel:
|
|||
# maximum of 3 seconds per message
|
||||
self._send_high_limit = min(max(self.avg_send_time * 2, 1), 3)
|
||||
|
||||
async def send(
|
||||
self, message: Union[WSMessageSchemaType, dict[str, Any]], use_timeout: bool = False
|
||||
):
|
||||
async def send(self, message: WSMessageSchemaType | dict[str, Any], use_timeout: bool = False):
|
||||
"""
|
||||
Send a message on the wrapped websocket. If the sending
|
||||
takes too long, it will raise a TimeoutError and
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from typing import Any, Union
|
||||
from typing import Any
|
||||
|
||||
from fastapi import WebSocket as FastAPIWebSocket
|
||||
from websockets.client import WebSocketClientProtocol as WebSocket
|
||||
|
@ -13,7 +13,7 @@ class WebSocketProxy:
|
|||
"""
|
||||
|
||||
def __init__(self, websocket: WebSocketType):
|
||||
self._websocket: Union[FastAPIWebSocket, WebSocket] = websocket
|
||||
self._websocket: FastAPIWebSocket | WebSocket = websocket
|
||||
|
||||
@property
|
||||
def raw_websocket(self):
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import logging
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Any, Union
|
||||
from typing import Any
|
||||
|
||||
import orjson
|
||||
import rapidjson
|
||||
|
@ -26,7 +26,7 @@ class WebSocketSerializer(ABC):
|
|||
def _deserialize(self, data):
|
||||
raise NotImplementedError()
|
||||
|
||||
async def send(self, data: Union[WSMessageSchemaType, dict[str, Any]]):
|
||||
async def send(self, data: WSMessageSchemaType | dict[str, Any]):
|
||||
await self._websocket.send(self._serialize(data))
|
||||
|
||||
async def recv(self) -> bytes:
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from datetime import datetime
|
||||
from typing import Any, Optional, TypedDict
|
||||
from typing import Any, TypedDict
|
||||
|
||||
from pandas import DataFrame
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
|
@ -14,18 +14,18 @@ class BaseArbitraryModel(BaseModel):
|
|||
|
||||
class WSRequestSchema(BaseArbitraryModel):
|
||||
type: RPCRequestType
|
||||
data: Optional[Any] = None
|
||||
data: Any | None = None
|
||||
|
||||
|
||||
class WSMessageSchemaType(TypedDict):
|
||||
# Type for typing to avoid doing pydantic typechecks.
|
||||
type: RPCMessageType
|
||||
data: Optional[dict[str, Any]]
|
||||
data: dict[str, Any] | None
|
||||
|
||||
|
||||
class WSMessageSchema(BaseArbitraryModel):
|
||||
type: RPCMessageType
|
||||
data: Optional[Any] = None
|
||||
data: Any | None = None
|
||||
model_config = ConfigDict(extra="allow")
|
||||
|
||||
|
||||
|
|
|
@ -8,8 +8,9 @@ from it
|
|||
import asyncio
|
||||
import logging
|
||||
import socket
|
||||
from collections.abc import Callable
|
||||
from threading import Thread
|
||||
from typing import Any, Callable, TypedDict, Union
|
||||
from typing import Any, TypedDict
|
||||
|
||||
import websockets
|
||||
from pydantic import ValidationError
|
||||
|
@ -42,7 +43,7 @@ class Producer(TypedDict):
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def schema_to_dict(schema: Union[WSMessageSchema, WSRequestSchema]):
|
||||
def schema_to_dict(schema: WSMessageSchema | WSRequestSchema):
|
||||
return schema.model_dump(exclude_none=True)
|
||||
|
||||
|
||||
|
@ -282,9 +283,7 @@ class ExternalMessageConsumer:
|
|||
logger.debug(e, exc_info=e)
|
||||
raise
|
||||
|
||||
def send_producer_request(
|
||||
self, producer_name: str, request: Union[WSRequestSchema, dict[str, Any]]
|
||||
):
|
||||
def send_producer_request(self, producer_name: str, request: WSRequestSchema | dict[str, Any]):
|
||||
"""
|
||||
Publish a message to the producer's message stream to be
|
||||
sent by the channel task.
|
||||
|
|
|
@ -7,7 +7,7 @@ from abc import abstractmethod
|
|||
from collections.abc import Generator, Sequence
|
||||
from datetime import date, datetime, timedelta, timezone
|
||||
from math import isnan
|
||||
from typing import Any, Optional, Union
|
||||
from typing import Any
|
||||
|
||||
import psutil
|
||||
from dateutil.relativedelta import relativedelta
|
||||
|
@ -97,7 +97,7 @@ class RPC:
|
|||
"""
|
||||
|
||||
# Bind _fiat_converter if needed
|
||||
_fiat_converter: Optional[CryptoToFiatConverter] = None
|
||||
_fiat_converter: CryptoToFiatConverter | None = None
|
||||
|
||||
def __init__(self, freqtrade) -> None:
|
||||
"""
|
||||
|
@ -112,7 +112,7 @@ class RPC:
|
|||
|
||||
@staticmethod
|
||||
def _rpc_show_config(
|
||||
config, botstate: Union[State, str], strategy_version: Optional[str] = None
|
||||
config, botstate: State | str, strategy_version: str | None = None
|
||||
) -> dict[str, Any]:
|
||||
"""
|
||||
Return a dict of config options.
|
||||
|
@ -168,7 +168,7 @@ class RPC:
|
|||
}
|
||||
return val
|
||||
|
||||
def _rpc_trade_status(self, trade_ids: Optional[list[int]] = None) -> list[dict[str, Any]]:
|
||||
def _rpc_trade_status(self, trade_ids: list[int] | None = None) -> list[dict[str, Any]]:
|
||||
"""
|
||||
Below follows the RPC backend it is prefixed with rpc_ to raise awareness that it is
|
||||
a remotely exposed function
|
||||
|
@ -184,11 +184,11 @@ class RPC:
|
|||
else:
|
||||
results = []
|
||||
for trade in trades:
|
||||
current_profit_fiat: Optional[float] = None
|
||||
total_profit_fiat: Optional[float] = None
|
||||
current_profit_fiat: float | None = None
|
||||
total_profit_fiat: float | None = None
|
||||
|
||||
# prepare open orders details
|
||||
oo_details: Optional[str] = ""
|
||||
oo_details: str | None = ""
|
||||
oo_details_lst = [
|
||||
f"({oo.order_type} {oo.side} rem={oo.safe_remaining:.8f})"
|
||||
for oo in trade.open_orders
|
||||
|
@ -197,7 +197,7 @@ class RPC:
|
|||
oo_details = ", ".join(oo_details_lst)
|
||||
|
||||
total_profit_abs = 0.0
|
||||
total_profit_ratio: Optional[float] = None
|
||||
total_profit_ratio: float | None = None
|
||||
# calculate profit and send message to user
|
||||
if trade.is_open:
|
||||
try:
|
||||
|
@ -487,7 +487,7 @@ class RPC:
|
|||
return {"exit_reasons": exit_reasons, "durations": durations}
|
||||
|
||||
def _rpc_trade_statistics(
|
||||
self, stake_currency: str, fiat_display_currency: str, start_date: Optional[datetime] = None
|
||||
self, stake_currency: str, fiat_display_currency: str, start_date: datetime | None = None
|
||||
) -> dict[str, Any]:
|
||||
"""Returns cumulative profit statistics"""
|
||||
|
||||
|
@ -682,7 +682,7 @@ class RPC:
|
|||
est_bot_stake = amount
|
||||
else:
|
||||
pair = self._freqtrade.exchange.get_valid_pair_combination(coin, stake_currency)
|
||||
rate: Optional[float] = tickers.get(pair, {}).get("last", None)
|
||||
rate: float | None = tickers.get(pair, {}).get("last", None)
|
||||
if rate:
|
||||
if pair.startswith(stake_currency) and not pair.endswith(stake_currency):
|
||||
rate = 1.0 / rate
|
||||
|
@ -851,7 +851,7 @@ class RPC:
|
|||
return {"status": "Reloaded from orders from exchange"}
|
||||
|
||||
def __exec_force_exit(
|
||||
self, trade: Trade, ordertype: Optional[str], amount: Optional[float] = None
|
||||
self, trade: Trade, ordertype: str | None, amount: float | None = None
|
||||
) -> bool:
|
||||
# Check if there is there are open orders
|
||||
trade_entry_cancelation_registry = []
|
||||
|
@ -882,7 +882,7 @@ class RPC:
|
|||
order_type = ordertype or self._freqtrade.strategy.order_types.get(
|
||||
"force_exit", self._freqtrade.strategy.order_types["exit"]
|
||||
)
|
||||
sub_amount: Optional[float] = None
|
||||
sub_amount: float | None = None
|
||||
if amount and amount < trade.amount:
|
||||
# Partial exit ...
|
||||
min_exit_stake = self._freqtrade.exchange.get_min_pair_stake_amount(
|
||||
|
@ -901,7 +901,7 @@ class RPC:
|
|||
return False
|
||||
|
||||
def _rpc_force_exit(
|
||||
self, trade_id: str, ordertype: Optional[str] = None, *, amount: Optional[float] = None
|
||||
self, trade_id: str, ordertype: str | None = None, *, amount: float | None = None
|
||||
) -> dict[str, str]:
|
||||
"""
|
||||
Handler for forceexit <id>.
|
||||
|
@ -960,14 +960,14 @@ class RPC:
|
|||
def _rpc_force_entry(
|
||||
self,
|
||||
pair: str,
|
||||
price: Optional[float],
|
||||
price: float | None,
|
||||
*,
|
||||
order_type: Optional[str] = None,
|
||||
order_type: str | None = None,
|
||||
order_side: SignalDirection = SignalDirection.LONG,
|
||||
stake_amount: Optional[float] = None,
|
||||
enter_tag: Optional[str] = "force_entry",
|
||||
leverage: Optional[float] = None,
|
||||
) -> Optional[Trade]:
|
||||
stake_amount: float | None = None,
|
||||
enter_tag: str | None = "force_entry",
|
||||
leverage: float | None = None,
|
||||
) -> Trade | None:
|
||||
"""
|
||||
Handler for forcebuy <asset> <price>
|
||||
Buys a pair trade at the given or current price
|
||||
|
@ -977,7 +977,7 @@ class RPC:
|
|||
# check if valid pair
|
||||
|
||||
# check if pair already has an open pair
|
||||
trade: Optional[Trade] = Trade.get_trades(
|
||||
trade: Trade | None = Trade.get_trades(
|
||||
[Trade.is_open.is_(True), Trade.pair == pair]
|
||||
).first()
|
||||
is_short = order_side == SignalDirection.SHORT
|
||||
|
@ -1052,7 +1052,7 @@ class RPC:
|
|||
)
|
||||
Trade.commit()
|
||||
|
||||
def _rpc_delete(self, trade_id: int) -> dict[str, Union[str, int]]:
|
||||
def _rpc_delete(self, trade_id: int) -> dict[str, str | int]:
|
||||
"""
|
||||
Handler for delete <id>.
|
||||
Delete the given trade and close eventually existing open orders.
|
||||
|
@ -1093,7 +1093,7 @@ class RPC:
|
|||
"cancel_order_count": c_count,
|
||||
}
|
||||
|
||||
def _rpc_list_custom_data(self, trade_id: int, key: Optional[str]) -> list[dict[str, Any]]:
|
||||
def _rpc_list_custom_data(self, trade_id: int, key: str | None) -> list[dict[str, Any]]:
|
||||
# Query for trade
|
||||
trade = Trade.get_trades(trade_filter=[Trade.id == trade_id]).first()
|
||||
if trade is None:
|
||||
|
@ -1128,21 +1128,21 @@ class RPC:
|
|||
|
||||
return pair_rates
|
||||
|
||||
def _rpc_enter_tag_performance(self, pair: Optional[str]) -> list[dict[str, Any]]:
|
||||
def _rpc_enter_tag_performance(self, pair: str | None) -> list[dict[str, Any]]:
|
||||
"""
|
||||
Handler for buy tag performance.
|
||||
Shows a performance statistic from finished trades
|
||||
"""
|
||||
return Trade.get_enter_tag_performance(pair)
|
||||
|
||||
def _rpc_exit_reason_performance(self, pair: Optional[str]) -> list[dict[str, Any]]:
|
||||
def _rpc_exit_reason_performance(self, pair: str | None) -> list[dict[str, Any]]:
|
||||
"""
|
||||
Handler for exit reason performance.
|
||||
Shows a performance statistic from finished trades
|
||||
"""
|
||||
return Trade.get_exit_reason_performance(pair)
|
||||
|
||||
def _rpc_mix_tag_performance(self, pair: Optional[str]) -> list[dict[str, Any]]:
|
||||
def _rpc_mix_tag_performance(self, pair: str | None) -> list[dict[str, Any]]:
|
||||
"""
|
||||
Handler for mix tag (enter_tag + exit_reason) performance.
|
||||
Shows a performance statistic from finished trades
|
||||
|
@ -1174,7 +1174,7 @@ class RPC:
|
|||
return {"lock_count": len(locks), "locks": [lock.to_json() for lock in locks]}
|
||||
|
||||
def _rpc_delete_lock(
|
||||
self, lockid: Optional[int] = None, pair: Optional[str] = None
|
||||
self, lockid: int | None = None, pair: str | None = None
|
||||
) -> dict[str, Any]:
|
||||
"""Delete specific lock(s)"""
|
||||
locks: Sequence[PairLock] = []
|
||||
|
@ -1192,9 +1192,7 @@ class RPC:
|
|||
|
||||
return self._rpc_locks()
|
||||
|
||||
def _rpc_add_lock(
|
||||
self, pair: str, until: datetime, reason: Optional[str], side: str
|
||||
) -> PairLock:
|
||||
def _rpc_add_lock(self, pair: str, until: datetime, reason: str | None, side: str) -> PairLock:
|
||||
lock = PairLocks.lock_pair(
|
||||
pair=pair,
|
||||
until=until,
|
||||
|
@ -1224,7 +1222,7 @@ class RPC:
|
|||
resp["errors"] = errors
|
||||
return resp
|
||||
|
||||
def _rpc_blacklist(self, add: Optional[list[str]] = None) -> dict:
|
||||
def _rpc_blacklist(self, add: list[str] | None = None) -> dict:
|
||||
"""Returns the currently active blacklist"""
|
||||
errors = {}
|
||||
if add:
|
||||
|
@ -1249,7 +1247,7 @@ class RPC:
|
|||
return res
|
||||
|
||||
@staticmethod
|
||||
def _rpc_get_logs(limit: Optional[int]) -> dict[str, Any]:
|
||||
def _rpc_get_logs(limit: int | None) -> dict[str, Any]:
|
||||
"""Returns the last X logs"""
|
||||
if limit:
|
||||
buffer = bufferHandler.buffer[-limit:]
|
||||
|
@ -1286,7 +1284,7 @@ class RPC:
|
|||
timeframe: str,
|
||||
dataframe: DataFrame,
|
||||
last_analyzed: datetime,
|
||||
selected_cols: Optional[list[str]],
|
||||
selected_cols: list[str] | None,
|
||||
) -> dict[str, Any]:
|
||||
has_content = len(dataframe) != 0
|
||||
dataframe_columns = list(dataframe.columns)
|
||||
|
@ -1355,7 +1353,7 @@ class RPC:
|
|||
return res
|
||||
|
||||
def _rpc_analysed_dataframe(
|
||||
self, pair: str, timeframe: str, limit: Optional[int], selected_cols: Optional[list[str]]
|
||||
self, pair: str, timeframe: str, limit: int | None, selected_cols: list[str] | None
|
||||
) -> dict[str, Any]:
|
||||
"""Analyzed dataframe in Dict form"""
|
||||
|
||||
|
@ -1365,7 +1363,7 @@ class RPC:
|
|||
)
|
||||
|
||||
def __rpc_analysed_dataframe_raw(
|
||||
self, pair: str, timeframe: str, limit: Optional[int]
|
||||
self, pair: str, timeframe: str, limit: int | None
|
||||
) -> tuple[DataFrame, datetime]:
|
||||
"""
|
||||
Get the dataframe and last analyze from the dataprovider
|
||||
|
@ -1383,7 +1381,7 @@ class RPC:
|
|||
return _data, last_analyzed
|
||||
|
||||
def _ws_all_analysed_dataframes(
|
||||
self, pairlist: list[str], limit: Optional[int]
|
||||
self, pairlist: list[str], limit: int | None
|
||||
) -> Generator[dict[str, Any], None, None]:
|
||||
"""
|
||||
Get the analysed dataframes of each pair in the pairlist.
|
||||
|
@ -1403,7 +1401,7 @@ class RPC:
|
|||
|
||||
yield {"key": (pair, timeframe, candle_type), "df": dataframe, "la": last_analyzed}
|
||||
|
||||
def _ws_request_analyzed_df(self, limit: Optional[int] = None, pair: Optional[str] = None):
|
||||
def _ws_request_analyzed_df(self, limit: int | None = None, pair: str | None = None):
|
||||
"""Historical Analyzed Dataframes for WebSocket"""
|
||||
pairlist = [pair] if pair else self._freqtrade.active_pair_whitelist
|
||||
|
||||
|
@ -1415,7 +1413,7 @@ class RPC:
|
|||
|
||||
@staticmethod
|
||||
def _rpc_analysed_history_full(
|
||||
config: Config, pair: str, timeframe: str, exchange, selected_cols: Optional[list[str]]
|
||||
config: Config, pair: str, timeframe: str, exchange, selected_cols: list[str] | None
|
||||
) -> dict[str, Any]:
|
||||
timerange_parsed = TimeRange.parse_timerange(config.get("timerange"))
|
||||
|
||||
|
@ -1482,9 +1480,9 @@ class RPC:
|
|||
"ram_pct": psutil.virtual_memory().percent,
|
||||
}
|
||||
|
||||
def health(self) -> dict[str, Optional[Union[str, int]]]:
|
||||
def health(self) -> dict[str, str | int | None]:
|
||||
last_p = self._freqtrade.last_process
|
||||
res: dict[str, Union[None, str, int]] = {
|
||||
res: dict[str, None | str | int] = {
|
||||
"last_process": None,
|
||||
"last_process_loc": None,
|
||||
"last_process_ts": None,
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from datetime import datetime
|
||||
from typing import Any, Literal, Optional, TypedDict, Union
|
||||
from typing import Any, Literal, TypedDict
|
||||
|
||||
from freqtrade.constants import PairWithTimeframe
|
||||
from freqtrade.enums import RPCMessageType
|
||||
|
@ -31,7 +31,7 @@ class RPCProtectionMsg(RPCSendMsgBase):
|
|||
type: Literal[RPCMessageType.PROTECTION_TRIGGER, RPCMessageType.PROTECTION_TRIGGER_GLOBAL]
|
||||
id: int
|
||||
pair: str
|
||||
base_currency: Optional[str]
|
||||
base_currency: str | None
|
||||
lock_time: str
|
||||
lock_timestamp: int
|
||||
lock_end_time: str
|
||||
|
@ -48,23 +48,23 @@ class RPCWhitelistMsg(RPCSendMsgBase):
|
|||
|
||||
class __RPCEntryExitMsgBase(RPCSendMsgBase):
|
||||
trade_id: int
|
||||
buy_tag: Optional[str]
|
||||
enter_tag: Optional[str]
|
||||
buy_tag: str | None
|
||||
enter_tag: str | None
|
||||
exchange: str
|
||||
pair: str
|
||||
base_currency: str
|
||||
quote_currency: str
|
||||
leverage: Optional[float]
|
||||
leverage: float | None
|
||||
direction: str
|
||||
limit: float
|
||||
open_rate: float
|
||||
order_type: str
|
||||
stake_amount: float
|
||||
stake_currency: str
|
||||
fiat_currency: Optional[str]
|
||||
fiat_currency: str | None
|
||||
amount: float
|
||||
open_date: datetime
|
||||
current_rate: Optional[float]
|
||||
current_rate: float | None
|
||||
sub_trade: bool
|
||||
|
||||
|
||||
|
@ -84,11 +84,11 @@ class RPCExitMsg(__RPCEntryExitMsgBase):
|
|||
close_rate: float
|
||||
profit_amount: float
|
||||
profit_ratio: float
|
||||
exit_reason: Optional[str]
|
||||
exit_reason: str | None
|
||||
close_date: datetime
|
||||
# current_rate: Optional[float]
|
||||
order_rate: Optional[float]
|
||||
final_profit_ratio: Optional[float]
|
||||
# current_rate: float | None
|
||||
order_rate: float | None
|
||||
final_profit_ratio: float | None
|
||||
is_final_exit: bool
|
||||
|
||||
|
||||
|
@ -98,7 +98,7 @@ class RPCExitCancelMsg(__RPCEntryExitMsgBase):
|
|||
gain: ProfitLossStr
|
||||
profit_amount: float
|
||||
profit_ratio: float
|
||||
exit_reason: Optional[str]
|
||||
exit_reason: str | None
|
||||
close_date: datetime
|
||||
|
||||
|
||||
|
@ -122,18 +122,18 @@ class RPCNewCandleMsg(RPCSendMsgBase):
|
|||
data: PairWithTimeframe
|
||||
|
||||
|
||||
RPCOrderMsg = Union[RPCEntryMsg, RPCExitMsg, RPCExitCancelMsg, RPCCancelMsg]
|
||||
RPCOrderMsg = RPCEntryMsg | RPCExitMsg | RPCExitCancelMsg | RPCCancelMsg
|
||||
|
||||
|
||||
RPCSendMsg = Union[
|
||||
RPCStatusMsg,
|
||||
RPCStrategyMsg,
|
||||
RPCProtectionMsg,
|
||||
RPCWhitelistMsg,
|
||||
RPCEntryMsg,
|
||||
RPCCancelMsg,
|
||||
RPCExitMsg,
|
||||
RPCExitCancelMsg,
|
||||
RPCAnalyzedDFMsg,
|
||||
RPCNewCandleMsg,
|
||||
]
|
||||
RPCSendMsg = (
|
||||
RPCStatusMsg
|
||||
| RPCStrategyMsg
|
||||
| RPCProtectionMsg
|
||||
| RPCWhitelistMsg
|
||||
| RPCEntryMsg
|
||||
| RPCCancelMsg
|
||||
| RPCExitMsg
|
||||
| RPCExitCancelMsg
|
||||
| RPCAnalyzedDFMsg
|
||||
| RPCNewCandleMsg
|
||||
)
|
||||
|
|
|
@ -8,7 +8,7 @@ import asyncio
|
|||
import json
|
||||
import logging
|
||||
import re
|
||||
from collections.abc import Coroutine
|
||||
from collections.abc import Callable, Coroutine
|
||||
from copy import deepcopy
|
||||
from dataclasses import dataclass
|
||||
from datetime import date, datetime, timedelta
|
||||
|
@ -17,7 +17,7 @@ from html import escape
|
|||
from itertools import chain
|
||||
from math import isnan
|
||||
from threading import Thread
|
||||
from typing import Any, Callable, Literal, Optional, Union
|
||||
from typing import Any, Literal
|
||||
|
||||
from tabulate import tabulate
|
||||
from telegram import (
|
||||
|
@ -146,7 +146,7 @@ class Telegram(RPCHandler):
|
|||
Validates the keyboard configuration from telegram config
|
||||
section.
|
||||
"""
|
||||
self._keyboard: list[list[Union[str, KeyboardButton]]] = [
|
||||
self._keyboard: list[list[str | KeyboardButton]] = [
|
||||
["/daily", "/profit", "/balance"],
|
||||
["/status", "/status table", "/performance"],
|
||||
["/count", "/start", "/stop", "/help"],
|
||||
|
@ -499,7 +499,7 @@ class Telegram(RPCHandler):
|
|||
profit_fiat_extra = f" / {profit_fiat:.3f} {fiat_currency}"
|
||||
return profit_fiat_extra
|
||||
|
||||
def compose_message(self, msg: RPCSendMsg) -> Optional[str]:
|
||||
def compose_message(self, msg: RPCSendMsg) -> str | None:
|
||||
if msg["type"] == RPCMessageType.ENTRY or msg["type"] == RPCMessageType.ENTRY_FILL:
|
||||
message = self._format_entry_msg(msg)
|
||||
|
||||
|
@ -1308,7 +1308,7 @@ class Telegram(RPCHandler):
|
|||
await query.answer()
|
||||
await query.edit_message_text(text="Force exit canceled.")
|
||||
return
|
||||
trade: Optional[Trade] = Trade.get_trades(trade_filter=Trade.id == trade_id).first()
|
||||
trade: Trade | None = Trade.get_trades(trade_filter=Trade.id == trade_id).first()
|
||||
await query.answer()
|
||||
if trade:
|
||||
await query.edit_message_text(
|
||||
|
@ -1318,7 +1318,7 @@ class Telegram(RPCHandler):
|
|||
else:
|
||||
await query.edit_message_text(text=f"Trade {trade_id} not found.")
|
||||
|
||||
async def _force_enter_action(self, pair, price: Optional[float], order_side: SignalDirection):
|
||||
async def _force_enter_action(self, pair, price: float | None, order_side: SignalDirection):
|
||||
if pair != "cancel":
|
||||
try:
|
||||
|
||||
|
@ -2006,10 +2006,10 @@ class Telegram(RPCHandler):
|
|||
msg: str,
|
||||
parse_mode: str = ParseMode.MARKDOWN,
|
||||
disable_notification: bool = False,
|
||||
keyboard: Optional[list[list[InlineKeyboardButton]]] = None,
|
||||
keyboard: list[list[InlineKeyboardButton]] | None = None,
|
||||
callback_path: str = "",
|
||||
reload_able: bool = False,
|
||||
query: Optional[CallbackQuery] = None,
|
||||
query: CallbackQuery | None = None,
|
||||
) -> None:
|
||||
"""
|
||||
Send given markdown message
|
||||
|
@ -2018,7 +2018,7 @@ class Telegram(RPCHandler):
|
|||
:param parse_mode: telegram parse mode
|
||||
:return: None
|
||||
"""
|
||||
reply_markup: Union[InlineKeyboardMarkup, ReplyKeyboardMarkup]
|
||||
reply_markup: InlineKeyboardMarkup | ReplyKeyboardMarkup
|
||||
if query:
|
||||
await self._update_msg(
|
||||
query=query,
|
||||
|
|
|
@ -4,7 +4,7 @@ This module manages webhook communication
|
|||
|
||||
import logging
|
||||
import time
|
||||
from typing import Any, Optional
|
||||
from typing import Any
|
||||
|
||||
from requests import RequestException, post
|
||||
|
||||
|
@ -44,7 +44,7 @@ class Webhook(RPCHandler):
|
|||
"""
|
||||
pass
|
||||
|
||||
def _get_value_dict(self, msg: RPCSendMsg) -> Optional[dict[str, Any]]:
|
||||
def _get_value_dict(self, msg: RPCSendMsg) -> dict[str, Any] | None:
|
||||
whconfig = self._config["webhook"]
|
||||
if msg["type"].value in whconfig:
|
||||
# Explicit types should have priority
|
||||
|
|
|
@ -6,7 +6,7 @@ This module defines a base class for auto-hyperoptable strategies.
|
|||
import logging
|
||||
from collections.abc import Iterator
|
||||
from pathlib import Path
|
||||
from typing import Any, Optional, Union
|
||||
from typing import Any
|
||||
|
||||
from freqtrade.constants import Config
|
||||
from freqtrade.exceptions import OperationalException
|
||||
|
@ -39,7 +39,7 @@ class HyperStrategyMixin:
|
|||
# Init/loading of parameters is done as part of ft_bot_start().
|
||||
|
||||
def enumerate_parameters(
|
||||
self, category: Optional[str] = None
|
||||
self, category: str | None = None
|
||||
) -> Iterator[tuple[str, BaseParameter]]:
|
||||
"""
|
||||
Find all optimizable parameters and return (name, attr) iterator.
|
||||
|
@ -190,7 +190,7 @@ class HyperStrategyMixin:
|
|||
|
||||
|
||||
def detect_parameters(
|
||||
obj: Union[HyperStrategyMixin, type[HyperStrategyMixin]], category: str
|
||||
obj: HyperStrategyMixin | type[HyperStrategyMixin], category: str
|
||||
) -> Iterator[tuple[str, BaseParameter]]:
|
||||
"""
|
||||
Detect all parameters for 'category' for "obj"
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user