freqtrade_origin/freqtrade/data/converter/orderflow.py

270 lines
11 KiB
Python
Raw Normal View History

2024-02-12 12:20:52 +00:00
"""
Functions to convert orderflow data from public_trades
"""
2024-05-15 15:09:32 +00:00
2024-02-12 12:20:52 +00:00
import logging
import time
import numpy as np
import pandas as pd
from freqtrade.constants import DEFAULT_ORDERFLOW_COLUMNS, Config
from freqtrade.exceptions import DependencyException
2024-02-12 12:20:52 +00:00
logger = logging.getLogger(__name__)
2024-03-16 15:26:17 +00:00
def _init_dataframe_with_trades_columns(dataframe: pd.DataFrame):
2024-02-12 12:20:52 +00:00
"""
Populates a dataframe with trades columns
:param dataframe: Dataframe to populate
"""
2024-05-15 15:09:32 +00:00
dataframe["trades"] = dataframe.apply(lambda _: [], axis=1)
dataframe["orderflow"] = dataframe.apply(lambda _: {}, axis=1)
dataframe["bid"] = np.nan
dataframe["ask"] = np.nan
dataframe["delta"] = np.nan
dataframe["min_delta"] = np.nan
dataframe["max_delta"] = np.nan
dataframe["total_trades"] = np.nan
dataframe["stacked_imbalances_bid"] = np.nan
dataframe["stacked_imbalances_ask"] = np.nan
2024-02-12 12:20:52 +00:00
2024-03-16 15:26:17 +00:00
def _calculate_ohlcv_candle_start_and_end(df: pd.DataFrame, timeframe: str):
2024-05-14 15:28:07 +00:00
from freqtrade.exchange import timeframe_to_next_date, timeframe_to_resample_freq
2024-02-12 12:20:52 +00:00
timeframe_frequency = timeframe_to_resample_freq(timeframe)
# calculate ohlcv candle start and end
if df is not None and not df.empty:
2024-05-15 15:09:32 +00:00
df["datetime"] = pd.to_datetime(df["date"], unit="ms")
df["candle_start"] = df["datetime"].dt.floor(timeframe_frequency)
2024-02-12 12:20:52 +00:00
# used in _now_is_time_to_refresh_trades
2024-05-15 15:09:32 +00:00
df["candle_end"] = df["candle_start"].apply(
lambda candle_start: timeframe_to_next_date(timeframe, candle_start)
)
df.drop(columns=["datetime"], inplace=True)
2024-02-12 12:20:52 +00:00
2024-05-15 15:09:32 +00:00
def populate_dataframe_with_trades(
config: Config, dataframe: pd.DataFrame, trades: pd.DataFrame
) -> pd.DataFrame:
2024-02-12 12:20:52 +00:00
"""
Populates a dataframe with trades
:param dataframe: Dataframe to populate
:param trades: Trades to populate with
:return: Dataframe with trades populated
"""
2024-05-15 15:09:32 +00:00
config_orderflow = config["orderflow"]
timeframe = config["timeframe"]
2024-02-12 12:20:52 +00:00
# create columns for trades
_init_dataframe_with_trades_columns(dataframe)
df = dataframe.copy()
try:
start_time = time.time()
# calculate ohlcv candle start and end
_calculate_ohlcv_candle_start_and_end(trades, timeframe)
# slice of trades that are before current ohlcv candles to make groupby faster
trades = trades.loc[trades.candle_start >= df.date[0]]
2024-02-12 12:20:52 +00:00
trades.reset_index(inplace=True, drop=True)
# group trades by candle start
2024-05-15 15:09:32 +00:00
trades_grouped_by_candle_start = trades.groupby("candle_start", group_keys=False)
2024-02-12 12:20:52 +00:00
for candle_start in trades_grouped_by_candle_start.groups:
trades_grouped_df = trades[candle_start == trades["candle_start"]]
is_between = candle_start == df["date"]
2024-02-12 12:20:52 +00:00
if np.any(is_between == True): # noqa: E712
from freqtrade.exchange import timeframe_to_next_date
candle_next = timeframe_to_next_date(timeframe, candle_start)
2024-02-12 12:20:52 +00:00
# skip if there are no trades at next candle
# because that this candle isn't finished yet
if candle_next not in trades_grouped_by_candle_start.groups:
logger.warning(
f"candle at {candle_start} with {len(trades_grouped_df)} trades "
2024-05-15 15:09:32 +00:00
f"might be unfinished, because no finished trades at {candle_next}"
)
2024-02-12 12:20:52 +00:00
# add trades to each candle
2024-05-15 15:09:32 +00:00
df.loc[is_between, "trades"] = df.loc[is_between, "trades"].apply(
lambda _: trades_grouped_df
)
2024-02-12 12:20:52 +00:00
# calculate orderflow for each candle
2024-05-15 15:09:32 +00:00
df.loc[is_between, "orderflow"] = df.loc[is_between, "orderflow"].apply(
2024-02-12 12:20:52 +00:00
lambda _: trades_to_volumeprofile_with_total_delta_bid_ask(
2024-05-15 15:09:32 +00:00
pd.DataFrame(trades_grouped_df), scale=config_orderflow["scale"]
)
)
2024-02-12 12:20:52 +00:00
# calculate imbalances for each candle's orderflow
2024-05-15 15:09:32 +00:00
df.loc[is_between, "imbalances"] = df.loc[is_between, "orderflow"].apply(
2024-02-12 12:20:52 +00:00
lambda x: trades_orderflow_to_imbalances(
x,
2024-05-15 15:09:32 +00:00
imbalance_ratio=config_orderflow["imbalance_ratio"],
imbalance_volume=config_orderflow["imbalance_volume"],
)
)
_stacked_imb = config_orderflow["stacked_imbalance_range"]
df.loc[is_between, "stacked_imbalances_bid"] = df.loc[
is_between, "imbalances"
].apply(lambda x: stacked_imbalance_bid(x, stacked_imbalance_range=_stacked_imb))
df.loc[is_between, "stacked_imbalances_ask"] = df.loc[
is_between, "imbalances"
].apply(lambda x: stacked_imbalance_ask(x, stacked_imbalance_range=_stacked_imb))
buy = df.loc[is_between, "bid"].apply(
lambda _: np.where(
trades_grouped_df["side"].str.contains("buy"),
0,
trades_grouped_df["amount"],
)
)
sell = df.loc[is_between, "ask"].apply(
lambda _: np.where(
trades_grouped_df["side"].str.contains("sell"),
0,
trades_grouped_df["amount"],
)
)
2024-02-12 12:20:52 +00:00
deltas_per_trade = sell - buy
min_delta = 0
max_delta = 0
delta = 0
for deltas in deltas_per_trade:
for d in deltas:
delta += d
if delta > max_delta:
max_delta = delta
if delta < min_delta:
min_delta = delta
2024-05-15 15:09:32 +00:00
df.loc[is_between, "max_delta"] = max_delta
df.loc[is_between, "min_delta"] = min_delta
df.loc[is_between, "bid"] = np.where(
trades_grouped_df["side"].str.contains("buy"), 0, trades_grouped_df["amount"]
).sum()
df.loc[is_between, "ask"] = np.where(
trades_grouped_df["side"].str.contains("sell"), 0, trades_grouped_df["amount"]
).sum()
df.loc[is_between, "delta"] = df.loc[is_between, "ask"] - df.loc[is_between, "bid"]
2024-02-12 12:20:52 +00:00
min_delta = np.min(deltas_per_trade)
max_delta = np.max(deltas_per_trade)
2024-05-15 15:09:32 +00:00
df.loc[is_between, "total_trades"] = len(trades_grouped_df)
2024-02-12 12:20:52 +00:00
# copy to avoid memory leaks
dataframe.loc[is_between] = df.loc[is_between].copy()
else:
2024-05-15 15:09:32 +00:00
logger.debug(f"Found NO candles for trades starting with {candle_start}")
logger.debug(f"trades.groups_keys in {time.time() - start_time} seconds")
2024-02-12 12:20:52 +00:00
2024-05-15 15:09:32 +00:00
logger.debug(f"trades.singleton_iterate in {time.time() - start_time} seconds")
2024-02-12 12:20:52 +00:00
except Exception as e:
logger.exception("Error populating dataframe with trades:", e)
raise DependencyException(e)
2024-02-12 12:20:52 +00:00
return dataframe
2024-03-16 15:26:17 +00:00
def trades_to_volumeprofile_with_total_delta_bid_ask(trades: pd.DataFrame, scale: float):
2024-02-12 12:20:52 +00:00
"""
:param trades: dataframe
:param scale: scale aka bin size e.g. 0.5
:return: trades binned to levels according to scale aka orderflow
"""
df = pd.DataFrame([], columns=DEFAULT_ORDERFLOW_COLUMNS)
# create bid, ask where side is sell or buy
2024-05-15 15:09:32 +00:00
df["bid_amount"] = np.where(trades["side"].str.contains("buy"), 0, trades["amount"])
df["ask_amount"] = np.where(trades["side"].str.contains("sell"), 0, trades["amount"])
df["bid"] = np.where(trades["side"].str.contains("buy"), 0, 1)
df["ask"] = np.where(trades["side"].str.contains("sell"), 0, 1)
2024-02-12 12:20:52 +00:00
# round the prices to the nearest multiple of the scale
2024-05-15 15:09:32 +00:00
df["price"] = ((trades["price"] / scale).round() * scale).astype("float64").values
2024-02-12 12:20:52 +00:00
if df.empty:
2024-05-15 15:09:32 +00:00
df["total"] = np.nan
df["delta"] = np.nan
2024-02-12 12:20:52 +00:00
return df
2024-05-15 15:09:32 +00:00
df["delta"] = df["ask_amount"] - df["bid_amount"]
df["total_volume"] = df["ask_amount"] + df["bid_amount"]
df["total_trades"] = df["ask"] + df["bid"]
2024-02-12 12:20:52 +00:00
# group to bins aka apply scale
2024-05-15 15:09:32 +00:00
df = df.groupby("price").sum(numeric_only=True)
2024-02-12 12:20:52 +00:00
return df
2024-03-16 15:26:17 +00:00
def trades_orderflow_to_imbalances(df: pd.DataFrame, imbalance_ratio: int, imbalance_volume: int):
2024-02-12 12:20:52 +00:00
"""
:param df: dataframes with bid and ask
:param imbalance_ratio: imbalance_ratio e.g. 3
:param imbalance_volume: imbalance volume e.g. 10
2024-02-12 12:20:52 +00:00
:return: dataframe with bid and ask imbalance
"""
bid = df.bid
ask = df.ask.shift(-1)
bid_imbalance = (bid / ask) > (imbalance_ratio)
2024-02-12 12:20:52 +00:00
# overwrite bid_imbalance with False if volume is not big enough
2024-05-15 15:09:32 +00:00
bid_imbalance_filtered = np.where(df.total_volume < imbalance_volume, False, bid_imbalance)
ask_imbalance = (ask / bid) > (imbalance_ratio)
2024-02-12 12:20:52 +00:00
# overwrite ask_imbalance with False if volume is not big enough
2024-05-15 15:09:32 +00:00
ask_imbalance_filtered = np.where(df.total_volume < imbalance_volume, False, ask_imbalance)
dataframe = pd.DataFrame(
{"bid_imbalance": bid_imbalance_filtered, "ask_imbalance": ask_imbalance_filtered},
index=df.index,
2024-02-12 12:20:52 +00:00
)
return dataframe
2024-05-15 15:09:32 +00:00
def stacked_imbalance(
df: pd.DataFrame, label: str, stacked_imbalance_range: int, should_reverse: bool
):
2024-02-12 12:20:52 +00:00
"""
y * (y.groupby((y != y.shift()).cumsum()).cumcount() + 1)
https://stackoverflow.com/questions/27626542/counting-consecutive-positive-values-in-python-pandas-array
"""
2024-05-15 15:09:32 +00:00
imbalance = df[f"{label}_imbalance"]
2024-02-12 12:20:52 +00:00
int_series = pd.Series(np.where(imbalance, 1, 0))
2024-05-15 15:09:32 +00:00
stacked = int_series * (
int_series.groupby((int_series != int_series.shift()).cumsum()).cumcount() + 1
2024-02-12 12:20:52 +00:00
)
2024-05-15 15:09:32 +00:00
max_stacked_imbalance_idx = stacked.index[stacked >= stacked_imbalance_range]
2024-02-12 12:20:52 +00:00
stacked_imbalance_price = np.nan
if not max_stacked_imbalance_idx.empty:
2024-05-15 15:09:32 +00:00
idx = (
max_stacked_imbalance_idx[0]
if not should_reverse
else np.flipud(max_stacked_imbalance_idx)[0]
)
2024-02-12 12:20:52 +00:00
stacked_imbalance_price = imbalance.index[idx]
return stacked_imbalance_price
2024-03-16 15:26:17 +00:00
def stacked_imbalance_bid(df: pd.DataFrame, stacked_imbalance_range: int):
2024-05-15 15:09:32 +00:00
return stacked_imbalance(df, "bid", stacked_imbalance_range, should_reverse=False)
2024-02-12 12:20:52 +00:00
2024-03-16 15:26:17 +00:00
def stacked_imbalance_ask(df: pd.DataFrame, stacked_imbalance_range: int):
2024-05-15 15:09:32 +00:00
return stacked_imbalance(df, "ask", stacked_imbalance_range, should_reverse=True)
2024-02-12 12:20:52 +00:00
2024-03-16 15:26:17 +00:00
def orderflow_to_volume_profile(df: pd.DataFrame):
2024-02-12 12:20:52 +00:00
"""
:param orderflow: dataframe
:return: volume profile dataframe
"""
2024-05-15 15:09:32 +00:00
bid = df.groupby("level").bid.sum()
ask = df.groupby("level").ask.sum()
df.groupby("level")["level"].sum()
delta = df.groupby("level").ask.sum() - df.groupby("level").bid.sum()
df = pd.DataFrame({"bid": bid, "ask": ask, "delta": delta})
2024-02-12 12:20:52 +00:00
return df