freqtrade_origin/freqtrade/data/history/datahandlers/jsondatahandler.py

151 lines
5.8 KiB
Python
Raw Normal View History

import logging
import numpy as np
from pandas import DataFrame, read_json, to_datetime
from freqtrade import misc
from freqtrade.configuration import TimeRange
from freqtrade.constants import DEFAULT_DATAFRAME_COLUMNS, DEFAULT_TRADES_COLUMNS
from freqtrade.data.converter import trades_dict_to_list, trades_list_to_df
from freqtrade.enums import CandleType, TradingMode
from .idatahandler import IDataHandler
2020-09-28 17:39:41 +00:00
logger = logging.getLogger(__name__)
class JsonDataHandler(IDataHandler):
_use_zip = False
_columns = DEFAULT_DATAFRAME_COLUMNS
def ohlcv_store(
2024-05-12 15:41:55 +00:00
self, pair: str, timeframe: str, data: DataFrame, candle_type: CandleType
) -> None:
"""
Store data in json format "values".
format looks as follows:
[[<date>,<open>,<high>,<low>,<close>]]
:param pair: Pair - used to generate filename
2021-06-25 17:13:31 +00:00
:param timeframe: Timeframe - used to generate filename
:param data: Dataframe containing OHLCV data
2021-12-03 11:23:35 +00:00
:param candle_type: Any of the enum CandleType (must match trading mode!)
:return: None
"""
2022-05-16 17:53:01 +00:00
filename = self._pair_data_filename(self._datadir, pair, timeframe, candle_type)
self.create_dir_if_needed(filename)
_data = data.copy()
# Convert date to int
2024-05-12 15:41:55 +00:00
_data["date"] = _data["date"].astype(np.int64) // 1000 // 1000
# Reset index, select only appropriate columns and save as json
_data.reset_index(drop=True).loc[:, self._columns].to_json(
2024-05-12 15:41:55 +00:00
filename, orient="values", compression="gzip" if self._use_zip else None
)
2024-05-12 15:41:55 +00:00
def _ohlcv_load(
self, pair: str, timeframe: str, timerange: TimeRange | None, candle_type: CandleType
2024-05-12 15:41:55 +00:00
) -> DataFrame:
"""
2019-12-25 14:07:49 +00:00
Internal method used to load data for one pair from disk.
Implements the loading and conversion to a Pandas dataframe.
Timerange trimming and dataframe validation happens outside of this method.
2019-12-25 18:53:52 +00:00
:param pair: Pair to load data
:param timeframe: Timeframe (e.g. "5m")
:param timerange: Limit data to be loaded to this timerange.
Optionally implemented by subclasses to avoid loading
all data where possible.
2021-12-03 11:23:35 +00:00
:param candle_type: Any of the enum CandleType (must match trading mode!)
2019-12-25 14:07:49 +00:00
:return: DataFrame with ohlcv data, or empty DataFrame
"""
2024-05-12 15:41:55 +00:00
filename = self._pair_data_filename(self._datadir, pair, timeframe, candle_type=candle_type)
2019-12-27 10:08:47 +00:00
if not filename.exists():
# Fallback mode for 1M files
filename = self._pair_data_filename(
2024-05-12 15:41:55 +00:00
self._datadir, pair, timeframe, candle_type=candle_type, no_timeframe_modify=True
)
if not filename.exists():
return DataFrame(columns=self._columns)
try:
2024-05-12 15:41:55 +00:00
pairdata = read_json(filename, orient="values")
pairdata.columns = self._columns
except ValueError:
logger.error(f"Could not load data for {pair}.")
return DataFrame(columns=self._columns)
2024-05-12 15:41:55 +00:00
pairdata = pairdata.astype(
dtype={
"open": "float",
"high": "float",
"low": "float",
"close": "float",
"volume": "float",
}
)
pairdata["date"] = to_datetime(pairdata["date"], unit="ms", utc=True)
return pairdata
def ohlcv_append(
2024-05-12 15:41:55 +00:00
self, pair: str, timeframe: str, data: DataFrame, candle_type: CandleType
) -> None:
"""
Append data to existing data structures
:param pair: Pair
:param timeframe: Timeframe this ohlcv data is for
:param data: Data to append.
2021-12-03 11:23:35 +00:00
:param candle_type: Any of the enum CandleType (must match trading mode!)
"""
raise NotImplementedError()
def _trades_store(self, pair: str, data: DataFrame, trading_mode: TradingMode) -> None:
"""
2019-12-25 18:53:52 +00:00
Store trades data (list of Dicts) to file
:param pair: Pair - used for filename
:param data: Dataframe containing trades
column sequence as in DEFAULT_TRADES_COLUMNS
:param trading_mode: Trading mode to use (used to determine the filename)
"""
filename = self._pair_trades_filename(self._datadir, pair, trading_mode)
2023-08-18 07:31:57 +00:00
trades = data.values.tolist()
misc.file_dump_json(filename, trades, is_zip=self._use_zip)
def trades_append(self, pair: str, data: DataFrame):
"""
Append data to existing files
2019-12-25 18:53:52 +00:00
:param pair: Pair - used for filename
:param data: Dataframe containing trades
column sequence as in DEFAULT_TRADES_COLUMNS
"""
raise NotImplementedError()
def _trades_load(
self, pair: str, trading_mode: TradingMode, timerange: TimeRange | None = None
) -> DataFrame:
"""
Load a pair from file, either .json.gz or .json
2019-12-25 18:53:52 +00:00
# TODO: respect timerange ...
:param pair: Load trades for this pair
:param trading_mode: Trading mode to use (used to determine the filename)
2019-12-25 18:53:52 +00:00
:param timerange: Timerange to load trades for - currently not implemented
:return: Dataframe containing trades
"""
filename = self._pair_trades_filename(self._datadir, pair, trading_mode)
tradesdata = misc.file_load_json(filename)
if not tradesdata:
return DataFrame(columns=DEFAULT_TRADES_COLUMNS)
if isinstance(tradesdata[0], dict):
# Convert trades dict to list
logger.info("Old trades format detected - converting")
tradesdata = trades_dict_to_list(tradesdata)
pass
2023-08-18 05:43:29 +00:00
return trades_list_to_df(tradesdata, convert=False)
@classmethod
def _get_file_extension(cls):
return "json.gz" if cls._use_zip else "json"
class JsonGzDataHandler(JsonDataHandler):
_use_zip = True