freqtrade_origin/freqtrade/data/history.py

486 lines
19 KiB
Python
Raw Normal View History

2018-12-16 08:58:46 +00:00
"""
Handle historic data (ohlcv).
Includes:
2018-12-16 08:58:46 +00:00
* load data for a pair (or a list of pairs) from disk
* download data from exchange and store to disk
"""
import logging
import operator
2019-10-20 12:02:53 +00:00
from copy import deepcopy
from datetime import datetime, timezone
2018-12-15 12:54:35 +00:00
from pathlib import Path
from typing import Any, Dict, List, Optional, Tuple
import arrow
2018-12-15 13:28:37 +00:00
from pandas import DataFrame
from freqtrade import OperationalException, misc
from freqtrade.configuration import TimeRange
2019-10-13 17:21:27 +00:00
from freqtrade.data.converter import parse_ticker_dataframe, trades_to_ohlcv
2019-10-20 12:02:53 +00:00
from freqtrade.exchange import Exchange, timeframe_to_minutes, timeframe_to_seconds
logger = logging.getLogger(__name__)
def trim_tickerlist(tickerlist: List[Dict], timerange: TimeRange) -> List[Dict]:
"""
Trim tickerlist based on given timerange
"""
if not tickerlist:
return tickerlist
start_index = 0
stop_index = len(tickerlist)
if timerange.starttype == 'date':
while (start_index < len(tickerlist) and
tickerlist[start_index][0] < timerange.startts * 1000):
start_index += 1
if timerange.stoptype == 'date':
while (stop_index > 0 and
tickerlist[stop_index-1][0] > timerange.stopts * 1000):
stop_index -= 1
if start_index > stop_index:
raise ValueError(f'The timerange [{timerange.startts},{timerange.stopts}] is incorrect')
return tickerlist[start_index:stop_index]
2019-11-13 19:44:55 +00:00
def trim_dataframe(df: DataFrame, timerange: TimeRange, df_date_col: str = 'date') -> DataFrame:
2019-10-20 09:30:20 +00:00
"""
Trim dataframe based on given timerange
2019-11-13 19:44:55 +00:00
:param df: Dataframe to trim
:param timerange: timerange (use start and end date if available)
:param: df_date_col: Column in the dataframe to use as Date column
:return: trimmed dataframe
2019-10-20 09:30:20 +00:00
"""
if timerange.starttype == 'date':
start = datetime.fromtimestamp(timerange.startts, tz=timezone.utc)
2019-11-13 19:44:55 +00:00
df = df.loc[df[df_date_col] >= start, :]
2019-10-20 09:30:20 +00:00
if timerange.stoptype == 'date':
stop = datetime.fromtimestamp(timerange.stopts, tz=timezone.utc)
2019-11-13 19:44:55 +00:00
df = df.loc[df[df_date_col] <= stop, :]
2019-10-20 09:30:20 +00:00
return df
def load_tickerdata_file(datadir: Path, pair: str, timeframe: str,
timerange: Optional[TimeRange] = None) -> Optional[list]:
"""
Load a pair from file, either .json.gz or .json
2019-09-05 20:00:16 +00:00
:return: tickerlist or None if unsuccessful
"""
filename = pair_data_filename(datadir, pair, timeframe)
2019-05-21 17:49:02 +00:00
pairdata = misc.file_load_json(filename)
if not pairdata:
return []
if timerange:
pairdata = trim_tickerlist(pairdata, timerange)
return pairdata
def store_tickerdata_file(datadir: Path, pair: str,
timeframe: str, data: list, is_zip: bool = False):
2019-08-14 18:48:42 +00:00
"""
Stores tickerdata to file
"""
filename = pair_data_filename(datadir, pair, timeframe)
2019-08-14 18:48:42 +00:00
misc.file_dump_json(filename, data, is_zip=is_zip)
def load_trades_file(datadir: Path, pair: str,
2019-08-16 08:51:04 +00:00
timerange: Optional[TimeRange] = None) -> List[Dict]:
2019-08-16 08:26:38 +00:00
"""
Load a pair from file, either .json.gz or .json
:return: tradelist or empty list if unsuccesful
2019-08-16 08:26:38 +00:00
"""
filename = pair_trades_filename(datadir, pair)
tradesdata = misc.file_load_json(filename)
if not tradesdata:
2019-08-16 08:51:04 +00:00
return []
2019-08-16 08:26:38 +00:00
return tradesdata
def store_trades_file(datadir: Path, pair: str,
2019-08-16 08:26:38 +00:00
data: list, is_zip: bool = True):
"""
Stores tickerdata to file
"""
filename = pair_trades_filename(datadir, pair)
misc.file_dump_json(filename, data, is_zip=is_zip)
2019-10-06 15:10:40 +00:00
def _validate_pairdata(pair, pairdata, timerange: TimeRange):
if timerange.starttype == 'date' and pairdata[0][0] > timerange.startts * 1000:
logger.warning('Missing data at start for pair %s, data starts at %s',
pair, arrow.get(pairdata[0][0] // 1000).strftime('%Y-%m-%d %H:%M:%S'))
if timerange.stoptype == 'date' and pairdata[-1][0] < timerange.stopts * 1000:
logger.warning('Missing data at end for pair %s, data ends at %s',
pair, arrow.get(pairdata[-1][0] // 1000).strftime('%Y-%m-%d %H:%M:%S'))
def load_pair_history(pair: str,
timeframe: str,
datadir: Path,
2019-10-06 15:10:40 +00:00
timerange: Optional[TimeRange] = None,
2018-12-16 09:17:11 +00:00
refresh_pairs: bool = False,
exchange: Optional[Exchange] = None,
2019-06-09 12:40:45 +00:00
fill_up_missing: bool = True,
2019-10-20 12:02:53 +00:00
drop_incomplete: bool = True,
startup_candles: int = 0,
2018-12-16 09:17:11 +00:00
) -> DataFrame:
"""
Loads cached ticker history for the given pair.
2019-06-09 12:40:45 +00:00
:param pair: Pair to load data for
:param timeframe: Ticker timeframe (e.g. "5m")
2019-06-09 12:40:45 +00:00
:param datadir: Path to the data storage location.
:param timerange: Limit data to be loaded to this timerange
:param refresh_pairs: Refresh pairs from exchange.
(Note: Requires exchange to be passed as well.)
:param exchange: Exchange object (needed when using "refresh_pairs")
:param fill_up_missing: Fill missing values with "No action"-candles
:param drop_incomplete: Drop last candle assuming it may be incomplete.
2019-10-20 12:02:53 +00:00
:param startup_candles: Additional candles to load at the start of the period
:return: DataFrame with ohlcv data, or empty DataFrame
"""
2019-10-20 12:02:53 +00:00
timerange_startup = deepcopy(timerange)
2019-10-28 12:05:54 +00:00
if startup_candles > 0 and timerange_startup:
timerange_startup.subtract_start(timeframe_to_seconds(timeframe) * startup_candles)
2019-10-20 12:02:53 +00:00
2019-05-17 16:05:36 +00:00
# The user forced the refresh of pairs
2018-12-16 09:17:11 +00:00
if refresh_pairs:
_download_pair_history(datadir=datadir,
exchange=exchange,
pair=pair,
timeframe=timeframe,
timerange=timerange)
2018-12-16 09:17:11 +00:00
pairdata = load_tickerdata_file(datadir, pair, timeframe, timerange=timerange_startup)
2019-01-01 12:42:30 +00:00
if pairdata:
2019-10-20 12:02:53 +00:00
if timerange_startup:
_validate_pairdata(pair, pairdata, timerange_startup)
return parse_ticker_dataframe(pairdata, timeframe, pair=pair,
2019-06-09 12:40:45 +00:00
fill_missing=fill_up_missing,
drop_incomplete=drop_incomplete)
else:
2019-05-17 16:05:36 +00:00
logger.warning(
f'No history data for pair: "{pair}", timeframe: {timeframe}. '
'Use `freqtrade download-data` to download the data'
2019-05-17 16:05:36 +00:00
)
return DataFrame()
def load_data(datadir: Path,
timeframe: str,
pairs: List[str],
2018-12-16 09:17:11 +00:00
refresh_pairs: bool = False,
exchange: Optional[Exchange] = None,
2019-10-06 15:10:40 +00:00
timerange: Optional[TimeRange] = None,
fill_up_missing: bool = True,
2019-10-20 12:02:53 +00:00
startup_candles: int = 0,
fail_without_data: bool = False
) -> Dict[str, DataFrame]:
"""
Loads ticker history data for a list of pairs
2019-10-20 12:02:53 +00:00
:param datadir: Path to the data storage location.
:param timeframe: Ticker Timeframe (e.g. "5m")
2019-10-20 12:02:53 +00:00
:param pairs: List of pairs to load
:param refresh_pairs: Refresh pairs from exchange.
(Note: Requires exchange to be passed as well.)
:param exchange: Exchange object (needed when using "refresh_pairs")
:param timerange: Limit data to be loaded to this timerange
:param fill_up_missing: Fill missing values with "No action"-candles
:param startup_candles: Additional candles to load at the start of the period
:param fail_without_data: Raise OperationalException if no data is found.
2019-10-20 12:02:53 +00:00
:return: dict(<pair>:<Dataframe>)
TODO: refresh_pairs is still used by edge to keep the data uptodate.
This should be replaced in the future. Instead, writing the current candles to disk
from dataprovider should be implemented, as this would avoid loading ohlcv data twice.
exchange and refresh_pairs are then not needed here nor in load_pair_history.
"""
result: Dict[str, DataFrame] = {}
if startup_candles > 0 and timerange:
logger.info(f'Using indicator startup period: {startup_candles} ...')
for pair in pairs:
hist = load_pair_history(pair=pair, timeframe=timeframe,
datadir=datadir, timerange=timerange,
refresh_pairs=refresh_pairs,
exchange=exchange,
fill_up_missing=fill_up_missing,
startup_candles=startup_candles)
if not hist.empty:
result[pair] = hist
if fail_without_data and not result:
raise OperationalException("No data found. Terminating.")
return result
def pair_data_filename(datadir: Path, pair: str, timeframe: str) -> Path:
2019-05-21 17:49:02 +00:00
pair_s = pair.replace("/", "_")
filename = datadir.joinpath(f'{pair_s}-{timeframe}.json')
2019-05-21 17:49:02 +00:00
return filename
2019-08-14 16:58:27 +00:00
def pair_trades_filename(datadir: Path, pair: str) -> Path:
pair_s = pair.replace("/", "_")
2019-08-16 08:26:20 +00:00
filename = datadir.joinpath(f'{pair_s}-trades.json.gz')
2019-08-14 16:58:27 +00:00
return filename
def _load_cached_data_for_updating(datadir: Path, pair: str, timeframe: str,
2019-10-06 15:10:40 +00:00
timerange: Optional[TimeRange]) -> Tuple[List[Any],
Optional[int]]:
"""
Load cached data to download more data.
2019-10-06 15:10:40 +00:00
If timerange is passed in, checks whether data from an before the stored data will be
downloaded.
If that's the case then what's available should be completely overwritten.
Only used by download_pair_history().
"""
since_ms = None
# user sets timerange, so find the start time
if timerange:
if timerange.starttype == 'date':
since_ms = timerange.startts * 1000
elif timerange.stoptype == 'line':
num_minutes = timerange.stopts * timeframe_to_minutes(timeframe)
since_ms = arrow.utcnow().shift(minutes=num_minutes).timestamp * 1000
# read the cached file
# Intentionally don't pass timerange in - since we need to load the full dataset.
data = load_tickerdata_file(datadir, pair, timeframe)
2019-08-14 18:48:42 +00:00
# remove the last item, could be incomplete candle
if data:
data.pop()
else:
data = []
if data:
if since_ms and since_ms < data[0][0]:
# Earlier data than existing data requested, redownload all
data = []
else:
# a part of the data was already downloaded, so download unexist data only
since_ms = data[-1][0] + 1
return (data, since_ms)
def _download_pair_history(datadir: Path,
exchange: Optional[Exchange],
pair: str,
timeframe: str = '5m',
timerange: Optional[TimeRange] = None) -> bool:
"""
2019-11-13 10:28:26 +00:00
Download latest candles from the exchange for the pair and timeframe passed in parameters
The data is downloaded starting from the last correct data that
exists in a cache. If timerange starts earlier than the data in the cache,
the full data will be redownloaded
Based on @Rybolov work: https://github.com/rybolov/freqtrade-data
2019-05-17 16:05:36 +00:00
:param pair: pair to download
:param timeframe: Ticker Timeframe (e.g 5m)
:param timerange: range of time to download
2018-12-16 13:14:17 +00:00
:return: bool with success state
"""
2019-12-16 20:25:57 +00:00
if not exchange:
raise OperationalException(
"Exchange needs to be initialized when downloading pair history data"
)
2018-12-16 09:29:53 +00:00
try:
2019-05-17 16:05:36 +00:00
logger.info(
f'Download history data for pair: "{pair}", timeframe: {timeframe} '
2019-05-17 16:05:36 +00:00
f'and store in {datadir}.'
)
data, since_ms = _load_cached_data_for_updating(datadir, pair, timeframe, timerange)
2018-12-16 09:29:53 +00:00
logger.debug("Current Start: %s", misc.format_ms_time(data[1][0]) if data else 'None')
logger.debug("Current End: %s", misc.format_ms_time(data[-1][0]) if data else 'None')
2018-12-16 09:29:53 +00:00
# Default since_ms to 30 days if nothing is given
2019-12-16 19:12:26 +00:00
new_data = exchange.get_historic_ohlcv(pair=pair,
timeframe=timeframe,
since_ms=since_ms if since_ms else
int(arrow.utcnow().shift(
2019-12-16 19:12:26 +00:00
days=-30).float_timestamp) * 1000
)
2018-12-16 09:29:53 +00:00
data.extend(new_data)
2018-12-16 09:29:53 +00:00
logger.debug("New Start: %s", misc.format_ms_time(data[0][0]))
logger.debug("New End: %s", misc.format_ms_time(data[-1][0]))
store_tickerdata_file(datadir, pair, timeframe, data=data)
2018-12-16 09:29:53 +00:00
return True
2019-05-17 16:05:36 +00:00
2019-05-21 17:49:02 +00:00
except Exception as e:
2019-05-17 16:05:36 +00:00
logger.error(
f'Failed to download history data for pair: "{pair}", timeframe: {timeframe}. '
2019-05-21 17:49:02 +00:00
f'Error: {e}'
2019-05-17 16:05:36 +00:00
)
2019-01-31 05:51:03 +00:00
return False
def refresh_backtest_ohlcv_data(exchange: Exchange, pairs: List[str], timeframes: List[str],
2019-12-16 18:43:33 +00:00
datadir: Path, timerange: Optional[TimeRange] = None,
erase=False) -> List[str]:
"""
Refresh stored ohlcv data for backtesting and hyperopt operations.
2019-12-16 18:43:33 +00:00
Used by freqtrade download-data subcommand.
:return: List of pairs that are not available.
"""
pairs_not_available = []
for pair in pairs:
if pair not in exchange.markets:
pairs_not_available.append(pair)
logger.info(f"Skipping pair {pair}...")
continue
for timeframe in timeframes:
2019-12-16 18:43:33 +00:00
dl_file = pair_data_filename(datadir, pair, timeframe)
if erase and dl_file.exists():
logger.info(
f'Deleting existing data for pair {pair}, interval {timeframe}.')
dl_file.unlink()
logger.info(f'Downloading pair {pair}, interval {timeframe}.')
_download_pair_history(datadir=datadir, exchange=exchange,
pair=pair, timeframe=str(timeframe),
timerange=timerange)
return pairs_not_available
def _download_trades_history(datadir: Path,
exchange: Exchange,
pair: str,
timerange: Optional[TimeRange] = None) -> bool:
2019-08-25 12:30:09 +00:00
"""
Download trade history from the exchange.
Appends to previously downloaded trades data.
"""
2019-08-16 08:51:04 +00:00
try:
since = timerange.startts * 1000 if timerange and timerange.starttype == 'date' else None
trades = load_trades_file(datadir, pair)
from_id = trades[-1]['id'] if trades else None
2019-08-25 12:14:31 +00:00
logger.debug("Current Start: %s", trades[0]['datetime'] if trades else 'None')
2019-08-16 08:51:04 +00:00
logger.debug("Current End: %s", trades[-1]['datetime'] if trades else 'None')
2019-12-16 19:12:26 +00:00
# Default since_ms to 30 days if nothing is given
2019-08-25 12:14:31 +00:00
new_trades = exchange.get_historic_trades(pair=pair,
2019-08-25 12:30:09 +00:00
since=since if since else
int(arrow.utcnow().shift(
days=-30).float_timestamp) * 1000,
from_id=from_id,
)
2019-08-25 12:14:31 +00:00
trades.extend(new_trades[1])
2019-08-16 08:51:04 +00:00
store_trades_file(datadir, pair, trades)
logger.debug("New Start: %s", trades[0]['datetime'])
logger.debug("New End: %s", trades[-1]['datetime'])
logger.info(f"New Amount of trades: {len(trades)}")
return True
2019-08-16 08:51:04 +00:00
except Exception as e:
logger.error(
f'Failed to download historic trades for pair: "{pair}". '
f'Error: {e}'
)
return False
def refresh_backtest_trades_data(exchange: Exchange, pairs: List[str], datadir: Path,
timerange: TimeRange, erase=False) -> List[str]:
2019-08-27 05:13:50 +00:00
"""
2019-12-16 18:43:33 +00:00
Refresh stored trades data for backtesting and hyperopt operations.
Used by freqtrade download-data subcommand.
:return: List of pairs that are not available.
2019-08-27 05:13:50 +00:00
"""
pairs_not_available = []
for pair in pairs:
if pair not in exchange.markets:
pairs_not_available.append(pair)
logger.info(f"Skipping pair {pair}...")
continue
dl_file = pair_trades_filename(datadir, pair)
2019-08-27 05:13:50 +00:00
if erase and dl_file.exists():
logger.info(
f'Deleting existing data for pair {pair}.')
dl_file.unlink()
logger.info(f'Downloading trades for pair {pair}.')
_download_trades_history(datadir=datadir, exchange=exchange,
pair=pair,
timerange=timerange)
2019-08-27 05:13:50 +00:00
return pairs_not_available
2019-10-13 17:21:27 +00:00
def convert_trades_to_ohlcv(pairs: List[str], timeframes: List[str],
datadir: Path, timerange: TimeRange, erase=False) -> None:
"""
Convert stored trades data to ohlcv data
"""
for pair in pairs:
trades = load_trades_file(datadir, pair)
for timeframe in timeframes:
ohlcv_file = pair_data_filename(datadir, pair, timeframe)
if erase and ohlcv_file.exists():
logger.info(f'Deleting existing data for pair {pair}, interval {timeframe}.')
ohlcv_file.unlink()
2019-10-13 17:21:27 +00:00
ohlcv = trades_to_ohlcv(trades, timeframe)
# Store ohlcv
store_tickerdata_file(datadir, pair, timeframe, data=ohlcv)
def get_timeframe(data: Dict[str, DataFrame]) -> Tuple[arrow.Arrow, arrow.Arrow]:
"""
Get the maximum timeframe for the given backtest data
:param data: dictionary with preprocessed backtesting data
:return: tuple containing min_date, max_date
"""
timeframe = [
(arrow.get(frame['date'].min()), arrow.get(frame['date'].max()))
for frame in data.values()
]
return min(timeframe, key=operator.itemgetter(0))[0], \
max(timeframe, key=operator.itemgetter(1))[1]
def validate_backtest_data(data: DataFrame, pair: str, min_date: datetime,
2019-12-11 06:12:37 +00:00
max_date: datetime, timeframe_min: int) -> bool:
"""
Validates preprocessed backtesting data for missing values and shows warnings about it that.
:param data: preprocessed backtesting data (as DataFrame)
:param pair: pair used for log output.
:param min_date: start-date of the data
:param max_date: end-date of the data
2019-12-11 06:12:37 +00:00
:param timeframe_min: ticker Timeframe in minutes
"""
# total difference in minutes / timeframe-minutes
2019-12-11 06:12:37 +00:00
expected_frames = int((max_date - min_date).total_seconds() // 60 // timeframe_min)
found_missing = False
dflen = len(data)
if dflen < expected_frames:
found_missing = True
logger.warning("%s has missing frames: expected %s, got %s, that's %s missing values",
pair, expected_frames, dflen, expected_frames - dflen)
return found_missing