mirror of
https://github.com/freqtrade/freqtrade.git
synced 2024-11-10 10:21:59 +00:00
Merge pull request #10093 from freqtrade/feat/btmarketchange
Market change visualization
This commit is contained in:
commit
f8a2569739
|
@ -238,6 +238,16 @@ def update_backtest_metadata(filename: Path, strategy: str, content: Dict[str, A
|
|||
file_dump_json(get_backtest_metadata_filename(filename), metadata)
|
||||
|
||||
|
||||
def get_backtest_market_change(filename: Path, include_ts: bool = True) -> pd.DataFrame:
|
||||
"""
|
||||
Read backtest market change file.
|
||||
"""
|
||||
df = pd.read_feather(filename)
|
||||
if include_ts:
|
||||
df.loc[:, '__date_ts'] = df.loc[:, 'date'].astype(np.int64) // 1000 // 1000
|
||||
return df
|
||||
|
||||
|
||||
def find_existing_backtest_stats(dirname: Union[Path, str], run_ids: Dict[str, str],
|
||||
min_backtest_date: Optional[datetime] = None) -> Dict[str, Any]:
|
||||
"""
|
||||
|
|
|
@ -30,8 +30,25 @@ def calculate_market_change(data: Dict[str, pd.DataFrame], column: str = "close"
|
|||
return float(np.mean(tmp_means))
|
||||
|
||||
|
||||
def combine_dataframes_with_mean(data: Dict[str, pd.DataFrame],
|
||||
column: str = "close") -> pd.DataFrame:
|
||||
def combine_dataframes_by_column(
|
||||
data: Dict[str, pd.DataFrame], column: str = "close") -> pd.DataFrame:
|
||||
"""
|
||||
Combine multiple dataframes "column"
|
||||
:param data: Dict of Dataframes, dict key should be pair.
|
||||
:param column: Column in the original dataframes to use
|
||||
:return: DataFrame with the column renamed to the dict key.
|
||||
:raise: ValueError if no data is provided.
|
||||
"""
|
||||
if not data:
|
||||
raise ValueError("No data provided.")
|
||||
df_comb = pd.concat([data[pair].set_index('date').rename(
|
||||
{column: pair}, axis=1)[pair] for pair in data], axis=1)
|
||||
return df_comb
|
||||
|
||||
|
||||
def combined_dataframes_with_rel_mean(
|
||||
data: Dict[str, pd.DataFrame], fromdt: datetime, todt: datetime,
|
||||
column: str = "close") -> pd.DataFrame:
|
||||
"""
|
||||
Combine multiple dataframes "column"
|
||||
:param data: Dict of Dataframes, dict key should be pair.
|
||||
|
@ -40,8 +57,26 @@ def combine_dataframes_with_mean(data: Dict[str, pd.DataFrame],
|
|||
named mean, containing the mean of all pairs.
|
||||
:raise: ValueError if no data is provided.
|
||||
"""
|
||||
df_comb = pd.concat([data[pair].set_index('date').rename(
|
||||
{column: pair}, axis=1)[pair] for pair in data], axis=1)
|
||||
df_comb = combine_dataframes_by_column(data, column)
|
||||
# Trim dataframes to the given timeframe
|
||||
df_comb = df_comb.iloc[(df_comb.index >= fromdt) & (df_comb.index < todt)]
|
||||
df_comb['count'] = df_comb.count(axis=1)
|
||||
df_comb['mean'] = df_comb.mean(axis=1)
|
||||
df_comb['rel_mean'] = df_comb['mean'].pct_change().fillna(0).cumsum()
|
||||
return df_comb[['mean', 'rel_mean', 'count']]
|
||||
|
||||
|
||||
def combine_dataframes_with_mean(
|
||||
data: Dict[str, pd.DataFrame], column: str = "close") -> pd.DataFrame:
|
||||
"""
|
||||
Combine multiple dataframes "column"
|
||||
:param data: Dict of Dataframes, dict key should be pair.
|
||||
:param column: Column in the original dataframes to use
|
||||
:return: DataFrame with the column renamed to the dict key, and a column
|
||||
named mean, containing the mean of all pairs.
|
||||
:raise: ValueError if no data is provided.
|
||||
"""
|
||||
df_comb = combine_dataframes_by_column(data, column)
|
||||
|
||||
df_comb['mean'] = df_comb.mean(axis=1)
|
||||
|
||||
|
|
|
@ -19,6 +19,7 @@ from freqtrade.data import history
|
|||
from freqtrade.data.btanalysis import find_existing_backtest_stats, trade_list_to_dataframe
|
||||
from freqtrade.data.converter import trim_dataframe, trim_dataframes
|
||||
from freqtrade.data.dataprovider import DataProvider
|
||||
from freqtrade.data.metrics import combined_dataframes_with_rel_mean
|
||||
from freqtrade.enums import (BacktestState, CandleType, ExitCheckTuple, ExitType, RunMode,
|
||||
TradingMode)
|
||||
from freqtrade.exceptions import DependencyException, OperationalException
|
||||
|
@ -1394,7 +1395,7 @@ class Backtesting:
|
|||
"""
|
||||
Run backtesting end-to-end
|
||||
"""
|
||||
data: Dict[str, Any] = {}
|
||||
data: Dict[str, DataFrame] = {}
|
||||
|
||||
data, timerange = self.load_bt_data()
|
||||
self.load_bt_data_detail()
|
||||
|
@ -1421,7 +1422,9 @@ class Backtesting:
|
|||
self.results = results
|
||||
dt_appendix = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
|
||||
if self.config.get('export', 'none') in ('trades', 'signals'):
|
||||
store_backtest_stats(self.config['exportfilename'], self.results, dt_appendix)
|
||||
combined_res = combined_dataframes_with_rel_mean(data, min_date, max_date)
|
||||
store_backtest_stats(self.config['exportfilename'], self.results, dt_appendix,
|
||||
market_change_data=combined_res)
|
||||
|
||||
if (self.config.get('export', 'none') == 'signals' and
|
||||
self.dataprovider.runmode == RunMode.BACKTEST):
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Dict
|
||||
from typing import Dict, Optional
|
||||
|
||||
from pandas import DataFrame
|
||||
|
||||
from freqtrade.constants import LAST_BT_RESULT_FN
|
||||
from freqtrade.misc import file_dump_joblib, file_dump_json
|
||||
|
@ -11,7 +13,7 @@ from freqtrade.types import BacktestResultType
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def generate_filename(recordfilename: Path, appendix: str, suffix: str) -> Path:
|
||||
def _generate_filename(recordfilename: Path, appendix: str, suffix: str) -> Path:
|
||||
"""
|
||||
Generates a filename based on the provided parameters.
|
||||
:param recordfilename: Path object, which can either be a filename or a directory.
|
||||
|
@ -29,7 +31,8 @@ def generate_filename(recordfilename: Path, appendix: str, suffix: str) -> Path:
|
|||
|
||||
|
||||
def store_backtest_stats(
|
||||
recordfilename: Path, stats: BacktestResultType, dtappendix: str) -> Path:
|
||||
recordfilename: Path, stats: BacktestResultType, dtappendix: str, *,
|
||||
market_change_data: Optional[DataFrame] = None) -> Path:
|
||||
"""
|
||||
Stores backtest results
|
||||
:param recordfilename: Path object, which can either be a filename or a directory.
|
||||
|
@ -38,7 +41,7 @@ def store_backtest_stats(
|
|||
:param stats: Dataframe containing the backtesting statistics
|
||||
:param dtappendix: Datetime to use for the filename
|
||||
"""
|
||||
filename = generate_filename(recordfilename, dtappendix, '.json')
|
||||
filename = _generate_filename(recordfilename, dtappendix, '.json')
|
||||
|
||||
# Store metadata separately.
|
||||
file_dump_json(get_backtest_metadata_filename(filename), stats['metadata'])
|
||||
|
@ -53,6 +56,11 @@ def store_backtest_stats(
|
|||
latest_filename = Path.joinpath(filename.parent, LAST_BT_RESULT_FN)
|
||||
file_dump_json(latest_filename, {'latest_backtest': str(filename.name)})
|
||||
|
||||
if market_change_data is not None:
|
||||
filename_mc = _generate_filename(recordfilename, f"{dtappendix}_market_change", '.feather')
|
||||
market_change_data.reset_index().to_feather(
|
||||
filename_mc, compression_level=9, compression='lz4')
|
||||
|
||||
return filename
|
||||
|
||||
|
||||
|
@ -69,7 +77,7 @@ def _store_backtest_analysis_data(
|
|||
:param dtappendix: Datetime to use for the filename
|
||||
:param name: Name to use for the file, e.g. signals, rejected
|
||||
"""
|
||||
filename = generate_filename(recordfilename, f"{dtappendix}_{name}", '.pkl')
|
||||
filename = _generate_filename(recordfilename, f"{dtappendix}_{name}", '.pkl')
|
||||
|
||||
file_dump_joblib(filename, data)
|
||||
|
||||
|
|
|
@ -10,15 +10,16 @@ from fastapi.exceptions import HTTPException
|
|||
|
||||
from freqtrade.configuration.config_validation import validate_config_consistency
|
||||
from freqtrade.constants import Config
|
||||
from freqtrade.data.btanalysis import (delete_backtest_result, get_backtest_result,
|
||||
get_backtest_resultlist, load_and_merge_backtest_result,
|
||||
update_backtest_metadata)
|
||||
from freqtrade.data.btanalysis import (delete_backtest_result, get_backtest_market_change,
|
||||
get_backtest_result, get_backtest_resultlist,
|
||||
load_and_merge_backtest_result, update_backtest_metadata)
|
||||
from freqtrade.enums import BacktestState
|
||||
from freqtrade.exceptions import ConfigurationError, DependencyException, OperationalException
|
||||
from freqtrade.exchange.common import remove_exchange_credentials
|
||||
from freqtrade.misc import deep_merge_dicts, is_file_in_dir
|
||||
from freqtrade.rpc.api_server.api_schemas import (BacktestHistoryEntry, BacktestMetadataUpdate,
|
||||
BacktestRequest, BacktestResponse)
|
||||
from freqtrade.rpc.api_server.api_schemas import (BacktestHistoryEntry, BacktestMarketChange,
|
||||
BacktestMetadataUpdate, BacktestRequest,
|
||||
BacktestResponse)
|
||||
from freqtrade.rpc.api_server.deps import get_config
|
||||
from freqtrade.rpc.api_server.webserver_bgwork import ApiBG
|
||||
from freqtrade.rpc.rpc import RPCException
|
||||
|
@ -32,8 +33,10 @@ router = APIRouter()
|
|||
|
||||
|
||||
def __run_backtest_bg(btconfig: Config):
|
||||
from freqtrade.data.metrics import combined_dataframes_with_rel_mean
|
||||
from freqtrade.optimize.optimize_reports import generate_backtest_stats, store_backtest_stats
|
||||
from freqtrade.resolvers import StrategyResolver
|
||||
|
||||
asyncio.set_event_loop(asyncio.new_event_loop())
|
||||
try:
|
||||
# Reload strategy
|
||||
|
@ -89,11 +92,14 @@ def __run_backtest_bg(btconfig: Config):
|
|||
min_date=min_date, max_date=max_date)
|
||||
|
||||
if btconfig.get('export', 'none') == 'trades':
|
||||
combined_res = combined_dataframes_with_rel_mean(ApiBG.bt['data'], min_date, max_date)
|
||||
fn = store_backtest_stats(
|
||||
btconfig['exportfilename'], ApiBG.bt['bt'].results,
|
||||
datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
|
||||
btconfig['exportfilename'],
|
||||
ApiBG.bt['bt'].results,
|
||||
datetime.now().strftime("%Y-%m-%d_%H-%M-%S"),
|
||||
market_change_data=combined_res
|
||||
)
|
||||
ApiBG.bt['bt'].results['metadata'][strategy_name]['filename'] = str(fn.name)
|
||||
ApiBG.bt['bt'].results['metadata'][strategy_name]['filename'] = str(fn.stem)
|
||||
ApiBG.bt['bt'].results['metadata'][strategy_name]['strategy'] = strategy_name
|
||||
|
||||
logger.info("Backtest finished.")
|
||||
|
@ -308,3 +314,20 @@ def api_update_backtest_history_entry(file: str, body: BacktestMetadataUpdate,
|
|||
raise HTTPException(status_code=400, detail=str(e))
|
||||
|
||||
return get_backtest_result(file_abs)
|
||||
|
||||
|
||||
@router.get('/backtest/history/{file}/market_change', response_model=BacktestMarketChange,
|
||||
tags=['webserver', 'backtest'])
|
||||
def api_get_backtest_market_change(file: str, config=Depends(get_config)):
|
||||
bt_results_base: Path = config['user_data_dir'] / 'backtest_results'
|
||||
file_abs = (bt_results_base / f"{file}_market_change").with_suffix('.feather')
|
||||
# Ensure file is in backtest_results directory
|
||||
if not is_file_in_dir(file_abs, bt_results_base):
|
||||
raise HTTPException(status_code=404, detail="File not found.")
|
||||
df = get_backtest_market_change(file_abs)
|
||||
|
||||
return {
|
||||
'columns': df.columns.tolist(),
|
||||
'data': df.values.tolist(),
|
||||
'length': len(df),
|
||||
}
|
||||
|
|
|
@ -558,6 +558,12 @@ class BacktestMetadataUpdate(BaseModel):
|
|||
notes: str = ''
|
||||
|
||||
|
||||
class BacktestMarketChange(BaseModel):
|
||||
columns: List[str]
|
||||
length: int
|
||||
data: List[List[Any]]
|
||||
|
||||
|
||||
class SysInfo(BaseModel):
|
||||
cpu_pct: List[float]
|
||||
ram_pct: float
|
||||
|
|
|
@ -16,7 +16,7 @@ from freqtrade.data.metrics import (calculate_cagr, calculate_calmar, calculate_
|
|||
calculate_expectancy, calculate_market_change,
|
||||
calculate_max_drawdown, calculate_sharpe, calculate_sortino,
|
||||
calculate_underwater, combine_dataframes_with_mean,
|
||||
create_cum_profit)
|
||||
combined_dataframes_with_rel_mean, create_cum_profit)
|
||||
from freqtrade.exceptions import OperationalException
|
||||
from freqtrade.util import dt_utc
|
||||
from tests.conftest import CURRENT_TEST_STRATEGY, create_mock_trades
|
||||
|
@ -251,10 +251,29 @@ def test_combine_dataframes_with_mean(testdatadir):
|
|||
assert "mean" in df.columns
|
||||
|
||||
|
||||
def test_combined_dataframes_with_rel_mean(testdatadir):
|
||||
pairs = ["ETH/BTC", "ADA/BTC"]
|
||||
data = load_data(datadir=testdatadir, pairs=pairs, timeframe='5m')
|
||||
df = combined_dataframes_with_rel_mean(
|
||||
data,
|
||||
datetime(2018, 1, 12, tzinfo=timezone.utc),
|
||||
datetime(2018, 1, 28, tzinfo=timezone.utc)
|
||||
)
|
||||
assert isinstance(df, DataFrame)
|
||||
assert "ETH/BTC" not in df.columns
|
||||
assert "ADA/BTC" not in df.columns
|
||||
assert "mean" in df.columns
|
||||
assert "rel_mean" in df.columns
|
||||
assert "count" in df.columns
|
||||
assert df.iloc[0]['count'] == 2
|
||||
assert df.iloc[-1]['count'] == 2
|
||||
assert len(df) < len(data['ETH/BTC'])
|
||||
|
||||
|
||||
def test_combine_dataframes_with_mean_no_data(testdatadir):
|
||||
pairs = ["ETH/BTC", "ADA/BTC"]
|
||||
data = load_data(datadir=testdatadir, pairs=pairs, timeframe='6m')
|
||||
with pytest.raises(ValueError, match=r"No objects to concatenate"):
|
||||
with pytest.raises(ValueError, match=r"No data provided\."):
|
||||
combine_dataframes_with_mean(data)
|
||||
|
||||
|
||||
|
|
|
@ -229,6 +229,28 @@ def test_store_backtest_stats(testdatadir, mocker):
|
|||
assert str(dump_mock.call_args_list[0][0][0]).startswith(str(testdatadir / 'testresult'))
|
||||
|
||||
|
||||
def test_store_backtest_stats_real(tmp_path):
|
||||
data = {'metadata': {}, 'strategy': {}, 'strategy_comparison': []}
|
||||
store_backtest_stats(tmp_path, data, '2022_01_01_15_05_13')
|
||||
|
||||
assert (tmp_path / 'backtest-result-2022_01_01_15_05_13.json').is_file()
|
||||
assert (tmp_path / 'backtest-result-2022_01_01_15_05_13.meta.json').is_file()
|
||||
assert not (tmp_path / 'backtest-result-2022_01_01_15_05_13_market_change.feather').is_file()
|
||||
assert (tmp_path / LAST_BT_RESULT_FN).is_file()
|
||||
fn = get_latest_backtest_filename(tmp_path)
|
||||
assert fn == 'backtest-result-2022_01_01_15_05_13.json'
|
||||
|
||||
store_backtest_stats(tmp_path, data, '2024_01_01_15_05_25', market_change_data=pd.DataFrame())
|
||||
assert (tmp_path / 'backtest-result-2024_01_01_15_05_25.json').is_file()
|
||||
assert (tmp_path / 'backtest-result-2024_01_01_15_05_25.meta.json').is_file()
|
||||
assert (tmp_path / 'backtest-result-2024_01_01_15_05_25_market_change.feather').is_file()
|
||||
assert (tmp_path / LAST_BT_RESULT_FN).is_file()
|
||||
|
||||
# Last file reference should be updated
|
||||
fn = get_latest_backtest_filename(tmp_path)
|
||||
assert fn == 'backtest-result-2024_01_01_15_05_25.json'
|
||||
|
||||
|
||||
def test_store_backtest_candles(testdatadir, mocker):
|
||||
|
||||
dump_mock = mocker.patch(
|
||||
|
|
|
@ -2249,6 +2249,42 @@ def test_api_patch_backtest_history_entry(botclient, tmp_path: Path):
|
|||
assert fileres[CURRENT_TEST_STRATEGY]['notes'] == 'FooBar'
|
||||
|
||||
|
||||
def test_api_patch_backtest_market_change(botclient, tmp_path: Path):
|
||||
ftbot, client = botclient
|
||||
|
||||
# Create a temporary directory and file
|
||||
bt_results_base = tmp_path / "backtest_results"
|
||||
bt_results_base.mkdir()
|
||||
file_path = bt_results_base / "test_22_market_change.feather"
|
||||
df = pd.DataFrame({
|
||||
'date': ['2018-01-01T00:00:00Z', '2018-01-01T00:05:00Z'],
|
||||
'count': [2, 4],
|
||||
'mean': [2555, 2556],
|
||||
'rel_mean': [0, 0.022],
|
||||
})
|
||||
df['date'] = pd.to_datetime(df['date'])
|
||||
df.to_feather(file_path, compression_level=9, compression='lz4')
|
||||
# Nonexisting file
|
||||
rc = client_get(client, f"{BASE_URI}/backtest/history/randomFile.json/market_change")
|
||||
assert_response(rc, 503)
|
||||
|
||||
ftbot.config['user_data_dir'] = tmp_path
|
||||
ftbot.config['runmode'] = RunMode.WEBSERVER
|
||||
|
||||
rc = client_get(client, f"{BASE_URI}/backtest/history/randomFile.json/market_change")
|
||||
assert_response(rc, 404)
|
||||
|
||||
rc = client_get(client, f"{BASE_URI}/backtest/history/test_22/market_change")
|
||||
assert_response(rc, 200)
|
||||
result = rc.json()
|
||||
assert result['length'] == 2
|
||||
assert result['columns'] == ['date', 'count', 'mean', 'rel_mean', '__date_ts']
|
||||
assert result['data'] == [
|
||||
['2018-01-01T00:00:00Z', 2, 2555, 0.0, 1514764800000],
|
||||
['2018-01-01T00:05:00Z', 4, 2556, 0.022, 1514765100000]
|
||||
]
|
||||
|
||||
|
||||
def test_health(botclient):
|
||||
_ftbot, client = botclient
|
||||
|
||||
|
|
Loading…
Reference in New Issue
Block a user