2023-05-14 09:28:40 +00:00
|
|
|
from datetime import datetime, timedelta, timezone
|
2020-03-15 08:39:45 +00:00
|
|
|
from pathlib import Path
|
2019-06-16 08:57:21 +00:00
|
|
|
from unittest.mock import MagicMock
|
2019-03-16 16:50:57 +00:00
|
|
|
|
2019-06-16 09:12:19 +00:00
|
|
|
import pytest
|
2020-03-15 08:39:45 +00:00
|
|
|
from pandas import DataFrame, DateOffset, Timestamp, to_datetime
|
2019-06-16 09:12:19 +00:00
|
|
|
|
2019-08-14 08:07:32 +00:00
|
|
|
from freqtrade.configuration import TimeRange
|
2020-06-28 07:27:19 +00:00
|
|
|
from freqtrade.constants import LAST_BT_RESULT_FN
|
2024-05-12 13:08:40 +00:00
|
|
|
from freqtrade.data.btanalysis import (
|
|
|
|
BT_DATA_COLUMNS,
|
|
|
|
analyze_trade_parallelism,
|
|
|
|
extract_trades_of_period,
|
|
|
|
get_latest_backtest_filename,
|
|
|
|
get_latest_hyperopt_file,
|
|
|
|
load_backtest_data,
|
|
|
|
load_backtest_metadata,
|
|
|
|
load_trades,
|
|
|
|
load_trades_from_db,
|
|
|
|
)
|
2019-09-07 18:34:25 +00:00
|
|
|
from freqtrade.data.history import load_data, load_pair_history
|
2024-05-12 13:08:40 +00:00
|
|
|
from freqtrade.data.metrics import (
|
|
|
|
calculate_cagr,
|
|
|
|
calculate_calmar,
|
|
|
|
calculate_csum,
|
|
|
|
calculate_expectancy,
|
|
|
|
calculate_market_change,
|
|
|
|
calculate_max_drawdown,
|
|
|
|
calculate_sharpe,
|
|
|
|
calculate_sortino,
|
|
|
|
calculate_underwater,
|
|
|
|
combine_dataframes_with_mean,
|
|
|
|
combined_dataframes_with_rel_mean,
|
|
|
|
create_cum_profit,
|
|
|
|
)
|
2022-01-06 18:49:25 +00:00
|
|
|
from freqtrade.exceptions import OperationalException
|
2023-05-14 16:31:09 +00:00
|
|
|
from freqtrade.util import dt_utc
|
2021-09-21 18:18:14 +00:00
|
|
|
from tests.conftest import CURRENT_TEST_STRATEGY, create_mock_trades
|
2020-09-10 05:40:19 +00:00
|
|
|
from tests.conftest_trades import MOCK_TRADE_COUNT
|
2019-03-16 16:50:57 +00:00
|
|
|
|
|
|
|
|
2020-06-27 04:46:54 +00:00
|
|
|
def test_get_latest_backtest_filename(testdatadir, mocker):
|
|
|
|
with pytest.raises(ValueError, match=r"Directory .* does not exist\."):
|
2024-05-12 14:00:45 +00:00
|
|
|
get_latest_backtest_filename(testdatadir / "does_not_exist")
|
2020-06-27 04:46:54 +00:00
|
|
|
|
2024-05-12 14:00:45 +00:00
|
|
|
with pytest.raises(ValueError, match=r"Directory .* does not seem to contain .*"):
|
2022-04-11 18:32:02 +00:00
|
|
|
get_latest_backtest_filename(testdatadir)
|
2020-06-27 04:46:54 +00:00
|
|
|
|
2022-04-11 18:32:02 +00:00
|
|
|
testdir_bt = testdatadir / "backtest_results"
|
|
|
|
res = get_latest_backtest_filename(testdir_bt)
|
2024-05-12 14:00:45 +00:00
|
|
|
assert res == "backtest-result.json"
|
2020-06-27 04:46:54 +00:00
|
|
|
|
2022-04-11 18:32:02 +00:00
|
|
|
res = get_latest_backtest_filename(str(testdir_bt))
|
2024-05-12 14:00:45 +00:00
|
|
|
assert res == "backtest-result.json"
|
2020-06-27 04:46:54 +00:00
|
|
|
|
2020-06-27 13:59:22 +00:00
|
|
|
mocker.patch("freqtrade.data.btanalysis.json_load", return_value={})
|
2020-06-27 04:46:54 +00:00
|
|
|
|
|
|
|
with pytest.raises(ValueError, match=r"Invalid '.last_result.json' format."):
|
2022-04-11 18:32:02 +00:00
|
|
|
get_latest_backtest_filename(testdir_bt)
|
2020-06-27 04:46:54 +00:00
|
|
|
|
|
|
|
|
2022-01-16 18:11:20 +00:00
|
|
|
def test_get_latest_hyperopt_file(testdatadir):
|
2024-05-12 14:00:45 +00:00
|
|
|
res = get_latest_hyperopt_file(testdatadir / "does_not_exist", "testfile.pickle")
|
|
|
|
assert res == testdatadir / "does_not_exist/testfile.pickle"
|
2020-09-27 15:00:23 +00:00
|
|
|
|
|
|
|
res = get_latest_hyperopt_file(testdatadir.parent)
|
|
|
|
assert res == testdatadir.parent / "hyperopt_results.pickle"
|
|
|
|
|
2020-09-27 22:35:19 +00:00
|
|
|
res = get_latest_hyperopt_file(str(testdatadir.parent))
|
|
|
|
assert res == testdatadir.parent / "hyperopt_results.pickle"
|
|
|
|
|
2022-01-21 14:23:06 +00:00
|
|
|
# Test with absolute path
|
|
|
|
with pytest.raises(
|
|
|
|
OperationalException,
|
2024-05-12 14:00:45 +00:00
|
|
|
match="--hyperopt-filename expects only the filename, not an absolute path.",
|
|
|
|
):
|
2022-01-21 14:23:06 +00:00
|
|
|
get_latest_hyperopt_file(str(testdatadir.parent), str(testdatadir.parent))
|
|
|
|
|
2020-09-27 15:00:23 +00:00
|
|
|
|
2022-01-16 18:11:20 +00:00
|
|
|
def test_load_backtest_metadata(mocker, testdatadir):
|
2024-05-12 14:00:45 +00:00
|
|
|
res = load_backtest_metadata(testdatadir / "nonexistant.file.json")
|
2022-01-16 18:11:20 +00:00
|
|
|
assert res == {}
|
|
|
|
|
2024-05-12 14:00:45 +00:00
|
|
|
mocker.patch("freqtrade.data.btanalysis.get_backtest_metadata_filename")
|
|
|
|
mocker.patch("freqtrade.data.btanalysis.json_load", side_effect=Exception())
|
|
|
|
with pytest.raises(
|
|
|
|
OperationalException, match=r"Unexpected error.*loading backtest metadata\."
|
|
|
|
):
|
|
|
|
load_backtest_metadata(testdatadir / "nonexistant.file.json")
|
2022-01-16 18:11:20 +00:00
|
|
|
|
2020-09-27 15:00:23 +00:00
|
|
|
|
2022-01-06 18:49:25 +00:00
|
|
|
def test_load_backtest_data_old_format(testdatadir, mocker):
|
|
|
|
filename = testdatadir / "backtest-result_test222.json"
|
2024-05-12 14:00:45 +00:00
|
|
|
mocker.patch("freqtrade.data.btanalysis.load_backtest_stats", return_value=[])
|
2019-03-16 16:50:57 +00:00
|
|
|
|
2024-05-12 14:00:45 +00:00
|
|
|
with pytest.raises(
|
|
|
|
OperationalException,
|
|
|
|
match=r"Backtest-results with only trades data are no longer supported.",
|
|
|
|
):
|
2022-01-06 18:49:25 +00:00
|
|
|
load_backtest_data(filename)
|
2019-06-16 08:57:21 +00:00
|
|
|
|
|
|
|
|
2020-06-27 08:06:59 +00:00
|
|
|
def test_load_backtest_data_new_format(testdatadir):
|
2022-12-26 14:30:39 +00:00
|
|
|
filename = testdatadir / "backtest_results/backtest-result.json"
|
2020-06-27 08:06:59 +00:00
|
|
|
bt_data = load_backtest_data(filename)
|
|
|
|
assert isinstance(bt_data, DataFrame)
|
2022-05-26 05:17:22 +00:00
|
|
|
assert set(bt_data.columns) == set(BT_DATA_COLUMNS)
|
2020-06-27 08:06:59 +00:00
|
|
|
assert len(bt_data) == 179
|
|
|
|
|
|
|
|
# Test loading from string (must yield same result)
|
|
|
|
bt_data2 = load_backtest_data(str(filename))
|
|
|
|
assert bt_data.equals(bt_data2)
|
|
|
|
|
2020-06-28 07:51:49 +00:00
|
|
|
# Test loading from folder (must yield same result)
|
2022-04-11 18:32:02 +00:00
|
|
|
bt_data3 = load_backtest_data(testdatadir / "backtest_results")
|
2020-06-28 07:51:49 +00:00
|
|
|
assert bt_data.equals(bt_data3)
|
|
|
|
|
2020-06-27 08:06:59 +00:00
|
|
|
with pytest.raises(ValueError, match=r"File .* does not exist\."):
|
2023-03-19 16:50:08 +00:00
|
|
|
load_backtest_data("filename" + "nofile")
|
2020-06-27 08:06:59 +00:00
|
|
|
|
2020-06-27 13:59:22 +00:00
|
|
|
with pytest.raises(ValueError, match=r"Unknown dataformat."):
|
2022-04-11 18:32:02 +00:00
|
|
|
load_backtest_data(testdatadir / "backtest_results" / LAST_BT_RESULT_FN)
|
2020-06-27 13:59:22 +00:00
|
|
|
|
|
|
|
|
|
|
|
def test_load_backtest_data_multi(testdatadir):
|
2022-04-11 18:32:02 +00:00
|
|
|
filename = testdatadir / "backtest_results/backtest-result_multistrat.json"
|
2024-05-12 14:00:45 +00:00
|
|
|
for strategy in ("StrategyTestV2", "TestStrategy"):
|
2020-06-27 13:59:22 +00:00
|
|
|
bt_data = load_backtest_data(filename, strategy=strategy)
|
|
|
|
assert isinstance(bt_data, DataFrame)
|
2024-05-12 14:00:45 +00:00
|
|
|
assert set(bt_data.columns) == set(BT_DATA_COLUMNS)
|
2020-06-27 13:59:22 +00:00
|
|
|
assert len(bt_data) == 179
|
|
|
|
|
|
|
|
# Test loading from string (must yield same result)
|
|
|
|
bt_data2 = load_backtest_data(str(filename), strategy=strategy)
|
|
|
|
assert bt_data.equals(bt_data2)
|
|
|
|
|
|
|
|
with pytest.raises(ValueError, match=r"Strategy XYZ not available in the backtest result\."):
|
2024-05-12 14:00:45 +00:00
|
|
|
load_backtest_data(filename, strategy="XYZ")
|
2020-06-27 13:59:22 +00:00
|
|
|
|
|
|
|
with pytest.raises(ValueError, match=r"Detected backtest result with more than one strategy.*"):
|
|
|
|
load_backtest_data(filename)
|
|
|
|
|
2020-06-27 08:06:59 +00:00
|
|
|
|
2019-06-16 08:57:21 +00:00
|
|
|
@pytest.mark.usefixtures("init_persistence")
|
2024-05-12 14:00:45 +00:00
|
|
|
@pytest.mark.parametrize("is_short", [False, True])
|
2021-09-20 02:24:22 +00:00
|
|
|
def test_load_trades_from_db(default_conf, fee, is_short, mocker):
|
|
|
|
create_mock_trades(fee, is_short)
|
2019-06-16 08:57:21 +00:00
|
|
|
# remove init so it does not init again
|
2024-05-12 14:00:45 +00:00
|
|
|
init_mock = mocker.patch("freqtrade.data.btanalysis.init_db", MagicMock())
|
2019-06-16 08:57:21 +00:00
|
|
|
|
2024-05-12 14:00:45 +00:00
|
|
|
trades = load_trades_from_db(db_url=default_conf["db_url"])
|
2019-06-16 08:57:21 +00:00
|
|
|
assert init_mock.call_count == 1
|
2020-09-10 05:40:19 +00:00
|
|
|
assert len(trades) == MOCK_TRADE_COUNT
|
2019-06-16 08:57:21 +00:00
|
|
|
assert isinstance(trades, DataFrame)
|
|
|
|
assert "pair" in trades.columns
|
2020-06-26 07:21:28 +00:00
|
|
|
assert "open_date" in trades.columns
|
2021-01-23 19:49:49 +00:00
|
|
|
assert "profit_ratio" in trades.columns
|
2019-08-03 17:55:54 +00:00
|
|
|
|
|
|
|
for col in BT_DATA_COLUMNS:
|
2024-05-12 14:00:45 +00:00
|
|
|
if col not in ["index", "open_at_end"]:
|
2019-08-03 17:55:54 +00:00
|
|
|
assert col in trades.columns
|
2024-05-12 14:00:45 +00:00
|
|
|
trades = load_trades_from_db(db_url=default_conf["db_url"], strategy=CURRENT_TEST_STRATEGY)
|
2021-01-01 18:38:28 +00:00
|
|
|
assert len(trades) == 4
|
2024-05-12 14:00:45 +00:00
|
|
|
trades = load_trades_from_db(db_url=default_conf["db_url"], strategy="NoneStrategy")
|
2020-06-27 07:59:23 +00:00
|
|
|
assert len(trades) == 0
|
2019-06-16 09:12:19 +00:00
|
|
|
|
|
|
|
|
2019-09-07 18:56:03 +00:00
|
|
|
def test_extract_trades_of_period(testdatadir):
|
2019-06-16 09:12:19 +00:00
|
|
|
pair = "UNITTEST/BTC"
|
2019-10-19 12:53:56 +00:00
|
|
|
# 2018-11-14 06:07:00
|
2024-05-12 14:00:45 +00:00
|
|
|
timerange = TimeRange("date", None, 1510639620, 0)
|
2019-06-16 09:12:19 +00:00
|
|
|
|
2024-05-12 14:00:45 +00:00
|
|
|
data = load_pair_history(pair=pair, timeframe="1m", datadir=testdatadir, timerange=timerange)
|
2019-06-16 09:12:19 +00:00
|
|
|
|
|
|
|
trades = DataFrame(
|
2024-05-12 14:00:45 +00:00
|
|
|
{
|
|
|
|
"pair": [pair, pair, pair, pair],
|
|
|
|
"profit_ratio": [0.0, 0.1, -0.2, -0.5],
|
|
|
|
"profit_abs": [0.0, 1, -2, -5],
|
|
|
|
"open_date": to_datetime(
|
|
|
|
[
|
|
|
|
datetime(2017, 11, 13, 15, 40, 0, tzinfo=timezone.utc),
|
|
|
|
datetime(2017, 11, 14, 9, 41, 0, tzinfo=timezone.utc),
|
|
|
|
datetime(2017, 11, 14, 14, 20, 0, tzinfo=timezone.utc),
|
|
|
|
datetime(2017, 11, 15, 3, 40, 0, tzinfo=timezone.utc),
|
|
|
|
],
|
|
|
|
utc=True,
|
|
|
|
),
|
|
|
|
"close_date": to_datetime(
|
|
|
|
[
|
|
|
|
datetime(2017, 11, 13, 16, 40, 0, tzinfo=timezone.utc),
|
|
|
|
datetime(2017, 11, 14, 10, 41, 0, tzinfo=timezone.utc),
|
|
|
|
datetime(2017, 11, 14, 15, 25, 0, tzinfo=timezone.utc),
|
|
|
|
datetime(2017, 11, 15, 3, 55, 0, tzinfo=timezone.utc),
|
|
|
|
],
|
|
|
|
utc=True,
|
|
|
|
),
|
|
|
|
}
|
|
|
|
)
|
2019-06-16 09:12:19 +00:00
|
|
|
trades1 = extract_trades_of_period(data, trades)
|
|
|
|
# First and last trade are dropped as they are out of range
|
|
|
|
assert len(trades1) == 2
|
2023-05-14 09:28:40 +00:00
|
|
|
assert trades1.iloc[0].open_date == datetime(2017, 11, 14, 9, 41, 0, tzinfo=timezone.utc)
|
|
|
|
assert trades1.iloc[0].close_date == datetime(2017, 11, 14, 10, 41, 0, tzinfo=timezone.utc)
|
|
|
|
assert trades1.iloc[-1].open_date == datetime(2017, 11, 14, 14, 20, 0, tzinfo=timezone.utc)
|
|
|
|
assert trades1.iloc[-1].close_date == datetime(2017, 11, 14, 15, 25, 0, tzinfo=timezone.utc)
|
2019-06-29 15:19:42 +00:00
|
|
|
|
|
|
|
|
2022-01-06 18:28:04 +00:00
|
|
|
def test_analyze_trade_parallelism(testdatadir):
|
2022-12-26 14:30:39 +00:00
|
|
|
filename = testdatadir / "backtest_results/backtest-result.json"
|
2019-10-30 13:07:23 +00:00
|
|
|
bt_data = load_backtest_data(filename)
|
|
|
|
|
|
|
|
res = analyze_trade_parallelism(bt_data, "5m")
|
|
|
|
assert isinstance(res, DataFrame)
|
2024-05-12 14:00:45 +00:00
|
|
|
assert "open_trades" in res.columns
|
|
|
|
assert res["open_trades"].max() == 3
|
|
|
|
assert res["open_trades"].min() == 0
|
2019-10-30 13:07:23 +00:00
|
|
|
|
|
|
|
|
2019-06-29 18:50:31 +00:00
|
|
|
def test_load_trades(default_conf, mocker):
|
|
|
|
db_mock = mocker.patch("freqtrade.data.btanalysis.load_trades_from_db", MagicMock())
|
|
|
|
bt_mock = mocker.patch("freqtrade.data.btanalysis.load_backtest_data", MagicMock())
|
|
|
|
|
2024-05-12 14:00:45 +00:00
|
|
|
load_trades(
|
|
|
|
"DB",
|
|
|
|
db_url=default_conf.get("db_url"),
|
|
|
|
exportfilename=default_conf.get("exportfilename"),
|
|
|
|
no_trades=False,
|
|
|
|
strategy=CURRENT_TEST_STRATEGY,
|
|
|
|
)
|
2019-06-29 18:50:31 +00:00
|
|
|
|
|
|
|
assert db_mock.call_count == 1
|
|
|
|
assert bt_mock.call_count == 0
|
|
|
|
|
|
|
|
db_mock.reset_mock()
|
|
|
|
bt_mock.reset_mock()
|
2024-05-12 14:00:45 +00:00
|
|
|
default_conf["exportfilename"] = Path("testfile.json")
|
|
|
|
load_trades(
|
|
|
|
"file",
|
|
|
|
db_url=default_conf.get("db_url"),
|
|
|
|
exportfilename=default_conf.get("exportfilename"),
|
|
|
|
)
|
2019-06-29 18:50:31 +00:00
|
|
|
|
|
|
|
assert db_mock.call_count == 0
|
|
|
|
assert bt_mock.call_count == 1
|
|
|
|
|
2020-03-14 23:09:08 +00:00
|
|
|
db_mock.reset_mock()
|
|
|
|
bt_mock.reset_mock()
|
2024-05-12 14:00:45 +00:00
|
|
|
default_conf["exportfilename"] = "testfile.json"
|
|
|
|
load_trades(
|
|
|
|
"file",
|
|
|
|
db_url=default_conf.get("db_url"),
|
|
|
|
exportfilename=default_conf.get("exportfilename"),
|
|
|
|
no_trades=True,
|
|
|
|
)
|
2020-03-14 23:09:08 +00:00
|
|
|
|
|
|
|
assert db_mock.call_count == 0
|
|
|
|
assert bt_mock.call_count == 0
|
|
|
|
|
2019-06-29 18:50:31 +00:00
|
|
|
|
2020-06-25 18:39:55 +00:00
|
|
|
def test_calculate_market_change(testdatadir):
|
|
|
|
pairs = ["ETH/BTC", "ADA/BTC"]
|
2024-05-12 14:00:45 +00:00
|
|
|
data = load_data(datadir=testdatadir, pairs=pairs, timeframe="5m")
|
2020-06-25 18:39:55 +00:00
|
|
|
result = calculate_market_change(data)
|
|
|
|
assert isinstance(result, float)
|
2022-09-30 04:44:19 +00:00
|
|
|
assert pytest.approx(result) == 0.01100002
|
2020-06-25 18:39:55 +00:00
|
|
|
|
|
|
|
|
2020-03-08 10:35:31 +00:00
|
|
|
def test_combine_dataframes_with_mean(testdatadir):
|
2019-10-02 08:59:45 +00:00
|
|
|
pairs = ["ETH/BTC", "ADA/BTC"]
|
2024-05-12 14:00:45 +00:00
|
|
|
data = load_data(datadir=testdatadir, pairs=pairs, timeframe="5m")
|
2020-03-08 10:35:31 +00:00
|
|
|
df = combine_dataframes_with_mean(data)
|
2019-06-30 08:04:43 +00:00
|
|
|
assert isinstance(df, DataFrame)
|
|
|
|
assert "ETH/BTC" in df.columns
|
2019-10-02 08:59:45 +00:00
|
|
|
assert "ADA/BTC" in df.columns
|
2019-06-30 08:04:43 +00:00
|
|
|
assert "mean" in df.columns
|
|
|
|
|
|
|
|
|
2024-04-16 05:14:00 +00:00
|
|
|
def test_combined_dataframes_with_rel_mean(testdatadir):
|
|
|
|
pairs = ["ETH/BTC", "ADA/BTC"]
|
2024-05-12 14:00:45 +00:00
|
|
|
data = load_data(datadir=testdatadir, pairs=pairs, timeframe="5m")
|
2024-04-16 16:17:20 +00:00
|
|
|
df = combined_dataframes_with_rel_mean(
|
2024-05-12 14:00:45 +00:00
|
|
|
data, datetime(2018, 1, 12, tzinfo=timezone.utc), datetime(2018, 1, 28, tzinfo=timezone.utc)
|
2024-04-16 16:17:20 +00:00
|
|
|
)
|
2024-04-16 05:14:00 +00:00
|
|
|
assert isinstance(df, DataFrame)
|
|
|
|
assert "ETH/BTC" not in df.columns
|
|
|
|
assert "ADA/BTC" not in df.columns
|
|
|
|
assert "mean" in df.columns
|
|
|
|
assert "rel_mean" in df.columns
|
|
|
|
assert "count" in df.columns
|
2024-05-12 14:00:45 +00:00
|
|
|
assert df.iloc[0]["count"] == 2
|
|
|
|
assert df.iloc[-1]["count"] == 2
|
|
|
|
assert len(df) < len(data["ETH/BTC"])
|
2024-04-16 05:14:00 +00:00
|
|
|
|
|
|
|
|
2021-12-30 09:14:45 +00:00
|
|
|
def test_combine_dataframes_with_mean_no_data(testdatadir):
|
|
|
|
pairs = ["ETH/BTC", "ADA/BTC"]
|
2024-05-12 14:00:45 +00:00
|
|
|
data = load_data(datadir=testdatadir, pairs=pairs, timeframe="6m")
|
2024-04-16 05:14:00 +00:00
|
|
|
with pytest.raises(ValueError, match=r"No data provided\."):
|
2021-12-30 09:14:45 +00:00
|
|
|
combine_dataframes_with_mean(data)
|
|
|
|
|
|
|
|
|
2019-09-07 18:56:03 +00:00
|
|
|
def test_create_cum_profit(testdatadir):
|
2022-12-26 14:30:39 +00:00
|
|
|
filename = testdatadir / "backtest_results/backtest-result.json"
|
2019-06-29 15:19:42 +00:00
|
|
|
bt_data = load_backtest_data(filename)
|
2019-08-14 08:07:32 +00:00
|
|
|
timerange = TimeRange.parse_timerange("20180110-20180112")
|
2019-06-29 15:19:42 +00:00
|
|
|
|
2024-05-12 14:00:45 +00:00
|
|
|
df = load_pair_history(pair="TRX/BTC", timeframe="5m", datadir=testdatadir, timerange=timerange)
|
2019-06-29 15:19:42 +00:00
|
|
|
|
2024-05-12 14:00:45 +00:00
|
|
|
cum_profits = create_cum_profit(
|
|
|
|
df.set_index("date"), bt_data[bt_data["pair"] == "TRX/BTC"], "cum_profits", timeframe="5m"
|
|
|
|
)
|
2019-06-29 15:19:42 +00:00
|
|
|
assert "cum_profits" in cum_profits.columns
|
2024-05-12 14:00:45 +00:00
|
|
|
assert cum_profits.iloc[0]["cum_profits"] == 0
|
|
|
|
assert pytest.approx(cum_profits.iloc[-1]["cum_profits"]) == 9.0225563e-05
|
2019-10-28 13:30:01 +00:00
|
|
|
|
|
|
|
|
|
|
|
def test_create_cum_profit1(testdatadir):
|
2022-12-26 14:30:39 +00:00
|
|
|
filename = testdatadir / "backtest_results/backtest-result.json"
|
2019-10-28 13:30:01 +00:00
|
|
|
bt_data = load_backtest_data(filename)
|
|
|
|
# Move close-time to "off" the candle, to make sure the logic still works
|
2024-05-12 14:00:45 +00:00
|
|
|
bt_data["close_date"] = bt_data.loc[:, "close_date"] + DateOffset(seconds=20)
|
2019-10-28 13:30:01 +00:00
|
|
|
timerange = TimeRange.parse_timerange("20180110-20180112")
|
|
|
|
|
2024-05-12 14:00:45 +00:00
|
|
|
df = load_pair_history(pair="TRX/BTC", timeframe="5m", datadir=testdatadir, timerange=timerange)
|
2019-10-28 13:30:01 +00:00
|
|
|
|
2024-05-12 14:00:45 +00:00
|
|
|
cum_profits = create_cum_profit(
|
|
|
|
df.set_index("date"), bt_data[bt_data["pair"] == "TRX/BTC"], "cum_profits", timeframe="5m"
|
|
|
|
)
|
2019-10-28 13:30:01 +00:00
|
|
|
assert "cum_profits" in cum_profits.columns
|
2024-05-12 14:00:45 +00:00
|
|
|
assert cum_profits.iloc[0]["cum_profits"] == 0
|
|
|
|
assert pytest.approx(cum_profits.iloc[-1]["cum_profits"]) == 9.0225563e-05
|
|
|
|
|
|
|
|
with pytest.raises(ValueError, match="Trade dataframe empty."):
|
|
|
|
create_cum_profit(
|
|
|
|
df.set_index("date"),
|
|
|
|
bt_data[bt_data["pair"] == "NOTAPAIR"],
|
|
|
|
"cum_profits",
|
|
|
|
timeframe="5m",
|
|
|
|
)
|
2020-05-21 05:12:23 +00:00
|
|
|
|
2020-03-03 06:20:41 +00:00
|
|
|
|
|
|
|
def test_calculate_max_drawdown(testdatadir):
|
2022-12-26 14:30:39 +00:00
|
|
|
filename = testdatadir / "backtest_results/backtest-result.json"
|
2020-03-03 06:20:41 +00:00
|
|
|
bt_data = load_backtest_data(filename)
|
2022-01-04 16:56:41 +00:00
|
|
|
_, hdate, lowdate, hval, lval, drawdown = calculate_max_drawdown(
|
2024-05-12 14:00:45 +00:00
|
|
|
bt_data, value_col="profit_abs"
|
|
|
|
)
|
2020-03-03 06:20:41 +00:00
|
|
|
assert isinstance(drawdown, float)
|
2022-12-26 13:25:45 +00:00
|
|
|
assert pytest.approx(drawdown) == 0.29753914
|
2021-02-14 18:30:17 +00:00
|
|
|
assert isinstance(hdate, Timestamp)
|
|
|
|
assert isinstance(lowdate, Timestamp)
|
|
|
|
assert isinstance(hval, float)
|
|
|
|
assert isinstance(lval, float)
|
2024-05-12 14:00:45 +00:00
|
|
|
assert hdate == Timestamp("2018-01-16 19:30:00", tz="UTC")
|
|
|
|
assert lowdate == Timestamp("2018-01-16 22:25:00", tz="UTC")
|
2022-01-01 13:39:58 +00:00
|
|
|
|
|
|
|
underwater = calculate_underwater(bt_data)
|
|
|
|
assert isinstance(underwater, DataFrame)
|
|
|
|
|
2024-05-12 14:00:45 +00:00
|
|
|
with pytest.raises(ValueError, match="Trade dataframe empty."):
|
2022-01-04 15:16:08 +00:00
|
|
|
calculate_max_drawdown(DataFrame())
|
2020-04-05 12:29:03 +00:00
|
|
|
|
2024-05-12 14:00:45 +00:00
|
|
|
with pytest.raises(ValueError, match="Trade dataframe empty."):
|
2022-01-01 13:39:58 +00:00
|
|
|
calculate_underwater(DataFrame())
|
|
|
|
|
2020-04-05 12:29:03 +00:00
|
|
|
|
2021-02-14 18:44:13 +00:00
|
|
|
def test_calculate_csum(testdatadir):
|
2022-12-26 14:30:39 +00:00
|
|
|
filename = testdatadir / "backtest_results/backtest-result.json"
|
2021-02-14 18:44:13 +00:00
|
|
|
bt_data = load_backtest_data(filename)
|
|
|
|
csum_min, csum_max = calculate_csum(bt_data)
|
|
|
|
|
|
|
|
assert isinstance(csum_min, float)
|
|
|
|
assert isinstance(csum_max, float)
|
2022-12-26 13:25:45 +00:00
|
|
|
assert csum_min < csum_max
|
|
|
|
assert csum_min < 0.0001
|
|
|
|
assert csum_max > 0.0002
|
2021-02-16 19:12:59 +00:00
|
|
|
csum_min1, csum_max1 = calculate_csum(bt_data, 5)
|
|
|
|
|
|
|
|
assert csum_min1 == csum_min + 5
|
|
|
|
assert csum_max1 == csum_max + 5
|
|
|
|
|
2024-05-12 14:00:45 +00:00
|
|
|
with pytest.raises(ValueError, match="Trade dataframe empty."):
|
2021-02-14 18:44:13 +00:00
|
|
|
csum_min, csum_max = calculate_csum(DataFrame())
|
|
|
|
|
2023-07-22 02:29:08 +00:00
|
|
|
|
2023-07-22 02:25:53 +00:00
|
|
|
def test_calculate_expectancy(testdatadir):
|
|
|
|
filename = testdatadir / "backtest_results/backtest-result.json"
|
|
|
|
bt_data = load_backtest_data(filename)
|
|
|
|
|
2023-07-22 08:29:43 +00:00
|
|
|
expectancy, expectancy_ratio = calculate_expectancy(DataFrame())
|
2023-07-22 02:25:53 +00:00
|
|
|
assert expectancy == 0.0
|
2023-07-23 05:27:45 +00:00
|
|
|
assert expectancy_ratio == 100
|
2022-12-26 14:38:58 +00:00
|
|
|
|
2023-07-22 08:29:43 +00:00
|
|
|
expectancy, expectancy_ratio = calculate_expectancy(bt_data)
|
|
|
|
assert isinstance(expectancy, float)
|
2023-07-21 23:36:51 +00:00
|
|
|
assert isinstance(expectancy_ratio, float)
|
2023-07-22 08:29:43 +00:00
|
|
|
assert pytest.approx(expectancy) == 5.820687070932315e-06
|
2023-07-21 23:36:51 +00:00
|
|
|
assert pytest.approx(expectancy_ratio) == 0.07151374226574791
|
2022-12-26 14:38:58 +00:00
|
|
|
|
2024-05-12 14:00:45 +00:00
|
|
|
data = {"profit_abs": [100, 200, 50, -150, 300, -100, 80, -30]}
|
2023-07-22 17:43:20 +00:00
|
|
|
df = DataFrame(data)
|
|
|
|
expectancy, expectancy_ratio = calculate_expectancy(df)
|
|
|
|
|
|
|
|
assert pytest.approx(expectancy) == 56.25
|
|
|
|
assert pytest.approx(expectancy_ratio) == 0.60267857
|
|
|
|
|
2022-12-26 14:38:58 +00:00
|
|
|
|
|
|
|
def test_calculate_sortino(testdatadir):
|
|
|
|
filename = testdatadir / "backtest_results/backtest-result.json"
|
|
|
|
bt_data = load_backtest_data(filename)
|
|
|
|
|
|
|
|
sortino = calculate_sortino(DataFrame(), None, None, 0)
|
|
|
|
assert sortino == 0.0
|
|
|
|
|
|
|
|
sortino = calculate_sortino(
|
|
|
|
bt_data,
|
2024-05-12 14:00:45 +00:00
|
|
|
bt_data["open_date"].min(),
|
|
|
|
bt_data["close_date"].max(),
|
2022-12-26 14:38:58 +00:00
|
|
|
0.01,
|
2024-05-12 14:00:45 +00:00
|
|
|
)
|
2022-12-26 14:38:58 +00:00
|
|
|
assert isinstance(sortino, float)
|
2022-12-28 13:59:23 +00:00
|
|
|
assert pytest.approx(sortino) == 35.17722
|
2022-12-26 14:38:58 +00:00
|
|
|
|
|
|
|
|
|
|
|
def test_calculate_sharpe(testdatadir):
|
|
|
|
filename = testdatadir / "backtest_results/backtest-result.json"
|
|
|
|
bt_data = load_backtest_data(filename)
|
|
|
|
|
|
|
|
sharpe = calculate_sharpe(DataFrame(), None, None, 0)
|
|
|
|
assert sharpe == 0.0
|
|
|
|
|
|
|
|
sharpe = calculate_sharpe(
|
|
|
|
bt_data,
|
2024-05-12 14:00:45 +00:00
|
|
|
bt_data["open_date"].min(),
|
|
|
|
bt_data["close_date"].max(),
|
2022-12-26 14:38:58 +00:00
|
|
|
0.01,
|
2024-05-12 14:00:45 +00:00
|
|
|
)
|
2022-12-26 14:38:58 +00:00
|
|
|
assert isinstance(sharpe, float)
|
|
|
|
assert pytest.approx(sharpe) == 44.5078669
|
|
|
|
|
|
|
|
|
|
|
|
def test_calculate_calmar(testdatadir):
|
|
|
|
filename = testdatadir / "backtest_results/backtest-result.json"
|
|
|
|
bt_data = load_backtest_data(filename)
|
|
|
|
|
|
|
|
calmar = calculate_calmar(DataFrame(), None, None, 0)
|
|
|
|
assert calmar == 0.0
|
|
|
|
|
|
|
|
calmar = calculate_calmar(
|
|
|
|
bt_data,
|
2024-05-12 14:00:45 +00:00
|
|
|
bt_data["open_date"].min(),
|
|
|
|
bt_data["close_date"].max(),
|
2022-12-26 14:38:58 +00:00
|
|
|
0.01,
|
2024-05-12 14:00:45 +00:00
|
|
|
)
|
2022-12-26 14:38:58 +00:00
|
|
|
assert isinstance(calmar, float)
|
|
|
|
assert pytest.approx(calmar) == 559.040508
|
|
|
|
|
|
|
|
|
2024-05-12 14:00:45 +00:00
|
|
|
@pytest.mark.parametrize(
|
|
|
|
"start,end,days, expected",
|
|
|
|
[
|
|
|
|
(64900, 176000, 3 * 365, 0.3945),
|
|
|
|
(64900, 176000, 365, 1.7119),
|
|
|
|
(1000, 1000, 365, 0.0),
|
|
|
|
(1000, 1500, 365, 0.5),
|
|
|
|
(1000, 1500, 100, 3.3927), # sub year
|
|
|
|
(0.01000000, 0.01762792, 120, 4.6087), # sub year BTC values
|
|
|
|
],
|
|
|
|
)
|
2022-04-25 08:51:11 +00:00
|
|
|
def test_calculate_cagr(start, end, days, expected):
|
|
|
|
assert round(calculate_cagr(days, start, end), 4) == expected
|
|
|
|
|
|
|
|
|
2020-04-05 12:29:03 +00:00
|
|
|
def test_calculate_max_drawdown2():
|
2024-05-12 14:00:45 +00:00
|
|
|
values = [
|
|
|
|
0.011580,
|
|
|
|
0.010048,
|
|
|
|
0.011340,
|
|
|
|
0.012161,
|
|
|
|
0.010416,
|
|
|
|
0.010009,
|
|
|
|
0.020024,
|
|
|
|
-0.024662,
|
|
|
|
-0.022350,
|
|
|
|
0.020496,
|
|
|
|
-0.029859,
|
|
|
|
-0.030511,
|
|
|
|
0.010041,
|
|
|
|
0.010872,
|
|
|
|
-0.025782,
|
|
|
|
0.010400,
|
|
|
|
0.012374,
|
|
|
|
0.012467,
|
|
|
|
0.114741,
|
|
|
|
0.010303,
|
|
|
|
0.010088,
|
|
|
|
-0.033961,
|
|
|
|
0.010680,
|
|
|
|
0.010886,
|
|
|
|
-0.029274,
|
|
|
|
0.011178,
|
|
|
|
0.010693,
|
|
|
|
0.010711,
|
|
|
|
]
|
2020-04-05 12:29:03 +00:00
|
|
|
|
2023-05-14 16:31:09 +00:00
|
|
|
dates = [dt_utc(2020, 1, 1) + timedelta(days=i) for i in range(len(values))]
|
2024-05-12 14:00:45 +00:00
|
|
|
df = DataFrame(zip(values, dates), columns=["profit", "open_date"])
|
2020-04-14 06:02:42 +00:00
|
|
|
# sort by profit and reset index
|
2024-05-12 14:00:45 +00:00
|
|
|
df = df.sort_values("profit").reset_index(drop=True)
|
2020-04-14 06:02:42 +00:00
|
|
|
df1 = df.copy()
|
2022-01-04 15:16:08 +00:00
|
|
|
drawdown, hdate, ldate, hval, lval, drawdown_rel = calculate_max_drawdown(
|
2024-05-12 14:00:45 +00:00
|
|
|
df, date_col="open_date", value_col="profit"
|
|
|
|
)
|
2020-04-14 06:02:42 +00:00
|
|
|
# Ensure df has not been altered.
|
|
|
|
assert df.equals(df1)
|
|
|
|
|
2020-04-05 12:29:03 +00:00
|
|
|
assert isinstance(drawdown, float)
|
2022-01-04 15:16:08 +00:00
|
|
|
assert isinstance(drawdown_rel, float)
|
2020-04-05 12:29:03 +00:00
|
|
|
# High must be before low
|
2021-02-14 18:30:17 +00:00
|
|
|
assert hdate < ldate
|
|
|
|
# High value must be higher than low value
|
|
|
|
assert hval > lval
|
2020-04-05 12:29:03 +00:00
|
|
|
assert drawdown == 0.091755
|
2020-04-05 12:43:01 +00:00
|
|
|
|
2024-05-12 14:00:45 +00:00
|
|
|
df = DataFrame(zip(values[:5], dates[:5]), columns=["profit", "open_date"])
|
|
|
|
with pytest.raises(ValueError, match="No losing trade, therefore no drawdown."):
|
|
|
|
calculate_max_drawdown(df, date_col="open_date", value_col="profit")
|
2022-04-25 05:23:16 +00:00
|
|
|
|
2024-05-12 14:00:45 +00:00
|
|
|
df1 = DataFrame(zip(values[:5], dates[:5]), columns=["profit", "open_date"])
|
|
|
|
df1.loc[:, "profit"] = df1["profit"] * -1
|
2024-02-19 06:02:37 +00:00
|
|
|
# No winning trade ...
|
|
|
|
drawdown, hdate, ldate, hval, lval, drawdown_rel = calculate_max_drawdown(
|
2024-05-12 14:00:45 +00:00
|
|
|
df1, date_col="open_date", value_col="profit"
|
|
|
|
)
|
2024-02-19 06:02:37 +00:00
|
|
|
assert drawdown == 0.043965
|
|
|
|
|
2022-04-25 05:23:16 +00:00
|
|
|
|
2024-05-12 14:00:45 +00:00
|
|
|
@pytest.mark.parametrize(
|
|
|
|
"profits,relative,highd,lowdays,result,result_rel",
|
|
|
|
[
|
|
|
|
([0.0, -500.0, 500.0, 10000.0, -1000.0], False, 3, 4, 1000.0, 0.090909),
|
|
|
|
([0.0, -500.0, 500.0, 10000.0, -1000.0], True, 0, 1, 500.0, 0.5),
|
|
|
|
],
|
|
|
|
)
|
2024-04-19 05:02:31 +00:00
|
|
|
def test_calculate_max_drawdown_abs(profits, relative, highd, lowdays, result, result_rel):
|
2022-04-25 05:23:16 +00:00
|
|
|
"""
|
|
|
|
Test case from issue https://github.com/freqtrade/freqtrade/issues/6655
|
|
|
|
[1000, 500, 1000, 11000, 10000] # absolute results
|
|
|
|
[1000, 50%, 0%, 0%, ~9%] # Relative drawdowns
|
|
|
|
"""
|
2023-05-14 09:28:40 +00:00
|
|
|
init_date = datetime(2020, 1, 1, tzinfo=timezone.utc)
|
|
|
|
dates = [init_date + timedelta(days=i) for i in range(len(profits))]
|
2024-05-12 14:00:45 +00:00
|
|
|
df = DataFrame(zip(profits, dates), columns=["profit_abs", "open_date"])
|
2022-04-25 05:23:16 +00:00
|
|
|
# sort by profit and reset index
|
2024-05-12 14:00:45 +00:00
|
|
|
df = df.sort_values("profit_abs").reset_index(drop=True)
|
2022-04-25 05:23:16 +00:00
|
|
|
df1 = df.copy()
|
|
|
|
drawdown, hdate, ldate, hval, lval, drawdown_rel = calculate_max_drawdown(
|
2024-05-12 14:00:45 +00:00
|
|
|
df, date_col="open_date", starting_balance=1000, relative=relative
|
|
|
|
)
|
2022-04-25 05:23:16 +00:00
|
|
|
# Ensure df has not been altered.
|
|
|
|
assert df.equals(df1)
|
|
|
|
|
|
|
|
assert isinstance(drawdown, float)
|
|
|
|
assert isinstance(drawdown_rel, float)
|
2023-05-14 09:28:40 +00:00
|
|
|
assert hdate == init_date + timedelta(days=highd)
|
2024-04-19 05:02:31 +00:00
|
|
|
assert ldate == init_date + timedelta(days=lowdays)
|
2022-04-25 05:32:32 +00:00
|
|
|
|
2022-04-25 05:23:16 +00:00
|
|
|
# High must be before low
|
|
|
|
assert hdate < ldate
|
|
|
|
# High value must be higher than low value
|
|
|
|
assert hval > lval
|
|
|
|
assert drawdown == result
|
|
|
|
assert pytest.approx(drawdown_rel) == result_rel
|